text
stringlengths 94
87.1k
| code_tokens
sequence | avg_line_len
float64 7.91
668
| score
sequence |
---|---|---|---|
def logic_subset(self, op=None):
"""Return set of logicnets, filtered by the type(s) of logic op provided as op.
If no op is specified, the full set of logicnets associated with the Block are
returned. This is helpful for getting all memories of a block for example."""
if op is None:
return self.logic
else:
return set(x for x in self.logic if x.op in op) | [
"def",
"logic_subset",
"(",
"self",
",",
"op",
"=",
"None",
")",
":",
"if",
"op",
"is",
"None",
":",
"return",
"self",
".",
"logic",
"else",
":",
"return",
"set",
"(",
"x",
"for",
"x",
"in",
"self",
".",
"logic",
"if",
"x",
".",
"op",
"in",
"op",
")"
] | 46 | [
0.03125,
0.034482758620689655,
0,
0.03488372093023256,
0.046511627906976744,
0.09090909090909091,
0.06896551724137931,
0.15384615384615385,
0.03389830508474576
] |
def get_lattice_type(number):
"""Return the lattice crystal system.
Hexagonal cells are differentiated into rhombohedral and hexagonal
lattices.
Args:
number (int): The international space group number.
Returns:
str: The lattice crystal system.
"""
f = lambda i, j: i <= number <= j
cs = {'triclinic': (1, 2), 'monoclinic': (3, 15),
'orthorhombic': (16, 74), 'tetragonal': (75, 142),
'trigonal': (143, 167), 'hexagonal': (168, 194),
'cubic': (195, 230)}
crystal_system = None
for k, v in cs.items():
if f(*v):
crystal_system = k
break
if number in [146, 148, 155, 160, 161, 166, 167]:
return "rhombohedral"
elif crystal_system == "trigonal":
return "hexagonal"
else:
return crystal_system | [
"def",
"get_lattice_type",
"(",
"number",
")",
":",
"f",
"=",
"lambda",
"i",
",",
"j",
":",
"i",
"<=",
"number",
"<=",
"j",
"cs",
"=",
"{",
"'triclinic'",
":",
"(",
"1",
",",
"2",
")",
",",
"'monoclinic'",
":",
"(",
"3",
",",
"15",
")",
",",
"'orthorhombic'",
":",
"(",
"16",
",",
"74",
")",
",",
"'tetragonal'",
":",
"(",
"75",
",",
"142",
")",
",",
"'trigonal'",
":",
"(",
"143",
",",
"167",
")",
",",
"'hexagonal'",
":",
"(",
"168",
",",
"194",
")",
",",
"'cubic'",
":",
"(",
"195",
",",
"230",
")",
"}",
"crystal_system",
"=",
"None",
"for",
"k",
",",
"v",
"in",
"cs",
".",
"items",
"(",
")",
":",
"if",
"f",
"(",
"*",
"v",
")",
":",
"crystal_system",
"=",
"k",
"break",
"if",
"number",
"in",
"[",
"146",
",",
"148",
",",
"155",
",",
"160",
",",
"161",
",",
"166",
",",
"167",
"]",
":",
"return",
"\"rhombohedral\"",
"elif",
"crystal_system",
"==",
"\"trigonal\"",
":",
"return",
"\"hexagonal\"",
"else",
":",
"return",
"crystal_system"
] | 30.466667 | [
0.034482758620689655,
0.044444444444444446,
0,
0.02702702702702703,
0.11764705882352941,
0,
0.15384615384615385,
0.047619047619047616,
0,
0.125,
0.045454545454545456,
0.18181818181818182,
0.07317073170731707,
0.05263157894736842,
0.046875,
0.04838709677419355,
0.11764705882352941,
0,
0.06896551724137931,
0.06451612903225806,
0.09523809523809523,
0.058823529411764705,
0.09523809523809523,
0,
0.03508771929824561,
0.06060606060606061,
0.047619047619047616,
0.06666666666666667,
0.15384615384615385,
0.06060606060606061
] |
def _get_mapper_params(self):
"""Converts self to model.MapperSpec.params."""
reader_params = self.input_reader_cls.params_to_json(
self.input_reader_params)
# TODO(user): Do the same for writer params.
return {"input_reader": reader_params,
"output_writer": self.output_writer_params} | [
"def",
"_get_mapper_params",
"(",
"self",
")",
":",
"reader_params",
"=",
"self",
".",
"input_reader_cls",
".",
"params_to_json",
"(",
"self",
".",
"input_reader_params",
")",
"# TODO(user): Do the same for writer params.",
"return",
"{",
"\"input_reader\"",
":",
"reader_params",
",",
"\"output_writer\"",
":",
"self",
".",
"output_writer_params",
"}"
] | 45 | [
0.034482758620689655,
0.0392156862745098,
0.05263157894736842,
0.09090909090909091,
0.041666666666666664,
0.07142857142857142,
0.05454545454545454
] |
def fit(self, X, y=None):
"""Compute the minimum and maximum to be used for later scaling.
Parameters
----------
X : array-like, shape [n_samples, n_features]
The data used to compute the per-feature minimum and maximum
used for later scaling along the features axis.
"""
X = check_array(X, copy=self.copy,
dtype=[np.float64, np.float32, np.float16, np.float128])
feature_range = self.feature_range
if feature_range[0] >= feature_range[1]:
raise ValueError("Minimum of desired feature range must be smaller"
" than maximum. Got %s." % str(feature_range))
if self.fit_feature_range is not None:
fit_feature_range = self.fit_feature_range
if fit_feature_range[0] >= fit_feature_range[1]:
raise ValueError("Minimum of desired (fit) feature range must "
"be smaller than maximum. Got %s."
% str(feature_range))
if (fit_feature_range[0] < feature_range[0] or
fit_feature_range[1] > feature_range[1]):
raise ValueError("fit_feature_range must be a subset of "
"feature_range. Got %s, fit %s."
% (str(feature_range),
str(fit_feature_range)))
feature_range = fit_feature_range
data_min = np.min(X, axis=0)
data_range = np.max(X, axis=0) - data_min
# Do not scale constant features
data_range[data_range == 0.0] = 1.0
self.scale_ = (feature_range[1] - feature_range[0]) / data_range
self.min_ = feature_range[0] - data_min * self.scale_
self.data_range = data_range
self.data_min = data_min
return self | [
"def",
"fit",
"(",
"self",
",",
"X",
",",
"y",
"=",
"None",
")",
":",
"X",
"=",
"check_array",
"(",
"X",
",",
"copy",
"=",
"self",
".",
"copy",
",",
"dtype",
"=",
"[",
"np",
".",
"float64",
",",
"np",
".",
"float32",
",",
"np",
".",
"float16",
",",
"np",
".",
"float128",
"]",
")",
"feature_range",
"=",
"self",
".",
"feature_range",
"if",
"feature_range",
"[",
"0",
"]",
">=",
"feature_range",
"[",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"\"Minimum of desired feature range must be smaller\"",
"\" than maximum. Got %s.\"",
"%",
"str",
"(",
"feature_range",
")",
")",
"if",
"self",
".",
"fit_feature_range",
"is",
"not",
"None",
":",
"fit_feature_range",
"=",
"self",
".",
"fit_feature_range",
"if",
"fit_feature_range",
"[",
"0",
"]",
">=",
"fit_feature_range",
"[",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"\"Minimum of desired (fit) feature range must \"",
"\"be smaller than maximum. Got %s.\"",
"%",
"str",
"(",
"feature_range",
")",
")",
"if",
"(",
"fit_feature_range",
"[",
"0",
"]",
"<",
"feature_range",
"[",
"0",
"]",
"or",
"fit_feature_range",
"[",
"1",
"]",
">",
"feature_range",
"[",
"1",
"]",
")",
":",
"raise",
"ValueError",
"(",
"\"fit_feature_range must be a subset of \"",
"\"feature_range. Got %s, fit %s.\"",
"%",
"(",
"str",
"(",
"feature_range",
")",
",",
"str",
"(",
"fit_feature_range",
")",
")",
")",
"feature_range",
"=",
"fit_feature_range",
"data_min",
"=",
"np",
".",
"min",
"(",
"X",
",",
"axis",
"=",
"0",
")",
"data_range",
"=",
"np",
".",
"max",
"(",
"X",
",",
"axis",
"=",
"0",
")",
"-",
"data_min",
"# Do not scale constant features",
"data_range",
"[",
"data_range",
"==",
"0.0",
"]",
"=",
"1.0",
"self",
".",
"scale_",
"=",
"(",
"feature_range",
"[",
"1",
"]",
"-",
"feature_range",
"[",
"0",
"]",
")",
"/",
"data_range",
"self",
".",
"min_",
"=",
"feature_range",
"[",
"0",
"]",
"-",
"data_min",
"*",
"self",
".",
"scale_",
"self",
".",
"data_range",
"=",
"data_range",
"self",
".",
"data_min",
"=",
"data_min",
"return",
"self"
] | 47.666667 | [
0.04,
0.027777777777777776,
0,
0.1111111111111111,
0.1111111111111111,
0.07547169811320754,
0.027777777777777776,
0.03389830508474576,
0.18181818181818182,
0.07142857142857142,
0.0625,
0,
0.047619047619047616,
0.041666666666666664,
0.0379746835443038,
0.05333333333333334,
0.043478260869565216,
0.037037037037037035,
0.03333333333333333,
0.0379746835443038,
0.04477611940298507,
0.07407407407407407,
0.05172413793103448,
0.04918032786885246,
0.0410958904109589,
0.046153846153846156,
0.07272727272727272,
0.05,
0.044444444444444446,
0,
0.05555555555555555,
0.04081632653061224,
0.05,
0.046511627906976744,
0.027777777777777776,
0.03278688524590164,
0.05555555555555555,
0.0625,
0.10526315789473684
] |
def make_label(loss, key):
'''Create a legend label for an optimization run.'''
algo, rate, mu, half, reg = key
slots, args = ['{:.3f}', '{}', 'm={:.3f}'], [loss, algo, mu]
if algo in 'SGD NAG RMSProp Adam ESGD'.split():
slots.append('lr={:.2e}')
args.append(rate)
if algo in 'RMSProp ADADELTA ESGD'.split():
slots.append('rmsh={}')
args.append(half)
slots.append('rmsr={:.2e}')
args.append(reg)
return ' '.join(slots).format(*args) | [
"def",
"make_label",
"(",
"loss",
",",
"key",
")",
":",
"algo",
",",
"rate",
",",
"mu",
",",
"half",
",",
"reg",
"=",
"key",
"slots",
",",
"args",
"=",
"[",
"'{:.3f}'",
",",
"'{}'",
",",
"'m={:.3f}'",
"]",
",",
"[",
"loss",
",",
"algo",
",",
"mu",
"]",
"if",
"algo",
"in",
"'SGD NAG RMSProp Adam ESGD'",
".",
"split",
"(",
")",
":",
"slots",
".",
"append",
"(",
"'lr={:.2e}'",
")",
"args",
".",
"append",
"(",
"rate",
")",
"if",
"algo",
"in",
"'RMSProp ADADELTA ESGD'",
".",
"split",
"(",
")",
":",
"slots",
".",
"append",
"(",
"'rmsh={}'",
")",
"args",
".",
"append",
"(",
"half",
")",
"slots",
".",
"append",
"(",
"'rmsr={:.2e}'",
")",
"args",
".",
"append",
"(",
"reg",
")",
"return",
"' '",
".",
"join",
"(",
"slots",
")",
".",
"format",
"(",
"*",
"args",
")"
] | 37.846154 | [
0.038461538461538464,
0.03571428571428571,
0.05714285714285714,
0.03125,
0.0392156862745098,
0.06060606060606061,
0.08,
0.0425531914893617,
0.06451612903225806,
0.08,
0.05714285714285714,
0.08333333333333333,
0.05
] |
def _fake_references(self, namespace, **params):
"""
Implements a mock WBEM server responder for
:meth:`~pywbem.WBEMConnection.References`
"""
rc = None if params['ResultClass'] is None else \
params['ResultClass'].classname
role = params['Role']
obj_name = params['ObjectName']
classname = obj_name.classname
pl = params['PropertyList']
ico = params['IncludeClassOrigin']
iq = params['IncludeQualifiers']
if isinstance(obj_name, CIMClassName):
rtn_classnames = self._get_reference_classnames(
classname, namespace, rc, role)
# returns list of tuples of (CIMClassname, CIMClass)
return self._return_assoc_class_tuples(rtn_classnames, namespace,
iq, ico, pl)
assert isinstance(obj_name, CIMInstanceName)
ref_paths = self._get_reference_instnames(obj_name, namespace, rc,
role)
rtn_insts = []
for path in ref_paths:
rtn_insts.append(self._get_instance(
path, namespace, None,
params['PropertyList'],
params['IncludeClassOrigin'],
params['IncludeQualifiers']))
for inst in rtn_insts:
if inst.path.host is None:
inst.path.host = self.host
return self._return_assoc_tuple(rtn_insts) | [
"def",
"_fake_references",
"(",
"self",
",",
"namespace",
",",
"*",
"*",
"params",
")",
":",
"rc",
"=",
"None",
"if",
"params",
"[",
"'ResultClass'",
"]",
"is",
"None",
"else",
"params",
"[",
"'ResultClass'",
"]",
".",
"classname",
"role",
"=",
"params",
"[",
"'Role'",
"]",
"obj_name",
"=",
"params",
"[",
"'ObjectName'",
"]",
"classname",
"=",
"obj_name",
".",
"classname",
"pl",
"=",
"params",
"[",
"'PropertyList'",
"]",
"ico",
"=",
"params",
"[",
"'IncludeClassOrigin'",
"]",
"iq",
"=",
"params",
"[",
"'IncludeQualifiers'",
"]",
"if",
"isinstance",
"(",
"obj_name",
",",
"CIMClassName",
")",
":",
"rtn_classnames",
"=",
"self",
".",
"_get_reference_classnames",
"(",
"classname",
",",
"namespace",
",",
"rc",
",",
"role",
")",
"# returns list of tuples of (CIMClassname, CIMClass)",
"return",
"self",
".",
"_return_assoc_class_tuples",
"(",
"rtn_classnames",
",",
"namespace",
",",
"iq",
",",
"ico",
",",
"pl",
")",
"assert",
"isinstance",
"(",
"obj_name",
",",
"CIMInstanceName",
")",
"ref_paths",
"=",
"self",
".",
"_get_reference_instnames",
"(",
"obj_name",
",",
"namespace",
",",
"rc",
",",
"role",
")",
"rtn_insts",
"=",
"[",
"]",
"for",
"path",
"in",
"ref_paths",
":",
"rtn_insts",
".",
"append",
"(",
"self",
".",
"_get_instance",
"(",
"path",
",",
"namespace",
",",
"None",
",",
"params",
"[",
"'PropertyList'",
"]",
",",
"params",
"[",
"'IncludeClassOrigin'",
"]",
",",
"params",
"[",
"'IncludeQualifiers'",
"]",
")",
")",
"for",
"inst",
"in",
"rtn_insts",
":",
"if",
"inst",
".",
"path",
".",
"host",
"is",
"None",
":",
"inst",
".",
"path",
".",
"host",
"=",
"self",
".",
"host",
"return",
"self",
".",
"_return_assoc_tuple",
"(",
"rtn_insts",
")"
] | 39.405405 | [
0.020833333333333332,
0.18181818181818182,
0.0392156862745098,
0.10204081632653061,
0.18181818181818182,
0.03508771929824561,
0.046511627906976744,
0.06896551724137931,
0.05128205128205128,
0.05263157894736842,
0.05714285714285714,
0.047619047619047616,
0.05,
0,
0.043478260869565216,
0.05,
0.06382978723404255,
0.03125,
0.03896103896103896,
0.06349206349206349,
0,
0.038461538461538464,
0.04054054054054054,
0.07272727272727272,
0.09090909090909091,
0.06666666666666667,
0.0625,
0.05263157894736842,
0.05128205128205128,
0.044444444444444446,
0.06666666666666667,
0,
0.06666666666666667,
0.05263157894736842,
0.047619047619047616,
0,
0.04
] |
def sshInstance(self, *args, **kwargs):
"""
Run a command on the instance.
Returns the binary output of the command.
"""
kwargs['collectStdout'] = True
return self.coreSSH(*args, **kwargs) | [
"def",
"sshInstance",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'collectStdout'",
"]",
"=",
"True",
"return",
"self",
".",
"coreSSH",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 32.857143 | [
0.02564102564102564,
0.18181818181818182,
0.05263157894736842,
0.04081632653061224,
0.18181818181818182,
0.05263157894736842,
0.045454545454545456
] |
def count(self, view, include=None):
"""
Return a ViewCount for a view.
:param include: list of objects to sideload. `Side-loading API Docs
<https://developer.zendesk.com/rest_api/docs/core/side_loading>`__.
:param view: View or view id
"""
return self._get(self._build_url(self.endpoint.count(id=view, include=include))) | [
"def",
"count",
"(",
"self",
",",
"view",
",",
"include",
"=",
"None",
")",
":",
"return",
"self",
".",
"_get",
"(",
"self",
".",
"_build_url",
"(",
"self",
".",
"endpoint",
".",
"count",
"(",
"id",
"=",
"view",
",",
"include",
"=",
"include",
")",
")",
")"
] | 41.666667 | [
0.027777777777777776,
0.18181818181818182,
0.05263157894736842,
0,
0.06578947368421052,
0.06329113924050633,
0.08333333333333333,
0.18181818181818182,
0.03409090909090909
] |
def add_filter_by_review_state(self, query, out_params):
"""Applies the filter by review_state to the search query
"""
self.add_filter_by_wf_state(query=query, out_params=out_params,
wf_id="bika_analysis_workflow",
index="review_state",
title=_("Status")) | [
"def",
"add_filter_by_review_state",
"(",
"self",
",",
"query",
",",
"out_params",
")",
":",
"self",
".",
"add_filter_by_wf_state",
"(",
"query",
"=",
"query",
",",
"out_params",
"=",
"out_params",
",",
"wf_id",
"=",
"\"bika_analysis_workflow\"",
",",
"index",
"=",
"\"review_state\"",
",",
"title",
"=",
"_",
"(",
"\"Status\"",
")",
")"
] | 54.428571 | [
0.017857142857142856,
0.03076923076923077,
0.18181818181818182,
0.04225352112676056,
0.04477611940298507,
0.05263157894736842,
0.07407407407407407
] |
def round(self):
"""
Round coordinates.
"""
x, y = self.anchor
self.anchor = (normalizers.normalizeRounding(x),
normalizers.normalizeRounding(y))
x, y = self.bcpIn
self.bcpIn = (normalizers.normalizeRounding(x),
normalizers.normalizeRounding(y))
x, y = self.bcpOut
self.bcpOut = (normalizers.normalizeRounding(x),
normalizers.normalizeRounding(y)) | [
"def",
"round",
"(",
"self",
")",
":",
"x",
",",
"y",
"=",
"self",
".",
"anchor",
"self",
".",
"anchor",
"=",
"(",
"normalizers",
".",
"normalizeRounding",
"(",
"x",
")",
",",
"normalizers",
".",
"normalizeRounding",
"(",
"y",
")",
")",
"x",
",",
"y",
"=",
"self",
".",
"bcpIn",
"self",
".",
"bcpIn",
"=",
"(",
"normalizers",
".",
"normalizeRounding",
"(",
"x",
")",
",",
"normalizers",
".",
"normalizeRounding",
"(",
"y",
")",
")",
"x",
",",
"y",
"=",
"self",
".",
"bcpOut",
"self",
".",
"bcpOut",
"=",
"(",
"normalizers",
".",
"normalizeRounding",
"(",
"x",
")",
",",
"normalizers",
".",
"normalizeRounding",
"(",
"y",
")",
")"
] | 36.538462 | [
0.0625,
0.18181818181818182,
0.07692307692307693,
0.18181818181818182,
0.07692307692307693,
0.05357142857142857,
0.07142857142857142,
0.08,
0.05454545454545454,
0.07272727272727272,
0.07692307692307693,
0.05357142857142857,
0.07142857142857142
] |
def get_output_matrix(self):
"""
Get a copy of the full output matrix of a Model. This only
works if the model is not quantized.
"""
if self.f.isQuant():
raise ValueError("Can't get quantized Matrix")
return np.array(self.f.getOutputMatrix()) | [
"def",
"get_output_matrix",
"(",
"self",
")",
":",
"if",
"self",
".",
"f",
".",
"isQuant",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"Can't get quantized Matrix\"",
")",
"return",
"np",
".",
"array",
"(",
"self",
".",
"f",
".",
"getOutputMatrix",
"(",
")",
")"
] | 36.875 | [
0.03571428571428571,
0.18181818181818182,
0.030303030303030304,
0.045454545454545456,
0.18181818181818182,
0.07142857142857142,
0.034482758620689655,
0.04081632653061224
] |
def draw_status(self, writer, idx):
"""
Conditionally draw status bar when output terminal is a tty.
:param writer: callable writes to output stream, receiving unicode.
:param idx: current page position index.
:type idx: int
"""
if self.term.is_a_tty:
writer(self.term.hide_cursor())
style = self.screen.style
writer(self.term.move(self.term.height - 1))
if idx == self.last_page:
last_end = u'(END)'
else:
last_end = u'/{0}'.format(self.last_page)
txt = (u'Page {idx}{last_end} - '
u'{q} to quit, [keys: {keyset}]'
.format(idx=style.attr_minor(u'{0}'.format(idx)),
last_end=style.attr_major(last_end),
keyset=style.attr_major('kjfb12-='),
q=style.attr_minor(u'q')))
writer(self.term.center(txt).rstrip()) | [
"def",
"draw_status",
"(",
"self",
",",
"writer",
",",
"idx",
")",
":",
"if",
"self",
".",
"term",
".",
"is_a_tty",
":",
"writer",
"(",
"self",
".",
"term",
".",
"hide_cursor",
"(",
")",
")",
"style",
"=",
"self",
".",
"screen",
".",
"style",
"writer",
"(",
"self",
".",
"term",
".",
"move",
"(",
"self",
".",
"term",
".",
"height",
"-",
"1",
")",
")",
"if",
"idx",
"==",
"self",
".",
"last_page",
":",
"last_end",
"=",
"u'(END)'",
"else",
":",
"last_end",
"=",
"u'/{0}'",
".",
"format",
"(",
"self",
".",
"last_page",
")",
"txt",
"=",
"(",
"u'Page {idx}{last_end} - '",
"u'{q} to quit, [keys: {keyset}]'",
".",
"format",
"(",
"idx",
"=",
"style",
".",
"attr_minor",
"(",
"u'{0}'",
".",
"format",
"(",
"idx",
")",
")",
",",
"last_end",
"=",
"style",
".",
"attr_major",
"(",
"last_end",
")",
",",
"keyset",
"=",
"style",
".",
"attr_major",
"(",
"'kjfb12-='",
")",
",",
"q",
"=",
"style",
".",
"attr_minor",
"(",
"u'q'",
")",
")",
")",
"writer",
"(",
"self",
".",
"term",
".",
"center",
"(",
"txt",
")",
".",
"rstrip",
"(",
")",
")"
] | 42.391304 | [
0.02857142857142857,
0.18181818181818182,
0.029411764705882353,
0,
0.04,
0.0625,
0.13636363636363635,
0.18181818181818182,
0.06666666666666667,
0.046511627906976744,
0.05405405405405406,
0.03571428571428571,
0.05405405405405406,
0.05714285714285714,
0.11764705882352941,
0.03508771929824561,
0.06666666666666667,
0.058823529411764705,
0.058823529411764705,
0.06349206349206349,
0.06349206349206349,
0.09433962264150944,
0.04
] |
def load_modules_from_python(self, route_list):
"""Load modules from the native python source."""
for name, modpath in route_list:
if ':' in modpath:
path, attr = modpath.split(':', 1)
else:
path, attr = modpath, None
self.commands[name] = ModuleLoader(path, attr=attr) | [
"def",
"load_modules_from_python",
"(",
"self",
",",
"route_list",
")",
":",
"for",
"name",
",",
"modpath",
"in",
"route_list",
":",
"if",
"':'",
"in",
"modpath",
":",
"path",
",",
"attr",
"=",
"modpath",
".",
"split",
"(",
"':'",
",",
"1",
")",
"else",
":",
"path",
",",
"attr",
"=",
"modpath",
",",
"None",
"self",
".",
"commands",
"[",
"name",
"]",
"=",
"ModuleLoader",
"(",
"path",
",",
"attr",
"=",
"attr",
")"
] | 43.25 | [
0.02127659574468085,
0.03508771929824561,
0.05,
0.06666666666666667,
0.04,
0.11764705882352941,
0.047619047619047616,
0.031746031746031744
] |
async def do_api_call(self):
"""Start. Sending and waiting for answer."""
self.pyvlx.connection.register_frame_received_cb(
self.response_rec_callback)
await self.send_frame()
await self.start_timeout()
await self.response_received_or_timeout.wait()
await self.stop_timeout()
self.pyvlx.connection.unregister_frame_received_cb(self.response_rec_callback) | [
"async",
"def",
"do_api_call",
"(",
"self",
")",
":",
"self",
".",
"pyvlx",
".",
"connection",
".",
"register_frame_received_cb",
"(",
"self",
".",
"response_rec_callback",
")",
"await",
"self",
".",
"send_frame",
"(",
")",
"await",
"self",
".",
"start_timeout",
"(",
")",
"await",
"self",
".",
"response_received_or_timeout",
".",
"wait",
"(",
")",
"await",
"self",
".",
"stop_timeout",
"(",
")",
"self",
".",
"pyvlx",
".",
"connection",
".",
"unregister_frame_received_cb",
"(",
"self",
".",
"response_rec_callback",
")"
] | 46 | [
0.03571428571428571,
0.038461538461538464,
0.05263157894736842,
0.07692307692307693,
0.06451612903225806,
0.058823529411764705,
0.037037037037037035,
0.06060606060606061,
0.03488372093023256
] |
def get_targets(self, name=None):
"""
Retrieve all/one target objects
:param name: name of the target to search for, None for everything
:return: A list of target objects
"""
targets = []
for section in self.get_sections():
if section.endswith(u'Target'):
targets += [value for value in self._sections[section]]
if name is None:
return targets
if not isinstance(name, list):
name = [name]
return [target for target in targets if target.name in name] | [
"def",
"get_targets",
"(",
"self",
",",
"name",
"=",
"None",
")",
":",
"targets",
"=",
"[",
"]",
"for",
"section",
"in",
"self",
".",
"get_sections",
"(",
")",
":",
"if",
"section",
".",
"endswith",
"(",
"u'Target'",
")",
":",
"targets",
"+=",
"[",
"value",
"for",
"value",
"in",
"self",
".",
"_sections",
"[",
"section",
"]",
"]",
"if",
"name",
"is",
"None",
":",
"return",
"targets",
"if",
"not",
"isinstance",
"(",
"name",
",",
"list",
")",
":",
"name",
"=",
"[",
"name",
"]",
"return",
"[",
"target",
"for",
"target",
"in",
"targets",
"if",
"target",
".",
"name",
"in",
"name",
"]"
] | 31.5 | [
0.030303030303030304,
0.18181818181818182,
0.05128205128205128,
0.04054054054054054,
0.07317073170731707,
0.18181818181818182,
0.1,
0.046511627906976744,
0.046511627906976744,
0.028169014084507043,
0,
0.08333333333333333,
0.07692307692307693,
0,
0.05263157894736842,
0.08,
0,
0.029411764705882353
] |
def get_preferences_from_user(self):
"""Launches preferences dialog and returns dict with preferences"""
dlg = PreferencesDialog(self.main_window)
change_choice = dlg.ShowModal()
preferences = {}
if change_choice == wx.ID_OK:
for (parameter, _), ctrl in zip(dlg.parameters, dlg.textctrls):
if isinstance(ctrl, wx.Choice):
value = ctrl.GetStringSelection()
if value:
preferences[parameter] = repr(value)
else:
preferences[parameter] = repr(ctrl.Value)
dlg.Destroy()
return preferences | [
"def",
"get_preferences_from_user",
"(",
"self",
")",
":",
"dlg",
"=",
"PreferencesDialog",
"(",
"self",
".",
"main_window",
")",
"change_choice",
"=",
"dlg",
".",
"ShowModal",
"(",
")",
"preferences",
"=",
"{",
"}",
"if",
"change_choice",
"==",
"wx",
".",
"ID_OK",
":",
"for",
"(",
"parameter",
",",
"_",
")",
",",
"ctrl",
"in",
"zip",
"(",
"dlg",
".",
"parameters",
",",
"dlg",
".",
"textctrls",
")",
":",
"if",
"isinstance",
"(",
"ctrl",
",",
"wx",
".",
"Choice",
")",
":",
"value",
"=",
"ctrl",
".",
"GetStringSelection",
"(",
")",
"if",
"value",
":",
"preferences",
"[",
"parameter",
"]",
"=",
"repr",
"(",
"value",
")",
"else",
":",
"preferences",
"[",
"parameter",
"]",
"=",
"repr",
"(",
"ctrl",
".",
"Value",
")",
"dlg",
".",
"Destroy",
"(",
")",
"return",
"preferences"
] | 31.095238 | [
0.027777777777777776,
0.02666666666666667,
0,
0.04081632653061224,
0,
0.05128205128205128,
0,
0.08333333333333333,
0,
0.05405405405405406,
0.02666666666666667,
0.0425531914893617,
0.03773584905660377,
0.06896551724137931,
0.03333333333333333,
0.09523809523809523,
0.03278688524590164,
0,
0.09523809523809523,
0,
0.07692307692307693
] |
def get_report_time(self):
""" Returns time when the L2A processing started and reports was created.
:return: String in a form YYYYMMDDTHHMMSS
:rtype: str
"""
tree = get_xml(self.get_url(AwsConstants.REPORT))
try:
timestamp = tree.find('check/inspection').attrib['execution']
return timestamp.split(',')[0].replace(' ', 'T').replace(':', '').replace('-', '')
except AttributeError:
warnings.warn('Could not obtain the L2A report creation time')
return 'unknown' | [
"def",
"get_report_time",
"(",
"self",
")",
":",
"tree",
"=",
"get_xml",
"(",
"self",
".",
"get_url",
"(",
"AwsConstants",
".",
"REPORT",
")",
")",
"try",
":",
"timestamp",
"=",
"tree",
".",
"find",
"(",
"'check/inspection'",
")",
".",
"attrib",
"[",
"'execution'",
"]",
"return",
"timestamp",
".",
"split",
"(",
"','",
")",
"[",
"0",
"]",
".",
"replace",
"(",
"' '",
",",
"'T'",
")",
".",
"replace",
"(",
"':'",
",",
"''",
")",
".",
"replace",
"(",
"'-'",
",",
"''",
")",
"except",
"AttributeError",
":",
"warnings",
".",
"warn",
"(",
"'Could not obtain the L2A report creation time'",
")",
"return",
"'unknown'"
] | 42.615385 | [
0.038461538461538464,
0.037037037037037035,
0.061224489795918366,
0.15789473684210525,
0.18181818181818182,
0.03508771929824561,
0,
0.16666666666666666,
0.0273972602739726,
0.031914893617021274,
0.06666666666666667,
0.02702702702702703,
0.07142857142857142
] |
def output_colored(code, text, is_bold=False):
"""
Create function to output with color sequence
"""
if is_bold:
code = '1;%s' % code
return '\033[%sm%s\033[0m' % (code, text) | [
"def",
"output_colored",
"(",
"code",
",",
"text",
",",
"is_bold",
"=",
"False",
")",
":",
"if",
"is_bold",
":",
"code",
"=",
"'1;%s'",
"%",
"code",
"return",
"'\\033[%sm%s\\033[0m'",
"%",
"(",
"code",
",",
"text",
")"
] | 24.625 | [
0.021739130434782608,
0.2857142857142857,
0.04081632653061224,
0.2857142857142857,
0.13333333333333333,
0.07142857142857142,
0,
0.044444444444444446
] |
def get_options_from_file(self, options):
"""
Search and read a configuration file to override options.
Available formats are python, yaml and json (file extension rules).
By default, there is no configuration file and this method exits immediately.
To define a configuration file, use:
- variable OPTIONS_FILE,
- optional special parameter --options_file
search order is:
- hardcoded if file is an absolute path,
- hardcoded path in variable OPTIONS_PATH if existing,
- local directory,
- ~/.clingon/,
- /etc/clingon/,
If a configuration file is found, sets the variable
options_file_path to effective_path/effective_file.
"""
options_file = self.python_options.get('options_file') or self._variables.get('OPTIONS_FILE')
if not options_file:
self._variables['options_file_path'] = None
return
options_path = self._variables.get('OPTIONS_PATH')
options_dict, options_file_path = None, None
try:
if options_path or os.path.isabs(options_file):
options_file_path, options_dict = read_configuration(options_file, options_path)
else:
for path in (os.getcwd(), os.path.expanduser('~/.clingon'), '/etc/clingon/'):
try:
options_file_path, options_dict = read_configuration(options_file, path)
break
except RuntimeError as e:
error = e
except (RuntimeError, TypeError) as e:
self._write_error(str(e))
self._variables['options_file_path'] = options_file_path
if options_dict:
for k in list(self.python_options):
default = options_dict.get(k)
if default is not None:
options[k] = self.eval_option_value(default)
else:
self._write_error(str(error)) | [
"def",
"get_options_from_file",
"(",
"self",
",",
"options",
")",
":",
"options_file",
"=",
"self",
".",
"python_options",
".",
"get",
"(",
"'options_file'",
")",
"or",
"self",
".",
"_variables",
".",
"get",
"(",
"'OPTIONS_FILE'",
")",
"if",
"not",
"options_file",
":",
"self",
".",
"_variables",
"[",
"'options_file_path'",
"]",
"=",
"None",
"return",
"options_path",
"=",
"self",
".",
"_variables",
".",
"get",
"(",
"'OPTIONS_PATH'",
")",
"options_dict",
",",
"options_file_path",
"=",
"None",
",",
"None",
"try",
":",
"if",
"options_path",
"or",
"os",
".",
"path",
".",
"isabs",
"(",
"options_file",
")",
":",
"options_file_path",
",",
"options_dict",
"=",
"read_configuration",
"(",
"options_file",
",",
"options_path",
")",
"else",
":",
"for",
"path",
"in",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~/.clingon'",
")",
",",
"'/etc/clingon/'",
")",
":",
"try",
":",
"options_file_path",
",",
"options_dict",
"=",
"read_configuration",
"(",
"options_file",
",",
"path",
")",
"break",
"except",
"RuntimeError",
"as",
"e",
":",
"error",
"=",
"e",
"except",
"(",
"RuntimeError",
",",
"TypeError",
")",
"as",
"e",
":",
"self",
".",
"_write_error",
"(",
"str",
"(",
"e",
")",
")",
"self",
".",
"_variables",
"[",
"'options_file_path'",
"]",
"=",
"options_file_path",
"if",
"options_dict",
":",
"for",
"k",
"in",
"list",
"(",
"self",
".",
"python_options",
")",
":",
"default",
"=",
"options_dict",
".",
"get",
"(",
"k",
")",
"if",
"default",
"is",
"not",
"None",
":",
"options",
"[",
"k",
"]",
"=",
"self",
".",
"eval_option_value",
"(",
"default",
")",
"else",
":",
"self",
".",
"_write_error",
"(",
"str",
"(",
"error",
")",
")"
] | 45.930233 | [
0.024390243902439025,
0.18181818181818182,
0.03076923076923077,
0.04,
0.03529411764705882,
0.045454545454545456,
0.0625,
0.058823529411764705,
0.125,
0.041666666666666664,
0.03225806451612903,
0.07692307692307693,
0.09090909090909091,
0.125,
0.03389830508474576,
0.03389830508474576,
0.18181818181818182,
0.0297029702970297,
0.07142857142857142,
0.03636363636363636,
0.1111111111111111,
0.034482758620689655,
0.038461538461538464,
0.16666666666666666,
0.03389830508474576,
0.03125,
0.11764705882352941,
0.03225806451612903,
0.08333333333333333,
0.03125,
0.06896551724137931,
0.044444444444444446,
0.06060606060606061,
0.043478260869565216,
0.05405405405405406,
0.03125,
0.08333333333333333,
0.0425531914893617,
0.044444444444444446,
0.05128205128205128,
0.03125,
0.15384615384615385,
0.04878048780487805
] |
def run(self, func, tasks, func2=None):
'''run will send a list of tasks,
a tuple with arguments, through a function.
the arguments should be ordered correctly.
:param func: the function to run with multiprocessing.pool
:param tasks: a list of tasks, each a tuple
of arguments to process
:param func2: filter function to run result
from func through (optional)
'''
# Keep track of some progress for the user
progress = 1
total = len(tasks)
# if we don't have tasks, don't run
if len(tasks) == 0:
return
# If two functions are run per task, double total jobs
if func2 is not None:
total = total * 2
finished = []
level1 = []
results = []
try:
prefix = "[%s/%s]" % (progress, total)
bot.show_progress(0, total, length=35, prefix=prefix)
pool = multiprocessing.Pool(self.workers, init_worker)
self.start()
for task in tasks:
result = pool.apply_async(multi_wrapper,
multi_package(func, [task]))
results.append(result)
level1.append(result._job)
while len(results) > 0:
result = results.pop()
result.wait()
bot.show_progress(progress, total, length=35, prefix=prefix)
progress += 1
prefix = "[%s/%s]" % (progress, total)
# Pass the result through a second function?
if func2 is not None and result._job in level1:
result = pool.apply_async(multi_wrapper,
multi_package(func2,
[(result.get(),)]))
results.append(result)
else:
finished.append(result.get())
self.end()
pool.close()
pool.join()
except (KeyboardInterrupt, SystemExit):
bot.error("Keyboard interrupt detected, terminating workers!")
pool.terminate()
sys.exit(1)
except Exception as e:
bot.error(e)
return finished | [
"def",
"run",
"(",
"self",
",",
"func",
",",
"tasks",
",",
"func2",
"=",
"None",
")",
":",
"# Keep track of some progress for the user",
"progress",
"=",
"1",
"total",
"=",
"len",
"(",
"tasks",
")",
"# if we don't have tasks, don't run",
"if",
"len",
"(",
"tasks",
")",
"==",
"0",
":",
"return",
"# If two functions are run per task, double total jobs",
"if",
"func2",
"is",
"not",
"None",
":",
"total",
"=",
"total",
"*",
"2",
"finished",
"=",
"[",
"]",
"level1",
"=",
"[",
"]",
"results",
"=",
"[",
"]",
"try",
":",
"prefix",
"=",
"\"[%s/%s]\"",
"%",
"(",
"progress",
",",
"total",
")",
"bot",
".",
"show_progress",
"(",
"0",
",",
"total",
",",
"length",
"=",
"35",
",",
"prefix",
"=",
"prefix",
")",
"pool",
"=",
"multiprocessing",
".",
"Pool",
"(",
"self",
".",
"workers",
",",
"init_worker",
")",
"self",
".",
"start",
"(",
")",
"for",
"task",
"in",
"tasks",
":",
"result",
"=",
"pool",
".",
"apply_async",
"(",
"multi_wrapper",
",",
"multi_package",
"(",
"func",
",",
"[",
"task",
"]",
")",
")",
"results",
".",
"append",
"(",
"result",
")",
"level1",
".",
"append",
"(",
"result",
".",
"_job",
")",
"while",
"len",
"(",
"results",
")",
">",
"0",
":",
"result",
"=",
"results",
".",
"pop",
"(",
")",
"result",
".",
"wait",
"(",
")",
"bot",
".",
"show_progress",
"(",
"progress",
",",
"total",
",",
"length",
"=",
"35",
",",
"prefix",
"=",
"prefix",
")",
"progress",
"+=",
"1",
"prefix",
"=",
"\"[%s/%s]\"",
"%",
"(",
"progress",
",",
"total",
")",
"# Pass the result through a second function?",
"if",
"func2",
"is",
"not",
"None",
"and",
"result",
".",
"_job",
"in",
"level1",
":",
"result",
"=",
"pool",
".",
"apply_async",
"(",
"multi_wrapper",
",",
"multi_package",
"(",
"func2",
",",
"[",
"(",
"result",
".",
"get",
"(",
")",
",",
")",
"]",
")",
")",
"results",
".",
"append",
"(",
"result",
")",
"else",
":",
"finished",
".",
"append",
"(",
"result",
".",
"get",
"(",
")",
")",
"self",
".",
"end",
"(",
")",
"pool",
".",
"close",
"(",
")",
"pool",
".",
"join",
"(",
")",
"except",
"(",
"KeyboardInterrupt",
",",
"SystemExit",
")",
":",
"bot",
".",
"error",
"(",
"\"Keyboard interrupt detected, terminating workers!\"",
")",
"pool",
".",
"terminate",
"(",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"except",
"Exception",
"as",
"e",
":",
"bot",
".",
"error",
"(",
"e",
")",
"return",
"finished"
] | 33.691176 | [
0.02564102564102564,
0.04878048780487805,
0.0392156862745098,
0.04,
0.045454545454545456,
0.058823529411764705,
0.06666666666666667,
0.058823529411764705,
0.08,
0.18181818181818182,
0,
0.04,
0.1,
0.07692307692307693,
0,
0.046511627906976744,
0.07407407407407407,
0.1111111111111111,
0,
0.03225806451612903,
0.06896551724137931,
0.06896551724137931,
0,
0.09523809523809523,
0.10526315789473684,
0.1,
0,
0.16666666666666666,
0.04,
0.03076923076923077,
0.030303030303030304,
0,
0.08333333333333333,
0.06666666666666667,
0.05357142857142857,
0.05714285714285714,
0.05263157894736842,
0.047619047619047616,
0,
0.05714285714285714,
0.05263157894736842,
0.06896551724137931,
0.02631578947368421,
0.06896551724137931,
0.037037037037037035,
0,
0.03333333333333333,
0.031746031746031744,
0.05,
0.06060606060606061,
0.0379746835443038,
0.047619047619047616,
0.09523809523809523,
0.04081632653061224,
0,
0.09090909090909091,
0.08333333333333333,
0.08695652173913043,
0,
0.0425531914893617,
0.02702702702702703,
0.07142857142857142,
0.08695652173913043,
0,
0.06666666666666667,
0.08333333333333333,
0,
0.08695652173913043
] |
def _update(self, data):
'''Update the line using the blob of json-parsed data directly from the
API.
'''
self.bullet = data['bullet']
self.level = data['level']
self.text = WikiText(data['text_raw'],
data['text_rendered']) | [
"def",
"_update",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"bullet",
"=",
"data",
"[",
"'bullet'",
"]",
"self",
".",
"level",
"=",
"data",
"[",
"'level'",
"]",
"self",
".",
"text",
"=",
"WikiText",
"(",
"data",
"[",
"'text_raw'",
"]",
",",
"data",
"[",
"'text_rendered'",
"]",
")"
] | 34.875 | [
0.041666666666666664,
0.025974025974025976,
0.3,
0.2222222222222222,
0.08823529411764706,
0.09375,
0.09090909090909091,
0.08163265306122448
] |
def _call_api(self, method, params=None):
"""
Low-level method to call the Slack API.
Args:
method: {str} method name to call
params: {dict} GET parameters
The token will always be added
"""
url = self.url.format(method=method)
if not params:
params = {'token': self.token}
else:
params['token'] = self.token
logger.debug('Send request to %s', url)
response = requests.get(url, params=params).json()
if self.verify:
if not response['ok']:
msg = 'For {url} API returned this bad response {response}'
raise Exception(msg.format(url=url, response=response))
return response | [
"def",
"_call_api",
"(",
"self",
",",
"method",
",",
"params",
"=",
"None",
")",
":",
"url",
"=",
"self",
".",
"url",
".",
"format",
"(",
"method",
"=",
"method",
")",
"if",
"not",
"params",
":",
"params",
"=",
"{",
"'token'",
":",
"self",
".",
"token",
"}",
"else",
":",
"params",
"[",
"'token'",
"]",
"=",
"self",
".",
"token",
"logger",
".",
"debug",
"(",
"'Send request to %s'",
",",
"url",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"params",
"=",
"params",
")",
".",
"json",
"(",
")",
"if",
"self",
".",
"verify",
":",
"if",
"not",
"response",
"[",
"'ok'",
"]",
":",
"msg",
"=",
"'For {url} API returned this bad response {response}'",
"raise",
"Exception",
"(",
"msg",
".",
"format",
"(",
"url",
"=",
"url",
",",
"response",
"=",
"response",
")",
")",
"return",
"response"
] | 35.571429 | [
0.024390243902439025,
0.18181818181818182,
0.0425531914893617,
0,
0.15384615384615385,
0.044444444444444446,
0.04878048780487805,
0.043478260869565216,
0.18181818181818182,
0.045454545454545456,
0.09090909090909091,
0.047619047619047616,
0.15384615384615385,
0.05,
0.0425531914893617,
0.034482758620689655,
0.08695652173913043,
0.058823529411764705,
0.02666666666666667,
0.028169014084507043,
0.08695652173913043
] |
def discover(scope, loglevel, capture):
"Discover systems using WS-Discovery"
if loglevel:
level = getattr(logging, loglevel, None)
if not level:
print("Invalid log level '%s'" % loglevel)
return
logger.setLevel(level)
run(scope=scope, capture=capture) | [
"def",
"discover",
"(",
"scope",
",",
"loglevel",
",",
"capture",
")",
":",
"if",
"loglevel",
":",
"level",
"=",
"getattr",
"(",
"logging",
",",
"loglevel",
",",
"None",
")",
"if",
"not",
"level",
":",
"print",
"(",
"\"Invalid log level '%s'\"",
"%",
"loglevel",
")",
"return",
"logger",
".",
"setLevel",
"(",
"level",
")",
"run",
"(",
"scope",
"=",
"scope",
",",
"capture",
"=",
"capture",
")"
] | 27.454545 | [
0.02564102564102564,
0.04878048780487805,
0,
0.125,
0.041666666666666664,
0.09523809523809523,
0.05660377358490566,
0.17647058823529413,
0.06666666666666667,
0,
0.05405405405405406
] |
def collect_expression(sample_frame, ref_targets, ref_sample):
"""Calculates the expression of all rows in the sample_frame relative to
each of the ref_targets. Used in rank_targets.
:param DataFrame sample_frame: A sample data frame.
:param iterable ref_targets: A sequence of targets from the Target column of
the sample frame.
:param string ref_sample: The name of the sample to which expression should
be referenced.
:return: a DataFrame of relative expression; rows represent rows of the
sample_frame and columns represent each of the ref_targets.
:rtype: DataFrame
"""
by_gene = {'Sample': sample_frame['Sample'], 'Target': sample_frame['Target']}
for target in ref_targets:
by_gene[target] = expression_ddcq(sample_frame, target, ref_sample)
return pd.DataFrame(by_gene) | [
"def",
"collect_expression",
"(",
"sample_frame",
",",
"ref_targets",
",",
"ref_sample",
")",
":",
"by_gene",
"=",
"{",
"'Sample'",
":",
"sample_frame",
"[",
"'Sample'",
"]",
",",
"'Target'",
":",
"sample_frame",
"[",
"'Target'",
"]",
"}",
"for",
"target",
"in",
"ref_targets",
":",
"by_gene",
"[",
"target",
"]",
"=",
"expression_ddcq",
"(",
"sample_frame",
",",
"target",
",",
"ref_sample",
")",
"return",
"pd",
".",
"DataFrame",
"(",
"by_gene",
")"
] | 49.294118 | [
0.016129032258064516,
0.02631578947368421,
0.04,
0,
0.05454545454545454,
0.05,
0.08,
0.0379746835443038,
0.09090909090909091,
0.05333333333333334,
0.029850746268656716,
0.14285714285714285,
0.2857142857142857,
0.036585365853658534,
0.06666666666666667,
0.02666666666666667,
0.0625
] |
def boost(dev, target):
""" Gets or sets the boost mode. """
click.echo("Boost: %s" % dev.boost)
if target is not None:
click.echo("Setting boost: %s" % target)
dev.boost = target | [
"def",
"boost",
"(",
"dev",
",",
"target",
")",
":",
"click",
".",
"echo",
"(",
"\"Boost: %s\"",
"%",
"dev",
".",
"boost",
")",
"if",
"target",
"is",
"not",
"None",
":",
"click",
".",
"echo",
"(",
"\"Setting boost: %s\"",
"%",
"target",
")",
"dev",
".",
"boost",
"=",
"target"
] | 33.666667 | [
0.043478260869565216,
0.05,
0.05128205128205128,
0.07692307692307693,
0.041666666666666664,
0.07692307692307693
] |
def _on_motion(self, event):
"""Drag around label if visible."""
if not self._visual_drag.winfo_ismapped():
return
if self._drag_cols and self._dragged_col is not None:
self._drag_col(event)
elif self._drag_rows and self._dragged_row is not None:
self._drag_row(event) | [
"def",
"_on_motion",
"(",
"self",
",",
"event",
")",
":",
"if",
"not",
"self",
".",
"_visual_drag",
".",
"winfo_ismapped",
"(",
")",
":",
"return",
"if",
"self",
".",
"_drag_cols",
"and",
"self",
".",
"_dragged_col",
"is",
"not",
"None",
":",
"self",
".",
"_drag_col",
"(",
"event",
")",
"elif",
"self",
".",
"_drag_rows",
"and",
"self",
".",
"_dragged_row",
"is",
"not",
"None",
":",
"self",
".",
"_drag_row",
"(",
"event",
")"
] | 36.555556 | [
0.03571428571428571,
0.046511627906976744,
0.04,
0.1111111111111111,
0,
0.03278688524590164,
0.06060606060606061,
0.031746031746031744,
0.06060606060606061
] |
def datacenter_configured(name):
'''
Makes sure a datacenter exists.
If the state is run by an ``esxdatacenter`` minion, the name of the
datacenter is retrieved from the proxy details, otherwise the datacenter
has the same name as the state.
Supported proxies: esxdatacenter
name:
Datacenter name. Ignored if the proxytype is ``esxdatacenter``.
'''
proxy_type = __salt__['vsphere.get_proxy_type']()
if proxy_type == 'esxdatacenter':
dc_name = __salt__['esxdatacenter.get_details']()['datacenter']
else:
dc_name = name
log.info('Running datacenter_configured for datacenter \'%s\'', dc_name)
ret = {'name': name,
'changes': {},
'result': None,
'comment': 'Default'}
comments = []
si = None
try:
si = __salt__['vsphere.get_service_instance_via_proxy']()
dcs = __salt__['vsphere.list_datacenters_via_proxy'](
datacenter_names=[dc_name], service_instance=si)
if not dcs:
if __opts__['test']:
comments.append('State will create '
'datacenter \'{0}\'.'.format(dc_name))
else:
log.debug('Creating datacenter \'%s\'', dc_name)
__salt__['vsphere.create_datacenter'](dc_name, si)
comments.append('Created datacenter \'{0}\'.'.format(dc_name))
log.info(comments[-1])
ret['changes'].update({'new': {'name': dc_name}})
else:
comments.append('Datacenter \'{0}\' already exists. Nothing to be '
'done.'.format(dc_name))
log.info(comments[-1])
__salt__['vsphere.disconnect'](si)
ret['comment'] = '\n'.join(comments)
ret['result'] = None if __opts__['test'] and ret['changes'] else True
return ret
except salt.exceptions.CommandExecutionError as exc:
log.error('Error: %s', exc)
if si:
__salt__['vsphere.disconnect'](si)
ret.update({
'result': False if not __opts__['test'] else None,
'comment': six.text_type(exc)})
return ret | [
"def",
"datacenter_configured",
"(",
"name",
")",
":",
"proxy_type",
"=",
"__salt__",
"[",
"'vsphere.get_proxy_type'",
"]",
"(",
")",
"if",
"proxy_type",
"==",
"'esxdatacenter'",
":",
"dc_name",
"=",
"__salt__",
"[",
"'esxdatacenter.get_details'",
"]",
"(",
")",
"[",
"'datacenter'",
"]",
"else",
":",
"dc_name",
"=",
"name",
"log",
".",
"info",
"(",
"'Running datacenter_configured for datacenter \\'%s\\''",
",",
"dc_name",
")",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"None",
",",
"'comment'",
":",
"'Default'",
"}",
"comments",
"=",
"[",
"]",
"si",
"=",
"None",
"try",
":",
"si",
"=",
"__salt__",
"[",
"'vsphere.get_service_instance_via_proxy'",
"]",
"(",
")",
"dcs",
"=",
"__salt__",
"[",
"'vsphere.list_datacenters_via_proxy'",
"]",
"(",
"datacenter_names",
"=",
"[",
"dc_name",
"]",
",",
"service_instance",
"=",
"si",
")",
"if",
"not",
"dcs",
":",
"if",
"__opts__",
"[",
"'test'",
"]",
":",
"comments",
".",
"append",
"(",
"'State will create '",
"'datacenter \\'{0}\\'.'",
".",
"format",
"(",
"dc_name",
")",
")",
"else",
":",
"log",
".",
"debug",
"(",
"'Creating datacenter \\'%s\\''",
",",
"dc_name",
")",
"__salt__",
"[",
"'vsphere.create_datacenter'",
"]",
"(",
"dc_name",
",",
"si",
")",
"comments",
".",
"append",
"(",
"'Created datacenter \\'{0}\\'.'",
".",
"format",
"(",
"dc_name",
")",
")",
"log",
".",
"info",
"(",
"comments",
"[",
"-",
"1",
"]",
")",
"ret",
"[",
"'changes'",
"]",
".",
"update",
"(",
"{",
"'new'",
":",
"{",
"'name'",
":",
"dc_name",
"}",
"}",
")",
"else",
":",
"comments",
".",
"append",
"(",
"'Datacenter \\'{0}\\' already exists. Nothing to be '",
"'done.'",
".",
"format",
"(",
"dc_name",
")",
")",
"log",
".",
"info",
"(",
"comments",
"[",
"-",
"1",
"]",
")",
"__salt__",
"[",
"'vsphere.disconnect'",
"]",
"(",
"si",
")",
"ret",
"[",
"'comment'",
"]",
"=",
"'\\n'",
".",
"join",
"(",
"comments",
")",
"ret",
"[",
"'result'",
"]",
"=",
"None",
"if",
"__opts__",
"[",
"'test'",
"]",
"and",
"ret",
"[",
"'changes'",
"]",
"else",
"True",
"return",
"ret",
"except",
"salt",
".",
"exceptions",
".",
"CommandExecutionError",
"as",
"exc",
":",
"log",
".",
"error",
"(",
"'Error: %s'",
",",
"exc",
")",
"if",
"si",
":",
"__salt__",
"[",
"'vsphere.disconnect'",
"]",
"(",
"si",
")",
"ret",
".",
"update",
"(",
"{",
"'result'",
":",
"False",
"if",
"not",
"__opts__",
"[",
"'test'",
"]",
"else",
"None",
",",
"'comment'",
":",
"six",
".",
"text_type",
"(",
"exc",
")",
"}",
")",
"return",
"ret"
] | 38.618182 | [
0.03125,
0.2857142857142857,
0.05714285714285714,
0,
0.04225352112676056,
0.02631578947368421,
0.05714285714285714,
0,
0.05555555555555555,
0,
0.2222222222222222,
0.04225352112676056,
0.2857142857142857,
0.03773584905660377,
0.05405405405405406,
0.028169014084507043,
0.2222222222222222,
0.09090909090909091,
0.02631578947368421,
0.125,
0.12,
0.11538461538461539,
0.125,
0.11764705882352941,
0.15384615384615385,
0.25,
0.03076923076923077,
0.04918032786885246,
0.08333333333333333,
0.10526315789473684,
0.0625,
0.057692307692307696,
0.04285714285714286,
0.11764705882352941,
0.03125,
0.030303030303030304,
0.02564102564102564,
0.058823529411764705,
0.03278688524590164,
0.15384615384615385,
0.0379746835443038,
0.057692307692307696,
0.058823529411764705,
0.047619047619047616,
0.045454545454545456,
0.025974025974025976,
0.1111111111111111,
0.03571428571428571,
0.05714285714285714,
0.14285714285714285,
0.043478260869565216,
0.15,
0.03225806451612903,
0.06976744186046512,
0.1111111111111111
] |
def option(self, *args, **kwargs):
"""
Registers a click.option which falls back to a configmanager Item
if user hasn't provided a value in the command line.
Item must be the last of ``args``.
Examples::
config = Config({'greeting': 'Hello'})
@click.command()
@config.click.option('--greeting', config.greeting)
def say_hello(greeting):
click.echo(greeting)
"""
args, kwargs = _config_parameter(args, kwargs)
return self._click.option(*args, **kwargs) | [
"def",
"option",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
",",
"kwargs",
"=",
"_config_parameter",
"(",
"args",
",",
"kwargs",
")",
"return",
"self",
".",
"_click",
".",
"option",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 29.789474 | [
0.029411764705882353,
0.18181818181818182,
0.0273972602739726,
0.03333333333333333,
0,
0.07142857142857142,
0,
0.16666666666666666,
0,
0.04,
0,
0.07142857142857142,
0.031746031746031744,
0.05555555555555555,
0.05555555555555555,
0,
0.18181818181818182,
0.037037037037037035,
0.04
] |
def to_pandas(self):
"""Converts Modin DataFrame to Pandas DataFrame.
Returns:
Pandas DataFrame of the DataManager.
"""
df = self.data.to_pandas(is_transposed=self._is_transposed)
if df.empty:
dtype_dict = {
col_name: pandas.Series(dtype=self.dtypes[col_name])
for col_name in self.columns
}
df = pandas.DataFrame(dtype_dict, self.index)
else:
ErrorMessage.catch_bugs_and_request_email(
len(df.index) != len(self.index) or len(df.columns) != len(self.columns)
)
df.index = self.index
df.columns = self.columns
return df | [
"def",
"to_pandas",
"(",
"self",
")",
":",
"df",
"=",
"self",
".",
"data",
".",
"to_pandas",
"(",
"is_transposed",
"=",
"self",
".",
"_is_transposed",
")",
"if",
"df",
".",
"empty",
":",
"dtype_dict",
"=",
"{",
"col_name",
":",
"pandas",
".",
"Series",
"(",
"dtype",
"=",
"self",
".",
"dtypes",
"[",
"col_name",
"]",
")",
"for",
"col_name",
"in",
"self",
".",
"columns",
"}",
"df",
"=",
"pandas",
".",
"DataFrame",
"(",
"dtype_dict",
",",
"self",
".",
"index",
")",
"else",
":",
"ErrorMessage",
".",
"catch_bugs_and_request_email",
"(",
"len",
"(",
"df",
".",
"index",
")",
"!=",
"len",
"(",
"self",
".",
"index",
")",
"or",
"len",
"(",
"df",
".",
"columns",
")",
"!=",
"len",
"(",
"self",
".",
"columns",
")",
")",
"df",
".",
"index",
"=",
"self",
".",
"index",
"df",
".",
"columns",
"=",
"self",
".",
"columns",
"return",
"df"
] | 36 | [
0.047619047619047616,
0.017543859649122806,
1,
0.11764705882352941,
0.02040816326530612,
0.08333333333333333,
0.014705882352941176,
0.14285714285714285,
0.07407407407407407,
0.014492753623188406,
0.022222222222222223,
0.14285714285714285,
0.017241379310344827,
0.21428571428571427,
0.03636363636363636,
0.02247191011235955,
0.14285714285714285,
0.029411764705882353,
0.02631578947368421,
0.11764705882352941
] |
def fit(self, X, y):
"""
Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self.
"""
# fit random hidden layer and compute the hidden layer activations
self.hidden_activations_ = self.hidden_layer.fit_transform(X)
# solve the regression from hidden activations to outputs
self._fit_regression(as_float_array(y, copy=True))
return self | [
"def",
"fit",
"(",
"self",
",",
"X",
",",
"y",
")",
":",
"# fit random hidden layer and compute the hidden layer activations",
"self",
".",
"hidden_activations_",
"=",
"self",
".",
"hidden_layer",
".",
"fit_transform",
"(",
"X",
")",
"# solve the regression from hidden activations to outputs",
"self",
".",
"_fit_regression",
"(",
"as_float_array",
"(",
"y",
",",
"copy",
"=",
"True",
")",
")",
"return",
"self"
] | 31.481481 | [
0.05,
0.18181818181818182,
0.04,
0,
0.1111111111111111,
0.1111111111111111,
0.05555555555555555,
0.02857142857142857,
0.03773584905660377,
0,
0.07407407407407407,
0.05405405405405406,
0.13043478260869565,
0,
0.13333333333333333,
0.13333333333333333,
0.14285714285714285,
0,
0.05,
0.18181818181818182,
0.02702702702702703,
0.028985507246376812,
0,
0.03076923076923077,
0.034482758620689655,
0,
0.10526315789473684
] |
def init_app(self, app):
"""Flask application initialization."""
self.init_config(app)
if app.config['LOGGING_FS_LOGFILE'] is None:
return
self.install_handler(app)
app.extensions['invenio-logging-fs'] = self | [
"def",
"init_app",
"(",
"self",
",",
"app",
")",
":",
"self",
".",
"init_config",
"(",
"app",
")",
"if",
"app",
".",
"config",
"[",
"'LOGGING_FS_LOGFILE'",
"]",
"is",
"None",
":",
"return",
"self",
".",
"install_handler",
"(",
"app",
")",
"app",
".",
"extensions",
"[",
"'invenio-logging-fs'",
"]",
"=",
"self"
] | 36.285714 | [
0.041666666666666664,
0.0425531914893617,
0.06896551724137931,
0.038461538461538464,
0.1111111111111111,
0.06060606060606061,
0.0392156862745098
] |
def d8distdowntostream(np, p, fel, src, dist, distancemethod, thresh, workingdir=None,
mpiexedir=None, exedir=None,
log_file=None, runtime_file=None, hostfile=None):
"""Run D8 distance down to stream by different method for distance.
This function is extended from d8hdisttostrm by Liangjun.
Please clone `TauDEM by lreis2415`_ and compile for this program.
.. _TauDEM by lreis2415:
https://github.com/lreis2415/TauDEM
"""
fname = TauDEM.func_name('d8distdowntostream')
return TauDEM.run(FileClass.get_executable_fullpath(fname, exedir),
{'-fel': fel, '-p': p, '-src': src},
workingdir,
{'-thresh': thresh, '-m': TauDEM.convertdistmethod(distancemethod)},
{'-dist': dist},
{'mpipath': mpiexedir, 'hostfile': hostfile, 'n': np},
{'logfile': log_file, 'runtimefile': runtime_file}) | [
"def",
"d8distdowntostream",
"(",
"np",
",",
"p",
",",
"fel",
",",
"src",
",",
"dist",
",",
"distancemethod",
",",
"thresh",
",",
"workingdir",
"=",
"None",
",",
"mpiexedir",
"=",
"None",
",",
"exedir",
"=",
"None",
",",
"log_file",
"=",
"None",
",",
"runtime_file",
"=",
"None",
",",
"hostfile",
"=",
"None",
")",
":",
"fname",
"=",
"TauDEM",
".",
"func_name",
"(",
"'d8distdowntostream'",
")",
"return",
"TauDEM",
".",
"run",
"(",
"FileClass",
".",
"get_executable_fullpath",
"(",
"fname",
",",
"exedir",
")",
",",
"{",
"'-fel'",
":",
"fel",
",",
"'-p'",
":",
"p",
",",
"'-src'",
":",
"src",
"}",
",",
"workingdir",
",",
"{",
"'-thresh'",
":",
"thresh",
",",
"'-m'",
":",
"TauDEM",
".",
"convertdistmethod",
"(",
"distancemethod",
")",
"}",
",",
"{",
"'-dist'",
":",
"dist",
"}",
",",
"{",
"'mpipath'",
":",
"mpiexedir",
",",
"'hostfile'",
":",
"hostfile",
",",
"'n'",
":",
"np",
"}",
",",
"{",
"'logfile'",
":",
"log_file",
",",
"'runtimefile'",
":",
"runtime_file",
"}",
")"
] | 54.736842 | [
0.03488372093023256,
0.09090909090909091,
0.09210526315789473,
0.02666666666666667,
0.03076923076923077,
0,
0.0410958904109589,
0,
0.0625,
0.08695652173913043,
0.18181818181818182,
0.037037037037037035,
0.04,
0.04838709677419355,
0.08108108108108109,
0.0425531914893617,
0.07142857142857142,
0.05,
0.05194805194805195
] |
def kendalltau_dist(params1, params2=None):
r"""Compute the Kendall tau distance between two models.
This function computes the Kendall tau distance between the rankings
induced by two parameter vectors. Let :math:`\sigma_i` be the rank of item
``i`` in the model described by ``params1``, and :math:`\tau_i` be its rank
in the model described by ``params2``. The Kendall tau distance is defined
as the number of pairwise disagreements between the two rankings, i.e.,
.. math::
\sum_{i=1}^N \sum_{j=1}^N
\mathbf{1} \{ \sigma_i > \sigma_j \wedge \tau_i < \tau_j \}
By convention, items with the lowest parameters are ranked first (i.e.,
sorted using the natural order).
If the argument ``params2`` is ``None``, the second model is assumed to
rank the items by their index: item ``0`` has rank 1, item ``1`` has rank
2, etc.
If some values are equal within a parameter vector, all items are given a
distinct rank, corresponding to the order in which the values occur.
Parameters
----------
params1 : array_like
Parameters of the first model.
params2 : array_like, optional
Parameters of the second model.
Returns
-------
dist : float
Kendall tau distance.
"""
assert params2 is None or len(params1) == len(params2)
ranks1 = rankdata(params1, method="ordinal")
if params2 is None:
ranks2 = np.arange(1, len(params1) + 1, dtype=float)
else:
ranks2 = rankdata(params2, method="ordinal")
tau, _ = kendalltau(ranks1, ranks2)
n_items = len(params1)
n_pairs = n_items * (n_items - 1) / 2
return round((n_pairs - n_pairs * tau) / 2) | [
"def",
"kendalltau_dist",
"(",
"params1",
",",
"params2",
"=",
"None",
")",
":",
"assert",
"params2",
"is",
"None",
"or",
"len",
"(",
"params1",
")",
"==",
"len",
"(",
"params2",
")",
"ranks1",
"=",
"rankdata",
"(",
"params1",
",",
"method",
"=",
"\"ordinal\"",
")",
"if",
"params2",
"is",
"None",
":",
"ranks2",
"=",
"np",
".",
"arange",
"(",
"1",
",",
"len",
"(",
"params1",
")",
"+",
"1",
",",
"dtype",
"=",
"float",
")",
"else",
":",
"ranks2",
"=",
"rankdata",
"(",
"params2",
",",
"method",
"=",
"\"ordinal\"",
")",
"tau",
",",
"_",
"=",
"kendalltau",
"(",
"ranks1",
",",
"ranks2",
")",
"n_items",
"=",
"len",
"(",
"params1",
")",
"n_pairs",
"=",
"n_items",
"*",
"(",
"n_items",
"-",
"1",
")",
"/",
"2",
"return",
"round",
"(",
"(",
"n_pairs",
"-",
"n_pairs",
"*",
"tau",
")",
"/",
"2",
")"
] | 36.065217 | [
0.023255813953488372,
0.03333333333333333,
0,
0.027777777777777776,
0.07692307692307693,
0.0759493670886076,
0.038461538461538464,
0.02666666666666667,
0,
0.23076923076923078,
0,
0.22580645161290322,
0.04477611940298507,
0,
0.05333333333333334,
0.08333333333333333,
0,
0.04,
0.03896103896103896,
0.18181818181818182,
0,
0.025974025974025976,
0.027777777777777776,
0,
0.14285714285714285,
0.14285714285714285,
0.125,
0.05263157894736842,
0.08823529411764706,
0.05128205128205128,
0,
0.18181818181818182,
0.18181818181818182,
0.1875,
0.06896551724137931,
0.2857142857142857,
0.034482758620689655,
0.041666666666666664,
0.08695652173913043,
0.03333333333333333,
0.2222222222222222,
0.038461538461538464,
0.05128205128205128,
0.07692307692307693,
0.04878048780487805,
0.0425531914893617
] |
def _center_transform(self, transform):
''''
Works like setupTransform of a version of java nodebox
http://dev.nodebox.net/browser/nodebox-java/branches/rewrite/src/java/net/nodebox/graphics/Grob.java
'''
dx, dy = self._get_center()
t = cairo.Matrix()
t.translate(dx, dy)
t = transform * t
t.translate(-dx, -dy)
return t | [
"def",
"_center_transform",
"(",
"self",
",",
"transform",
")",
":",
"dx",
",",
"dy",
"=",
"self",
".",
"_get_center",
"(",
")",
"t",
"=",
"cairo",
".",
"Matrix",
"(",
")",
"t",
".",
"translate",
"(",
"dx",
",",
"dy",
")",
"t",
"=",
"transform",
"*",
"t",
"t",
".",
"translate",
"(",
"-",
"dx",
",",
"-",
"dy",
")",
"return",
"t"
] | 35.454545 | [
0.02564102564102564,
0.16666666666666666,
0.03225806451612903,
0.037037037037037035,
0.18181818181818182,
0.05714285714285714,
0.07692307692307693,
0.07407407407407407,
0.08,
0.06896551724137931,
0.125
] |
def update_terminal_regions(self, tree, X, y, residual, y_pred,
sample_weight, sample_mask,
learning_rate=1.0, k=0):
"""Least squares does not need to update terminal regions.
But it has to update the predictions.
"""
# update predictions
y_pred[:, k] += learning_rate * tree.predict(X).ravel() | [
"def",
"update_terminal_regions",
"(",
"self",
",",
"tree",
",",
"X",
",",
"y",
",",
"residual",
",",
"y_pred",
",",
"sample_weight",
",",
"sample_mask",
",",
"learning_rate",
"=",
"1.0",
",",
"k",
"=",
"0",
")",
":",
"# update predictions",
"y_pred",
"[",
":",
",",
"k",
"]",
"+=",
"learning_rate",
"*",
"tree",
".",
"predict",
"(",
"X",
")",
".",
"ravel",
"(",
")"
] | 43.444444 | [
0.031746031746031744,
0.03389830508474576,
0.08928571428571429,
0.030303030303030304,
0,
0.044444444444444446,
0.18181818181818182,
0.07142857142857142,
0.031746031746031744
] |
def get_body(self):
"""Return "data" value on self.data
:return: data to send
:rtype: str
"""
if self.default_body:
return self.default_body
data = self.data.get('data')
if isinstance(data, dict):
return json.dumps(data)
return data | [
"def",
"get_body",
"(",
"self",
")",
":",
"if",
"self",
".",
"default_body",
":",
"return",
"self",
".",
"default_body",
"data",
"=",
"self",
".",
"data",
".",
"get",
"(",
"'data'",
")",
"if",
"isinstance",
"(",
"data",
",",
"dict",
")",
":",
"return",
"json",
".",
"dumps",
"(",
"data",
")",
"return",
"data"
] | 25.833333 | [
0.05263157894736842,
0.046511627906976744,
0,
0.10344827586206896,
0.15789473684210525,
0.18181818181818182,
0.06896551724137931,
0.05555555555555555,
0.05555555555555555,
0.058823529411764705,
0.05714285714285714,
0.10526315789473684
] |
def GetPlugins(cls):
"""Retrieves the registered plugins.
Yields:
tuple[str, type]: name and class of the plugin.
"""
for plugin_name, plugin_class in iter(cls._plugin_classes.items()):
yield plugin_name, plugin_class | [
"def",
"GetPlugins",
"(",
"cls",
")",
":",
"for",
"plugin_name",
",",
"plugin_class",
"in",
"iter",
"(",
"cls",
".",
"_plugin_classes",
".",
"items",
"(",
")",
")",
":",
"yield",
"plugin_name",
",",
"plugin_class"
] | 29.875 | [
0.05,
0.05,
0,
0.18181818181818182,
0.05660377358490566,
0.2857142857142857,
0.028169014084507043,
0.08108108108108109
] |
def max_pv_count(self):
"""
Returns the maximum allowed physical volume count.
"""
self.open()
count = lvm_vg_get_max_pv(self.handle)
self.close()
return count | [
"def",
"max_pv_count",
"(",
"self",
")",
":",
"self",
".",
"open",
"(",
")",
"count",
"=",
"lvm_vg_get_max_pv",
"(",
"self",
".",
"handle",
")",
"self",
".",
"close",
"(",
")",
"return",
"count"
] | 26 | [
0.043478260869565216,
0.18181818181818182,
0.034482758620689655,
0.18181818181818182,
0.10526315789473684,
0.043478260869565216,
0.1,
0.1
] |
def tryReduceXor(sig, val):
"""
Return sig and val reduced by ^ operator or None
if it is not possible to statically reduce expression
"""
m = sig._dtype.all_mask()
if not val.vldMask:
return val
if val._isFullVld():
v = val.val
if v == m:
return ~sig
elif v == 0:
return sig | [
"def",
"tryReduceXor",
"(",
"sig",
",",
"val",
")",
":",
"m",
"=",
"sig",
".",
"_dtype",
".",
"all_mask",
"(",
")",
"if",
"not",
"val",
".",
"vldMask",
":",
"return",
"val",
"if",
"val",
".",
"_isFullVld",
"(",
")",
":",
"v",
"=",
"val",
".",
"val",
"if",
"v",
"==",
"m",
":",
"return",
"~",
"sig",
"elif",
"v",
"==",
"0",
":",
"return",
"sig"
] | 23.066667 | [
0.037037037037037035,
0.2857142857142857,
0.038461538461538464,
0.03508771929824561,
0.2857142857142857,
0.06896551724137931,
0.08695652173913043,
0.1111111111111111,
0,
0.08333333333333333,
0.10526315789473684,
0.1111111111111111,
0.08695652173913043,
0.1,
0.09090909090909091
] |
def backup_path(self) -> str:
"""
The path to the backed up optimizer folder.
"""
return "{}/{}/{}{}/optimizer_backup".format(conf.instance.output_path, self.phase_path, self.phase_name,
self.phase_tag) | [
"def",
"backup_path",
"(",
"self",
")",
"->",
"str",
":",
"return",
"\"{}/{}/{}{}/optimizer_backup\"",
".",
"format",
"(",
"conf",
".",
"instance",
".",
"output_path",
",",
"self",
".",
"phase_path",
",",
"self",
".",
"phase_name",
",",
"self",
".",
"phase_tag",
")"
] | 46.833333 | [
0.034482758620689655,
0.18181818181818182,
0.0392156862745098,
0.18181818181818182,
0.03571428571428571,
0.04477611940298507
] |
def trial(request):
"""View for a single trial."""
job_id = request.GET.get("job_id")
trial_id = request.GET.get("trial_id")
recent_trials = TrialRecord.objects \
.filter(job_id=job_id) \
.order_by("-start_time")
recent_results = ResultRecord.objects \
.filter(trial_id=trial_id) \
.order_by("-date")[0:2000]
current_trial = TrialRecord.objects \
.filter(trial_id=trial_id) \
.order_by("-start_time")[0]
context = {
"job_id": job_id,
"trial_id": trial_id,
"current_trial": current_trial,
"recent_results": recent_results,
"recent_trials": recent_trials
}
return render(request, "trial.html", context) | [
"def",
"trial",
"(",
"request",
")",
":",
"job_id",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"\"job_id\"",
")",
"trial_id",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"\"trial_id\"",
")",
"recent_trials",
"=",
"TrialRecord",
".",
"objects",
".",
"filter",
"(",
"job_id",
"=",
"job_id",
")",
".",
"order_by",
"(",
"\"-start_time\"",
")",
"recent_results",
"=",
"ResultRecord",
".",
"objects",
".",
"filter",
"(",
"trial_id",
"=",
"trial_id",
")",
".",
"order_by",
"(",
"\"-date\"",
")",
"[",
"0",
":",
"2000",
"]",
"current_trial",
"=",
"TrialRecord",
".",
"objects",
".",
"filter",
"(",
"trial_id",
"=",
"trial_id",
")",
".",
"order_by",
"(",
"\"-start_time\"",
")",
"[",
"0",
"]",
"context",
"=",
"{",
"\"job_id\"",
":",
"job_id",
",",
"\"trial_id\"",
":",
"trial_id",
",",
"\"current_trial\"",
":",
"current_trial",
",",
"\"recent_results\"",
":",
"recent_results",
",",
"\"recent_trials\"",
":",
"recent_trials",
"}",
"return",
"render",
"(",
"request",
",",
"\"trial.html\"",
",",
"context",
")"
] | 33.52381 | [
0.05263157894736842,
0.058823529411764705,
0.05263157894736842,
0.047619047619047616,
0.04878048780487805,
0.0625,
0.0625,
0.046511627906976744,
0.05555555555555555,
0.058823529411764705,
0.04878048780487805,
0.05555555555555555,
0.05714285714285714,
0.2,
0.08,
0.06896551724137931,
0.05128205128205128,
0.04878048780487805,
0.05263157894736842,
0.6,
0.04081632653061224
] |
def delete_gauge(self, slug):
"""Removes all gauges with the given ``slug``."""
key = self._gauge_key(slug)
self.r.delete(key) # Remove the Gauge
self.r.srem(self._gauge_slugs_key, slug) | [
"def",
"delete_gauge",
"(",
"self",
",",
"slug",
")",
":",
"key",
"=",
"self",
".",
"_gauge_key",
"(",
"slug",
")",
"self",
".",
"r",
".",
"delete",
"(",
"key",
")",
"# Remove the Gauge",
"self",
".",
"r",
".",
"srem",
"(",
"self",
".",
"_gauge_slugs_key",
",",
"slug",
")"
] | 43 | [
0.034482758620689655,
0.03508771929824561,
0.05714285714285714,
0.043478260869565216,
0.041666666666666664
] |
def altitude_encode(self, time_usec, altitude_monotonic, altitude_amsl, altitude_local, altitude_relative, altitude_terrain, bottom_clearance):
'''
The current system altitude.
time_usec : Timestamp (micros since boot or Unix epoch) (uint64_t)
altitude_monotonic : This altitude measure is initialized on system boot and monotonic (it is never reset, but represents the local altitude change). The only guarantee on this field is that it will never be reset and is consistent within a flight. The recommended value for this field is the uncorrected barometric altitude at boot time. This altitude will also drift and vary between flights. (float)
altitude_amsl : This altitude measure is strictly above mean sea level and might be non-monotonic (it might reset on events like GPS lock or when a new QNH value is set). It should be the altitude to which global altitude waypoints are compared to. Note that it is *not* the GPS altitude, however, most GPS modules already output AMSL by default and not the WGS84 altitude. (float)
altitude_local : This is the local altitude in the local coordinate frame. It is not the altitude above home, but in reference to the coordinate origin (0, 0, 0). It is up-positive. (float)
altitude_relative : This is the altitude above the home position. It resets on each change of the current home position. (float)
altitude_terrain : This is the altitude above terrain. It might be fed by a terrain database or an altimeter. Values smaller than -1000 should be interpreted as unknown. (float)
bottom_clearance : This is not the altitude, but the clear space below the system according to the fused clearance estimate. It generally should max out at the maximum range of e.g. the laser altimeter. It is generally a moving target. A negative value indicates no measurement available. (float)
'''
return MAVLink_altitude_message(time_usec, altitude_monotonic, altitude_amsl, altitude_local, altitude_relative, altitude_terrain, bottom_clearance) | [
"def",
"altitude_encode",
"(",
"self",
",",
"time_usec",
",",
"altitude_monotonic",
",",
"altitude_amsl",
",",
"altitude_local",
",",
"altitude_relative",
",",
"altitude_terrain",
",",
"bottom_clearance",
")",
":",
"return",
"MAVLink_altitude_message",
"(",
"time_usec",
",",
"altitude_monotonic",
",",
"altitude_amsl",
",",
"altitude_local",
",",
"altitude_relative",
",",
"altitude_terrain",
",",
"bottom_clearance",
")"
] | 158 | [
0.013986013986013986,
0.10526315789473684,
0.045454545454545456,
0,
0.061224489795918366,
0.011764705882352941,
0.012224938875305624,
0.023148148148148147,
0.02631578947368421,
0.024752475247524754,
0.012461059190031152,
0,
0.10526315789473684,
0.018292682926829267
] |
def GetHashObject(self):
"""Returns a `Hash` object with appropriate fields filled-in."""
hash_object = rdf_crypto.Hash()
hash_object.num_bytes = self._bytes_read
for algorithm in self._hashers:
setattr(hash_object, algorithm, self._hashers[algorithm].digest())
return hash_object | [
"def",
"GetHashObject",
"(",
"self",
")",
":",
"hash_object",
"=",
"rdf_crypto",
".",
"Hash",
"(",
")",
"hash_object",
".",
"num_bytes",
"=",
"self",
".",
"_bytes_read",
"for",
"algorithm",
"in",
"self",
".",
"_hashers",
":",
"setattr",
"(",
"hash_object",
",",
"algorithm",
",",
"self",
".",
"_hashers",
"[",
"algorithm",
"]",
".",
"digest",
"(",
")",
")",
"return",
"hash_object"
] | 42.857143 | [
0.041666666666666664,
0.029411764705882353,
0.05714285714285714,
0.045454545454545456,
0.05714285714285714,
0.041666666666666664,
0.09090909090909091
] |
def envelope(self):
""" returns an :class:`Event` that can be used for site streams """
def enveloped_event(data):
return 'for_user' in data and self._func(data.get('message'))
return self.__class__(enveloped_event, self.__name__) | [
"def",
"envelope",
"(",
"self",
")",
":",
"def",
"enveloped_event",
"(",
"data",
")",
":",
"return",
"'for_user'",
"in",
"data",
"and",
"self",
".",
"_func",
"(",
"data",
".",
"get",
"(",
"'message'",
")",
")",
"return",
"self",
".",
"__class__",
"(",
"enveloped_event",
",",
"self",
".",
"__name__",
")"
] | 37.428571 | [
0.05263157894736842,
0.02666666666666667,
0,
0.058823529411764705,
0.0273972602739726,
0,
0.03278688524590164
] |
def add_ip(self,oid,value,label=None):
"""Short helper to add an IP address value to the MIB subtree."""
self.add_oid_entry(oid,'IPADDRESS',value,label=label) | [
"def",
"add_ip",
"(",
"self",
",",
"oid",
",",
"value",
",",
"label",
"=",
"None",
")",
":",
"self",
".",
"add_oid_entry",
"(",
"oid",
",",
"'IPADDRESS'",
",",
"value",
",",
"label",
"=",
"label",
")"
] | 53.333333 | [
0.10526315789473684,
0.04477611940298507,
0.10909090909090909
] |
def gettext(message):
"""
Translate the 'message' string. It uses the current thread to find the
translation object to use. If no current translation is activated, the
message will be run through the default translation object.
"""
global _default
_default = _default or translation(DEFAULT_LANGUAGE)
translation_object = getattr(_active, 'value', _default)
result = translation_object.gettext(message)
return result | [
"def",
"gettext",
"(",
"message",
")",
":",
"global",
"_default",
"_default",
"=",
"_default",
"or",
"translation",
"(",
"DEFAULT_LANGUAGE",
")",
"translation_object",
"=",
"getattr",
"(",
"_active",
",",
"'value'",
",",
"_default",
")",
"result",
"=",
"translation_object",
".",
"gettext",
"(",
"message",
")",
"return",
"result"
] | 40.545455 | [
0.047619047619047616,
0.2857142857142857,
0.02702702702702703,
0.02702702702702703,
0.031746031746031744,
0.2857142857142857,
0.10526315789473684,
0.03571428571428571,
0.03333333333333333,
0.041666666666666664,
0.11764705882352941
] |
def register_function_compilation(self, func, compilation_cbk, listclass):
"""
Register given compilation method for given function.
:param str path: Function name.
:param callable compilation_cbk: Compilation callback to be called.
:param class listclass: List class to use for lists.
"""
self.compilations_function[func] = {
'callback': compilation_cbk,
'listclass': listclass
} | [
"def",
"register_function_compilation",
"(",
"self",
",",
"func",
",",
"compilation_cbk",
",",
"listclass",
")",
":",
"self",
".",
"compilations_function",
"[",
"func",
"]",
"=",
"{",
"'callback'",
":",
"compilation_cbk",
",",
"'listclass'",
":",
"listclass",
"}"
] | 38.25 | [
0.013513513513513514,
0.18181818181818182,
0.03278688524590164,
0,
0.07692307692307693,
0.04,
0.05,
0.18181818181818182,
0.06818181818181818,
0.04878048780487805,
0.058823529411764705,
0.3333333333333333
] |
def getQwordAtOffset(self, offset):
"""
Returns a C{QWORD} from a given offset.
@type offset: int
@param offset: The offset to get the C{QWORD} from.
@rtype: L{QWORD}
@return: The L{QWORD} obtained at the given offset.
"""
return datatypes.QWORD.parse(utils.ReadData(self.getDataAtOffset(offset, 8))) | [
"def",
"getQwordAtOffset",
"(",
"self",
",",
"offset",
")",
":",
"return",
"datatypes",
".",
"QWORD",
".",
"parse",
"(",
"utils",
".",
"ReadData",
"(",
"self",
".",
"getDataAtOffset",
"(",
"offset",
",",
"8",
")",
")",
")"
] | 34 | [
0.02857142857142857,
0.18181818181818182,
0.0625,
0.25,
0.08,
0.03389830508474576,
0.25,
0.08333333333333333,
0.03389830508474576,
0.18181818181818182,
0.03488372093023256
] |
def _send_merge_commands(self, config, file_config):
"""
Netmiko is being used to push set commands.
"""
if self.loaded is False:
if self._save_backup() is False:
raise MergeConfigException('Error while storing backup '
'config.')
if self.ssh_connection is False:
self._open_ssh()
if file_config:
if isinstance(config, str):
config = config.splitlines()
else:
if isinstance(config, str):
config = str(config).split()
self.ssh_device.send_config_set(config)
self.loaded = True
self.merge_config = True | [
"def",
"_send_merge_commands",
"(",
"self",
",",
"config",
",",
"file_config",
")",
":",
"if",
"self",
".",
"loaded",
"is",
"False",
":",
"if",
"self",
".",
"_save_backup",
"(",
")",
"is",
"False",
":",
"raise",
"MergeConfigException",
"(",
"'Error while storing backup '",
"'config.'",
")",
"if",
"self",
".",
"ssh_connection",
"is",
"False",
":",
"self",
".",
"_open_ssh",
"(",
")",
"if",
"file_config",
":",
"if",
"isinstance",
"(",
"config",
",",
"str",
")",
":",
"config",
"=",
"config",
".",
"splitlines",
"(",
")",
"else",
":",
"if",
"isinstance",
"(",
"config",
",",
"str",
")",
":",
"config",
"=",
"str",
"(",
"config",
")",
".",
"split",
"(",
")",
"self",
".",
"ssh_device",
".",
"send_config_set",
"(",
"config",
")",
"self",
".",
"loaded",
"=",
"True",
"self",
".",
"merge_config",
"=",
"True"
] | 33.380952 | [
0.019230769230769232,
0.18181818181818182,
0.0392156862745098,
0.18181818181818182,
0.0625,
0.045454545454545456,
0.041666666666666664,
0.07547169811320754,
0.05,
0.07142857142857142,
0,
0.08695652173913043,
0.05128205128205128,
0.045454545454545456,
0.15384615384615385,
0.05128205128205128,
0.045454545454545456,
0,
0.0425531914893617,
0.07692307692307693,
0.0625
] |
def _generate_input(options):
"""First send strings from any given file, one string per line, sends
any strings provided on the command line.
:param options: ArgumentParser or equivalent to provide
options.input and options.strings.
:return: string
"""
if options.input:
fp = open(options.input) if options.input != "-" else sys.stdin
for string in fp.readlines():
yield string
if options.strings:
for string in options.strings:
yield string | [
"def",
"_generate_input",
"(",
"options",
")",
":",
"if",
"options",
".",
"input",
":",
"fp",
"=",
"open",
"(",
"options",
".",
"input",
")",
"if",
"options",
".",
"input",
"!=",
"\"-\"",
"else",
"sys",
".",
"stdin",
"for",
"string",
"in",
"fp",
".",
"readlines",
"(",
")",
":",
"yield",
"string",
"if",
"options",
".",
"strings",
":",
"for",
"string",
"in",
"options",
".",
"strings",
":",
"yield",
"string"
] | 32 | [
0.034482758620689655,
0.0273972602739726,
0.044444444444444446,
0,
0.05084745762711865,
0.047619047619047616,
0.15789473684210525,
0,
0.2857142857142857,
0.09523809523809523,
0.028169014084507043,
0.05405405405405406,
0.08333333333333333,
0.08695652173913043,
0.05263157894736842,
0.08333333333333333
] |
def main(argString=None):
"""The main function of this module.
:param argString: the options.
:type argString: list
These are the steps:
1. Runs a plate bias analysis using Plink
(:py:func:`executePlateBiasAnalysis`).
2. Extracts the list of significant markers after plate bias analysis
(:py:func:`extractSignificantSNPs`).
3. Computes the frequency of all significant markers after plate bias
analysis (:py:func:`computeFrequencyOfSignificantSNPs`).
"""
# Getting and checking the options
args = parseArgs(argString)
checkArgs(args)
logger.info("Options used:")
for key, value in vars(args).iteritems():
logger.info(" --{} {}".format(key.replace("_", "-"), value))
# Run plink
logger.info("Running Plink to check the plate bias")
executePlateBiasAnalysis(args)
# Extract significant SNPs
logger.info("Extracting significant SNPs")
assocResults = extractSignificantSNPs(args.out)
# Remove significant SNPs using plink
logger.info("Computing frequency of significant SNPs")
maf = computeFrequencyOfSignificantSNPs(args)
# Create the final summary file
logger.info("Creating the summary file")
createSummaryFile(assocResults, maf, args.out) | [
"def",
"main",
"(",
"argString",
"=",
"None",
")",
":",
"# Getting and checking the options",
"args",
"=",
"parseArgs",
"(",
"argString",
")",
"checkArgs",
"(",
"args",
")",
"logger",
".",
"info",
"(",
"\"Options used:\"",
")",
"for",
"key",
",",
"value",
"in",
"vars",
"(",
"args",
")",
".",
"iteritems",
"(",
")",
":",
"logger",
".",
"info",
"(",
"\" --{} {}\"",
".",
"format",
"(",
"key",
".",
"replace",
"(",
"\"_\"",
",",
"\"-\"",
")",
",",
"value",
")",
")",
"# Run plink",
"logger",
".",
"info",
"(",
"\"Running Plink to check the plate bias\"",
")",
"executePlateBiasAnalysis",
"(",
"args",
")",
"# Extract significant SNPs",
"logger",
".",
"info",
"(",
"\"Extracting significant SNPs\"",
")",
"assocResults",
"=",
"extractSignificantSNPs",
"(",
"args",
".",
"out",
")",
"# Remove significant SNPs using plink",
"logger",
".",
"info",
"(",
"\"Computing frequency of significant SNPs\"",
")",
"maf",
"=",
"computeFrequencyOfSignificantSNPs",
"(",
"args",
")",
"# Create the final summary file",
"logger",
".",
"info",
"(",
"\"Creating the summary file\"",
")",
"createSummaryFile",
"(",
"assocResults",
",",
"maf",
",",
"args",
".",
"out",
")"
] | 31 | [
0.04,
0.05,
0,
0.08823529411764706,
0,
0.12,
0,
0.08333333333333333,
0,
0.044444444444444446,
0.15555555555555556,
0.0273972602739726,
0.16279069767441862,
0.0273972602739726,
0.12698412698412698,
0,
0.2857142857142857,
0.05263157894736842,
0.06451612903225806,
0.10526315789473684,
0,
0.0625,
0.044444444444444446,
0.028985507246376812,
0,
0.13333333333333333,
0.03571428571428571,
0.058823529411764705,
0,
0.06666666666666667,
0.043478260869565216,
0.0392156862745098,
0,
0.04878048780487805,
0.034482758620689655,
0.04081632653061224,
0,
0.05714285714285714,
0.045454545454545456,
0.04
] |
def get_prep_value(self, value):
"""
We need to accomodate queries where a single email,
or list of email addresses is supplied as arguments. For example:
- Email.objects.filter(to='[email protected]')
- Email.objects.filter(to=['[email protected]', '[email protected]'])
"""
if isinstance(value, six.string_types):
return value
else:
return ', '.join(map(lambda s: s.strip(), value)) | [
"def",
"get_prep_value",
"(",
"self",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"six",
".",
"string_types",
")",
":",
"return",
"value",
"else",
":",
"return",
"', '",
".",
"join",
"(",
"map",
"(",
"lambda",
"s",
":",
"s",
".",
"strip",
"(",
")",
",",
"value",
")",
")"
] | 38.083333 | [
0.03125,
0.18181818181818182,
0.03389830508474576,
0.0273972602739726,
0,
0.03773584905660377,
0.0273972602739726,
0.18181818181818182,
0.0425531914893617,
0.08333333333333333,
0.15384615384615385,
0.03278688524590164
] |
def get(self, sid):
"""
Constructs a MessageContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.chat.v2.service.channel.message.MessageContext
:rtype: twilio.rest.chat.v2.service.channel.message.MessageContext
"""
return MessageContext(
self._version,
service_sid=self._solution['service_sid'],
channel_sid=self._solution['channel_sid'],
sid=sid,
) | [
"def",
"get",
"(",
"self",
",",
"sid",
")",
":",
"return",
"MessageContext",
"(",
"self",
".",
"_version",
",",
"service_sid",
"=",
"self",
".",
"_solution",
"[",
"'service_sid'",
"]",
",",
"channel_sid",
"=",
"self",
".",
"_solution",
"[",
"'channel_sid'",
"]",
",",
"sid",
"=",
"sid",
",",
")"
] | 32.333333 | [
0.05263157894736842,
0.18181818181818182,
0.05714285714285714,
0,
0.045454545454545456,
0,
0.039473684210526314,
0.04054054054054054,
0.18181818181818182,
0.1,
0.07692307692307693,
0.05555555555555555,
0.05555555555555555,
0.15,
0.3333333333333333
] |
def beginning_of_history(self): # (M-<)
u'''Move to the first line in the history.'''
self.history_cursor = 0
if len(self.history) > 0:
self.l_buffer = self.history[0] | [
"def",
"beginning_of_history",
"(",
"self",
")",
":",
"# (M-<)\r",
"self",
".",
"history_cursor",
"=",
"0",
"if",
"len",
"(",
"self",
".",
"history",
")",
">",
"0",
":",
"self",
".",
"l_buffer",
"=",
"self",
".",
"history",
"[",
"0",
"]"
] | 40.6 | [
0.025,
0.018518518518518517,
0.03125,
0.08823529411764706,
0.046511627906976744
] |
def get_class_method(cls_or_inst, method_name):
"""
Returns a method from a given class or instance. When the method doest not exist, it returns `None`. Also works with
properties and cached properties.
"""
cls = cls_or_inst if isinstance(cls_or_inst, type) else cls_or_inst.__class__
meth = getattr(cls, method_name, None)
if isinstance(meth, property):
meth = meth.fget
elif isinstance(meth, cached_property):
meth = meth.func
return meth | [
"def",
"get_class_method",
"(",
"cls_or_inst",
",",
"method_name",
")",
":",
"cls",
"=",
"cls_or_inst",
"if",
"isinstance",
"(",
"cls_or_inst",
",",
"type",
")",
"else",
"cls_or_inst",
".",
"__class__",
"meth",
"=",
"getattr",
"(",
"cls",
",",
"method_name",
",",
"None",
")",
"if",
"isinstance",
"(",
"meth",
",",
"property",
")",
":",
"meth",
"=",
"meth",
".",
"fget",
"elif",
"isinstance",
"(",
"meth",
",",
"cached_property",
")",
":",
"meth",
"=",
"meth",
".",
"func",
"return",
"meth"
] | 40.083333 | [
0.02127659574468085,
0.2857142857142857,
0.03333333333333333,
0.05405405405405406,
0.2857142857142857,
0.037037037037037035,
0.047619047619047616,
0.058823529411764705,
0.08333333333333333,
0.046511627906976744,
0.08333333333333333,
0.13333333333333333
] |
def capture(self):
"""
Capture the payment of an existing, uncaptured, charge.
This is the second half of the two-step payment flow, where first you
created a charge with the capture option set to False.
See https://stripe.com/docs/api#capture_charge
"""
captured_charge = self.api_retrieve().capture()
return self.__class__.sync_from_stripe_data(captured_charge) | [
"def",
"capture",
"(",
"self",
")",
":",
"captured_charge",
"=",
"self",
".",
"api_retrieve",
"(",
")",
".",
"capture",
"(",
")",
"return",
"self",
".",
"__class__",
".",
"sync_from_stripe_data",
"(",
"captured_charge",
")"
] | 33.727273 | [
0.05555555555555555,
0.6,
0.05263157894736842,
0.04225352112676056,
0.05357142857142857,
0,
0.125,
0.6,
0,
0.061224489795918366,
0.04838709677419355
] |
def index(self, value):
"""
Return index of *value* in self.
Raises ValueError if *value* is not found.
"""
# pylint: disable=arguments-differ
for idx, val in enumerate(self):
if value == val:
return idx
raise ValueError('{0!r} is not in dict'.format(value)) | [
"def",
"index",
"(",
"self",
",",
"value",
")",
":",
"# pylint: disable=arguments-differ",
"for",
"idx",
",",
"val",
"in",
"enumerate",
"(",
"self",
")",
":",
"if",
"value",
"==",
"val",
":",
"return",
"idx",
"raise",
"ValueError",
"(",
"'{0!r} is not in dict'",
".",
"format",
"(",
"value",
")",
")"
] | 30.272727 | [
0.043478260869565216,
0.18181818181818182,
0.1,
0,
0.06,
0.18181818181818182,
0.047619047619047616,
0.05,
0.07142857142857142,
0.07692307692307693,
0.03225806451612903
] |
def smudge(newtype, target):
"""
Smudge magic bytes with a known type
"""
db = smudge_db.get()
magic_bytes = db[newtype]['magic']
magic_offset = db[newtype]['offset']
_backup_bytes(target, magic_offset, len(magic_bytes))
_smudge_bytes(target, magic_offset, magic_bytes) | [
"def",
"smudge",
"(",
"newtype",
",",
"target",
")",
":",
"db",
"=",
"smudge_db",
".",
"get",
"(",
")",
"magic_bytes",
"=",
"db",
"[",
"newtype",
"]",
"[",
"'magic'",
"]",
"magic_offset",
"=",
"db",
"[",
"newtype",
"]",
"[",
"'offset'",
"]",
"_backup_bytes",
"(",
"target",
",",
"magic_offset",
",",
"len",
"(",
"magic_bytes",
")",
")",
"_smudge_bytes",
"(",
"target",
",",
"magic_offset",
",",
"magic_bytes",
")"
] | 24.75 | [
0.03571428571428571,
0.2857142857142857,
0.045454545454545456,
0.2857142857142857,
0,
0.08333333333333333,
0,
0.05263157894736842,
0.05,
0,
0.03508771929824561,
0.038461538461538464
] |
def remove(self, list):
"""
Removes a list from the site.
"""
xml = SP.DeleteList(SP.listName(list.id))
self.opener.post_soap(LIST_WEBSERVICE, xml,
soapaction='http://schemas.microsoft.com/sharepoint/soap/DeleteList')
self.all_lists.remove(list) | [
"def",
"remove",
"(",
"self",
",",
"list",
")",
":",
"xml",
"=",
"SP",
".",
"DeleteList",
"(",
"SP",
".",
"listName",
"(",
"list",
".",
"id",
")",
")",
"self",
".",
"opener",
".",
"post_soap",
"(",
"LIST_WEBSERVICE",
",",
"xml",
",",
"soapaction",
"=",
"'http://schemas.microsoft.com/sharepoint/soap/DeleteList'",
")",
"self",
".",
"all_lists",
".",
"remove",
"(",
"list",
")"
] | 39.5 | [
0.043478260869565216,
0.18181818181818182,
0.05405405405405406,
0.18181818181818182,
0.04081632653061224,
0.058823529411764705,
0.06060606060606061,
0.05714285714285714
] |
def set_hold_temp(self, index, cool_temp, heat_temp,
hold_type="nextTransition"):
''' Set a hold '''
body = {"selection": {
"selectionType": "thermostats",
"selectionMatch": self.thermostats[index]['identifier']},
"functions": [{"type": "setHold", "params": {
"holdType": hold_type,
"coolHoldTemp": int(cool_temp * 10),
"heatHoldTemp": int(heat_temp * 10)
}}]}
log_msg_action = "set hold temp"
return self.make_request(body, log_msg_action) | [
"def",
"set_hold_temp",
"(",
"self",
",",
"index",
",",
"cool_temp",
",",
"heat_temp",
",",
"hold_type",
"=",
"\"nextTransition\"",
")",
":",
"body",
"=",
"{",
"\"selection\"",
":",
"{",
"\"selectionType\"",
":",
"\"thermostats\"",
",",
"\"selectionMatch\"",
":",
"self",
".",
"thermostats",
"[",
"index",
"]",
"[",
"'identifier'",
"]",
"}",
",",
"\"functions\"",
":",
"[",
"{",
"\"type\"",
":",
"\"setHold\"",
",",
"\"params\"",
":",
"{",
"\"holdType\"",
":",
"hold_type",
",",
"\"coolHoldTemp\"",
":",
"int",
"(",
"cool_temp",
"*",
"10",
")",
",",
"\"heatHoldTemp\"",
":",
"int",
"(",
"heat_temp",
"*",
"10",
")",
"}",
"}",
"]",
"}",
"log_msg_action",
"=",
"\"set hold temp\"",
"return",
"self",
".",
"make_request",
"(",
"body",
",",
"log_msg_action",
")"
] | 47.230769 | [
0.038461538461538464,
0.1,
0.07692307692307693,
0.1,
0.0392156862745098,
0.03896103896103896,
0.04918032786885246,
0.047619047619047616,
0.03571428571428571,
0.03636363636363636,
0.15,
0.05,
0.037037037037037035
] |
def get_map(self, create_html=True):
"""Strike data should be a pd.DF from the WWLN data files read by
read_WWLN()"""
strike_data = self.df
num_rows = len(self.df)
if num_rows > 1000:
print("Warning, you have requested lots of data be mapped." /
" Limiting your request to the first 1,000 rows" /
" as this is currently only a preview feature.")
strike_data = self.df[0:1000]
m = folium.Map(location=[0.0, 0.01], zoom_start=2)
marker_cluster = folium.MarkerCluster().add_to(m)
for event in strike_data.index:
self.add_to_map(map_obj=m,
date_time=strike_data.dt[event],
cluster_obj=marker_cluster,
lat=strike_data.geometry[event].y,
lon=strike_data.geometry[event].x,
key=event)
if create_html:
data_date = strike_data.dt[0].split()[0]
m.save('map_{0}.html'.format(data_date))
return m | [
"def",
"get_map",
"(",
"self",
",",
"create_html",
"=",
"True",
")",
":",
"strike_data",
"=",
"self",
".",
"df",
"num_rows",
"=",
"len",
"(",
"self",
".",
"df",
")",
"if",
"num_rows",
">",
"1000",
":",
"print",
"(",
"\"Warning, you have requested lots of data be mapped.\"",
"/",
"\" Limiting your request to the first 1,000 rows\"",
"/",
"\" as this is currently only a preview feature.\"",
")",
"strike_data",
"=",
"self",
".",
"df",
"[",
"0",
":",
"1000",
"]",
"m",
"=",
"folium",
".",
"Map",
"(",
"location",
"=",
"[",
"0.0",
",",
"0.01",
"]",
",",
"zoom_start",
"=",
"2",
")",
"marker_cluster",
"=",
"folium",
".",
"MarkerCluster",
"(",
")",
".",
"add_to",
"(",
"m",
")",
"for",
"event",
"in",
"strike_data",
".",
"index",
":",
"self",
".",
"add_to_map",
"(",
"map_obj",
"=",
"m",
",",
"date_time",
"=",
"strike_data",
".",
"dt",
"[",
"event",
"]",
",",
"cluster_obj",
"=",
"marker_cluster",
",",
"lat",
"=",
"strike_data",
".",
"geometry",
"[",
"event",
"]",
".",
"y",
",",
"lon",
"=",
"strike_data",
".",
"geometry",
"[",
"event",
"]",
".",
"x",
",",
"key",
"=",
"event",
")",
"if",
"create_html",
":",
"data_date",
"=",
"strike_data",
".",
"dt",
"[",
"0",
"]",
".",
"split",
"(",
")",
"[",
"0",
"]",
"m",
".",
"save",
"(",
"'map_{0}.html'",
".",
"format",
"(",
"data_date",
")",
")",
"return",
"m"
] | 46.869565 | [
0.027777777777777776,
0.0273972602739726,
0.13636363636363635,
0.06896551724137931,
0.06451612903225806,
0.07407407407407407,
0.0410958904109589,
0.04411764705882353,
0.06060606060606061,
0.04878048780487805,
0.034482758620689655,
0.03508771929824561,
0.05128205128205128,
0.07894736842105263,
0.05,
0.05454545454545454,
0.04838709677419355,
0.04838709677419355,
0.10526315789473684,
0.08695652173913043,
0.038461538461538464,
0.038461538461538464,
0.125
] |
def create_bzip2 (archive, compression, cmd, verbosity, interactive, filenames):
"""Create a BZIP2 archive with the bz2 Python module."""
if len(filenames) > 1:
raise util.PatoolError('multi-file compression not supported in Python bz2')
try:
with bz2.BZ2File(archive, 'wb') as bz2file:
filename = filenames[0]
with open(filename, 'rb') as srcfile:
data = srcfile.read(READ_SIZE_BYTES)
while data:
bz2file.write(data)
data = srcfile.read(READ_SIZE_BYTES)
except Exception as err:
msg = "error creating %s: %s" % (archive, err)
raise util.PatoolError(msg)
return None | [
"def",
"create_bzip2",
"(",
"archive",
",",
"compression",
",",
"cmd",
",",
"verbosity",
",",
"interactive",
",",
"filenames",
")",
":",
"if",
"len",
"(",
"filenames",
")",
">",
"1",
":",
"raise",
"util",
".",
"PatoolError",
"(",
"'multi-file compression not supported in Python bz2'",
")",
"try",
":",
"with",
"bz2",
".",
"BZ2File",
"(",
"archive",
",",
"'wb'",
")",
"as",
"bz2file",
":",
"filename",
"=",
"filenames",
"[",
"0",
"]",
"with",
"open",
"(",
"filename",
",",
"'rb'",
")",
"as",
"srcfile",
":",
"data",
"=",
"srcfile",
".",
"read",
"(",
"READ_SIZE_BYTES",
")",
"while",
"data",
":",
"bz2file",
".",
"write",
"(",
"data",
")",
"data",
"=",
"srcfile",
".",
"read",
"(",
"READ_SIZE_BYTES",
")",
"except",
"Exception",
"as",
"err",
":",
"msg",
"=",
"\"error creating %s: %s\"",
"%",
"(",
"archive",
",",
"err",
")",
"raise",
"util",
".",
"PatoolError",
"(",
"msg",
")",
"return",
"None"
] | 43.6875 | [
0.0375,
0.03333333333333333,
0.07692307692307693,
0.03571428571428571,
0.25,
0.0392156862745098,
0.05714285714285714,
0.04081632653061224,
0.038461538461538464,
0.07407407407407407,
0.05128205128205128,
0.03571428571428571,
0.07142857142857142,
0.037037037037037035,
0.05714285714285714,
0.13333333333333333
] |
def parseFile(self, filename):
'''
parseFile - Parses a file and creates the DOM tree and indexes
@param filename <str/file> - A string to a filename or a file object. If file object, it will not be closed, you must close.
'''
self.reset()
if isinstance(filename, file):
contents = filename.read()
else:
with codecs.open(filename, 'r', encoding=self.encoding) as f:
contents = f.read()
self.feed(contents) | [
"def",
"parseFile",
"(",
"self",
",",
"filename",
")",
":",
"self",
".",
"reset",
"(",
")",
"if",
"isinstance",
"(",
"filename",
",",
"file",
")",
":",
"contents",
"=",
"filename",
".",
"read",
"(",
")",
"else",
":",
"with",
"codecs",
".",
"open",
"(",
"filename",
",",
"'r'",
",",
"encoding",
"=",
"self",
".",
"encoding",
")",
"as",
"f",
":",
"contents",
"=",
"f",
".",
"read",
"(",
")",
"self",
".",
"feed",
"(",
"contents",
")"
] | 34.266667 | [
0.03333333333333333,
0.18181818181818182,
0.02702702702702703,
0.5,
0.03571428571428571,
0.18181818181818182,
0.1,
0,
0.05263157894736842,
0.05263157894736842,
0.15384615384615385,
0.0273972602739726,
0.05714285714285714,
0,
0.07407407407407407
] |
def move_partition(self, rg_destination, victim_partition):
"""Move partition(victim) from current replication-group to destination
replication-group.
Step 1: Evaluate source and destination broker
Step 2: Move partition from source-broker to destination-broker
"""
# Select best-fit source and destination brokers for partition
# Best-fit is based on partition-count and presence/absence of
# Same topic-partition over brokers
broker_source, broker_destination = self._select_broker_pair(
rg_destination,
victim_partition,
)
# Actual-movement of victim-partition
self.log.debug(
'Moving partition {p_name} from broker {broker_source} to '
'replication-group:broker {rg_dest}:{dest_broker}'.format(
p_name=victim_partition.name,
broker_source=broker_source.id,
dest_broker=broker_destination.id,
rg_dest=rg_destination.id,
),
)
broker_source.move_partition(victim_partition, broker_destination) | [
"def",
"move_partition",
"(",
"self",
",",
"rg_destination",
",",
"victim_partition",
")",
":",
"# Select best-fit source and destination brokers for partition",
"# Best-fit is based on partition-count and presence/absence of",
"# Same topic-partition over brokers",
"broker_source",
",",
"broker_destination",
"=",
"self",
".",
"_select_broker_pair",
"(",
"rg_destination",
",",
"victim_partition",
",",
")",
"# Actual-movement of victim-partition",
"self",
".",
"log",
".",
"debug",
"(",
"'Moving partition {p_name} from broker {broker_source} to '",
"'replication-group:broker {rg_dest}:{dest_broker}'",
".",
"format",
"(",
"p_name",
"=",
"victim_partition",
".",
"name",
",",
"broker_source",
"=",
"broker_source",
".",
"id",
",",
"dest_broker",
"=",
"broker_destination",
".",
"id",
",",
"rg_dest",
"=",
"rg_destination",
".",
"id",
",",
")",
",",
")",
"broker_source",
".",
"move_partition",
"(",
"victim_partition",
",",
"broker_destination",
")"
] | 44.28 | [
0.01694915254237288,
0.02531645569620253,
0.07692307692307693,
0,
0.037037037037037035,
0.028169014084507043,
0.18181818181818182,
0.02857142857142857,
0.02857142857142857,
0.046511627906976744,
0.043478260869565216,
0.07407407407407407,
0.06896551724137931,
0.3333333333333333,
0.044444444444444446,
0.13043478260869565,
0.028169014084507043,
0.04285714285714286,
0.06666666666666667,
0.06382978723404255,
0.06,
0.07142857142857142,
0.21428571428571427,
0.3333333333333333,
0.02702702702702703
] |
def url(self):
'''url of the item
Notes
-----
if remote-adderes was given, then that is used as the base
'''
path = '/web/itemdetails.html?id={}'.format(self.id)
return self.connector.get_url(path, attach_api_key=False) | [
"def",
"url",
"(",
"self",
")",
":",
"path",
"=",
"'/web/itemdetails.html?id={}'",
".",
"format",
"(",
"self",
".",
"id",
")",
"return",
"self",
".",
"connector",
".",
"get_url",
"(",
"path",
",",
"attach_api_key",
"=",
"False",
")"
] | 26.888889 | [
0.07142857142857142,
0.09090909090909091,
0,
0.2222222222222222,
0.2222222222222222,
0.046875,
0.2857142857142857,
0.03571428571428571,
0.03278688524590164
] |
def init(factory):
# simple types
factory.register(BoolPlugin(), ColumnType.Bool)
factory.register(NumericPlugin(QLineEdit), ColumnType.Double)
factory.register(NumericPlugin(QLineEdit), ColumnType.Decimal)
factory.register(NumericPlugin(QLineEdit), ColumnType.Integer)
factory.register(NumericPlugin(QLineEdit), ColumnType.BigInt)
# Enum
factory.register(EnumPlugin(), ColumnType.Enum)
# string types
factory.register(StringPlugin(XLineEdit), ColumnType.String)
factory.register(StringPlugin(XLineEdit), ColumnType.Text)
factory.register(StringPlugin(XLineEdit), ColumnType.Xml)
factory.register(StringPlugin(XLineEdit), ColumnType.Html)
factory.register(StringPlugin(XLineEdit), ColumnType.Color)
factory.register(PasswordPlugin(), ColumnType.Password)
# date/time types
dtimeplug = DateTimePlugin(XDateTimeEdit)
factory.register(DateTimePlugin(XDateEdit), ColumnType.Date)
factory.register(dtimeplug, ColumnType.Datetime)
factory.register(dtimeplug, ColumnType.DatetimeWithTimezone)
factory.register(DateTimePlugin(XTimeEdit), ColumnType.Time)
# data types
'Image' # stores images in the database as binary
'ByteArray' # stores additional binary information
'Dict' # stores python dictionary types
# relation types
factory.register(ForeignKeyPlugin(), ColumnType.ForeignKey) | [
"def",
"init",
"(",
"factory",
")",
":",
"# simple types\r",
"factory",
".",
"register",
"(",
"BoolPlugin",
"(",
")",
",",
"ColumnType",
".",
"Bool",
")",
"factory",
".",
"register",
"(",
"NumericPlugin",
"(",
"QLineEdit",
")",
",",
"ColumnType",
".",
"Double",
")",
"factory",
".",
"register",
"(",
"NumericPlugin",
"(",
"QLineEdit",
")",
",",
"ColumnType",
".",
"Decimal",
")",
"factory",
".",
"register",
"(",
"NumericPlugin",
"(",
"QLineEdit",
")",
",",
"ColumnType",
".",
"Integer",
")",
"factory",
".",
"register",
"(",
"NumericPlugin",
"(",
"QLineEdit",
")",
",",
"ColumnType",
".",
"BigInt",
")",
"# Enum\r",
"factory",
".",
"register",
"(",
"EnumPlugin",
"(",
")",
",",
"ColumnType",
".",
"Enum",
")",
"# string types\r",
"factory",
".",
"register",
"(",
"StringPlugin",
"(",
"XLineEdit",
")",
",",
"ColumnType",
".",
"String",
")",
"factory",
".",
"register",
"(",
"StringPlugin",
"(",
"XLineEdit",
")",
",",
"ColumnType",
".",
"Text",
")",
"factory",
".",
"register",
"(",
"StringPlugin",
"(",
"XLineEdit",
")",
",",
"ColumnType",
".",
"Xml",
")",
"factory",
".",
"register",
"(",
"StringPlugin",
"(",
"XLineEdit",
")",
",",
"ColumnType",
".",
"Html",
")",
"factory",
".",
"register",
"(",
"StringPlugin",
"(",
"XLineEdit",
")",
",",
"ColumnType",
".",
"Color",
")",
"factory",
".",
"register",
"(",
"PasswordPlugin",
"(",
")",
",",
"ColumnType",
".",
"Password",
")",
"# date/time types\r",
"dtimeplug",
"=",
"DateTimePlugin",
"(",
"XDateTimeEdit",
")",
"factory",
".",
"register",
"(",
"DateTimePlugin",
"(",
"XDateEdit",
")",
",",
"ColumnType",
".",
"Date",
")",
"factory",
".",
"register",
"(",
"dtimeplug",
",",
"ColumnType",
".",
"Datetime",
")",
"factory",
".",
"register",
"(",
"dtimeplug",
",",
"ColumnType",
".",
"DatetimeWithTimezone",
")",
"factory",
".",
"register",
"(",
"DateTimePlugin",
"(",
"XTimeEdit",
")",
",",
"ColumnType",
".",
"Time",
")",
"# data types\r",
"# stores images in the database as binary\r",
"'ByteArray'",
"# stores additional binary information\r",
"'Dict'",
"# stores python dictionary types\r",
"# relation types\r",
"factory",
".",
"register",
"(",
"ForeignKeyPlugin",
"(",
")",
",",
"ColumnType",
".",
"ForeignKey",
")"
] | 46.454545 | [
0.05263157894736842,
0.05263157894736842,
0.014492753623188406,
0.014084507042253521,
0.013888888888888888,
0.013888888888888888,
0.014084507042253521,
0.2,
0.09090909090909091,
0.014492753623188406,
0.2,
0.05263157894736842,
0.014084507042253521,
0.014492753623188406,
0.014705882352941176,
0.014492753623188406,
0.014285714285714285,
0.0136986301369863,
0.2,
0.045454545454545456,
0.021739130434782608,
0.014492753623188406,
0.018867924528301886,
0.015384615384615385,
0.014492753623188406,
0.2,
0.058823529411764705,
0.01639344262295082,
0.017241379310344827,
0.019230769230769232,
0.2,
0.047619047619047616,
0.031746031746031744
] |
def extractLargestRegion(actor):
"""Keep only the largest connected part of a mesh and discard all the smaller pieces.
.. hint:: |largestregion.py|_
"""
conn = vtk.vtkConnectivityFilter()
conn.SetExtractionModeToLargestRegion()
conn.ScalarConnectivityOff()
poly = actor.GetMapper().GetInput()
conn.SetInputData(poly)
conn.Update()
epoly = conn.GetOutput()
eact = Actor(epoly)
pr = vtk.vtkProperty()
pr.DeepCopy(actor.GetProperty())
eact.SetProperty(pr)
return eact | [
"def",
"extractLargestRegion",
"(",
"actor",
")",
":",
"conn",
"=",
"vtk",
".",
"vtkConnectivityFilter",
"(",
")",
"conn",
".",
"SetExtractionModeToLargestRegion",
"(",
")",
"conn",
".",
"ScalarConnectivityOff",
"(",
")",
"poly",
"=",
"actor",
".",
"GetMapper",
"(",
")",
".",
"GetInput",
"(",
")",
"conn",
".",
"SetInputData",
"(",
"poly",
")",
"conn",
".",
"Update",
"(",
")",
"epoly",
"=",
"conn",
".",
"GetOutput",
"(",
")",
"eact",
"=",
"Actor",
"(",
"epoly",
")",
"pr",
"=",
"vtk",
".",
"vtkProperty",
"(",
")",
"pr",
".",
"DeepCopy",
"(",
"actor",
".",
"GetProperty",
"(",
")",
")",
"eact",
".",
"SetProperty",
"(",
"pr",
")",
"return",
"eact"
] | 29.941176 | [
0.03125,
0.033707865168539325,
0,
0.15151515151515152,
0.2857142857142857,
0.05263157894736842,
0.046511627906976744,
0.0625,
0.05128205128205128,
0.07407407407407407,
0.11764705882352941,
0.07142857142857142,
0.08695652173913043,
0.07692307692307693,
0.05555555555555555,
0.08333333333333333,
0.13333333333333333
] |
def arnoldi_projected(H, P, k, ortho='mgs'):
"""Compute (perturbed) Arnoldi relation for projected operator.
Assume that you have computed an Arnoldi relation
.. math ::
A V_n = V_{n+1} \\underline{H}_n
where :math:`V_{n+1}\\in\\mathbb{C}^{N,n+1}` has orthogonal columns
(with respect to an inner product :math:`\\langle\\cdot,\\cdot\\rangle`)
and :math:`\\underline{H}_n\\in\\mathbb{C}^{n+1,n}` is an extended
upper Hessenberg matrix.
For :math:`k<n` you choose full rank matrices
:math:`X\\in\\mathbb{C}^{n-1,k}` and :math:`Y\\in\\mathbb{C}^{n,k}` and
define :math:`\\tilde{X}:=A V_{n_1}X = V_n \\underline{H}_{n-1} X` and
:math:`\\tilde{Y}:=V_n Y` such that
:math:`\\langle \\tilde{Y}, \\tilde{X} \\rangle = Y^*\\underline{H}_{n-1} X`
is invertible. Then the projections :math:`P` and :math:`\\tilde{P}`
characterized by
* :math:`\\tilde{P}x = x -
\\tilde{X} \\langle \\tilde{Y},\\tilde{X} \\rangle^{-1}
\\langle\\tilde{Y},x\\rangle`
* :math:`P = I - \\underline{H}_{n-1}X (Y^*\\underline{H}_{n-1}X)^{-1}Y^*`
are well defined and :math:`\\tilde{P}V_{n+1} = [V_n P, v_{n+1}]` holds.
This method computes for :math:`i<n-k` the Arnoldi relation
.. math ::
(\\tilde{P}A + E_i) W_i
= W_{i+1} \\underline{G}_i
where :math:`W_{i+1}=V_n U_{i+1}` has orthogonal columns with respect
to :math:`\\langle\\cdot,\\cdot\\rangle`,
:math:`\\underline{G}_i` is an extended upper Hessenberg matrix
and :math:`E_i x = v_{n+1} F_i \\langle W_i,x\\rangle` with
:math:`F_i=[f_1,\ldots,f_i]\\in\\mathbb{C}^{1,i}`.
The perturbed Arnoldi relation can also be generated with the operator
:math:`P_{V_n} \\tilde{P} A`:
.. math ::
P_{V_n} \\tilde{P} A W_i
= W_{i+1} \\underline{G}_i.
In a sense the perturbed Arnoldi relation is the best prediction for the
behavior of the Krylov subspace :math:`K_i(\\tilde{P}A,\\tilde{P}v_1)`
that can be generated only with the data from :math:`K_{n+1}(A,v_1)` and
without carrying out further matrix-vector multiplications with A.
:param H: the extended upper Hessenberg matrix
:math:`\\underline{H}_n` with ``shape==(n+1,n)``.
:param P: the projection
:math:`P:\\mathbb{C}^n\\longrightarrow\\mathbb{C}^n` (has to be
compatible with :py:meth:`get_linearoperator`).
:param k: the dimension of the null space of P.
:returns: U, G, F where
* U is the coefficient matrix :math:`U_{i+1}` with ``shape==(n,i+1)``,
* G is the extended upper Hessenberg matrix :math:`\\underline{G}_i`
with ``shape==(i+1,i)``,
* F is the error matrix :math:`F_i` with ``shape==(1,i)``.
"""
n = H.shape[1]
dtype = find_common_dtype(H, P)
invariant = H.shape[0] == n
hlast = 0 if invariant else H[-1, -1]
H = get_linearoperator((n, n), H if invariant else H[:-1, :])
P = get_linearoperator((n, n), P)
v = P * numpy.eye(n, 1)
maxiter = n-k+1
F = numpy.zeros((1, maxiter), dtype=dtype)
PH = lambda x: P*(H*x)
PH = LinearOperator((n, n), dtype, PH)
_arnoldi = Arnoldi(PH, v, maxiter=maxiter, ortho=ortho)
while _arnoldi.iter < _arnoldi.maxiter and not _arnoldi.invariant:
u, _ = _arnoldi.get_last()
F[0, _arnoldi.iter] = hlast * u[-1, 0]
_arnoldi.advance()
U, G = _arnoldi.get()
return U, G, F[[0], :_arnoldi.iter] | [
"def",
"arnoldi_projected",
"(",
"H",
",",
"P",
",",
"k",
",",
"ortho",
"=",
"'mgs'",
")",
":",
"n",
"=",
"H",
".",
"shape",
"[",
"1",
"]",
"dtype",
"=",
"find_common_dtype",
"(",
"H",
",",
"P",
")",
"invariant",
"=",
"H",
".",
"shape",
"[",
"0",
"]",
"==",
"n",
"hlast",
"=",
"0",
"if",
"invariant",
"else",
"H",
"[",
"-",
"1",
",",
"-",
"1",
"]",
"H",
"=",
"get_linearoperator",
"(",
"(",
"n",
",",
"n",
")",
",",
"H",
"if",
"invariant",
"else",
"H",
"[",
":",
"-",
"1",
",",
":",
"]",
")",
"P",
"=",
"get_linearoperator",
"(",
"(",
"n",
",",
"n",
")",
",",
"P",
")",
"v",
"=",
"P",
"*",
"numpy",
".",
"eye",
"(",
"n",
",",
"1",
")",
"maxiter",
"=",
"n",
"-",
"k",
"+",
"1",
"F",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"1",
",",
"maxiter",
")",
",",
"dtype",
"=",
"dtype",
")",
"PH",
"=",
"lambda",
"x",
":",
"P",
"*",
"(",
"H",
"*",
"x",
")",
"PH",
"=",
"LinearOperator",
"(",
"(",
"n",
",",
"n",
")",
",",
"dtype",
",",
"PH",
")",
"_arnoldi",
"=",
"Arnoldi",
"(",
"PH",
",",
"v",
",",
"maxiter",
"=",
"maxiter",
",",
"ortho",
"=",
"ortho",
")",
"while",
"_arnoldi",
".",
"iter",
"<",
"_arnoldi",
".",
"maxiter",
"and",
"not",
"_arnoldi",
".",
"invariant",
":",
"u",
",",
"_",
"=",
"_arnoldi",
".",
"get_last",
"(",
")",
"F",
"[",
"0",
",",
"_arnoldi",
".",
"iter",
"]",
"=",
"hlast",
"*",
"u",
"[",
"-",
"1",
",",
"0",
"]",
"_arnoldi",
".",
"advance",
"(",
")",
"U",
",",
"G",
"=",
"_arnoldi",
".",
"get",
"(",
")",
"return",
"U",
",",
"G",
",",
"F",
"[",
"[",
"0",
"]",
",",
":",
"_arnoldi",
".",
"iter",
"]"
] | 38.976744 | [
0.022727272727272728,
0.029850746268656716,
0,
0.03773584905660377,
0,
0.2857142857142857,
0,
0.05,
0,
0.11267605633802817,
0.09210526315789473,
0.11428571428571428,
0.07142857142857142,
0,
0.14285714285714285,
0.16,
0.0945945945945946,
0.15384615384615385,
0.0875,
0.125,
0.1,
0,
0.2,
0.08196721311475409,
0.14285714285714285,
0.1282051282051282,
0,
0.07894736842105263,
0,
0.1111111111111111,
0,
0.2857142857142857,
0,
0.06451612903225806,
0.058823529411764705,
0,
0.0958904109589041,
0.15555555555555556,
0.07462686567164178,
0.1111111111111111,
0.18518518518518517,
0,
0.02702702702702703,
0.15151515151515152,
0,
0.2857142857142857,
0,
0.0625,
0.05714285714285714,
0,
0.02631578947368421,
0.0945945945945946,
0.09210526315789473,
0.02857142857142857,
0,
0.06,
0.14545454545454545,
0.10714285714285714,
0.14492753623188406,
0.16981132075471697,
0.058823529411764705,
0.1111111111111111,
0,
0.11842105263157894,
0.0945945945945946,
0.15625,
0.140625,
0.2857142857142857,
0.1111111111111111,
0.05714285714285714,
0.06451612903225806,
0.04878048780487805,
0.03076923076923077,
0.05405405405405406,
0.07407407407407407,
0.10526315789473684,
0.043478260869565216,
0.11538461538461539,
0.047619047619047616,
0.03389830508474576,
0.02857142857142857,
0.058823529411764705,
0.043478260869565216,
0.07692307692307693,
0.08,
0.05128205128205128
] |
def shear_vel_at_depth(self, y_c):
"""
Get the shear wave velocity at a depth.
:param y_c: float, depth from surface
:return:
"""
sl = self.get_soil_at_depth(y_c)
if y_c <= self.gwl:
saturation = False
else:
saturation = True
if hasattr(sl, "get_shear_vel_at_v_eff_stress"):
v_eff = self.get_v_eff_stress_at_depth(y_c)
vs = sl.get_shear_vel_at_v_eff_stress(v_eff, saturation)
else:
vs = sl.get_shear_vel(saturation)
return vs | [
"def",
"shear_vel_at_depth",
"(",
"self",
",",
"y_c",
")",
":",
"sl",
"=",
"self",
".",
"get_soil_at_depth",
"(",
"y_c",
")",
"if",
"y_c",
"<=",
"self",
".",
"gwl",
":",
"saturation",
"=",
"False",
"else",
":",
"saturation",
"=",
"True",
"if",
"hasattr",
"(",
"sl",
",",
"\"get_shear_vel_at_v_eff_stress\"",
")",
":",
"v_eff",
"=",
"self",
".",
"get_v_eff_stress_at_depth",
"(",
"y_c",
")",
"vs",
"=",
"sl",
".",
"get_shear_vel_at_v_eff_stress",
"(",
"v_eff",
",",
"saturation",
")",
"else",
":",
"vs",
"=",
"sl",
".",
"get_shear_vel",
"(",
"saturation",
")",
"return",
"vs"
] | 30.944444 | [
0.029411764705882353,
0.18181818181818182,
0.0425531914893617,
0,
0.06666666666666667,
0.1875,
0.18181818181818182,
0.05,
0.07407407407407407,
0.06666666666666667,
0.15384615384615385,
0.06896551724137931,
0.03571428571428571,
0.03636363636363636,
0.029411764705882353,
0.15384615384615385,
0.044444444444444446,
0.11764705882352941
] |
def check_calendar_dates(
feed: "Feed", *, as_df: bool = False, include_warnings: bool = False
) -> List:
"""
Analog of :func:`check_agency` for ``feed.calendar_dates``.
"""
table = "calendar_dates"
problems = []
# Preliminary checks
if feed.calendar_dates is None:
return problems
f = feed.calendar_dates.copy()
problems = check_for_required_columns(problems, table, f)
if problems:
return format_problems(problems, as_df=as_df)
if include_warnings:
problems = check_for_invalid_columns(problems, table, f)
# Check service_id
problems = check_column(problems, table, f, "service_id", valid_str)
# Check date
problems = check_column(problems, table, f, "date", valid_date)
# No duplicate (service_id, date) pairs allowed
cond = f[["service_id", "date"]].duplicated()
problems = check_table(
problems, table, f, cond, "Repeated pair (service_id, date)"
)
# Check exception_type
v = lambda x: x in [1, 2]
problems = check_column(problems, table, f, "exception_type", v)
return format_problems(problems, as_df=as_df) | [
"def",
"check_calendar_dates",
"(",
"feed",
":",
"\"Feed\"",
",",
"*",
",",
"as_df",
":",
"bool",
"=",
"False",
",",
"include_warnings",
":",
"bool",
"=",
"False",
")",
"->",
"List",
":",
"table",
"=",
"\"calendar_dates\"",
"problems",
"=",
"[",
"]",
"# Preliminary checks",
"if",
"feed",
".",
"calendar_dates",
"is",
"None",
":",
"return",
"problems",
"f",
"=",
"feed",
".",
"calendar_dates",
".",
"copy",
"(",
")",
"problems",
"=",
"check_for_required_columns",
"(",
"problems",
",",
"table",
",",
"f",
")",
"if",
"problems",
":",
"return",
"format_problems",
"(",
"problems",
",",
"as_df",
"=",
"as_df",
")",
"if",
"include_warnings",
":",
"problems",
"=",
"check_for_invalid_columns",
"(",
"problems",
",",
"table",
",",
"f",
")",
"# Check service_id",
"problems",
"=",
"check_column",
"(",
"problems",
",",
"table",
",",
"f",
",",
"\"service_id\"",
",",
"valid_str",
")",
"# Check date",
"problems",
"=",
"check_column",
"(",
"problems",
",",
"table",
",",
"f",
",",
"\"date\"",
",",
"valid_date",
")",
"# No duplicate (service_id, date) pairs allowed",
"cond",
"=",
"f",
"[",
"[",
"\"service_id\"",
",",
"\"date\"",
"]",
"]",
".",
"duplicated",
"(",
")",
"problems",
"=",
"check_table",
"(",
"problems",
",",
"table",
",",
"f",
",",
"cond",
",",
"\"Repeated pair (service_id, date)\"",
")",
"# Check exception_type",
"v",
"=",
"lambda",
"x",
":",
"x",
"in",
"[",
"1",
",",
"2",
"]",
"problems",
"=",
"check_column",
"(",
"problems",
",",
"table",
",",
"f",
",",
"\"exception_type\"",
",",
"v",
")",
"return",
"format_problems",
"(",
"problems",
",",
"as_df",
"=",
"as_df",
")"
] | 29.263158 | [
0.08,
0.027777777777777776,
0.2,
0.2857142857142857,
0.09523809523809523,
0.2857142857142857,
0.07142857142857142,
0.11764705882352941,
0,
0.08333333333333333,
0.05714285714285714,
0.08695652173913043,
0,
0.058823529411764705,
0.03278688524590164,
0.125,
0.03773584905660377,
0,
0.08333333333333333,
0.03125,
0,
0.09090909090909091,
0.027777777777777776,
0,
0.125,
0.029850746268656716,
0,
0.0392156862745098,
0.04081632653061224,
0.1111111111111111,
0.029411764705882353,
0.6,
0,
0.07692307692307693,
0.10344827586206896,
0.029411764705882353,
0,
0.04081632653061224
] |
def incr(self, key, by=1):
""" :see::meth:RedisMap.incr """
pipe = self._client.pipeline(transaction=False)
pipe.incr(self.get_key(key), by)
if key not in self:
pipe.hincrby(self._bucket_key, self.key_prefix, 1)
result = pipe.execute()
return result[0] | [
"def",
"incr",
"(",
"self",
",",
"key",
",",
"by",
"=",
"1",
")",
":",
"pipe",
"=",
"self",
".",
"_client",
".",
"pipeline",
"(",
"transaction",
"=",
"False",
")",
"pipe",
".",
"incr",
"(",
"self",
".",
"get_key",
"(",
"key",
")",
",",
"by",
")",
"if",
"key",
"not",
"in",
"self",
":",
"pipe",
".",
"hincrby",
"(",
"self",
".",
"_bucket_key",
",",
"self",
".",
"key_prefix",
",",
"1",
")",
"result",
"=",
"pipe",
".",
"execute",
"(",
")",
"return",
"result",
"[",
"0",
"]"
] | 38.125 | [
0.038461538461538464,
0.05,
0.03636363636363636,
0.05,
0.07407407407407407,
0.03225806451612903,
0.06451612903225806,
0.08333333333333333
] |
def insert_at(df, sample_date, values):
""" Insert some values into a bi-temporal dataframe.
This is like what would happen when we get a price correction.
"""
observed_dt = dt(datetime.now())
return multi_index_insert_row(df, [sample_date, observed_dt], values) | [
"def",
"insert_at",
"(",
"df",
",",
"sample_date",
",",
"values",
")",
":",
"observed_dt",
"=",
"dt",
"(",
"datetime",
".",
"now",
"(",
")",
")",
"return",
"multi_index_insert_row",
"(",
"df",
",",
"[",
"sample_date",
",",
"observed_dt",
"]",
",",
"values",
")"
] | 46.833333 | [
0.02564102564102564,
0.03571428571428571,
0.02857142857142857,
0.2857142857142857,
0.05555555555555555,
0.0273972602739726
] |
def clean(ctx, text):
"""
Removes all non-printable characters from a text string
"""
text = conversions.to_string(text, ctx)
return ''.join([c for c in text if ord(c) >= 32]) | [
"def",
"clean",
"(",
"ctx",
",",
"text",
")",
":",
"text",
"=",
"conversions",
".",
"to_string",
"(",
"text",
",",
"ctx",
")",
"return",
"''",
".",
"join",
"(",
"[",
"c",
"for",
"c",
"in",
"text",
"if",
"ord",
"(",
"c",
")",
">=",
"32",
"]",
")"
] | 31.666667 | [
0.047619047619047616,
0.2857142857142857,
0.03389830508474576,
0.2857142857142857,
0.046511627906976744,
0.03773584905660377
] |
def close(self):
""" Shut down, closing any open connections in the pool.
"""
if not self._closed:
self._closed = True
if self._pool is not None:
self._pool.close()
self._pool = None | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_closed",
":",
"self",
".",
"_closed",
"=",
"True",
"if",
"self",
".",
"_pool",
"is",
"not",
"None",
":",
"self",
".",
"_pool",
".",
"close",
"(",
")",
"self",
".",
"_pool",
"=",
"None"
] | 31.875 | [
0.0625,
0.03125,
0.18181818181818182,
0.07142857142857142,
0.06451612903225806,
0.05263157894736842,
0.058823529411764705,
0.06060606060606061
] |
def _reverse_dependencies(self, cache_keys):
"""
Returns a lookup table of reverse dependencies for all the given cache keys.
Example input:
[('pep8', '1.5.7'),
('flake8', '2.4.0'),
('mccabe', '0.3'),
('pyflakes', '0.8.1')]
Example output:
{'pep8': ['flake8'],
'flake8': [],
'mccabe': ['flake8'],
'pyflakes': ['flake8']}
"""
# First, collect all the dependencies into a sequence of (parent, child) tuples, like [('flake8', 'pep8'),
# ('flake8', 'mccabe'), ...]
return lookup_table((key_from_req(Requirement(dep_name)), name)
for name, version_and_extras in cache_keys
for dep_name in self.cache[name][version_and_extras]) | [
"def",
"_reverse_dependencies",
"(",
"self",
",",
"cache_keys",
")",
":",
"# First, collect all the dependencies into a sequence of (parent, child) tuples, like [('flake8', 'pep8'),",
"# ('flake8', 'mccabe'), ...]",
"return",
"lookup_table",
"(",
"(",
"key_from_req",
"(",
"Requirement",
"(",
"dep_name",
")",
")",
",",
"name",
")",
"for",
"name",
",",
"version_and_extras",
"in",
"cache_keys",
"for",
"dep_name",
"in",
"self",
".",
"cache",
"[",
"name",
"]",
"[",
"version_and_extras",
"]",
")"
] | 34.375 | [
0.022727272727272728,
0.18181818181818182,
0.03571428571428571,
0,
0.09090909090909091,
0,
0.0967741935483871,
0.09090909090909091,
0.0967741935483871,
0.11428571428571428,
0,
0.08695652173913043,
0,
0.09375,
0.11538461538461539,
0.08823529411764706,
0.1111111111111111,
0,
0.18181818181818182,
0.02631578947368421,
0.05555555555555555,
0.04225352112676056,
0.02857142857142857,
0.04938271604938271
] |
def LookupClients(self, keywords):
"""Returns a list of client URNs associated with keywords.
Args:
keywords: The list of keywords to search by.
Returns:
A list of client URNs.
Raises:
ValueError: A string (single keyword) was passed instead of an iterable.
"""
if isinstance(keywords, string_types):
raise ValueError(
"Keywords should be an iterable, not a string (got %s)." % keywords)
start_time, filtered_keywords = self._AnalyzeKeywords(keywords)
keyword_map = data_store.REL_DB.ListClientsForKeywords(
list(map(self._NormalizeKeyword, filtered_keywords)),
start_time=start_time)
results = itervalues(keyword_map)
relevant_set = set(next(results))
for hits in results:
relevant_set &= set(hits)
if not relevant_set:
return []
return sorted(relevant_set) | [
"def",
"LookupClients",
"(",
"self",
",",
"keywords",
")",
":",
"if",
"isinstance",
"(",
"keywords",
",",
"string_types",
")",
":",
"raise",
"ValueError",
"(",
"\"Keywords should be an iterable, not a string (got %s).\"",
"%",
"keywords",
")",
"start_time",
",",
"filtered_keywords",
"=",
"self",
".",
"_AnalyzeKeywords",
"(",
"keywords",
")",
"keyword_map",
"=",
"data_store",
".",
"REL_DB",
".",
"ListClientsForKeywords",
"(",
"list",
"(",
"map",
"(",
"self",
".",
"_NormalizeKeyword",
",",
"filtered_keywords",
")",
")",
",",
"start_time",
"=",
"start_time",
")",
"results",
"=",
"itervalues",
"(",
"keyword_map",
")",
"relevant_set",
"=",
"set",
"(",
"next",
"(",
"results",
")",
")",
"for",
"hits",
"in",
"results",
":",
"relevant_set",
"&=",
"set",
"(",
"hits",
")",
"if",
"not",
"relevant_set",
":",
"return",
"[",
"]",
"return",
"sorted",
"(",
"relevant_set",
")"
] | 26.6875 | [
0.029411764705882353,
0.03225806451612903,
0,
0.2222222222222222,
0.06,
0,
0.16666666666666666,
0.10714285714285714,
0,
0.18181818181818182,
0.05128205128205128,
0.2857142857142857,
0.047619047619047616,
0.17391304347826086,
0.05128205128205128,
0,
0.029850746268656716,
0,
0.05084745762711865,
0.03278688524590164,
0.13333333333333333,
0,
0.05405405405405406,
0.05405405405405406,
0,
0.08333333333333333,
0.0967741935483871,
0,
0.11538461538461539,
0.11764705882352941,
0,
0.06451612903225806
] |
def show(self, ax:plt.Axes, **kwargs):
"Subclass this method if you want to customize the way this `ItemBase` is shown on `ax`."
ax.set_title(str(self)) | [
"def",
"show",
"(",
"self",
",",
"ax",
":",
"plt",
".",
"Axes",
",",
"*",
"*",
"kwargs",
")",
":",
"ax",
".",
"set_title",
"(",
"str",
"(",
"self",
")",
")"
] | 55.333333 | [
0.05263157894736842,
0.030927835051546393,
0.06451612903225806
] |
def apply_gemm(scope, input_name, output_name, container, operator_name=None, alpha=1.0, beta=1.0,
transA=0, transB=0):
"""
Applies operator `gemm <https://github.com/onnx/onnx/blob/master/docs/Operators.md#gemm>`.
"""
name = _create_name_or_use_existing_one(scope, 'Gemm', operator_name)
attrs = {'alpha': alpha, 'beta': beta, 'transA': transA, 'transB': transB}
if container.target_opset < 5:
attrs['op_version'] = 1
attrs['broadcast'] = 1
elif container.target_opset < 7:
attrs['op_version'] = 6
attrs['broadcast'] = 1
else:
attrs['op_version'] = 7
container.add_node('Gemm', input_name, output_name, name=name, **attrs) | [
"def",
"apply_gemm",
"(",
"scope",
",",
"input_name",
",",
"output_name",
",",
"container",
",",
"operator_name",
"=",
"None",
",",
"alpha",
"=",
"1.0",
",",
"beta",
"=",
"1.0",
",",
"transA",
"=",
"0",
",",
"transB",
"=",
"0",
")",
":",
"name",
"=",
"_create_name_or_use_existing_one",
"(",
"scope",
",",
"'Gemm'",
",",
"operator_name",
")",
"attrs",
"=",
"{",
"'alpha'",
":",
"alpha",
",",
"'beta'",
":",
"beta",
",",
"'transA'",
":",
"transA",
",",
"'transB'",
":",
"transB",
"}",
"if",
"container",
".",
"target_opset",
"<",
"5",
":",
"attrs",
"[",
"'op_version'",
"]",
"=",
"1",
"attrs",
"[",
"'broadcast'",
"]",
"=",
"1",
"elif",
"container",
".",
"target_opset",
"<",
"7",
":",
"attrs",
"[",
"'op_version'",
"]",
"=",
"6",
"attrs",
"[",
"'broadcast'",
"]",
"=",
"1",
"else",
":",
"attrs",
"[",
"'op_version'",
"]",
"=",
"7",
"container",
".",
"add_node",
"(",
"'Gemm'",
",",
"input_name",
",",
"output_name",
",",
"name",
"=",
"name",
",",
"*",
"*",
"attrs",
")"
] | 41.117647 | [
0.030612244897959183,
0.17142857142857143,
0.2857142857142857,
0.0851063829787234,
0.2857142857142857,
0.0273972602739726,
0.02564102564102564,
0.058823529411764705,
0.06451612903225806,
0.06666666666666667,
0.05555555555555555,
0.06451612903225806,
0.06666666666666667,
0.2222222222222222,
0.06451612903225806,
0,
0.02666666666666667
] |
def normalizeToTag(val):
"""Converts tags or full names to 2 character tags, case insensitive
# Parameters
_val_: `str`
> A two character string giving the tag or its full name
# Returns
`str`
> The short name of _val_
"""
try:
val = val.upper()
except AttributeError:
raise KeyError("{} is not a tag or name string".format(val))
if val not in tagsAndNameSetUpper:
raise KeyError("{} is not a tag or name string".format(val))
else:
try:
return fullToTagDictUpper[val]
except KeyError:
return val | [
"def",
"normalizeToTag",
"(",
"val",
")",
":",
"try",
":",
"val",
"=",
"val",
".",
"upper",
"(",
")",
"except",
"AttributeError",
":",
"raise",
"KeyError",
"(",
"\"{} is not a tag or name string\"",
".",
"format",
"(",
"val",
")",
")",
"if",
"val",
"not",
"in",
"tagsAndNameSetUpper",
":",
"raise",
"KeyError",
"(",
"\"{} is not a tag or name string\"",
".",
"format",
"(",
"val",
")",
")",
"else",
":",
"try",
":",
"return",
"fullToTagDictUpper",
"[",
"val",
"]",
"except",
"KeyError",
":",
"return",
"val"
] | 22.615385 | [
0.041666666666666664,
0.027777777777777776,
0,
0.125,
0,
0.1875,
0,
0.03333333333333333,
0,
0.15384615384615385,
0,
0.3333333333333333,
0,
0.06896551724137931,
0.2857142857142857,
0.25,
0.08,
0.07692307692307693,
0.029411764705882353,
0.05263157894736842,
0.029411764705882353,
0.2222222222222222,
0.16666666666666666,
0.047619047619047616,
0.08333333333333333,
0.09090909090909091
] |
def from_location(cls, location):
"""Try to create a Ladybug location from a location string.
Args:
locationString: Location string
Usage:
l = Location.from_location(locationString)
"""
if not location:
return cls()
try:
if hasattr(location, 'isLocation'):
# Ladybug location
return location
elif hasattr(location, 'Latitude'):
# Revit's location
return cls(city=str(location.Name.replace(",", " ")),
latitude=location.Latitude,
longitude=location.Longitude)
elif location.startswith('Site:'):
loc, city, latitude, longitude, time_zone, elevation = \
[x.strip() for x in re.findall(r'\r*\n*([^\r\n]*)[,|;]',
location, re.DOTALL)]
else:
try:
city, latitude, longitude, time_zone, elevation = \
[key.split(":")[-1].strip()
for key in location.split(",")]
except ValueError:
# it's just the city name
return cls(city=location)
return cls(city=city, country=None, latitude=latitude,
longitude=longitude, time_zone=time_zone,
elevation=elevation)
except Exception as e:
raise ValueError(
"Failed to create a Location from %s!\n%s" % (location, e)) | [
"def",
"from_location",
"(",
"cls",
",",
"location",
")",
":",
"if",
"not",
"location",
":",
"return",
"cls",
"(",
")",
"try",
":",
"if",
"hasattr",
"(",
"location",
",",
"'isLocation'",
")",
":",
"# Ladybug location",
"return",
"location",
"elif",
"hasattr",
"(",
"location",
",",
"'Latitude'",
")",
":",
"# Revit's location",
"return",
"cls",
"(",
"city",
"=",
"str",
"(",
"location",
".",
"Name",
".",
"replace",
"(",
"\",\"",
",",
"\" \"",
")",
")",
",",
"latitude",
"=",
"location",
".",
"Latitude",
",",
"longitude",
"=",
"location",
".",
"Longitude",
")",
"elif",
"location",
".",
"startswith",
"(",
"'Site:'",
")",
":",
"loc",
",",
"city",
",",
"latitude",
",",
"longitude",
",",
"time_zone",
",",
"elevation",
"=",
"[",
"x",
".",
"strip",
"(",
")",
"for",
"x",
"in",
"re",
".",
"findall",
"(",
"r'\\r*\\n*([^\\r\\n]*)[,|;]'",
",",
"location",
",",
"re",
".",
"DOTALL",
")",
"]",
"else",
":",
"try",
":",
"city",
",",
"latitude",
",",
"longitude",
",",
"time_zone",
",",
"elevation",
"=",
"[",
"key",
".",
"split",
"(",
"\":\"",
")",
"[",
"-",
"1",
"]",
".",
"strip",
"(",
")",
"for",
"key",
"in",
"location",
".",
"split",
"(",
"\",\"",
")",
"]",
"except",
"ValueError",
":",
"# it's just the city name",
"return",
"cls",
"(",
"city",
"=",
"location",
")",
"return",
"cls",
"(",
"city",
"=",
"city",
",",
"country",
"=",
"None",
",",
"latitude",
"=",
"latitude",
",",
"longitude",
"=",
"longitude",
",",
"time_zone",
"=",
"time_zone",
",",
"elevation",
"=",
"elevation",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"ValueError",
"(",
"\"Failed to create a Location from %s!\\n%s\"",
"%",
"(",
"location",
",",
"e",
")",
")"
] | 36.72093 | [
0.030303030303030304,
0.029850746268656716,
0,
0.15384615384615385,
0.046511627906976744,
0,
0.14285714285714285,
0,
0.05555555555555555,
0.18181818181818182,
0.08333333333333333,
0.08333333333333333,
0.16666666666666666,
0.0425531914893617,
0.058823529411764705,
0.06451612903225806,
0,
0.0425531914893617,
0.058823529411764705,
0.043478260869565216,
0.07407407407407407,
0.08928571428571429,
0,
0.043478260869565216,
0.027777777777777776,
0.039473684210526314,
0.05555555555555555,
0.11764705882352941,
0.1,
0.028169014084507043,
0.058823529411764705,
0.07142857142857142,
0.058823529411764705,
0.044444444444444446,
0.044444444444444446,
0,
0.045454545454545456,
0.078125,
0.11627906976744186,
0,
0.06666666666666667,
0.10344827586206896,
0.04
] |
def load_glove(filepath, batch_size=1000, limit=None, verbose=True):
r""" Load a pretrained GloVE word vector model
First header line of GloVE text file should look like:
400000 50\n
First vector of GloVE text file should look like:
the .12 .22 .32 .42 ... .42
>>> wv = load_glove(os.path.join(BIGDATA_PATH, 'glove_test.txt'))
>>> wv.most_similar('and')[:3]
[(',', 0.92...),
('.', 0.91...),
('of', 0.86...)]
"""
num_dim = isglove(filepath)
tqdm_prog = tqdm if verbose else no_tqdm
wv = KeyedVectors(num_dim)
if limit:
vocab_size = int(limit)
else:
with ensure_open(filepath) as fin:
for i, line in enumerate(fin):
pass
vocab_size = i + 1
wv.vectors = np.zeros((vocab_size, num_dim), REAL)
with ensure_open(filepath) as fin:
batch, words = [], []
for i, line in enumerate(tqdm_prog(fin, total=vocab_size)):
line = line.split()
word = line[0]
vector = np.array(line[1:]).astype(float)
# words.append(word)
# batch.append(vector)
wv.index2word.append(word)
wv.vocab[word] = Vocab(index=i, count=vocab_size - i)
wv.vectors[i] = vector
if len(words) >= batch_size:
# wv[words] = np.array(batch)
batch, words = [], []
if i >= vocab_size - 1:
break
if words:
wv[words] = np.array(batch)
return wv | [
"def",
"load_glove",
"(",
"filepath",
",",
"batch_size",
"=",
"1000",
",",
"limit",
"=",
"None",
",",
"verbose",
"=",
"True",
")",
":",
"num_dim",
"=",
"isglove",
"(",
"filepath",
")",
"tqdm_prog",
"=",
"tqdm",
"if",
"verbose",
"else",
"no_tqdm",
"wv",
"=",
"KeyedVectors",
"(",
"num_dim",
")",
"if",
"limit",
":",
"vocab_size",
"=",
"int",
"(",
"limit",
")",
"else",
":",
"with",
"ensure_open",
"(",
"filepath",
")",
"as",
"fin",
":",
"for",
"i",
",",
"line",
"in",
"enumerate",
"(",
"fin",
")",
":",
"pass",
"vocab_size",
"=",
"i",
"+",
"1",
"wv",
".",
"vectors",
"=",
"np",
".",
"zeros",
"(",
"(",
"vocab_size",
",",
"num_dim",
")",
",",
"REAL",
")",
"with",
"ensure_open",
"(",
"filepath",
")",
"as",
"fin",
":",
"batch",
",",
"words",
"=",
"[",
"]",
",",
"[",
"]",
"for",
"i",
",",
"line",
"in",
"enumerate",
"(",
"tqdm_prog",
"(",
"fin",
",",
"total",
"=",
"vocab_size",
")",
")",
":",
"line",
"=",
"line",
".",
"split",
"(",
")",
"word",
"=",
"line",
"[",
"0",
"]",
"vector",
"=",
"np",
".",
"array",
"(",
"line",
"[",
"1",
":",
"]",
")",
".",
"astype",
"(",
"float",
")",
"# words.append(word)",
"# batch.append(vector)",
"wv",
".",
"index2word",
".",
"append",
"(",
"word",
")",
"wv",
".",
"vocab",
"[",
"word",
"]",
"=",
"Vocab",
"(",
"index",
"=",
"i",
",",
"count",
"=",
"vocab_size",
"-",
"i",
")",
"wv",
".",
"vectors",
"[",
"i",
"]",
"=",
"vector",
"if",
"len",
"(",
"words",
")",
">=",
"batch_size",
":",
"# wv[words] = np.array(batch)",
"batch",
",",
"words",
"=",
"[",
"]",
",",
"[",
"]",
"if",
"i",
">=",
"vocab_size",
"-",
"1",
":",
"break",
"if",
"words",
":",
"wv",
"[",
"words",
"]",
"=",
"np",
".",
"array",
"(",
"batch",
")",
"return",
"wv"
] | 31.702128 | [
0.014705882352941176,
0.04,
0,
0.034482758620689655,
0.10526315789473684,
0.03773584905660377,
0.05714285714285714,
0,
0.043478260869565216,
0.08823529411764706,
0.15,
0.15,
0.19047619047619047,
0.2857142857142857,
0.06451612903225806,
0.045454545454545456,
0.06666666666666667,
0,
0.15384615384615385,
0.06451612903225806,
0.2222222222222222,
0.047619047619047616,
0.047619047619047616,
0.1,
0.07692307692307693,
0,
0.037037037037037035,
0,
0.05263157894736842,
0.06896551724137931,
0.029850746268656716,
0.06451612903225806,
0.07692307692307693,
0.03773584905660377,
0.0625,
0.058823529411764705,
0.05263157894736842,
0.03076923076923077,
0.058823529411764705,
0.05,
0.044444444444444446,
0.05405405405405406,
0.05714285714285714,
0.09523809523809523,
0.11764705882352941,
0.05128205128205128,
0.15384615384615385
] |
def ops(self, start=None, stop=None, only_virtual_ops=False):
""" Yields all operations (including virtual operations) starting from ``start``.
:param int start: Starting block
:param int stop: Stop at this block
:param str mode: We here have the choice between
* "head": the last block
* "irreversible": the block that is confirmed by 2/3 of all block producers and is thus irreversible!
:param bool only_virtual_ops: Only yield virtual operations
This call returns a list with elements that look like
this and carries only one operation each:::
{'block': 8411453,
'op': ['vote',
{'author': 'dana-edwards',
'permlink': 'church-encoding-numbers-defined-as-functions',
'voter': 'juanmiguelsalas',
'weight': 6000}],
'op_in_trx': 0,
'timestamp': '2017-01-12T12:26:03',
'trx_id': 'e897886e8b7560f37da31eb1a42177c6f236c985',
'trx_in_block': 1,
'virtual_op': 0}
"""
# Let's find out how often blocks are generated!
block_interval = self.config().get("STEEMIT_BLOCK_INTERVAL")
if not start:
start = self.get_current_block_num()
# We are going to loop indefinitely
while True:
# Get chain properies to identify the
head_block = self.get_current_block_num()
# Blocks from start until head block
for block_num in range(start, head_block + 1):
# Get full block
yield from self.steem.rpc.get_ops_in_block(block_num, only_virtual_ops)
# Set new start
start = head_block + 1
if stop and start > stop:
break
# Sleep for one block
time.sleep(block_interval) | [
"def",
"ops",
"(",
"self",
",",
"start",
"=",
"None",
",",
"stop",
"=",
"None",
",",
"only_virtual_ops",
"=",
"False",
")",
":",
"# Let's find out how often blocks are generated!",
"block_interval",
"=",
"self",
".",
"config",
"(",
")",
".",
"get",
"(",
"\"STEEMIT_BLOCK_INTERVAL\"",
")",
"if",
"not",
"start",
":",
"start",
"=",
"self",
".",
"get_current_block_num",
"(",
")",
"# We are going to loop indefinitely",
"while",
"True",
":",
"# Get chain properies to identify the",
"head_block",
"=",
"self",
".",
"get_current_block_num",
"(",
")",
"# Blocks from start until head block",
"for",
"block_num",
"in",
"range",
"(",
"start",
",",
"head_block",
"+",
"1",
")",
":",
"# Get full block",
"yield",
"from",
"self",
".",
"steem",
".",
"rpc",
".",
"get_ops_in_block",
"(",
"block_num",
",",
"only_virtual_ops",
")",
"# Set new start",
"start",
"=",
"head_block",
"+",
"1",
"if",
"stop",
"and",
"start",
">",
"stop",
":",
"break",
"# Sleep for one block",
"time",
".",
"sleep",
"(",
"block_interval",
")"
] | 37.480769 | [
0.01639344262295082,
0.033707865168539325,
0,
0.06818181818181818,
0.06382978723404255,
0.05,
0.07317073170731707,
0.03389830508474576,
0.04225352112676056,
0,
0.03076923076923077,
0.07272727272727272,
0,
0.08823529411764706,
0.12903225806451613,
0.06,
0.047619047619047616,
0.057692307692307696,
0.09523809523809523,
0.09375,
0.057692307692307696,
0.04285714285714286,
0.08571428571428572,
0.12121212121212122,
0,
0.18181818181818182,
0,
0.03571428571428571,
0.029411764705882353,
0,
0.09523809523809523,
0.041666666666666664,
0,
0.046511627906976744,
0.10526315789473684,
0,
0.04081632653061224,
0.03773584905660377,
0,
0.041666666666666664,
0.034482758620689655,
0.0625,
0.034482758620689655,
0,
0.07407407407407407,
0.058823529411764705,
0,
0.05405405405405406,
0.09523809523809523,
0,
0.06060606060606061,
0.05263157894736842
] |
def typical_price(close_data, high_data, low_data):
"""
Typical Price.
Formula:
TPt = (HIGHt + LOWt + CLOSEt) / 3
"""
catch_errors.check_for_input_len_diff(close_data, high_data, low_data)
tp = [(high_data[idx] + low_data[idx] + close_data[idx]) / 3 for idx in range(0, len(close_data))]
return np.array(tp) | [
"def",
"typical_price",
"(",
"close_data",
",",
"high_data",
",",
"low_data",
")",
":",
"catch_errors",
".",
"check_for_input_len_diff",
"(",
"close_data",
",",
"high_data",
",",
"low_data",
")",
"tp",
"=",
"[",
"(",
"high_data",
"[",
"idx",
"]",
"+",
"low_data",
"[",
"idx",
"]",
"+",
"close_data",
"[",
"idx",
"]",
")",
"/",
"3",
"for",
"idx",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"close_data",
")",
")",
"]",
"return",
"np",
".",
"array",
"(",
"tp",
")"
] | 33.1 | [
0.0196078431372549,
0.2857142857142857,
0.1111111111111111,
0,
0.16666666666666666,
0.05405405405405406,
0.2857142857142857,
0.02702702702702703,
0.029411764705882353,
0.08695652173913043
] |
def update_old_to_new(self, col, old_val, new_val):
""" simply updates all rows and sets COL to NEW_VAL where col = old_val
e.g. update_old_to_new("NAME", "The University of Adelaide", "University of Adelaide")
will generate
UPDATE table op SET op.NAME = 'University of Adelaide' WHERE op.NAME = 'The University of Adelaide';
"""
self.sql_text += "UPDATE " + self.fact_table + " SET " + col + " = '" + new_val + "' WHERE " + col + " = '" + old_val + "'; \n" | [
"def",
"update_old_to_new",
"(",
"self",
",",
"col",
",",
"old_val",
",",
"new_val",
")",
":",
"self",
".",
"sql_text",
"+=",
"\"UPDATE \"",
"+",
"self",
".",
"fact_table",
"+",
"\" SET \"",
"+",
"col",
"+",
"\" = '\"",
"+",
"new_val",
"+",
"\"' WHERE \"",
"+",
"col",
"+",
"\" = '\"",
"+",
"old_val",
"+",
"\"'; \\n\""
] | 71.285714 | [
0.0196078431372549,
0.02531645569620253,
0.031914893617021274,
0.09523809523809523,
0.037037037037037035,
0.18181818181818182,
0.022222222222222223
] |
def _addToHosts(self, node, destinationIP=None):
"""
Add an "privateIP hostname" line to the /etc/hosts file. If destinationIP is given,
do this on the remote machine.
Azure VMs sometimes fail to initialize, causing the appliance to fail.
This error is given:
Failed to obtain the IP address for 'l7d41a19b-15a6-442c-8ba1-9678a951d824';
the DNS service may not be able to resolve it: Name or service not known.
This method is a fix.
:param node: Node to add to /etc/hosts.
:param destinationIP: A remote host's address
"""
cmd = "echo %s %s | sudo tee --append /etc/hosts > /dev/null" % (node.privateIP, node.name)
logger.debug("Running command %s on %s" % (cmd, destinationIP))
if destinationIP:
subprocess.Popen(["ssh", "-oStrictHostKeyChecking=no", "core@%s" % destinationIP, cmd])
else:
subprocess.Popen(cmd, shell=True) | [
"def",
"_addToHosts",
"(",
"self",
",",
"node",
",",
"destinationIP",
"=",
"None",
")",
":",
"cmd",
"=",
"\"echo %s %s | sudo tee --append /etc/hosts > /dev/null\"",
"%",
"(",
"node",
".",
"privateIP",
",",
"node",
".",
"name",
")",
"logger",
".",
"debug",
"(",
"\"Running command %s on %s\"",
"%",
"(",
"cmd",
",",
"destinationIP",
")",
")",
"if",
"destinationIP",
":",
"subprocess",
".",
"Popen",
"(",
"[",
"\"ssh\"",
",",
"\"-oStrictHostKeyChecking=no\"",
",",
"\"core@%s\"",
"%",
"destinationIP",
",",
"cmd",
"]",
")",
"else",
":",
"subprocess",
".",
"Popen",
"(",
"cmd",
",",
"shell",
"=",
"True",
")"
] | 47.85 | [
0.020833333333333332,
0.18181818181818182,
0.04395604395604396,
0.05263157894736842,
0,
0.02564102564102564,
0.07142857142857142,
0.05747126436781609,
0.047619047619047616,
0.06896551724137931,
0,
0.0851063829787234,
0.05660377358490566,
0.18181818181818182,
0.030303030303030304,
0.028169014084507043,
0.08,
0.030303030303030304,
0.15384615384615385,
0.044444444444444446
] |
def parse_point_source_node(node, mfd_spacing=0.1):
"""
Returns an "areaSource" node into an instance of the :class:
openquake.hmtk.sources.area.mtkAreaSource
"""
assert "pointSource" in node.tag
pnt_taglist = get_taglist(node)
# Get metadata
point_id, name, trt = (node.attrib["id"],
node.attrib["name"],
node.attrib["tectonicRegion"])
assert point_id # Defensive validation!
# Process geometry
location, upper_depth, lower_depth = node_to_point_geometry(
node.nodes[pnt_taglist.index("pointGeometry")])
# Process scaling relation
msr = node_to_scalerel(node.nodes[pnt_taglist.index("magScaleRel")])
# Process aspect ratio
aspect = float_(node.nodes[pnt_taglist.index("ruptAspectRatio")].text)
# Process MFD
mfd = node_to_mfd(node, pnt_taglist)
# Process nodal planes
npds = node_to_nodal_planes(
node.nodes[pnt_taglist.index("nodalPlaneDist")])
# Process hypocentral depths
hdds = node_to_hdd(node.nodes[pnt_taglist.index("hypoDepthDist")])
return mtkPointSource(point_id, name, trt,
geometry=location,
upper_depth=upper_depth,
lower_depth=lower_depth,
mag_scale_rel=msr,
rupt_aspect_ratio=aspect,
mfd=mfd,
nodal_plane_dist=npds,
hypo_depth_dist=hdds) | [
"def",
"parse_point_source_node",
"(",
"node",
",",
"mfd_spacing",
"=",
"0.1",
")",
":",
"assert",
"\"pointSource\"",
"in",
"node",
".",
"tag",
"pnt_taglist",
"=",
"get_taglist",
"(",
"node",
")",
"# Get metadata",
"point_id",
",",
"name",
",",
"trt",
"=",
"(",
"node",
".",
"attrib",
"[",
"\"id\"",
"]",
",",
"node",
".",
"attrib",
"[",
"\"name\"",
"]",
",",
"node",
".",
"attrib",
"[",
"\"tectonicRegion\"",
"]",
")",
"assert",
"point_id",
"# Defensive validation!",
"# Process geometry",
"location",
",",
"upper_depth",
",",
"lower_depth",
"=",
"node_to_point_geometry",
"(",
"node",
".",
"nodes",
"[",
"pnt_taglist",
".",
"index",
"(",
"\"pointGeometry\"",
")",
"]",
")",
"# Process scaling relation",
"msr",
"=",
"node_to_scalerel",
"(",
"node",
".",
"nodes",
"[",
"pnt_taglist",
".",
"index",
"(",
"\"magScaleRel\"",
")",
"]",
")",
"# Process aspect ratio",
"aspect",
"=",
"float_",
"(",
"node",
".",
"nodes",
"[",
"pnt_taglist",
".",
"index",
"(",
"\"ruptAspectRatio\"",
")",
"]",
".",
"text",
")",
"# Process MFD",
"mfd",
"=",
"node_to_mfd",
"(",
"node",
",",
"pnt_taglist",
")",
"# Process nodal planes",
"npds",
"=",
"node_to_nodal_planes",
"(",
"node",
".",
"nodes",
"[",
"pnt_taglist",
".",
"index",
"(",
"\"nodalPlaneDist\"",
")",
"]",
")",
"# Process hypocentral depths",
"hdds",
"=",
"node_to_hdd",
"(",
"node",
".",
"nodes",
"[",
"pnt_taglist",
".",
"index",
"(",
"\"hypoDepthDist\"",
")",
"]",
")",
"return",
"mtkPointSource",
"(",
"point_id",
",",
"name",
",",
"trt",
",",
"geometry",
"=",
"location",
",",
"upper_depth",
"=",
"upper_depth",
",",
"lower_depth",
"=",
"lower_depth",
",",
"mag_scale_rel",
"=",
"msr",
",",
"rupt_aspect_ratio",
"=",
"aspect",
",",
"mfd",
"=",
"mfd",
",",
"nodal_plane_dist",
"=",
"npds",
",",
"hypo_depth_dist",
"=",
"hdds",
")"
] | 42.457143 | [
0.0196078431372549,
0.2857142857142857,
0.0625,
0.044444444444444446,
0.2857142857142857,
0.05555555555555555,
0.05714285714285714,
0.1111111111111111,
0.06666666666666667,
0.06382978723404255,
0.07017543859649122,
0.045454545454545456,
0.09090909090909091,
0.046875,
0.05454545454545454,
0.06666666666666667,
0.027777777777777776,
0.07692307692307693,
0.02702702702702703,
0.11764705882352941,
0.05,
0.07692307692307693,
0.09375,
0.05357142857142857,
0.0625,
0.02857142857142857,
0.06521739130434782,
0.09090909090909091,
0.08,
0.08,
0.09090909090909091,
0.0784313725490196,
0.11764705882352941,
0.08333333333333333,
0.10638297872340426
] |
def side_by_side(left, right):
r"""Put two boxes next to each other.
Assumes that all lines in the boxes are the same width.
Example:
>>> left = 'A \nC '
>>> right = 'B\nD'
>>> print(side_by_side(left, right))
A B
C D
<BLANKLINE>
"""
left_lines = list(left.split('\n'))
right_lines = list(right.split('\n'))
# Pad the shorter column with whitespace
diff = abs(len(left_lines) - len(right_lines))
if len(left_lines) > len(right_lines):
fill = ' ' * len(right_lines[0])
right_lines += [fill] * diff
elif len(right_lines) > len(left_lines):
fill = ' ' * len(left_lines[0])
left_lines += [fill] * diff
return '\n'.join(a + b for a, b in zip(left_lines, right_lines)) + '\n' | [
"def",
"side_by_side",
"(",
"left",
",",
"right",
")",
":",
"left_lines",
"=",
"list",
"(",
"left",
".",
"split",
"(",
"'\\n'",
")",
")",
"right_lines",
"=",
"list",
"(",
"right",
".",
"split",
"(",
"'\\n'",
")",
")",
"# Pad the shorter column with whitespace",
"diff",
"=",
"abs",
"(",
"len",
"(",
"left_lines",
")",
"-",
"len",
"(",
"right_lines",
")",
")",
"if",
"len",
"(",
"left_lines",
")",
">",
"len",
"(",
"right_lines",
")",
":",
"fill",
"=",
"' '",
"*",
"len",
"(",
"right_lines",
"[",
"0",
"]",
")",
"right_lines",
"+=",
"[",
"fill",
"]",
"*",
"diff",
"elif",
"len",
"(",
"right_lines",
")",
">",
"len",
"(",
"left_lines",
")",
":",
"fill",
"=",
"' '",
"*",
"len",
"(",
"left_lines",
"[",
"0",
"]",
")",
"left_lines",
"+=",
"[",
"fill",
"]",
"*",
"diff",
"return",
"'\\n'",
".",
"join",
"(",
"a",
"+",
"b",
"for",
"a",
",",
"b",
"in",
"zip",
"(",
"left_lines",
",",
"right_lines",
")",
")",
"+",
"'\\n'"
] | 29.692308 | [
0.03333333333333333,
0.04878048780487805,
0,
0.03389830508474576,
0,
0.16666666666666666,
0.1111111111111111,
0.11538461538461539,
0.06818181818181818,
0.18181818181818182,
0.18181818181818182,
0.15789473684210525,
0.2857142857142857,
0.05128205128205128,
0.04878048780487805,
0,
0.045454545454545456,
0.04,
0.047619047619047616,
0.05,
0.05555555555555555,
0.045454545454545456,
0.05128205128205128,
0.05714285714285714,
0,
0.02666666666666667
] |
def handle_not_found(exception, **extra):
"""Custom blueprint exception handler."""
assert isinstance(exception, NotFound)
page = Page.query.filter(db.or_(Page.url == request.path,
Page.url == request.path + "/")).first()
if page:
_add_url_rule(page.url)
return render_template(
[
page.template_name,
current_app.config['PAGES_DEFAULT_TEMPLATE']
],
page=page
)
elif 'wrapped' in extra:
return extra['wrapped'](exception)
else:
return exception | [
"def",
"handle_not_found",
"(",
"exception",
",",
"*",
"*",
"extra",
")",
":",
"assert",
"isinstance",
"(",
"exception",
",",
"NotFound",
")",
"page",
"=",
"Page",
".",
"query",
".",
"filter",
"(",
"db",
".",
"or_",
"(",
"Page",
".",
"url",
"==",
"request",
".",
"path",
",",
"Page",
".",
"url",
"==",
"request",
".",
"path",
"+",
"\"/\"",
")",
")",
".",
"first",
"(",
")",
"if",
"page",
":",
"_add_url_rule",
"(",
"page",
".",
"url",
")",
"return",
"render_template",
"(",
"[",
"page",
".",
"template_name",
",",
"current_app",
".",
"config",
"[",
"'PAGES_DEFAULT_TEMPLATE'",
"]",
"]",
",",
"page",
"=",
"page",
")",
"elif",
"'wrapped'",
"in",
"extra",
":",
"return",
"extra",
"[",
"'wrapped'",
"]",
"(",
"exception",
")",
"else",
":",
"return",
"exception"
] | 29.7 | [
0.024390243902439025,
0.044444444444444446,
0.047619047619047616,
0,
0.04918032786885246,
0.039473684210526314,
0,
0.16666666666666666,
0.06451612903225806,
0.0967741935483871,
0.23076923076923078,
0.05714285714285714,
0.03333333333333333,
0.21428571428571427,
0.14285714285714285,
0.3333333333333333,
0.07142857142857142,
0.047619047619047616,
0.2222222222222222,
0.08333333333333333
] |
def get_slot_position_for(self, instance):
"""Returns the slot where the instance passed in is located. If not
found, returns None
"""
uid = api.get_uid(instance)
slot = filter(lambda s: s['analysis_uid'] == uid, self.getLayout())
if not slot:
return None
return to_int(slot[0]['position']) | [
"def",
"get_slot_position_for",
"(",
"self",
",",
"instance",
")",
":",
"uid",
"=",
"api",
".",
"get_uid",
"(",
"instance",
")",
"slot",
"=",
"filter",
"(",
"lambda",
"s",
":",
"s",
"[",
"'analysis_uid'",
"]",
"==",
"uid",
",",
"self",
".",
"getLayout",
"(",
")",
")",
"if",
"not",
"slot",
":",
"return",
"None",
"return",
"to_int",
"(",
"slot",
"[",
"0",
"]",
"[",
"'position'",
"]",
")"
] | 38.888889 | [
0.023809523809523808,
0.02666666666666667,
0.07407407407407407,
0.18181818181818182,
0.05714285714285714,
0.02666666666666667,
0.1,
0.08695652173913043,
0.047619047619047616
] |
def values(self, **kwargs):
"""Get the view's values"""
result = yield self.get(**kwargs)
if not result['rows']:
raise Return([])
raise Return([x['value'] for x in result['rows']]) | [
"def",
"values",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"result",
"=",
"yield",
"self",
".",
"get",
"(",
"*",
"*",
"kwargs",
")",
"if",
"not",
"result",
"[",
"'rows'",
"]",
":",
"raise",
"Return",
"(",
"[",
"]",
")",
"raise",
"Return",
"(",
"[",
"x",
"[",
"'value'",
"]",
"for",
"x",
"in",
"result",
"[",
"'rows'",
"]",
"]",
")"
] | 27.375 | [
0.037037037037037035,
0.05714285714285714,
0.04878048780487805,
0,
0.06666666666666667,
0.07142857142857142,
0,
0.034482758620689655
] |
def ghuser_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to a GitHub user.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
app = inliner.document.settings.env.app
#app.info('user link %r' % text)
ref = 'https://www.github.com/' + text
node = nodes.reference(rawtext, text, refuri=ref, **options)
return [node], [] | [
"def",
"ghuser_role",
"(",
"name",
",",
"rawtext",
",",
"text",
",",
"lineno",
",",
"inliner",
",",
"options",
"=",
"{",
"}",
",",
"content",
"=",
"[",
"]",
")",
":",
"app",
"=",
"inliner",
".",
"document",
".",
"settings",
".",
"env",
".",
"app",
"#app.info('user link %r' % text)",
"ref",
"=",
"'https://www.github.com/'",
"+",
"text",
"node",
"=",
"nodes",
".",
"reference",
"(",
"rawtext",
",",
"text",
",",
"refuri",
"=",
"ref",
",",
"*",
"*",
"options",
")",
"return",
"[",
"node",
"]",
",",
"[",
"]"
] | 43.15 | [
0.01282051282051282,
0.06896551724137931,
0,
0.029411764705882353,
0.029850746268656716,
0.2,
0,
0.057692307692307696,
0.05263157894736842,
0.06382978723404255,
0.04285714285714286,
0.05357142857142857,
0.05357142857142857,
0.05,
0.2857142857142857,
0.046511627906976744,
0.08333333333333333,
0.047619047619047616,
0.03125,
0.09523809523809523
] |
def __render_onto_sprite(self):
"""Render the font onto a surface and store the surface into a sprite so we can do more complex stuff with it.
(such as rotation and scale)"""
font_surface = self.__font.render(self.__text, True, self.color)
self.__font_sprite.apply_texture(font_surface)
self.__font_sprite.center_origin() | [
"def",
"__render_onto_sprite",
"(",
"self",
")",
":",
"font_surface",
"=",
"self",
".",
"__font",
".",
"render",
"(",
"self",
".",
"__text",
",",
"True",
",",
"self",
".",
"color",
")",
"self",
".",
"__font_sprite",
".",
"apply_texture",
"(",
"font_surface",
")",
"self",
".",
"__font_sprite",
".",
"center_origin",
"(",
")"
] | 59.333333 | [
0.03225806451612903,
0.025423728813559324,
0.07692307692307693,
0.027777777777777776,
0.037037037037037035,
0.047619047619047616
] |
def _build_table_options(self, row):
""" Setup the mostly-non-schema table options, like caching settings """
return dict((o, row.get(o)) for o in self.recognized_table_options if o in row) | [
"def",
"_build_table_options",
"(",
"self",
",",
"row",
")",
":",
"return",
"dict",
"(",
"(",
"o",
",",
"row",
".",
"get",
"(",
"o",
")",
")",
"for",
"o",
"in",
"self",
".",
"recognized_table_options",
"if",
"o",
"in",
"row",
")"
] | 67.666667 | [
0.027777777777777776,
0.0375,
0.034482758620689655
] |
def _generate_csv_header_line(*, header_names, header_prefix='', header=True, sep=',', newline='\n'):
"""
Helper function to generate a CSV header line depending on
the combination of arguments provided.
"""
if isinstance(header, str): # user-provided header line
header_line = header + newline
else:
if not (header is None or isinstance(header, bool)):
raise ValueError(f"Invalid value for argument `header`: {header}")
else:
if header:
header_line = header_prefix + sep.join(header_names) + newline
else:
header_line = ""
return header_line | [
"def",
"_generate_csv_header_line",
"(",
"*",
",",
"header_names",
",",
"header_prefix",
"=",
"''",
",",
"header",
"=",
"True",
",",
"sep",
"=",
"','",
",",
"newline",
"=",
"'\\n'",
")",
":",
"if",
"isinstance",
"(",
"header",
",",
"str",
")",
":",
"# user-provided header line",
"header_line",
"=",
"header",
"+",
"newline",
"else",
":",
"if",
"not",
"(",
"header",
"is",
"None",
"or",
"isinstance",
"(",
"header",
",",
"bool",
")",
")",
":",
"raise",
"ValueError",
"(",
"f\"Invalid value for argument `header`: {header}\"",
")",
"else",
":",
"if",
"header",
":",
"header_line",
"=",
"header_prefix",
"+",
"sep",
".",
"join",
"(",
"header_names",
")",
"+",
"newline",
"else",
":",
"header_line",
"=",
"\"\"",
"return",
"header_line"
] | 40.5 | [
0.019801980198019802,
0.2857142857142857,
0.03225806451612903,
0.047619047619047616,
0.2857142857142857,
0.03333333333333333,
0.05263157894736842,
0.2222222222222222,
0.03333333333333333,
0.02564102564102564,
0.15384615384615385,
0.09090909090909091,
0.02564102564102564,
0.11764705882352941,
0.0625,
0.09090909090909091
] |
def classical(self, **kwargs):
"""
turn classical mode on and off for various categories
turn on all classical modes:
classical()
classical(all=True)
turn on or off specific claassical modes:
e.g.
classical(herd=True)
classical(names=False)
By default all classical modes are off except names.
unknown value in args or key in kwargs rasies
exception: UnknownClasicalModeError
"""
classical_mode = list(def_classical.keys())
if not kwargs:
self.classical_dict = all_classical.copy()
return
if "all" in kwargs:
if kwargs["all"]:
self.classical_dict = all_classical.copy()
else:
self.classical_dict = no_classical.copy()
for k, v in list(kwargs.items()):
if k in classical_mode:
self.classical_dict[k] = v
else:
raise UnknownClassicalModeError | [
"def",
"classical",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"classical_mode",
"=",
"list",
"(",
"def_classical",
".",
"keys",
"(",
")",
")",
"if",
"not",
"kwargs",
":",
"self",
".",
"classical_dict",
"=",
"all_classical",
".",
"copy",
"(",
")",
"return",
"if",
"\"all\"",
"in",
"kwargs",
":",
"if",
"kwargs",
"[",
"\"all\"",
"]",
":",
"self",
".",
"classical_dict",
"=",
"all_classical",
".",
"copy",
"(",
")",
"else",
":",
"self",
".",
"classical_dict",
"=",
"no_classical",
".",
"copy",
"(",
")",
"for",
"k",
",",
"v",
"in",
"list",
"(",
"kwargs",
".",
"items",
"(",
")",
")",
":",
"if",
"k",
"in",
"classical_mode",
":",
"self",
".",
"classical_dict",
"[",
"k",
"]",
"=",
"v",
"else",
":",
"raise",
"UnknownClassicalModeError"
] | 28.970588 | [
0.03333333333333333,
0.18181818181818182,
0.03278688524590164,
0,
0.05555555555555555,
0.10526315789473684,
0.07407407407407407,
0,
0.04081632653061224,
0.16666666666666666,
0.07142857142857142,
0.06666666666666667,
0,
0.03333333333333333,
0,
0.03773584905660377,
0.046511627906976744,
0,
0.18181818181818182,
0.0392156862745098,
0.09090909090909091,
0.037037037037037035,
0.1111111111111111,
0.07407407407407407,
0.06896551724137931,
0.034482758620689655,
0.11764705882352941,
0.03508771929824561,
0,
0.04878048780487805,
0.05714285714285714,
0.047619047619047616,
0.11764705882352941,
0.0425531914893617
] |
def multi_call(*methods, **kwargs):
'''
Invoke multiple Netmiko methods at once, and return their output, as list.
methods
A list of dictionaries with the following keys:
- ``name``: the name of the Netmiko method to be executed.
- ``args``: list of arguments to be sent to the Netmiko method.
- ``kwargs``: dictionary of arguments to be sent to the Netmiko method.
kwargs
Key-value dictionary with the connection details (when not running
under a Proxy Minion).
'''
kwargs = clean_kwargs(**kwargs)
if 'netmiko.conn' in __proxy__:
conn = __proxy__['netmiko.conn']()
else:
conn, kwargs = _prepare_connection(**kwargs)
ret = []
for method in methods:
# Explicit unpacking
method_name = method['name']
method_args = method.get('args', [])
method_kwargs = method.get('kwargs', [])
ret.append(getattr(conn, method_name)(*method_args, **method_kwargs))
if 'netmiko.conn' not in __proxy__:
conn.disconnect()
return ret | [
"def",
"multi_call",
"(",
"*",
"methods",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"clean_kwargs",
"(",
"*",
"*",
"kwargs",
")",
"if",
"'netmiko.conn'",
"in",
"__proxy__",
":",
"conn",
"=",
"__proxy__",
"[",
"'netmiko.conn'",
"]",
"(",
")",
"else",
":",
"conn",
",",
"kwargs",
"=",
"_prepare_connection",
"(",
"*",
"*",
"kwargs",
")",
"ret",
"=",
"[",
"]",
"for",
"method",
"in",
"methods",
":",
"# Explicit unpacking",
"method_name",
"=",
"method",
"[",
"'name'",
"]",
"method_args",
"=",
"method",
".",
"get",
"(",
"'args'",
",",
"[",
"]",
")",
"method_kwargs",
"=",
"method",
".",
"get",
"(",
"'kwargs'",
",",
"[",
"]",
")",
"ret",
".",
"append",
"(",
"getattr",
"(",
"conn",
",",
"method_name",
")",
"(",
"*",
"method_args",
",",
"*",
"*",
"method_kwargs",
")",
")",
"if",
"'netmiko.conn'",
"not",
"in",
"__proxy__",
":",
"conn",
".",
"disconnect",
"(",
")",
"return",
"ret"
] | 34.833333 | [
0.02857142857142857,
0.2857142857142857,
0.02564102564102564,
0,
0.18181818181818182,
0.03636363636363636,
0,
0.045454545454545456,
0.04225352112676056,
0.0379746835443038,
0,
0.2,
0.05405405405405406,
0.1,
0.2857142857142857,
0.05714285714285714,
0.05714285714285714,
0.047619047619047616,
0.2222222222222222,
0.038461538461538464,
0.16666666666666666,
0.07692307692307693,
0.07142857142857142,
0.05555555555555555,
0.045454545454545456,
0.041666666666666664,
0.025974025974025976,
0.05128205128205128,
0.08,
0.14285714285714285
] |
def create_milestone(self, title, state=None, description=None,
due_on=None):
"""Create a milestone for this repository.
:param str title: (required), title of the milestone
:param str state: (optional), state of the milestone, accepted
values: ('open', 'closed'), default: 'open'
:param str description: (optional), description of the milestone
:param str due_on: (optional), ISO 8601 formatted due date
:returns: :class:`Milestone <github3.issues.milestone.Milestone>` if
successful, otherwise None
"""
url = self._build_url('milestones', base_url=self._api)
if state not in ('open', 'closed'):
state = None
data = {'title': title, 'state': state,
'description': description, 'due_on': due_on}
self._remove_none(data)
json = None
if data:
json = self._json(self._post(url, data=data), 201)
return Milestone(json, self) if json else None | [
"def",
"create_milestone",
"(",
"self",
",",
"title",
",",
"state",
"=",
"None",
",",
"description",
"=",
"None",
",",
"due_on",
"=",
"None",
")",
":",
"url",
"=",
"self",
".",
"_build_url",
"(",
"'milestones'",
",",
"base_url",
"=",
"self",
".",
"_api",
")",
"if",
"state",
"not",
"in",
"(",
"'open'",
",",
"'closed'",
")",
":",
"state",
"=",
"None",
"data",
"=",
"{",
"'title'",
":",
"title",
",",
"'state'",
":",
"state",
",",
"'description'",
":",
"description",
",",
"'due_on'",
":",
"due_on",
"}",
"self",
".",
"_remove_none",
"(",
"data",
")",
"json",
"=",
"None",
"if",
"data",
":",
"json",
"=",
"self",
".",
"_json",
"(",
"self",
".",
"_post",
"(",
"url",
",",
"data",
"=",
"data",
")",
",",
"201",
")",
"return",
"Milestone",
"(",
"json",
",",
"self",
")",
"if",
"json",
"else",
"None"
] | 46.318182 | [
0.031746031746031744,
0.13157894736842105,
0.04,
0,
0.05,
0.04285714285714286,
0.03636363636363636,
0.041666666666666664,
0.045454545454545456,
0.11842105263157894,
0.05263157894736842,
0.18181818181818182,
0.031746031746031744,
0.046511627906976744,
0.08333333333333333,
0.06382978723404255,
0.04918032786885246,
0.06451612903225806,
0.10526315789473684,
0.125,
0.03225806451612903,
0.037037037037037035
] |
def reset(self):
'''Resets the stream pointer to the beginning of the file.'''
if self.__row_number > self.__sample_size:
self.__parser.reset()
self.__extract_sample()
self.__extract_headers()
self.__row_number = 0 | [
"def",
"reset",
"(",
"self",
")",
":",
"if",
"self",
".",
"__row_number",
">",
"self",
".",
"__sample_size",
":",
"self",
".",
"__parser",
".",
"reset",
"(",
")",
"self",
".",
"__extract_sample",
"(",
")",
"self",
".",
"__extract_headers",
"(",
")",
"self",
".",
"__row_number",
"=",
"0"
] | 38.285714 | [
0.0625,
0.028985507246376812,
0.04,
0.06060606060606061,
0.05714285714285714,
0.05555555555555555,
0.06896551724137931
] |
def main(argv=sys.argv[1:]):
"""Parse commandline arguments and run the tool
:param argv: the commandline arguments.
:type argv: list
:returns: None
:rtype: None
:raises: None
"""
parser = setup_argparse()
args = parser.parse_args(argv)
if args.gendochelp:
sys.argv[0] = 'gendoc.py'
genparser = gendoc.setup_parser()
genparser.print_help()
sys.exit(0)
print 'Preparing output directories'
print '='*80
for odir in args.output:
prepare_dir(odir, not args.nodelete)
print '\nRunning gendoc'
print '='*80
for i, idir in enumerate(args.input):
if i >= len(args.output):
odir = args.output[-1]
else:
odir = args.output[i]
run_gendoc(idir, odir, args.gendocargs) | [
"def",
"main",
"(",
"argv",
"=",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
")",
":",
"parser",
"=",
"setup_argparse",
"(",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
"argv",
")",
"if",
"args",
".",
"gendochelp",
":",
"sys",
".",
"argv",
"[",
"0",
"]",
"=",
"'gendoc.py'",
"genparser",
"=",
"gendoc",
".",
"setup_parser",
"(",
")",
"genparser",
".",
"print_help",
"(",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"print",
"'Preparing output directories'",
"print",
"'='",
"*",
"80",
"for",
"odir",
"in",
"args",
".",
"output",
":",
"prepare_dir",
"(",
"odir",
",",
"not",
"args",
".",
"nodelete",
")",
"print",
"'\\nRunning gendoc'",
"print",
"'='",
"*",
"80",
"for",
"i",
",",
"idir",
"in",
"enumerate",
"(",
"args",
".",
"input",
")",
":",
"if",
"i",
">=",
"len",
"(",
"args",
".",
"output",
")",
":",
"odir",
"=",
"args",
".",
"output",
"[",
"-",
"1",
"]",
"else",
":",
"odir",
"=",
"args",
".",
"output",
"[",
"i",
"]",
"run_gendoc",
"(",
"idir",
",",
"odir",
",",
"args",
".",
"gendocargs",
")"
] | 27.928571 | [
0.03571428571428571,
0.0392156862745098,
0,
0.06976744186046512,
0.15,
0.16666666666666666,
0.1875,
0.17647058823529413,
0.2857142857142857,
0.06896551724137931,
0.058823529411764705,
0.08695652173913043,
0.06060606060606061,
0.04878048780487805,
0.06666666666666667,
0.10526315789473684,
0.05,
0.125,
0.07142857142857142,
0.045454545454545456,
0.07142857142857142,
0.125,
0.04878048780487805,
0.06060606060606061,
0.058823529411764705,
0.15384615384615385,
0.06060606060606061,
0.0425531914893617
] |