repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequencelengths 20
707
| docstring
stringlengths 3
17.3k
| docstring_tokens
sequencelengths 3
222
| sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value | idx
int64 0
252k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
cfobel/si-prefix | si_prefix/__init__.py | si_format | def si_format(value, precision=1, format_str=u'{value} {prefix}',
exp_format_str=u'{value}e{expof10}'):
'''
Format value to string with SI prefix, using the specified precision.
Parameters
----------
value : int, float
Input value.
precision : int
Number of digits after decimal place to include.
format_str : str or unicode
Format string where ``{prefix}`` and ``{value}`` represent the SI
prefix and the value (scaled according to the prefix), respectively.
The default format matches the `SI prefix style`_ format.
exp_str : str or unicode
Format string where ``{expof10}`` and ``{value}`` represent the
exponent of 10 and the value (scaled according to the exponent of 10),
respectively. This format is used if the absolute exponent of 10 value
is greater than 24.
Returns
-------
unicode
:data:`value` formatted according to the `SI prefix style`_.
Examples
--------
For example, with `precision=2`:
.. code-block:: python
1e-27 --> 1.00e-27
1.764e-24 --> 1.76 y
7.4088e-23 --> 74.09 y
3.1117e-21 --> 3.11 z
1.30691e-19 --> 130.69 z
5.48903e-18 --> 5.49 a
2.30539e-16 --> 230.54 a
9.68265e-15 --> 9.68 f
4.06671e-13 --> 406.67 f
1.70802e-11 --> 17.08 p
7.17368e-10 --> 717.37 p
3.01295e-08 --> 30.13 n
1.26544e-06 --> 1.27 u
5.31484e-05 --> 53.15 u
0.00223223 --> 2.23 m
0.0937537 --> 93.75 m
3.93766 --> 3.94
165.382 --> 165.38
6946.03 --> 6.95 k
291733 --> 291.73 k
1.22528e+07 --> 12.25 M
5.14617e+08 --> 514.62 M
2.16139e+10 --> 21.61 G
9.07785e+11 --> 907.78 G
3.8127e+13 --> 38.13 T
1.60133e+15 --> 1.60 P
6.7256e+16 --> 67.26 P
2.82475e+18 --> 2.82 E
1.1864e+20 --> 118.64 E
4.98286e+21 --> 4.98 Z
2.0928e+23 --> 209.28 Z
8.78977e+24 --> 8.79 Y
3.6917e+26 --> 369.17 Y
1.55051e+28 --> 15.51e+27
6.51216e+29 --> 651.22e+27
.. versionchanged:: 1.0
Use unicode string for :data:`format_str` and SI value format string to
support micro (i.e., µ) characte, and change return type to unicode
string.
.. seealso::
`Issue #4`_.
.. _`Issue #4`: https://github.com/cfobel/si-prefix/issues/4
.. _SI prefix style:
http://physics.nist.gov/cuu/Units/checklist.html
'''
svalue, expof10 = split(value, precision)
value_format = u'%%.%df' % precision
value_str = value_format % svalue
try:
return format_str.format(value=value_str,
prefix=prefix(expof10).strip())
except ValueError:
sign = ''
if expof10 > 0:
sign = "+"
return exp_format_str.format(value=value_str,
expof10=''.join([sign, str(expof10)])) | python | def si_format(value, precision=1, format_str=u'{value} {prefix}',
exp_format_str=u'{value}e{expof10}'):
'''
Format value to string with SI prefix, using the specified precision.
Parameters
----------
value : int, float
Input value.
precision : int
Number of digits after decimal place to include.
format_str : str or unicode
Format string where ``{prefix}`` and ``{value}`` represent the SI
prefix and the value (scaled according to the prefix), respectively.
The default format matches the `SI prefix style`_ format.
exp_str : str or unicode
Format string where ``{expof10}`` and ``{value}`` represent the
exponent of 10 and the value (scaled according to the exponent of 10),
respectively. This format is used if the absolute exponent of 10 value
is greater than 24.
Returns
-------
unicode
:data:`value` formatted according to the `SI prefix style`_.
Examples
--------
For example, with `precision=2`:
.. code-block:: python
1e-27 --> 1.00e-27
1.764e-24 --> 1.76 y
7.4088e-23 --> 74.09 y
3.1117e-21 --> 3.11 z
1.30691e-19 --> 130.69 z
5.48903e-18 --> 5.49 a
2.30539e-16 --> 230.54 a
9.68265e-15 --> 9.68 f
4.06671e-13 --> 406.67 f
1.70802e-11 --> 17.08 p
7.17368e-10 --> 717.37 p
3.01295e-08 --> 30.13 n
1.26544e-06 --> 1.27 u
5.31484e-05 --> 53.15 u
0.00223223 --> 2.23 m
0.0937537 --> 93.75 m
3.93766 --> 3.94
165.382 --> 165.38
6946.03 --> 6.95 k
291733 --> 291.73 k
1.22528e+07 --> 12.25 M
5.14617e+08 --> 514.62 M
2.16139e+10 --> 21.61 G
9.07785e+11 --> 907.78 G
3.8127e+13 --> 38.13 T
1.60133e+15 --> 1.60 P
6.7256e+16 --> 67.26 P
2.82475e+18 --> 2.82 E
1.1864e+20 --> 118.64 E
4.98286e+21 --> 4.98 Z
2.0928e+23 --> 209.28 Z
8.78977e+24 --> 8.79 Y
3.6917e+26 --> 369.17 Y
1.55051e+28 --> 15.51e+27
6.51216e+29 --> 651.22e+27
.. versionchanged:: 1.0
Use unicode string for :data:`format_str` and SI value format string to
support micro (i.e., µ) characte, and change return type to unicode
string.
.. seealso::
`Issue #4`_.
.. _`Issue #4`: https://github.com/cfobel/si-prefix/issues/4
.. _SI prefix style:
http://physics.nist.gov/cuu/Units/checklist.html
'''
svalue, expof10 = split(value, precision)
value_format = u'%%.%df' % precision
value_str = value_format % svalue
try:
return format_str.format(value=value_str,
prefix=prefix(expof10).strip())
except ValueError:
sign = ''
if expof10 > 0:
sign = "+"
return exp_format_str.format(value=value_str,
expof10=''.join([sign, str(expof10)])) | [
"def",
"si_format",
"(",
"value",
",",
"precision",
"=",
"1",
",",
"format_str",
"=",
"u'{value} {prefix}'",
",",
"exp_format_str",
"=",
"u'{value}e{expof10}'",
")",
":",
"svalue",
",",
"expof10",
"=",
"split",
"(",
"value",
",",
"precision",
")",
"value_format",
"=",
"u'%%.%df'",
"%",
"precision",
"value_str",
"=",
"value_format",
"%",
"svalue",
"try",
":",
"return",
"format_str",
".",
"format",
"(",
"value",
"=",
"value_str",
",",
"prefix",
"=",
"prefix",
"(",
"expof10",
")",
".",
"strip",
"(",
")",
")",
"except",
"ValueError",
":",
"sign",
"=",
"''",
"if",
"expof10",
">",
"0",
":",
"sign",
"=",
"\"+\"",
"return",
"exp_format_str",
".",
"format",
"(",
"value",
"=",
"value_str",
",",
"expof10",
"=",
"''",
".",
"join",
"(",
"[",
"sign",
",",
"str",
"(",
"expof10",
")",
"]",
")",
")"
] | Format value to string with SI prefix, using the specified precision.
Parameters
----------
value : int, float
Input value.
precision : int
Number of digits after decimal place to include.
format_str : str or unicode
Format string where ``{prefix}`` and ``{value}`` represent the SI
prefix and the value (scaled according to the prefix), respectively.
The default format matches the `SI prefix style`_ format.
exp_str : str or unicode
Format string where ``{expof10}`` and ``{value}`` represent the
exponent of 10 and the value (scaled according to the exponent of 10),
respectively. This format is used if the absolute exponent of 10 value
is greater than 24.
Returns
-------
unicode
:data:`value` formatted according to the `SI prefix style`_.
Examples
--------
For example, with `precision=2`:
.. code-block:: python
1e-27 --> 1.00e-27
1.764e-24 --> 1.76 y
7.4088e-23 --> 74.09 y
3.1117e-21 --> 3.11 z
1.30691e-19 --> 130.69 z
5.48903e-18 --> 5.49 a
2.30539e-16 --> 230.54 a
9.68265e-15 --> 9.68 f
4.06671e-13 --> 406.67 f
1.70802e-11 --> 17.08 p
7.17368e-10 --> 717.37 p
3.01295e-08 --> 30.13 n
1.26544e-06 --> 1.27 u
5.31484e-05 --> 53.15 u
0.00223223 --> 2.23 m
0.0937537 --> 93.75 m
3.93766 --> 3.94
165.382 --> 165.38
6946.03 --> 6.95 k
291733 --> 291.73 k
1.22528e+07 --> 12.25 M
5.14617e+08 --> 514.62 M
2.16139e+10 --> 21.61 G
9.07785e+11 --> 907.78 G
3.8127e+13 --> 38.13 T
1.60133e+15 --> 1.60 P
6.7256e+16 --> 67.26 P
2.82475e+18 --> 2.82 E
1.1864e+20 --> 118.64 E
4.98286e+21 --> 4.98 Z
2.0928e+23 --> 209.28 Z
8.78977e+24 --> 8.79 Y
3.6917e+26 --> 369.17 Y
1.55051e+28 --> 15.51e+27
6.51216e+29 --> 651.22e+27
.. versionchanged:: 1.0
Use unicode string for :data:`format_str` and SI value format string to
support micro (i.e., µ) characte, and change return type to unicode
string.
.. seealso::
`Issue #4`_.
.. _`Issue #4`: https://github.com/cfobel/si-prefix/issues/4
.. _SI prefix style:
http://physics.nist.gov/cuu/Units/checklist.html | [
"Format",
"value",
"to",
"string",
"with",
"SI",
"prefix",
"using",
"the",
"specified",
"precision",
"."
] | 274fdf47f65d87d0b7a2e3c80f267db63d042c59 | https://github.com/cfobel/si-prefix/blob/274fdf47f65d87d0b7a2e3c80f267db63d042c59/si_prefix/__init__.py#L128-L221 | train | 1,000 |
cfobel/si-prefix | si_prefix/__init__.py | si_parse | def si_parse(value):
'''
Parse a value expressed using SI prefix units to a floating point number.
Parameters
----------
value : str or unicode
Value expressed using SI prefix units (as returned by :func:`si_format`
function).
.. versionchanged:: 1.0
Use unicode string for SI unit to support micro (i.e., µ) character.
.. seealso::
`Issue #4`_.
.. _`Issue #4`: https://github.com/cfobel/si-prefix/issues/4
'''
CRE_10E_NUMBER = re.compile(r'^\s*(?P<integer>[\+\-]?\d+)?'
r'(?P<fraction>.\d+)?\s*([eE]\s*'
r'(?P<expof10>[\+\-]?\d+))?$')
CRE_SI_NUMBER = re.compile(r'^\s*(?P<number>(?P<integer>[\+\-]?\d+)?'
r'(?P<fraction>.\d+)?)\s*'
u'(?P<si_unit>[%s])?\s*$' % SI_PREFIX_UNITS)
match = CRE_10E_NUMBER.match(value)
if match:
# Can be parse using `float`.
assert(match.group('integer') is not None or
match.group('fraction') is not None)
return float(value)
match = CRE_SI_NUMBER.match(value)
assert(match.group('integer') is not None or
match.group('fraction') is not None)
d = match.groupdict()
si_unit = d['si_unit'] if d['si_unit'] else ' '
prefix_levels = (len(SI_PREFIX_UNITS) - 1) // 2
scale = 10 ** (3 * (SI_PREFIX_UNITS.index(si_unit) - prefix_levels))
return float(d['number']) * scale | python | def si_parse(value):
'''
Parse a value expressed using SI prefix units to a floating point number.
Parameters
----------
value : str or unicode
Value expressed using SI prefix units (as returned by :func:`si_format`
function).
.. versionchanged:: 1.0
Use unicode string for SI unit to support micro (i.e., µ) character.
.. seealso::
`Issue #4`_.
.. _`Issue #4`: https://github.com/cfobel/si-prefix/issues/4
'''
CRE_10E_NUMBER = re.compile(r'^\s*(?P<integer>[\+\-]?\d+)?'
r'(?P<fraction>.\d+)?\s*([eE]\s*'
r'(?P<expof10>[\+\-]?\d+))?$')
CRE_SI_NUMBER = re.compile(r'^\s*(?P<number>(?P<integer>[\+\-]?\d+)?'
r'(?P<fraction>.\d+)?)\s*'
u'(?P<si_unit>[%s])?\s*$' % SI_PREFIX_UNITS)
match = CRE_10E_NUMBER.match(value)
if match:
# Can be parse using `float`.
assert(match.group('integer') is not None or
match.group('fraction') is not None)
return float(value)
match = CRE_SI_NUMBER.match(value)
assert(match.group('integer') is not None or
match.group('fraction') is not None)
d = match.groupdict()
si_unit = d['si_unit'] if d['si_unit'] else ' '
prefix_levels = (len(SI_PREFIX_UNITS) - 1) // 2
scale = 10 ** (3 * (SI_PREFIX_UNITS.index(si_unit) - prefix_levels))
return float(d['number']) * scale | [
"def",
"si_parse",
"(",
"value",
")",
":",
"CRE_10E_NUMBER",
"=",
"re",
".",
"compile",
"(",
"r'^\\s*(?P<integer>[\\+\\-]?\\d+)?'",
"r'(?P<fraction>.\\d+)?\\s*([eE]\\s*'",
"r'(?P<expof10>[\\+\\-]?\\d+))?$'",
")",
"CRE_SI_NUMBER",
"=",
"re",
".",
"compile",
"(",
"r'^\\s*(?P<number>(?P<integer>[\\+\\-]?\\d+)?'",
"r'(?P<fraction>.\\d+)?)\\s*'",
"u'(?P<si_unit>[%s])?\\s*$'",
"%",
"SI_PREFIX_UNITS",
")",
"match",
"=",
"CRE_10E_NUMBER",
".",
"match",
"(",
"value",
")",
"if",
"match",
":",
"# Can be parse using `float`.",
"assert",
"(",
"match",
".",
"group",
"(",
"'integer'",
")",
"is",
"not",
"None",
"or",
"match",
".",
"group",
"(",
"'fraction'",
")",
"is",
"not",
"None",
")",
"return",
"float",
"(",
"value",
")",
"match",
"=",
"CRE_SI_NUMBER",
".",
"match",
"(",
"value",
")",
"assert",
"(",
"match",
".",
"group",
"(",
"'integer'",
")",
"is",
"not",
"None",
"or",
"match",
".",
"group",
"(",
"'fraction'",
")",
"is",
"not",
"None",
")",
"d",
"=",
"match",
".",
"groupdict",
"(",
")",
"si_unit",
"=",
"d",
"[",
"'si_unit'",
"]",
"if",
"d",
"[",
"'si_unit'",
"]",
"else",
"' '",
"prefix_levels",
"=",
"(",
"len",
"(",
"SI_PREFIX_UNITS",
")",
"-",
"1",
")",
"//",
"2",
"scale",
"=",
"10",
"**",
"(",
"3",
"*",
"(",
"SI_PREFIX_UNITS",
".",
"index",
"(",
"si_unit",
")",
"-",
"prefix_levels",
")",
")",
"return",
"float",
"(",
"d",
"[",
"'number'",
"]",
")",
"*",
"scale"
] | Parse a value expressed using SI prefix units to a floating point number.
Parameters
----------
value : str or unicode
Value expressed using SI prefix units (as returned by :func:`si_format`
function).
.. versionchanged:: 1.0
Use unicode string for SI unit to support micro (i.e., µ) character.
.. seealso::
`Issue #4`_.
.. _`Issue #4`: https://github.com/cfobel/si-prefix/issues/4 | [
"Parse",
"a",
"value",
"expressed",
"using",
"SI",
"prefix",
"units",
"to",
"a",
"floating",
"point",
"number",
"."
] | 274fdf47f65d87d0b7a2e3c80f267db63d042c59 | https://github.com/cfobel/si-prefix/blob/274fdf47f65d87d0b7a2e3c80f267db63d042c59/si_prefix/__init__.py#L224-L263 | train | 1,001 |
tuomas2/automate | src/automate/extensions/rpc/rpc.py | ExternalApi.set_status | def set_status(self, name, status):
"""
Set sensor ``name`` status to ``status``.
"""
getattr(self.system, name).status = status
return True | python | def set_status(self, name, status):
"""
Set sensor ``name`` status to ``status``.
"""
getattr(self.system, name).status = status
return True | [
"def",
"set_status",
"(",
"self",
",",
"name",
",",
"status",
")",
":",
"getattr",
"(",
"self",
".",
"system",
",",
"name",
")",
".",
"status",
"=",
"status",
"return",
"True"
] | Set sensor ``name`` status to ``status``. | [
"Set",
"sensor",
"name",
"status",
"to",
"status",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/extensions/rpc/rpc.py#L31-L36 | train | 1,002 |
tuomas2/automate | src/automate/extensions/rpc/rpc.py | ExternalApi.toggle_object_status | def toggle_object_status(self, objname):
"""
Toggle boolean-valued sensor status between ``True`` and ``False``.
"""
o = getattr(self.system, objname)
o.status = not o.status
self.system.flush()
return o.status | python | def toggle_object_status(self, objname):
"""
Toggle boolean-valued sensor status between ``True`` and ``False``.
"""
o = getattr(self.system, objname)
o.status = not o.status
self.system.flush()
return o.status | [
"def",
"toggle_object_status",
"(",
"self",
",",
"objname",
")",
":",
"o",
"=",
"getattr",
"(",
"self",
".",
"system",
",",
"objname",
")",
"o",
".",
"status",
"=",
"not",
"o",
".",
"status",
"self",
".",
"system",
".",
"flush",
"(",
")",
"return",
"o",
".",
"status"
] | Toggle boolean-valued sensor status between ``True`` and ``False``. | [
"Toggle",
"boolean",
"-",
"valued",
"sensor",
"status",
"between",
"True",
"and",
"False",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/extensions/rpc/rpc.py#L52-L59 | train | 1,003 |
tuomas2/automate | src/automate/extensions/rpc/rpc.py | ExternalApi.log | def log(self):
"""
Return recent log entries as a string.
"""
logserv = self.system.request_service('LogStoreService')
return logserv.lastlog(html=False) | python | def log(self):
"""
Return recent log entries as a string.
"""
logserv = self.system.request_service('LogStoreService')
return logserv.lastlog(html=False) | [
"def",
"log",
"(",
"self",
")",
":",
"logserv",
"=",
"self",
".",
"system",
".",
"request_service",
"(",
"'LogStoreService'",
")",
"return",
"logserv",
".",
"lastlog",
"(",
"html",
"=",
"False",
")"
] | Return recent log entries as a string. | [
"Return",
"recent",
"log",
"entries",
"as",
"a",
"string",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/extensions/rpc/rpc.py#L93-L98 | train | 1,004 |
tuomas2/automate | src/automate/system.py | System.load_or_create | def load_or_create(cls, filename=None, no_input=False, create_new=False, **kwargs):
"""
Load system from a dump, if dump file exists, or create a new system if it does not exist.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--no_input', action='store_true')
parser.add_argument('--create_new', action='store_true')
args = parser.parse_args()
if args.no_input:
print('Parameter --no_input was given')
no_input = True
if args.create_new:
print('Parameter --create_new was given')
create_new = True
no_input = True
def savefile_more_recent():
time_savefile = os.path.getmtime(filename)
time_program = os.path.getmtime(sys.argv[0])
return time_savefile > time_program
def load_pickle():
with open(filename, 'rb') as of:
statefile_version, data = pickle.load(of)
if statefile_version != STATEFILE_VERSION:
raise RuntimeError(f'Wrong statefile version, please remove state file {filename}')
return data
def load():
print('Loading %s' % filename)
obj_list, config = load_pickle()
system = System(load_state=obj_list, filename=filename, **kwargs)
return system
def create():
print('Creating new system')
config = None
if filename:
try:
obj_list, config = load_pickle()
except FileNotFoundError:
config = None
return cls(filename=filename, load_config=config, **kwargs)
if filename and os.path.isfile(filename):
if savefile_more_recent() and not create_new:
return load()
else:
if no_input:
print('Program file more recent. Loading that instead.')
return create()
while True:
answer = input('Program file more recent. Do you want to load it? (y/n) ')
if answer == 'y':
return create()
elif answer == 'n':
return load()
else:
return create() | python | def load_or_create(cls, filename=None, no_input=False, create_new=False, **kwargs):
"""
Load system from a dump, if dump file exists, or create a new system if it does not exist.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--no_input', action='store_true')
parser.add_argument('--create_new', action='store_true')
args = parser.parse_args()
if args.no_input:
print('Parameter --no_input was given')
no_input = True
if args.create_new:
print('Parameter --create_new was given')
create_new = True
no_input = True
def savefile_more_recent():
time_savefile = os.path.getmtime(filename)
time_program = os.path.getmtime(sys.argv[0])
return time_savefile > time_program
def load_pickle():
with open(filename, 'rb') as of:
statefile_version, data = pickle.load(of)
if statefile_version != STATEFILE_VERSION:
raise RuntimeError(f'Wrong statefile version, please remove state file {filename}')
return data
def load():
print('Loading %s' % filename)
obj_list, config = load_pickle()
system = System(load_state=obj_list, filename=filename, **kwargs)
return system
def create():
print('Creating new system')
config = None
if filename:
try:
obj_list, config = load_pickle()
except FileNotFoundError:
config = None
return cls(filename=filename, load_config=config, **kwargs)
if filename and os.path.isfile(filename):
if savefile_more_recent() and not create_new:
return load()
else:
if no_input:
print('Program file more recent. Loading that instead.')
return create()
while True:
answer = input('Program file more recent. Do you want to load it? (y/n) ')
if answer == 'y':
return create()
elif answer == 'n':
return load()
else:
return create() | [
"def",
"load_or_create",
"(",
"cls",
",",
"filename",
"=",
"None",
",",
"no_input",
"=",
"False",
",",
"create_new",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
")",
"parser",
".",
"add_argument",
"(",
"'--no_input'",
",",
"action",
"=",
"'store_true'",
")",
"parser",
".",
"add_argument",
"(",
"'--create_new'",
",",
"action",
"=",
"'store_true'",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"if",
"args",
".",
"no_input",
":",
"print",
"(",
"'Parameter --no_input was given'",
")",
"no_input",
"=",
"True",
"if",
"args",
".",
"create_new",
":",
"print",
"(",
"'Parameter --create_new was given'",
")",
"create_new",
"=",
"True",
"no_input",
"=",
"True",
"def",
"savefile_more_recent",
"(",
")",
":",
"time_savefile",
"=",
"os",
".",
"path",
".",
"getmtime",
"(",
"filename",
")",
"time_program",
"=",
"os",
".",
"path",
".",
"getmtime",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
"return",
"time_savefile",
">",
"time_program",
"def",
"load_pickle",
"(",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'rb'",
")",
"as",
"of",
":",
"statefile_version",
",",
"data",
"=",
"pickle",
".",
"load",
"(",
"of",
")",
"if",
"statefile_version",
"!=",
"STATEFILE_VERSION",
":",
"raise",
"RuntimeError",
"(",
"f'Wrong statefile version, please remove state file {filename}'",
")",
"return",
"data",
"def",
"load",
"(",
")",
":",
"print",
"(",
"'Loading %s'",
"%",
"filename",
")",
"obj_list",
",",
"config",
"=",
"load_pickle",
"(",
")",
"system",
"=",
"System",
"(",
"load_state",
"=",
"obj_list",
",",
"filename",
"=",
"filename",
",",
"*",
"*",
"kwargs",
")",
"return",
"system",
"def",
"create",
"(",
")",
":",
"print",
"(",
"'Creating new system'",
")",
"config",
"=",
"None",
"if",
"filename",
":",
"try",
":",
"obj_list",
",",
"config",
"=",
"load_pickle",
"(",
")",
"except",
"FileNotFoundError",
":",
"config",
"=",
"None",
"return",
"cls",
"(",
"filename",
"=",
"filename",
",",
"load_config",
"=",
"config",
",",
"*",
"*",
"kwargs",
")",
"if",
"filename",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"filename",
")",
":",
"if",
"savefile_more_recent",
"(",
")",
"and",
"not",
"create_new",
":",
"return",
"load",
"(",
")",
"else",
":",
"if",
"no_input",
":",
"print",
"(",
"'Program file more recent. Loading that instead.'",
")",
"return",
"create",
"(",
")",
"while",
"True",
":",
"answer",
"=",
"input",
"(",
"'Program file more recent. Do you want to load it? (y/n) '",
")",
"if",
"answer",
"==",
"'y'",
":",
"return",
"create",
"(",
")",
"elif",
"answer",
"==",
"'n'",
":",
"return",
"load",
"(",
")",
"else",
":",
"return",
"create",
"(",
")"
] | Load system from a dump, if dump file exists, or create a new system if it does not exist. | [
"Load",
"system",
"from",
"a",
"dump",
"if",
"dump",
"file",
"exists",
"or",
"create",
"a",
"new",
"system",
"if",
"it",
"does",
"not",
"exist",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/system.py#L200-L261 | train | 1,005 |
tuomas2/automate | src/automate/system.py | System.cmd_namespace | def cmd_namespace(self):
"""
A read-only property that gives the namespace of the system for evaluating commands.
"""
import automate
ns = dict(list(automate.__dict__.items()) + list(self.namespace.items()))
return ns | python | def cmd_namespace(self):
"""
A read-only property that gives the namespace of the system for evaluating commands.
"""
import automate
ns = dict(list(automate.__dict__.items()) + list(self.namespace.items()))
return ns | [
"def",
"cmd_namespace",
"(",
"self",
")",
":",
"import",
"automate",
"ns",
"=",
"dict",
"(",
"list",
"(",
"automate",
".",
"__dict__",
".",
"items",
"(",
")",
")",
"+",
"list",
"(",
"self",
".",
"namespace",
".",
"items",
"(",
")",
")",
")",
"return",
"ns"
] | A read-only property that gives the namespace of the system for evaluating commands. | [
"A",
"read",
"-",
"only",
"property",
"that",
"gives",
"the",
"namespace",
"of",
"the",
"system",
"for",
"evaluating",
"commands",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/system.py#L287-L293 | train | 1,006 |
tuomas2/automate | src/automate/system.py | System.services_by_name | def services_by_name(self):
"""
A property that gives a dictionary that contains services as values and their names as keys.
"""
srvs = defaultdict(list)
for i in self.services:
srvs[i.__class__.__name__].append(i)
return srvs | python | def services_by_name(self):
"""
A property that gives a dictionary that contains services as values and their names as keys.
"""
srvs = defaultdict(list)
for i in self.services:
srvs[i.__class__.__name__].append(i)
return srvs | [
"def",
"services_by_name",
"(",
"self",
")",
":",
"srvs",
"=",
"defaultdict",
"(",
"list",
")",
"for",
"i",
"in",
"self",
".",
"services",
":",
"srvs",
"[",
"i",
".",
"__class__",
".",
"__name__",
"]",
".",
"append",
"(",
"i",
")",
"return",
"srvs"
] | A property that gives a dictionary that contains services as values and their names as keys. | [
"A",
"property",
"that",
"gives",
"a",
"dictionary",
"that",
"contains",
"services",
"as",
"values",
"and",
"their",
"names",
"as",
"keys",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/system.py#L323-L330 | train | 1,007 |
tuomas2/automate | src/automate/system.py | System.name_to_system_object | def name_to_system_object(self, name):
"""
Give SystemObject instance corresponding to the name
"""
if isinstance(name, str):
if self.allow_name_referencing:
name = name
else:
raise NameError('System.allow_name_referencing is set to False, cannot convert string to name')
elif isinstance(name, Object):
name = str(name)
return self.namespace.get(name, None) | python | def name_to_system_object(self, name):
"""
Give SystemObject instance corresponding to the name
"""
if isinstance(name, str):
if self.allow_name_referencing:
name = name
else:
raise NameError('System.allow_name_referencing is set to False, cannot convert string to name')
elif isinstance(name, Object):
name = str(name)
return self.namespace.get(name, None) | [
"def",
"name_to_system_object",
"(",
"self",
",",
"name",
")",
":",
"if",
"isinstance",
"(",
"name",
",",
"str",
")",
":",
"if",
"self",
".",
"allow_name_referencing",
":",
"name",
"=",
"name",
"else",
":",
"raise",
"NameError",
"(",
"'System.allow_name_referencing is set to False, cannot convert string to name'",
")",
"elif",
"isinstance",
"(",
"name",
",",
"Object",
")",
":",
"name",
"=",
"str",
"(",
"name",
")",
"return",
"self",
".",
"namespace",
".",
"get",
"(",
"name",
",",
"None",
")"
] | Give SystemObject instance corresponding to the name | [
"Give",
"SystemObject",
"instance",
"corresponding",
"to",
"the",
"name"
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/system.py#L345-L356 | train | 1,008 |
tuomas2/automate | src/automate/system.py | System.register_service_functions | def register_service_functions(self, *funcs):
"""
Register function in the system namespace. Called by Services.
"""
for func in funcs:
self.namespace[func.__name__] = func | python | def register_service_functions(self, *funcs):
"""
Register function in the system namespace. Called by Services.
"""
for func in funcs:
self.namespace[func.__name__] = func | [
"def",
"register_service_functions",
"(",
"self",
",",
"*",
"funcs",
")",
":",
"for",
"func",
"in",
"funcs",
":",
"self",
".",
"namespace",
"[",
"func",
".",
"__name__",
"]",
"=",
"func"
] | Register function in the system namespace. Called by Services. | [
"Register",
"function",
"in",
"the",
"system",
"namespace",
".",
"Called",
"by",
"Services",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/system.py#L369-L374 | train | 1,009 |
tuomas2/automate | src/automate/system.py | System.register_service | def register_service(self, service):
"""
Register service into the system. Called by Services.
"""
if service not in self.services:
self.services.append(service) | python | def register_service(self, service):
"""
Register service into the system. Called by Services.
"""
if service not in self.services:
self.services.append(service) | [
"def",
"register_service",
"(",
"self",
",",
"service",
")",
":",
"if",
"service",
"not",
"in",
"self",
".",
"services",
":",
"self",
".",
"services",
".",
"append",
"(",
"service",
")"
] | Register service into the system. Called by Services. | [
"Register",
"service",
"into",
"the",
"system",
".",
"Called",
"by",
"Services",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/system.py#L376-L381 | train | 1,010 |
tuomas2/automate | src/automate/system.py | System.cleanup | def cleanup(self):
"""
Clean up before quitting
"""
self.pre_exit_trigger = True
self.logger.info("Shutting down %s, please wait a moment.", self.name)
for t in threading.enumerate():
if isinstance(t, TimerClass):
t.cancel()
self.logger.debug('Timers cancelled')
for i in self.objects:
i.cleanup()
self.logger.debug('Sensors etc cleanups done')
for ser in (i for i in self.services if isinstance(i, AbstractUserService)):
ser.cleanup_system()
self.logger.debug('User services cleaned up')
if self.worker_thread.is_alive():
self.worker_thread.stop()
self.logger.debug('Worker thread really stopped')
for ser in (i for i in self.services if isinstance(i, AbstractSystemService)):
ser.cleanup_system()
self.logger.debug('System services cleaned up')
threads = list(t.name for t in threading.enumerate() if t.is_alive() and not t.daemon)
if threads:
self.logger.info('After cleanup, we have still the following threads '
'running: %s', ', '.join(threads)) | python | def cleanup(self):
"""
Clean up before quitting
"""
self.pre_exit_trigger = True
self.logger.info("Shutting down %s, please wait a moment.", self.name)
for t in threading.enumerate():
if isinstance(t, TimerClass):
t.cancel()
self.logger.debug('Timers cancelled')
for i in self.objects:
i.cleanup()
self.logger.debug('Sensors etc cleanups done')
for ser in (i for i in self.services if isinstance(i, AbstractUserService)):
ser.cleanup_system()
self.logger.debug('User services cleaned up')
if self.worker_thread.is_alive():
self.worker_thread.stop()
self.logger.debug('Worker thread really stopped')
for ser in (i for i in self.services if isinstance(i, AbstractSystemService)):
ser.cleanup_system()
self.logger.debug('System services cleaned up')
threads = list(t.name for t in threading.enumerate() if t.is_alive() and not t.daemon)
if threads:
self.logger.info('After cleanup, we have still the following threads '
'running: %s', ', '.join(threads)) | [
"def",
"cleanup",
"(",
"self",
")",
":",
"self",
".",
"pre_exit_trigger",
"=",
"True",
"self",
".",
"logger",
".",
"info",
"(",
"\"Shutting down %s, please wait a moment.\"",
",",
"self",
".",
"name",
")",
"for",
"t",
"in",
"threading",
".",
"enumerate",
"(",
")",
":",
"if",
"isinstance",
"(",
"t",
",",
"TimerClass",
")",
":",
"t",
".",
"cancel",
"(",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'Timers cancelled'",
")",
"for",
"i",
"in",
"self",
".",
"objects",
":",
"i",
".",
"cleanup",
"(",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'Sensors etc cleanups done'",
")",
"for",
"ser",
"in",
"(",
"i",
"for",
"i",
"in",
"self",
".",
"services",
"if",
"isinstance",
"(",
"i",
",",
"AbstractUserService",
")",
")",
":",
"ser",
".",
"cleanup_system",
"(",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'User services cleaned up'",
")",
"if",
"self",
".",
"worker_thread",
".",
"is_alive",
"(",
")",
":",
"self",
".",
"worker_thread",
".",
"stop",
"(",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'Worker thread really stopped'",
")",
"for",
"ser",
"in",
"(",
"i",
"for",
"i",
"in",
"self",
".",
"services",
"if",
"isinstance",
"(",
"i",
",",
"AbstractSystemService",
")",
")",
":",
"ser",
".",
"cleanup_system",
"(",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'System services cleaned up'",
")",
"threads",
"=",
"list",
"(",
"t",
".",
"name",
"for",
"t",
"in",
"threading",
".",
"enumerate",
"(",
")",
"if",
"t",
".",
"is_alive",
"(",
")",
"and",
"not",
"t",
".",
"daemon",
")",
"if",
"threads",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'After cleanup, we have still the following threads '",
"'running: %s'",
",",
"', '",
".",
"join",
"(",
"threads",
")",
")"
] | Clean up before quitting | [
"Clean",
"up",
"before",
"quitting"
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/system.py#L398-L429 | train | 1,011 |
tuomas2/automate | src/automate/system.py | System.cmd_exec | def cmd_exec(self, cmd):
"""
Execute commands in automate namespace
"""
if not cmd:
return
ns = self.cmd_namespace
import copy
rval = True
nscopy = copy.copy(ns)
try:
r = eval(cmd, ns)
if isinstance(r, SystemObject) and not r.system:
r.setup_system(self)
if callable(r):
r = r()
cmd += "()"
self.logger.info("Eval: %s", cmd)
self.logger.info("Result: %s", r)
except SyntaxError:
r = {}
try:
exec (cmd, ns)
self.logger.info("Exec: %s", cmd)
except ExitException:
raise
except Exception as e:
self.logger.info("Failed to exec cmd %s: %s.", cmd, e)
rval = False
for key, value in list(ns.items()):
if key not in nscopy or not value is nscopy[key]:
if key in self.namespace:
del self.namespace[key]
self.namespace[key] = value
r[key] = value
self.logger.info("Set items in namespace: %s", r)
except ExitException:
raise
except Exception as e:
self.logger.info("Failed to eval cmd %s: %s", cmd, e)
return False
return rval | python | def cmd_exec(self, cmd):
"""
Execute commands in automate namespace
"""
if not cmd:
return
ns = self.cmd_namespace
import copy
rval = True
nscopy = copy.copy(ns)
try:
r = eval(cmd, ns)
if isinstance(r, SystemObject) and not r.system:
r.setup_system(self)
if callable(r):
r = r()
cmd += "()"
self.logger.info("Eval: %s", cmd)
self.logger.info("Result: %s", r)
except SyntaxError:
r = {}
try:
exec (cmd, ns)
self.logger.info("Exec: %s", cmd)
except ExitException:
raise
except Exception as e:
self.logger.info("Failed to exec cmd %s: %s.", cmd, e)
rval = False
for key, value in list(ns.items()):
if key not in nscopy or not value is nscopy[key]:
if key in self.namespace:
del self.namespace[key]
self.namespace[key] = value
r[key] = value
self.logger.info("Set items in namespace: %s", r)
except ExitException:
raise
except Exception as e:
self.logger.info("Failed to eval cmd %s: %s", cmd, e)
return False
return rval | [
"def",
"cmd_exec",
"(",
"self",
",",
"cmd",
")",
":",
"if",
"not",
"cmd",
":",
"return",
"ns",
"=",
"self",
".",
"cmd_namespace",
"import",
"copy",
"rval",
"=",
"True",
"nscopy",
"=",
"copy",
".",
"copy",
"(",
"ns",
")",
"try",
":",
"r",
"=",
"eval",
"(",
"cmd",
",",
"ns",
")",
"if",
"isinstance",
"(",
"r",
",",
"SystemObject",
")",
"and",
"not",
"r",
".",
"system",
":",
"r",
".",
"setup_system",
"(",
"self",
")",
"if",
"callable",
"(",
"r",
")",
":",
"r",
"=",
"r",
"(",
")",
"cmd",
"+=",
"\"()\"",
"self",
".",
"logger",
".",
"info",
"(",
"\"Eval: %s\"",
",",
"cmd",
")",
"self",
".",
"logger",
".",
"info",
"(",
"\"Result: %s\"",
",",
"r",
")",
"except",
"SyntaxError",
":",
"r",
"=",
"{",
"}",
"try",
":",
"exec",
"(",
"cmd",
",",
"ns",
")",
"self",
".",
"logger",
".",
"info",
"(",
"\"Exec: %s\"",
",",
"cmd",
")",
"except",
"ExitException",
":",
"raise",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"Failed to exec cmd %s: %s.\"",
",",
"cmd",
",",
"e",
")",
"rval",
"=",
"False",
"for",
"key",
",",
"value",
"in",
"list",
"(",
"ns",
".",
"items",
"(",
")",
")",
":",
"if",
"key",
"not",
"in",
"nscopy",
"or",
"not",
"value",
"is",
"nscopy",
"[",
"key",
"]",
":",
"if",
"key",
"in",
"self",
".",
"namespace",
":",
"del",
"self",
".",
"namespace",
"[",
"key",
"]",
"self",
".",
"namespace",
"[",
"key",
"]",
"=",
"value",
"r",
"[",
"key",
"]",
"=",
"value",
"self",
".",
"logger",
".",
"info",
"(",
"\"Set items in namespace: %s\"",
",",
"r",
")",
"except",
"ExitException",
":",
"raise",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"Failed to eval cmd %s: %s\"",
",",
"cmd",
",",
"e",
")",
"return",
"False",
"return",
"rval"
] | Execute commands in automate namespace | [
"Execute",
"commands",
"in",
"automate",
"namespace"
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/system.py#L431-L474 | train | 1,012 |
tuomas2/automate | src/automate/services/plantumlserv.py | PlantUMLService.write_puml | def write_puml(self, filename=''):
"""
Writes PUML from the system. If filename is given, stores result in the file.
Otherwise returns result as a string.
"""
def get_type(o):
type = 'program'
if isinstance(o, AbstractSensor):
type = 'sensor'
elif isinstance(o, AbstractActuator):
type = 'actuator'
return type
if filename:
s = open(filename, 'w')
else:
s = io.StringIO()
s.write('@startuml\n')
s.write('skinparam state {\n')
for k, v in list(self.background_colors.items()):
s.write('BackGroundColor<<%s>> %s\n' % (k, v))
s.write('}\n')
for o in self.system.objects:
if isinstance(o, DefaultProgram) or o.hide_in_uml:
continue
if isinstance(o, ProgrammableSystemObject):
s.write('state "%s" as %s <<%s>>\n' % (o, o, get_type(o)))
s.write('%s: %s\n' % (o, o.class_name))
if isinstance(o, AbstractActuator):
for p in reversed(o.program_stack):
s.write('%s: %s :: %s\n' % (o, p, o.program_status.get(p, '-')))
elif hasattr(o, 'status'):
s.write('%s: Status: %s\n' % (o, o.status))
if getattr(o, 'is_program', False):
s.write('%s: Priority: %s\n' % (o, o.priority))
for t in o.actual_triggers:
if isinstance(t, DefaultProgram) or t.hide_in_uml:
continue
s.write('%s -[%s]-> %s\n' % (t, self.arrow_colors['trigger'], o))
for t in o.actual_targets:
if t.hide_in_uml:
continue
if o.active:
color = 'active_target'
else:
color = 'inactive_target'
if getattr(t, 'program', None) == o:
color = 'controlled_target'
s.write('%s -[%s]-> %s\n' % (o, self.arrow_colors[color], t))
s.write('@enduml\n')
if filename:
s.close()
else:
return s.getvalue() | python | def write_puml(self, filename=''):
"""
Writes PUML from the system. If filename is given, stores result in the file.
Otherwise returns result as a string.
"""
def get_type(o):
type = 'program'
if isinstance(o, AbstractSensor):
type = 'sensor'
elif isinstance(o, AbstractActuator):
type = 'actuator'
return type
if filename:
s = open(filename, 'w')
else:
s = io.StringIO()
s.write('@startuml\n')
s.write('skinparam state {\n')
for k, v in list(self.background_colors.items()):
s.write('BackGroundColor<<%s>> %s\n' % (k, v))
s.write('}\n')
for o in self.system.objects:
if isinstance(o, DefaultProgram) or o.hide_in_uml:
continue
if isinstance(o, ProgrammableSystemObject):
s.write('state "%s" as %s <<%s>>\n' % (o, o, get_type(o)))
s.write('%s: %s\n' % (o, o.class_name))
if isinstance(o, AbstractActuator):
for p in reversed(o.program_stack):
s.write('%s: %s :: %s\n' % (o, p, o.program_status.get(p, '-')))
elif hasattr(o, 'status'):
s.write('%s: Status: %s\n' % (o, o.status))
if getattr(o, 'is_program', False):
s.write('%s: Priority: %s\n' % (o, o.priority))
for t in o.actual_triggers:
if isinstance(t, DefaultProgram) or t.hide_in_uml:
continue
s.write('%s -[%s]-> %s\n' % (t, self.arrow_colors['trigger'], o))
for t in o.actual_targets:
if t.hide_in_uml:
continue
if o.active:
color = 'active_target'
else:
color = 'inactive_target'
if getattr(t, 'program', None) == o:
color = 'controlled_target'
s.write('%s -[%s]-> %s\n' % (o, self.arrow_colors[color], t))
s.write('@enduml\n')
if filename:
s.close()
else:
return s.getvalue() | [
"def",
"write_puml",
"(",
"self",
",",
"filename",
"=",
"''",
")",
":",
"def",
"get_type",
"(",
"o",
")",
":",
"type",
"=",
"'program'",
"if",
"isinstance",
"(",
"o",
",",
"AbstractSensor",
")",
":",
"type",
"=",
"'sensor'",
"elif",
"isinstance",
"(",
"o",
",",
"AbstractActuator",
")",
":",
"type",
"=",
"'actuator'",
"return",
"type",
"if",
"filename",
":",
"s",
"=",
"open",
"(",
"filename",
",",
"'w'",
")",
"else",
":",
"s",
"=",
"io",
".",
"StringIO",
"(",
")",
"s",
".",
"write",
"(",
"'@startuml\\n'",
")",
"s",
".",
"write",
"(",
"'skinparam state {\\n'",
")",
"for",
"k",
",",
"v",
"in",
"list",
"(",
"self",
".",
"background_colors",
".",
"items",
"(",
")",
")",
":",
"s",
".",
"write",
"(",
"'BackGroundColor<<%s>> %s\\n'",
"%",
"(",
"k",
",",
"v",
")",
")",
"s",
".",
"write",
"(",
"'}\\n'",
")",
"for",
"o",
"in",
"self",
".",
"system",
".",
"objects",
":",
"if",
"isinstance",
"(",
"o",
",",
"DefaultProgram",
")",
"or",
"o",
".",
"hide_in_uml",
":",
"continue",
"if",
"isinstance",
"(",
"o",
",",
"ProgrammableSystemObject",
")",
":",
"s",
".",
"write",
"(",
"'state \"%s\" as %s <<%s>>\\n'",
"%",
"(",
"o",
",",
"o",
",",
"get_type",
"(",
"o",
")",
")",
")",
"s",
".",
"write",
"(",
"'%s: %s\\n'",
"%",
"(",
"o",
",",
"o",
".",
"class_name",
")",
")",
"if",
"isinstance",
"(",
"o",
",",
"AbstractActuator",
")",
":",
"for",
"p",
"in",
"reversed",
"(",
"o",
".",
"program_stack",
")",
":",
"s",
".",
"write",
"(",
"'%s: %s :: %s\\n'",
"%",
"(",
"o",
",",
"p",
",",
"o",
".",
"program_status",
".",
"get",
"(",
"p",
",",
"'-'",
")",
")",
")",
"elif",
"hasattr",
"(",
"o",
",",
"'status'",
")",
":",
"s",
".",
"write",
"(",
"'%s: Status: %s\\n'",
"%",
"(",
"o",
",",
"o",
".",
"status",
")",
")",
"if",
"getattr",
"(",
"o",
",",
"'is_program'",
",",
"False",
")",
":",
"s",
".",
"write",
"(",
"'%s: Priority: %s\\n'",
"%",
"(",
"o",
",",
"o",
".",
"priority",
")",
")",
"for",
"t",
"in",
"o",
".",
"actual_triggers",
":",
"if",
"isinstance",
"(",
"t",
",",
"DefaultProgram",
")",
"or",
"t",
".",
"hide_in_uml",
":",
"continue",
"s",
".",
"write",
"(",
"'%s -[%s]-> %s\\n'",
"%",
"(",
"t",
",",
"self",
".",
"arrow_colors",
"[",
"'trigger'",
"]",
",",
"o",
")",
")",
"for",
"t",
"in",
"o",
".",
"actual_targets",
":",
"if",
"t",
".",
"hide_in_uml",
":",
"continue",
"if",
"o",
".",
"active",
":",
"color",
"=",
"'active_target'",
"else",
":",
"color",
"=",
"'inactive_target'",
"if",
"getattr",
"(",
"t",
",",
"'program'",
",",
"None",
")",
"==",
"o",
":",
"color",
"=",
"'controlled_target'",
"s",
".",
"write",
"(",
"'%s -[%s]-> %s\\n'",
"%",
"(",
"o",
",",
"self",
".",
"arrow_colors",
"[",
"color",
"]",
",",
"t",
")",
")",
"s",
".",
"write",
"(",
"'@enduml\\n'",
")",
"if",
"filename",
":",
"s",
".",
"close",
"(",
")",
"else",
":",
"return",
"s",
".",
"getvalue",
"(",
")"
] | Writes PUML from the system. If filename is given, stores result in the file.
Otherwise returns result as a string. | [
"Writes",
"PUML",
"from",
"the",
"system",
".",
"If",
"filename",
"is",
"given",
"stores",
"result",
"in",
"the",
"file",
".",
"Otherwise",
"returns",
"result",
"as",
"a",
"string",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/services/plantumlserv.py#L54-L112 | train | 1,013 |
tuomas2/automate | src/automate/services/plantumlserv.py | PlantUMLService.write_svg | def write_svg(self):
"""
Returns PUML from the system as a SVG image. Requires plantuml library.
"""
import plantuml
puml = self.write_puml()
server = plantuml.PlantUML(url=self.url)
svg = server.processes(puml)
return svg | python | def write_svg(self):
"""
Returns PUML from the system as a SVG image. Requires plantuml library.
"""
import plantuml
puml = self.write_puml()
server = plantuml.PlantUML(url=self.url)
svg = server.processes(puml)
return svg | [
"def",
"write_svg",
"(",
"self",
")",
":",
"import",
"plantuml",
"puml",
"=",
"self",
".",
"write_puml",
"(",
")",
"server",
"=",
"plantuml",
".",
"PlantUML",
"(",
"url",
"=",
"self",
".",
"url",
")",
"svg",
"=",
"server",
".",
"processes",
"(",
"puml",
")",
"return",
"svg"
] | Returns PUML from the system as a SVG image. Requires plantuml library. | [
"Returns",
"PUML",
"from",
"the",
"system",
"as",
"a",
"SVG",
"image",
".",
"Requires",
"plantuml",
"library",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/services/plantumlserv.py#L114-L122 | train | 1,014 |
lsanomaly/lsanomaly | lsanomaly/__init__.py | median_kneighbour_distance | def median_kneighbour_distance(X, k=5):
"""
Calculate the median kneighbor distance.
Find the distance between a set of random datapoints and
their kth nearest neighbours. This is a heuristic for setting the
kernel length scale.
"""
N_all = X.shape[0]
k = min(k, N_all)
N_subset = min(N_all, 2000)
sample_idx_train = np.random.permutation(N_all)[:N_subset]
nn = neighbors.NearestNeighbors(k)
nn.fit(X[sample_idx_train, :])
d, idx = nn.kneighbors(X[sample_idx_train, :])
return np.median(d[:, -1]) | python | def median_kneighbour_distance(X, k=5):
"""
Calculate the median kneighbor distance.
Find the distance between a set of random datapoints and
their kth nearest neighbours. This is a heuristic for setting the
kernel length scale.
"""
N_all = X.shape[0]
k = min(k, N_all)
N_subset = min(N_all, 2000)
sample_idx_train = np.random.permutation(N_all)[:N_subset]
nn = neighbors.NearestNeighbors(k)
nn.fit(X[sample_idx_train, :])
d, idx = nn.kneighbors(X[sample_idx_train, :])
return np.median(d[:, -1]) | [
"def",
"median_kneighbour_distance",
"(",
"X",
",",
"k",
"=",
"5",
")",
":",
"N_all",
"=",
"X",
".",
"shape",
"[",
"0",
"]",
"k",
"=",
"min",
"(",
"k",
",",
"N_all",
")",
"N_subset",
"=",
"min",
"(",
"N_all",
",",
"2000",
")",
"sample_idx_train",
"=",
"np",
".",
"random",
".",
"permutation",
"(",
"N_all",
")",
"[",
":",
"N_subset",
"]",
"nn",
"=",
"neighbors",
".",
"NearestNeighbors",
"(",
"k",
")",
"nn",
".",
"fit",
"(",
"X",
"[",
"sample_idx_train",
",",
":",
"]",
")",
"d",
",",
"idx",
"=",
"nn",
".",
"kneighbors",
"(",
"X",
"[",
"sample_idx_train",
",",
":",
"]",
")",
"return",
"np",
".",
"median",
"(",
"d",
"[",
":",
",",
"-",
"1",
"]",
")"
] | Calculate the median kneighbor distance.
Find the distance between a set of random datapoints and
their kth nearest neighbours. This is a heuristic for setting the
kernel length scale. | [
"Calculate",
"the",
"median",
"kneighbor",
"distance",
"."
] | 7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e | https://github.com/lsanomaly/lsanomaly/blob/7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e/lsanomaly/__init__.py#L12-L27 | train | 1,015 |
lsanomaly/lsanomaly | lsanomaly/__init__.py | pair_distance_centile | def pair_distance_centile(X, centile, max_pairs=5000):
"""
Calculate centiles of distances between random pairs in a dataset.
This an alternative to the median kNN distance for setting the kernel
length scale.
"""
N = X.shape[0]
n_pairs = min(max_pairs, N**2)
# randorder1 = np.random.permutation(N)
# randorder2 = np.random.permutation(N)
dists = np.zeros(n_pairs)
for i in range(n_pairs):
pair = np.random.randint(0, N, 2)
pairdiff = X[pair[0], :]-X[pair[1], :]
dists[i] = np.dot(pairdiff, pairdiff.T)
dists.sort()
out = dists[int(n_pairs*centile/100.)]
return np.sqrt(out) | python | def pair_distance_centile(X, centile, max_pairs=5000):
"""
Calculate centiles of distances between random pairs in a dataset.
This an alternative to the median kNN distance for setting the kernel
length scale.
"""
N = X.shape[0]
n_pairs = min(max_pairs, N**2)
# randorder1 = np.random.permutation(N)
# randorder2 = np.random.permutation(N)
dists = np.zeros(n_pairs)
for i in range(n_pairs):
pair = np.random.randint(0, N, 2)
pairdiff = X[pair[0], :]-X[pair[1], :]
dists[i] = np.dot(pairdiff, pairdiff.T)
dists.sort()
out = dists[int(n_pairs*centile/100.)]
return np.sqrt(out) | [
"def",
"pair_distance_centile",
"(",
"X",
",",
"centile",
",",
"max_pairs",
"=",
"5000",
")",
":",
"N",
"=",
"X",
".",
"shape",
"[",
"0",
"]",
"n_pairs",
"=",
"min",
"(",
"max_pairs",
",",
"N",
"**",
"2",
")",
"# randorder1 = np.random.permutation(N)",
"# randorder2 = np.random.permutation(N)",
"dists",
"=",
"np",
".",
"zeros",
"(",
"n_pairs",
")",
"for",
"i",
"in",
"range",
"(",
"n_pairs",
")",
":",
"pair",
"=",
"np",
".",
"random",
".",
"randint",
"(",
"0",
",",
"N",
",",
"2",
")",
"pairdiff",
"=",
"X",
"[",
"pair",
"[",
"0",
"]",
",",
":",
"]",
"-",
"X",
"[",
"pair",
"[",
"1",
"]",
",",
":",
"]",
"dists",
"[",
"i",
"]",
"=",
"np",
".",
"dot",
"(",
"pairdiff",
",",
"pairdiff",
".",
"T",
")",
"dists",
".",
"sort",
"(",
")",
"out",
"=",
"dists",
"[",
"int",
"(",
"n_pairs",
"*",
"centile",
"/",
"100.",
")",
"]",
"return",
"np",
".",
"sqrt",
"(",
"out",
")"
] | Calculate centiles of distances between random pairs in a dataset.
This an alternative to the median kNN distance for setting the kernel
length scale. | [
"Calculate",
"centiles",
"of",
"distances",
"between",
"random",
"pairs",
"in",
"a",
"dataset",
"."
] | 7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e | https://github.com/lsanomaly/lsanomaly/blob/7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e/lsanomaly/__init__.py#L30-L51 | train | 1,016 |
lsanomaly/lsanomaly | lsanomaly/__init__.py | LSAnomaly.fit | def fit(self, X, y=None):
"""
Fit the inlier model given training data.
This function attempts to choose reasonable defaults for parameters
sigma and rho if none are specified, which could then be adjusted
to improve performance.
Parameters
----------
X : array
Examples of inlier data, of dimension N times d (rows are
examples, columns are data dimensions)
y : array, optional
If the inliers have multiple classes, then y contains the class
assignments as a vector of length N. If this is specified then
the model will attempt to assign test data to one of the inlier
classes or to the outlier class.
"""
N = X.shape[0]
if y is None:
y = np.zeros(N)
self.classes = list(set(y))
self.classes.sort()
self.n_classes = len(self.classes)
# If no kernel parameters specified, try to choose some defaults
if not self.sigma:
self.sigma = median_kneighbour_distance(X)
self.gamma = self.sigma**-2
if not self.gamma:
self.gamma = self.sigma**-2
if not self.rho:
self.rho = 0.1
# choose kernel basis centres
if self.kernel_pos is None:
B = min(self.n_kernels_max, N)
kernel_idx = np.random.permutation(N)
self.kernel_pos = X[kernel_idx[:B]]
else:
B = self.kernel_pos.shape[0]
# fit coefficients
Phi = metrics.pairwise.rbf_kernel(X, self.kernel_pos, self.gamma)
theta = {}
Phi_PhiT = np.dot(Phi.T, Phi)
inverse_term = np.linalg.inv(Phi_PhiT + self.rho*np.eye(B))
for c in self.classes:
m = (y == c).astype(int)
theta[c] = np.dot(inverse_term, np.dot(Phi.T, m))
self.theta = theta | python | def fit(self, X, y=None):
"""
Fit the inlier model given training data.
This function attempts to choose reasonable defaults for parameters
sigma and rho if none are specified, which could then be adjusted
to improve performance.
Parameters
----------
X : array
Examples of inlier data, of dimension N times d (rows are
examples, columns are data dimensions)
y : array, optional
If the inliers have multiple classes, then y contains the class
assignments as a vector of length N. If this is specified then
the model will attempt to assign test data to one of the inlier
classes or to the outlier class.
"""
N = X.shape[0]
if y is None:
y = np.zeros(N)
self.classes = list(set(y))
self.classes.sort()
self.n_classes = len(self.classes)
# If no kernel parameters specified, try to choose some defaults
if not self.sigma:
self.sigma = median_kneighbour_distance(X)
self.gamma = self.sigma**-2
if not self.gamma:
self.gamma = self.sigma**-2
if not self.rho:
self.rho = 0.1
# choose kernel basis centres
if self.kernel_pos is None:
B = min(self.n_kernels_max, N)
kernel_idx = np.random.permutation(N)
self.kernel_pos = X[kernel_idx[:B]]
else:
B = self.kernel_pos.shape[0]
# fit coefficients
Phi = metrics.pairwise.rbf_kernel(X, self.kernel_pos, self.gamma)
theta = {}
Phi_PhiT = np.dot(Phi.T, Phi)
inverse_term = np.linalg.inv(Phi_PhiT + self.rho*np.eye(B))
for c in self.classes:
m = (y == c).astype(int)
theta[c] = np.dot(inverse_term, np.dot(Phi.T, m))
self.theta = theta | [
"def",
"fit",
"(",
"self",
",",
"X",
",",
"y",
"=",
"None",
")",
":",
"N",
"=",
"X",
".",
"shape",
"[",
"0",
"]",
"if",
"y",
"is",
"None",
":",
"y",
"=",
"np",
".",
"zeros",
"(",
"N",
")",
"self",
".",
"classes",
"=",
"list",
"(",
"set",
"(",
"y",
")",
")",
"self",
".",
"classes",
".",
"sort",
"(",
")",
"self",
".",
"n_classes",
"=",
"len",
"(",
"self",
".",
"classes",
")",
"# If no kernel parameters specified, try to choose some defaults",
"if",
"not",
"self",
".",
"sigma",
":",
"self",
".",
"sigma",
"=",
"median_kneighbour_distance",
"(",
"X",
")",
"self",
".",
"gamma",
"=",
"self",
".",
"sigma",
"**",
"-",
"2",
"if",
"not",
"self",
".",
"gamma",
":",
"self",
".",
"gamma",
"=",
"self",
".",
"sigma",
"**",
"-",
"2",
"if",
"not",
"self",
".",
"rho",
":",
"self",
".",
"rho",
"=",
"0.1",
"# choose kernel basis centres",
"if",
"self",
".",
"kernel_pos",
"is",
"None",
":",
"B",
"=",
"min",
"(",
"self",
".",
"n_kernels_max",
",",
"N",
")",
"kernel_idx",
"=",
"np",
".",
"random",
".",
"permutation",
"(",
"N",
")",
"self",
".",
"kernel_pos",
"=",
"X",
"[",
"kernel_idx",
"[",
":",
"B",
"]",
"]",
"else",
":",
"B",
"=",
"self",
".",
"kernel_pos",
".",
"shape",
"[",
"0",
"]",
"# fit coefficients",
"Phi",
"=",
"metrics",
".",
"pairwise",
".",
"rbf_kernel",
"(",
"X",
",",
"self",
".",
"kernel_pos",
",",
"self",
".",
"gamma",
")",
"theta",
"=",
"{",
"}",
"Phi_PhiT",
"=",
"np",
".",
"dot",
"(",
"Phi",
".",
"T",
",",
"Phi",
")",
"inverse_term",
"=",
"np",
".",
"linalg",
".",
"inv",
"(",
"Phi_PhiT",
"+",
"self",
".",
"rho",
"*",
"np",
".",
"eye",
"(",
"B",
")",
")",
"for",
"c",
"in",
"self",
".",
"classes",
":",
"m",
"=",
"(",
"y",
"==",
"c",
")",
".",
"astype",
"(",
"int",
")",
"theta",
"[",
"c",
"]",
"=",
"np",
".",
"dot",
"(",
"inverse_term",
",",
"np",
".",
"dot",
"(",
"Phi",
".",
"T",
",",
"m",
")",
")",
"self",
".",
"theta",
"=",
"theta"
] | Fit the inlier model given training data.
This function attempts to choose reasonable defaults for parameters
sigma and rho if none are specified, which could then be adjusted
to improve performance.
Parameters
----------
X : array
Examples of inlier data, of dimension N times d (rows are
examples, columns are data dimensions)
y : array, optional
If the inliers have multiple classes, then y contains the class
assignments as a vector of length N. If this is specified then
the model will attempt to assign test data to one of the inlier
classes or to the outlier class. | [
"Fit",
"the",
"inlier",
"model",
"given",
"training",
"data",
"."
] | 7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e | https://github.com/lsanomaly/lsanomaly/blob/7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e/lsanomaly/__init__.py#L108-L164 | train | 1,017 |
lsanomaly/lsanomaly | lsanomaly/__init__.py | LSAnomaly.predict | def predict(self, X):
"""
Assign classes to test data.
Parameters
----------
X : array
Test data, of dimension N times d (rows are examples, columns
are data dimensions)
Returns
-------
y_predicted : array
A vector of length N containing assigned classes. If no inlier
classes were specified during training, then 0 denotes an inlier
and 1 denotes an outlier. If multiple inlier classes were
specified, then each element of y_predicted is either on of
those inlier classes, or an outlier class (denoted by the
maximum inlier class ID plus 1).
"""
predictions_proba = self.predict_proba(X)
predictions = []
allclasses = copy.copy(self.classes)
allclasses.append('anomaly')
for i in range(X.shape[0]):
predictions.append(allclasses[predictions_proba[i, :].argmax()])
return predictions | python | def predict(self, X):
"""
Assign classes to test data.
Parameters
----------
X : array
Test data, of dimension N times d (rows are examples, columns
are data dimensions)
Returns
-------
y_predicted : array
A vector of length N containing assigned classes. If no inlier
classes were specified during training, then 0 denotes an inlier
and 1 denotes an outlier. If multiple inlier classes were
specified, then each element of y_predicted is either on of
those inlier classes, or an outlier class (denoted by the
maximum inlier class ID plus 1).
"""
predictions_proba = self.predict_proba(X)
predictions = []
allclasses = copy.copy(self.classes)
allclasses.append('anomaly')
for i in range(X.shape[0]):
predictions.append(allclasses[predictions_proba[i, :].argmax()])
return predictions | [
"def",
"predict",
"(",
"self",
",",
"X",
")",
":",
"predictions_proba",
"=",
"self",
".",
"predict_proba",
"(",
"X",
")",
"predictions",
"=",
"[",
"]",
"allclasses",
"=",
"copy",
".",
"copy",
"(",
"self",
".",
"classes",
")",
"allclasses",
".",
"append",
"(",
"'anomaly'",
")",
"for",
"i",
"in",
"range",
"(",
"X",
".",
"shape",
"[",
"0",
"]",
")",
":",
"predictions",
".",
"append",
"(",
"allclasses",
"[",
"predictions_proba",
"[",
"i",
",",
":",
"]",
".",
"argmax",
"(",
")",
"]",
")",
"return",
"predictions"
] | Assign classes to test data.
Parameters
----------
X : array
Test data, of dimension N times d (rows are examples, columns
are data dimensions)
Returns
-------
y_predicted : array
A vector of length N containing assigned classes. If no inlier
classes were specified during training, then 0 denotes an inlier
and 1 denotes an outlier. If multiple inlier classes were
specified, then each element of y_predicted is either on of
those inlier classes, or an outlier class (denoted by the
maximum inlier class ID plus 1). | [
"Assign",
"classes",
"to",
"test",
"data",
"."
] | 7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e | https://github.com/lsanomaly/lsanomaly/blob/7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e/lsanomaly/__init__.py#L166-L192 | train | 1,018 |
lsanomaly/lsanomaly | lsanomaly/__init__.py | LSAnomaly.predict_proba | def predict_proba(self, X):
"""
Calculate posterior probabilities of test data.
Parameters
----------
X : array
Test data, of dimension N times d (rows are examples, columns
are data dimensions)
Returns:
-------
y_prob : array
An array of dimension N times n_inlier_classes+1, containing
the probabilities of each row of X being one of the inlier
classes, or the outlier class (last column).
"""
Phi = metrics.pairwise.rbf_kernel(X, self.kernel_pos, self.gamma)
N = X.shape[0]
predictions = np.zeros((N, self.n_classes+1))
for i in range(N):
post = np.zeros(self.n_classes)
for c in range(self.n_classes):
post[c] = max(0,
np.dot(self.theta[self.classes[c]].T, Phi[i, :]))
post[c] = min(post[c], 1.)
predictions[i, :-1] = post
predictions[i, -1] = max(0, 1-sum(post))
return predictions | python | def predict_proba(self, X):
"""
Calculate posterior probabilities of test data.
Parameters
----------
X : array
Test data, of dimension N times d (rows are examples, columns
are data dimensions)
Returns:
-------
y_prob : array
An array of dimension N times n_inlier_classes+1, containing
the probabilities of each row of X being one of the inlier
classes, or the outlier class (last column).
"""
Phi = metrics.pairwise.rbf_kernel(X, self.kernel_pos, self.gamma)
N = X.shape[0]
predictions = np.zeros((N, self.n_classes+1))
for i in range(N):
post = np.zeros(self.n_classes)
for c in range(self.n_classes):
post[c] = max(0,
np.dot(self.theta[self.classes[c]].T, Phi[i, :]))
post[c] = min(post[c], 1.)
predictions[i, :-1] = post
predictions[i, -1] = max(0, 1-sum(post))
return predictions | [
"def",
"predict_proba",
"(",
"self",
",",
"X",
")",
":",
"Phi",
"=",
"metrics",
".",
"pairwise",
".",
"rbf_kernel",
"(",
"X",
",",
"self",
".",
"kernel_pos",
",",
"self",
".",
"gamma",
")",
"N",
"=",
"X",
".",
"shape",
"[",
"0",
"]",
"predictions",
"=",
"np",
".",
"zeros",
"(",
"(",
"N",
",",
"self",
".",
"n_classes",
"+",
"1",
")",
")",
"for",
"i",
"in",
"range",
"(",
"N",
")",
":",
"post",
"=",
"np",
".",
"zeros",
"(",
"self",
".",
"n_classes",
")",
"for",
"c",
"in",
"range",
"(",
"self",
".",
"n_classes",
")",
":",
"post",
"[",
"c",
"]",
"=",
"max",
"(",
"0",
",",
"np",
".",
"dot",
"(",
"self",
".",
"theta",
"[",
"self",
".",
"classes",
"[",
"c",
"]",
"]",
".",
"T",
",",
"Phi",
"[",
"i",
",",
":",
"]",
")",
")",
"post",
"[",
"c",
"]",
"=",
"min",
"(",
"post",
"[",
"c",
"]",
",",
"1.",
")",
"predictions",
"[",
"i",
",",
":",
"-",
"1",
"]",
"=",
"post",
"predictions",
"[",
"i",
",",
"-",
"1",
"]",
"=",
"max",
"(",
"0",
",",
"1",
"-",
"sum",
"(",
"post",
")",
")",
"return",
"predictions"
] | Calculate posterior probabilities of test data.
Parameters
----------
X : array
Test data, of dimension N times d (rows are examples, columns
are data dimensions)
Returns:
-------
y_prob : array
An array of dimension N times n_inlier_classes+1, containing
the probabilities of each row of X being one of the inlier
classes, or the outlier class (last column). | [
"Calculate",
"posterior",
"probabilities",
"of",
"test",
"data",
"."
] | 7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e | https://github.com/lsanomaly/lsanomaly/blob/7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e/lsanomaly/__init__.py#L194-L223 | train | 1,019 |
lsanomaly/lsanomaly | lsanomaly/__init__.py | LSAnomaly.decision_function | def decision_function(self, X):
"""
Generate an inlier score for each test data example.
Parameters
----------
X : array
Test data, of dimension N times d (rows are examples, columns
are data dimensions)
Returns:
-------
scores : array
A vector of length N, where each element contains an inlier
score in the range 0-1 (outliers have values close to zero,
inliers have values close to one).
"""
predictions = self.predict_proba(X)
out = np.zeros((predictions.shape[0], 1))
out[:, 0] = 1 - predictions[:, -1]
return out | python | def decision_function(self, X):
"""
Generate an inlier score for each test data example.
Parameters
----------
X : array
Test data, of dimension N times d (rows are examples, columns
are data dimensions)
Returns:
-------
scores : array
A vector of length N, where each element contains an inlier
score in the range 0-1 (outliers have values close to zero,
inliers have values close to one).
"""
predictions = self.predict_proba(X)
out = np.zeros((predictions.shape[0], 1))
out[:, 0] = 1 - predictions[:, -1]
return out | [
"def",
"decision_function",
"(",
"self",
",",
"X",
")",
":",
"predictions",
"=",
"self",
".",
"predict_proba",
"(",
"X",
")",
"out",
"=",
"np",
".",
"zeros",
"(",
"(",
"predictions",
".",
"shape",
"[",
"0",
"]",
",",
"1",
")",
")",
"out",
"[",
":",
",",
"0",
"]",
"=",
"1",
"-",
"predictions",
"[",
":",
",",
"-",
"1",
"]",
"return",
"out"
] | Generate an inlier score for each test data example.
Parameters
----------
X : array
Test data, of dimension N times d (rows are examples, columns
are data dimensions)
Returns:
-------
scores : array
A vector of length N, where each element contains an inlier
score in the range 0-1 (outliers have values close to zero,
inliers have values close to one). | [
"Generate",
"an",
"inlier",
"score",
"for",
"each",
"test",
"data",
"example",
"."
] | 7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e | https://github.com/lsanomaly/lsanomaly/blob/7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e/lsanomaly/__init__.py#L225-L245 | train | 1,020 |
lsanomaly/lsanomaly | lsanomaly/__init__.py | LSAnomaly.score | def score(self, X, y):
"""
Calculate accuracy score.
Needed because of bug in metrics.accuracy_score when comparing
list with numpy array.
"""
predictions = self.predict(X)
true = 0.0
total = 0.0
for i in range(len(predictions)):
total += 1
if predictions[i] == y[i]:
true += 1
return true/total | python | def score(self, X, y):
"""
Calculate accuracy score.
Needed because of bug in metrics.accuracy_score when comparing
list with numpy array.
"""
predictions = self.predict(X)
true = 0.0
total = 0.0
for i in range(len(predictions)):
total += 1
if predictions[i] == y[i]:
true += 1
return true/total | [
"def",
"score",
"(",
"self",
",",
"X",
",",
"y",
")",
":",
"predictions",
"=",
"self",
".",
"predict",
"(",
"X",
")",
"true",
"=",
"0.0",
"total",
"=",
"0.0",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"predictions",
")",
")",
":",
"total",
"+=",
"1",
"if",
"predictions",
"[",
"i",
"]",
"==",
"y",
"[",
"i",
"]",
":",
"true",
"+=",
"1",
"return",
"true",
"/",
"total"
] | Calculate accuracy score.
Needed because of bug in metrics.accuracy_score when comparing
list with numpy array. | [
"Calculate",
"accuracy",
"score",
"."
] | 7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e | https://github.com/lsanomaly/lsanomaly/blob/7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e/lsanomaly/__init__.py#L247-L261 | train | 1,021 |
lsanomaly/lsanomaly | lsanomaly/__init__.py | LSAnomaly.predict_sequence | def predict_sequence(self, X, A, pi, inference='smoothing'):
"""
Calculate class probabilities for a sequence of data.
Parameters
----------
X : array
Test data, of dimension N times d (rows are time frames, columns
are data dimensions)
A : class transition matrix, where A[i,j] contains p(y_t=j|y_{t-1}=i)
pi : vector of initial class probabilities
inference : can be 'smoothing' or 'filtering'.
Returns:
-------
y_prob : array
An array of dimension N times n_inlier_classes+1, containing
the probabilities of each row of X being one of the inlier
classes, or the outlier class (last column).
"""
obsll = self.predict_proba(X)
T, S = obsll.shape
alpha = np.zeros((T, S))
alpha[0, :] = pi
for t in range(1, T):
alpha[t, :] = np.dot(alpha[t-1, :], A)
for s in range(S):
alpha[t, s] *= obsll[t, s]
alpha[t, :] = alpha[t, :]/sum(alpha[t, :])
if inference == 'filtering':
return alpha
else:
beta = np.zeros((T, S))
gamma = np.zeros((T, S))
beta[T-1, :] = np.ones(S)
for t in range(T-2, -1, -1):
for i in range(S):
for j in range(S):
beta[t, i] += A[i, j]*obsll[t+1, j]*beta[t+1, j]
beta[t, :] = beta[t, :]/sum(beta[t, :])
for t in range(T):
gamma[t, :] = alpha[t, :]*beta[t, :]
gamma[t, :] = gamma[t, :]/sum(gamma[t, :])
return gamma | python | def predict_sequence(self, X, A, pi, inference='smoothing'):
"""
Calculate class probabilities for a sequence of data.
Parameters
----------
X : array
Test data, of dimension N times d (rows are time frames, columns
are data dimensions)
A : class transition matrix, where A[i,j] contains p(y_t=j|y_{t-1}=i)
pi : vector of initial class probabilities
inference : can be 'smoothing' or 'filtering'.
Returns:
-------
y_prob : array
An array of dimension N times n_inlier_classes+1, containing
the probabilities of each row of X being one of the inlier
classes, or the outlier class (last column).
"""
obsll = self.predict_proba(X)
T, S = obsll.shape
alpha = np.zeros((T, S))
alpha[0, :] = pi
for t in range(1, T):
alpha[t, :] = np.dot(alpha[t-1, :], A)
for s in range(S):
alpha[t, s] *= obsll[t, s]
alpha[t, :] = alpha[t, :]/sum(alpha[t, :])
if inference == 'filtering':
return alpha
else:
beta = np.zeros((T, S))
gamma = np.zeros((T, S))
beta[T-1, :] = np.ones(S)
for t in range(T-2, -1, -1):
for i in range(S):
for j in range(S):
beta[t, i] += A[i, j]*obsll[t+1, j]*beta[t+1, j]
beta[t, :] = beta[t, :]/sum(beta[t, :])
for t in range(T):
gamma[t, :] = alpha[t, :]*beta[t, :]
gamma[t, :] = gamma[t, :]/sum(gamma[t, :])
return gamma | [
"def",
"predict_sequence",
"(",
"self",
",",
"X",
",",
"A",
",",
"pi",
",",
"inference",
"=",
"'smoothing'",
")",
":",
"obsll",
"=",
"self",
".",
"predict_proba",
"(",
"X",
")",
"T",
",",
"S",
"=",
"obsll",
".",
"shape",
"alpha",
"=",
"np",
".",
"zeros",
"(",
"(",
"T",
",",
"S",
")",
")",
"alpha",
"[",
"0",
",",
":",
"]",
"=",
"pi",
"for",
"t",
"in",
"range",
"(",
"1",
",",
"T",
")",
":",
"alpha",
"[",
"t",
",",
":",
"]",
"=",
"np",
".",
"dot",
"(",
"alpha",
"[",
"t",
"-",
"1",
",",
":",
"]",
",",
"A",
")",
"for",
"s",
"in",
"range",
"(",
"S",
")",
":",
"alpha",
"[",
"t",
",",
"s",
"]",
"*=",
"obsll",
"[",
"t",
",",
"s",
"]",
"alpha",
"[",
"t",
",",
":",
"]",
"=",
"alpha",
"[",
"t",
",",
":",
"]",
"/",
"sum",
"(",
"alpha",
"[",
"t",
",",
":",
"]",
")",
"if",
"inference",
"==",
"'filtering'",
":",
"return",
"alpha",
"else",
":",
"beta",
"=",
"np",
".",
"zeros",
"(",
"(",
"T",
",",
"S",
")",
")",
"gamma",
"=",
"np",
".",
"zeros",
"(",
"(",
"T",
",",
"S",
")",
")",
"beta",
"[",
"T",
"-",
"1",
",",
":",
"]",
"=",
"np",
".",
"ones",
"(",
"S",
")",
"for",
"t",
"in",
"range",
"(",
"T",
"-",
"2",
",",
"-",
"1",
",",
"-",
"1",
")",
":",
"for",
"i",
"in",
"range",
"(",
"S",
")",
":",
"for",
"j",
"in",
"range",
"(",
"S",
")",
":",
"beta",
"[",
"t",
",",
"i",
"]",
"+=",
"A",
"[",
"i",
",",
"j",
"]",
"*",
"obsll",
"[",
"t",
"+",
"1",
",",
"j",
"]",
"*",
"beta",
"[",
"t",
"+",
"1",
",",
"j",
"]",
"beta",
"[",
"t",
",",
":",
"]",
"=",
"beta",
"[",
"t",
",",
":",
"]",
"/",
"sum",
"(",
"beta",
"[",
"t",
",",
":",
"]",
")",
"for",
"t",
"in",
"range",
"(",
"T",
")",
":",
"gamma",
"[",
"t",
",",
":",
"]",
"=",
"alpha",
"[",
"t",
",",
":",
"]",
"*",
"beta",
"[",
"t",
",",
":",
"]",
"gamma",
"[",
"t",
",",
":",
"]",
"=",
"gamma",
"[",
"t",
",",
":",
"]",
"/",
"sum",
"(",
"gamma",
"[",
"t",
",",
":",
"]",
")",
"return",
"gamma"
] | Calculate class probabilities for a sequence of data.
Parameters
----------
X : array
Test data, of dimension N times d (rows are time frames, columns
are data dimensions)
A : class transition matrix, where A[i,j] contains p(y_t=j|y_{t-1}=i)
pi : vector of initial class probabilities
inference : can be 'smoothing' or 'filtering'.
Returns:
-------
y_prob : array
An array of dimension N times n_inlier_classes+1, containing
the probabilities of each row of X being one of the inlier
classes, or the outlier class (last column). | [
"Calculate",
"class",
"probabilities",
"for",
"a",
"sequence",
"of",
"data",
"."
] | 7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e | https://github.com/lsanomaly/lsanomaly/blob/7680ccbd6eedc14ccdd84d11be56edb6f9fdca2e/lsanomaly/__init__.py#L263-L310 | train | 1,022 |
tuomas2/automate | src/automate/extensions/webui/djangoapp/views.py | toggle_sensor | def toggle_sensor(request, sensorname):
"""
This is used only if websocket fails
"""
if service.read_only:
service.logger.warning("Could not perform operation: read only mode enabled")
raise Http404
source = request.GET.get('source', 'main')
sensor = service.system.namespace[sensorname]
sensor.status = not sensor.status
service.system.flush()
return HttpResponseRedirect(reverse(source)) | python | def toggle_sensor(request, sensorname):
"""
This is used only if websocket fails
"""
if service.read_only:
service.logger.warning("Could not perform operation: read only mode enabled")
raise Http404
source = request.GET.get('source', 'main')
sensor = service.system.namespace[sensorname]
sensor.status = not sensor.status
service.system.flush()
return HttpResponseRedirect(reverse(source)) | [
"def",
"toggle_sensor",
"(",
"request",
",",
"sensorname",
")",
":",
"if",
"service",
".",
"read_only",
":",
"service",
".",
"logger",
".",
"warning",
"(",
"\"Could not perform operation: read only mode enabled\"",
")",
"raise",
"Http404",
"source",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'source'",
",",
"'main'",
")",
"sensor",
"=",
"service",
".",
"system",
".",
"namespace",
"[",
"sensorname",
"]",
"sensor",
".",
"status",
"=",
"not",
"sensor",
".",
"status",
"service",
".",
"system",
".",
"flush",
"(",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"source",
")",
")"
] | This is used only if websocket fails | [
"This",
"is",
"used",
"only",
"if",
"websocket",
"fails"
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/extensions/webui/djangoapp/views.py#L279-L290 | train | 1,023 |
tuomas2/automate | src/automate/extensions/webui/djangoapp/views.py | toggle_value | def toggle_value(request, name):
"""
For manual shortcut links to perform toggle actions
"""
obj = service.system.namespace.get(name, None)
if not obj or service.read_only:
raise Http404
new_status = obj.status = not obj.status
if service.redirect_from_setters:
return HttpResponseRedirect(reverse('set_ready', args=(name, new_status)))
else:
return set_ready(request, name, new_status) | python | def toggle_value(request, name):
"""
For manual shortcut links to perform toggle actions
"""
obj = service.system.namespace.get(name, None)
if not obj or service.read_only:
raise Http404
new_status = obj.status = not obj.status
if service.redirect_from_setters:
return HttpResponseRedirect(reverse('set_ready', args=(name, new_status)))
else:
return set_ready(request, name, new_status) | [
"def",
"toggle_value",
"(",
"request",
",",
"name",
")",
":",
"obj",
"=",
"service",
".",
"system",
".",
"namespace",
".",
"get",
"(",
"name",
",",
"None",
")",
"if",
"not",
"obj",
"or",
"service",
".",
"read_only",
":",
"raise",
"Http404",
"new_status",
"=",
"obj",
".",
"status",
"=",
"not",
"obj",
".",
"status",
"if",
"service",
".",
"redirect_from_setters",
":",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'set_ready'",
",",
"args",
"=",
"(",
"name",
",",
"new_status",
")",
")",
")",
"else",
":",
"return",
"set_ready",
"(",
"request",
",",
"name",
",",
"new_status",
")"
] | For manual shortcut links to perform toggle actions | [
"For",
"manual",
"shortcut",
"links",
"to",
"perform",
"toggle",
"actions"
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/extensions/webui/djangoapp/views.py#L294-L305 | train | 1,024 |
tuomas2/automate | src/automate/extensions/webui/djangoapp/views.py | set_value | def set_value(request, name, value):
"""
For manual shortcut links to perform set value actions
"""
obj = service.system.namespace.get(name, None)
if not obj or service.read_only:
raise Http404
obj.status = value
if service.redirect_from_setters:
return HttpResponseRedirect(reverse('set_ready', args=(name, value)))
else:
return set_ready(request, name, value) | python | def set_value(request, name, value):
"""
For manual shortcut links to perform set value actions
"""
obj = service.system.namespace.get(name, None)
if not obj or service.read_only:
raise Http404
obj.status = value
if service.redirect_from_setters:
return HttpResponseRedirect(reverse('set_ready', args=(name, value)))
else:
return set_ready(request, name, value) | [
"def",
"set_value",
"(",
"request",
",",
"name",
",",
"value",
")",
":",
"obj",
"=",
"service",
".",
"system",
".",
"namespace",
".",
"get",
"(",
"name",
",",
"None",
")",
"if",
"not",
"obj",
"or",
"service",
".",
"read_only",
":",
"raise",
"Http404",
"obj",
".",
"status",
"=",
"value",
"if",
"service",
".",
"redirect_from_setters",
":",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'set_ready'",
",",
"args",
"=",
"(",
"name",
",",
"value",
")",
")",
")",
"else",
":",
"return",
"set_ready",
"(",
"request",
",",
"name",
",",
"value",
")"
] | For manual shortcut links to perform set value actions | [
"For",
"manual",
"shortcut",
"links",
"to",
"perform",
"set",
"value",
"actions"
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/extensions/webui/djangoapp/views.py#L309-L320 | train | 1,025 |
tuomas2/automate | src/automate/systemobject.py | SystemObject.object_type | def object_type(self):
"""
A read-only property that gives the object type as string; sensor, actuator, program, other.
Used by WEB interface templates.
"""
from .statusobject import AbstractSensor, AbstractActuator
from .program import Program
if isinstance(self, AbstractSensor):
return 'sensor'
elif isinstance(self, AbstractActuator):
return 'actuator'
elif isinstance(self, Program):
return 'program'
else:
return 'other' | python | def object_type(self):
"""
A read-only property that gives the object type as string; sensor, actuator, program, other.
Used by WEB interface templates.
"""
from .statusobject import AbstractSensor, AbstractActuator
from .program import Program
if isinstance(self, AbstractSensor):
return 'sensor'
elif isinstance(self, AbstractActuator):
return 'actuator'
elif isinstance(self, Program):
return 'program'
else:
return 'other' | [
"def",
"object_type",
"(",
"self",
")",
":",
"from",
".",
"statusobject",
"import",
"AbstractSensor",
",",
"AbstractActuator",
"from",
".",
"program",
"import",
"Program",
"if",
"isinstance",
"(",
"self",
",",
"AbstractSensor",
")",
":",
"return",
"'sensor'",
"elif",
"isinstance",
"(",
"self",
",",
"AbstractActuator",
")",
":",
"return",
"'actuator'",
"elif",
"isinstance",
"(",
"self",
",",
"Program",
")",
":",
"return",
"'program'",
"else",
":",
"return",
"'other'"
] | A read-only property that gives the object type as string; sensor, actuator, program, other.
Used by WEB interface templates. | [
"A",
"read",
"-",
"only",
"property",
"that",
"gives",
"the",
"object",
"type",
"as",
"string",
";",
"sensor",
"actuator",
"program",
"other",
".",
"Used",
"by",
"WEB",
"interface",
"templates",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/systemobject.py#L115-L130 | train | 1,026 |
tuomas2/automate | src/automate/systemobject.py | SystemObject.get_as_datadict | def get_as_datadict(self):
"""
Get information about this object as a dictionary. Used by WebSocket interface to pass some
relevant information to client applications.
"""
return dict(type=self.__class__.__name__, tags=list(self.tags)) | python | def get_as_datadict(self):
"""
Get information about this object as a dictionary. Used by WebSocket interface to pass some
relevant information to client applications.
"""
return dict(type=self.__class__.__name__, tags=list(self.tags)) | [
"def",
"get_as_datadict",
"(",
"self",
")",
":",
"return",
"dict",
"(",
"type",
"=",
"self",
".",
"__class__",
".",
"__name__",
",",
"tags",
"=",
"list",
"(",
"self",
".",
"tags",
")",
")"
] | Get information about this object as a dictionary. Used by WebSocket interface to pass some
relevant information to client applications. | [
"Get",
"information",
"about",
"this",
"object",
"as",
"a",
"dictionary",
".",
"Used",
"by",
"WebSocket",
"interface",
"to",
"pass",
"some",
"relevant",
"information",
"to",
"client",
"applications",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/systemobject.py#L156-L161 | train | 1,027 |
tuomas2/automate | src/automate/systemobject.py | SystemObject.setup_system | def setup_system(self, system, name_from_system='', **kwargs):
"""
Set system attribute and do some initialization. Used by System.
"""
if not self.system:
self.system = system
name, traits = self._passed_arguments
new_name = self.system.get_unique_name(self, name, name_from_system)
if not self in self.system.reverse:
self.name = new_name
self.logger = self.system.logger.getChild('%s.%s' % (self.__class__.__name__, self.name))
self.logger.setLevel(self.log_level)
if name is None and 'name' in traits: # Only __setstate__ sets name to None. Default is ''.
del traits['name']
for cname in self.callables:
if cname in traits:
c = self._postponed_callables[cname] = traits.pop(cname)
c.setup_callable_system(self.system)
getattr(self, cname).setup_callable_system(self.system)
if not self.traits_inited():
super().__init__(**traits)
self.name_changed_event = True
self.setup() | python | def setup_system(self, system, name_from_system='', **kwargs):
"""
Set system attribute and do some initialization. Used by System.
"""
if not self.system:
self.system = system
name, traits = self._passed_arguments
new_name = self.system.get_unique_name(self, name, name_from_system)
if not self in self.system.reverse:
self.name = new_name
self.logger = self.system.logger.getChild('%s.%s' % (self.__class__.__name__, self.name))
self.logger.setLevel(self.log_level)
if name is None and 'name' in traits: # Only __setstate__ sets name to None. Default is ''.
del traits['name']
for cname in self.callables:
if cname in traits:
c = self._postponed_callables[cname] = traits.pop(cname)
c.setup_callable_system(self.system)
getattr(self, cname).setup_callable_system(self.system)
if not self.traits_inited():
super().__init__(**traits)
self.name_changed_event = True
self.setup() | [
"def",
"setup_system",
"(",
"self",
",",
"system",
",",
"name_from_system",
"=",
"''",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"system",
":",
"self",
".",
"system",
"=",
"system",
"name",
",",
"traits",
"=",
"self",
".",
"_passed_arguments",
"new_name",
"=",
"self",
".",
"system",
".",
"get_unique_name",
"(",
"self",
",",
"name",
",",
"name_from_system",
")",
"if",
"not",
"self",
"in",
"self",
".",
"system",
".",
"reverse",
":",
"self",
".",
"name",
"=",
"new_name",
"self",
".",
"logger",
"=",
"self",
".",
"system",
".",
"logger",
".",
"getChild",
"(",
"'%s.%s'",
"%",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"self",
".",
"name",
")",
")",
"self",
".",
"logger",
".",
"setLevel",
"(",
"self",
".",
"log_level",
")",
"if",
"name",
"is",
"None",
"and",
"'name'",
"in",
"traits",
":",
"# Only __setstate__ sets name to None. Default is ''.",
"del",
"traits",
"[",
"'name'",
"]",
"for",
"cname",
"in",
"self",
".",
"callables",
":",
"if",
"cname",
"in",
"traits",
":",
"c",
"=",
"self",
".",
"_postponed_callables",
"[",
"cname",
"]",
"=",
"traits",
".",
"pop",
"(",
"cname",
")",
"c",
".",
"setup_callable_system",
"(",
"self",
".",
"system",
")",
"getattr",
"(",
"self",
",",
"cname",
")",
".",
"setup_callable_system",
"(",
"self",
".",
"system",
")",
"if",
"not",
"self",
".",
"traits_inited",
"(",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"*",
"*",
"traits",
")",
"self",
".",
"name_changed_event",
"=",
"True",
"self",
".",
"setup",
"(",
")"
] | Set system attribute and do some initialization. Used by System. | [
"Set",
"system",
"attribute",
"and",
"do",
"some",
"initialization",
".",
"Used",
"by",
"System",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/systemobject.py#L169-L194 | train | 1,028 |
tuomas2/automate | src/automate/systemobject.py | SystemObject.setup_callables | def setup_callables(self):
"""
Setup Callable attributes that belong to this object.
"""
defaults = self.get_default_callables()
for key, value in list(defaults.items()):
self._postponed_callables.setdefault(key, value)
for key in self.callables:
value = self._postponed_callables.pop(key)
value.setup_callable_system(self.system, init=True)
setattr(self, key, value) | python | def setup_callables(self):
"""
Setup Callable attributes that belong to this object.
"""
defaults = self.get_default_callables()
for key, value in list(defaults.items()):
self._postponed_callables.setdefault(key, value)
for key in self.callables:
value = self._postponed_callables.pop(key)
value.setup_callable_system(self.system, init=True)
setattr(self, key, value) | [
"def",
"setup_callables",
"(",
"self",
")",
":",
"defaults",
"=",
"self",
".",
"get_default_callables",
"(",
")",
"for",
"key",
",",
"value",
"in",
"list",
"(",
"defaults",
".",
"items",
"(",
")",
")",
":",
"self",
".",
"_postponed_callables",
".",
"setdefault",
"(",
"key",
",",
"value",
")",
"for",
"key",
"in",
"self",
".",
"callables",
":",
"value",
"=",
"self",
".",
"_postponed_callables",
".",
"pop",
"(",
"key",
")",
"value",
".",
"setup_callable_system",
"(",
"self",
".",
"system",
",",
"init",
"=",
"True",
")",
"setattr",
"(",
"self",
",",
"key",
",",
"value",
")"
] | Setup Callable attributes that belong to this object. | [
"Setup",
"Callable",
"attributes",
"that",
"belong",
"to",
"this",
"object",
"."
] | d8a8cd03cd0da047e033a2d305f3f260f8c4e017 | https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/systemobject.py#L196-L206 | train | 1,029 |
ofa/django-bouncy | django_bouncy/utils.py | grab_keyfile | def grab_keyfile(cert_url):
"""
Function to acqure the keyfile
SNS keys expire and Amazon does not promise they will use the same key
for all SNS requests. So we need to keep a copy of the cert in our
cache
"""
key_cache = caches[getattr(settings, 'BOUNCY_KEY_CACHE', 'default')]
pemfile = key_cache.get(cert_url)
if not pemfile:
response = urlopen(cert_url)
pemfile = response.read()
# Extract the first certificate in the file and confirm it's a valid
# PEM certificate
certificates = pem.parse(smart_bytes(pemfile))
# A proper certificate file will contain 1 certificate
if len(certificates) != 1:
logger.error('Invalid Certificate File: URL %s', cert_url)
raise ValueError('Invalid Certificate File')
key_cache.set(cert_url, pemfile)
return pemfile | python | def grab_keyfile(cert_url):
"""
Function to acqure the keyfile
SNS keys expire and Amazon does not promise they will use the same key
for all SNS requests. So we need to keep a copy of the cert in our
cache
"""
key_cache = caches[getattr(settings, 'BOUNCY_KEY_CACHE', 'default')]
pemfile = key_cache.get(cert_url)
if not pemfile:
response = urlopen(cert_url)
pemfile = response.read()
# Extract the first certificate in the file and confirm it's a valid
# PEM certificate
certificates = pem.parse(smart_bytes(pemfile))
# A proper certificate file will contain 1 certificate
if len(certificates) != 1:
logger.error('Invalid Certificate File: URL %s', cert_url)
raise ValueError('Invalid Certificate File')
key_cache.set(cert_url, pemfile)
return pemfile | [
"def",
"grab_keyfile",
"(",
"cert_url",
")",
":",
"key_cache",
"=",
"caches",
"[",
"getattr",
"(",
"settings",
",",
"'BOUNCY_KEY_CACHE'",
",",
"'default'",
")",
"]",
"pemfile",
"=",
"key_cache",
".",
"get",
"(",
"cert_url",
")",
"if",
"not",
"pemfile",
":",
"response",
"=",
"urlopen",
"(",
"cert_url",
")",
"pemfile",
"=",
"response",
".",
"read",
"(",
")",
"# Extract the first certificate in the file and confirm it's a valid",
"# PEM certificate",
"certificates",
"=",
"pem",
".",
"parse",
"(",
"smart_bytes",
"(",
"pemfile",
")",
")",
"# A proper certificate file will contain 1 certificate",
"if",
"len",
"(",
"certificates",
")",
"!=",
"1",
":",
"logger",
".",
"error",
"(",
"'Invalid Certificate File: URL %s'",
",",
"cert_url",
")",
"raise",
"ValueError",
"(",
"'Invalid Certificate File'",
")",
"key_cache",
".",
"set",
"(",
"cert_url",
",",
"pemfile",
")",
"return",
"pemfile"
] | Function to acqure the keyfile
SNS keys expire and Amazon does not promise they will use the same key
for all SNS requests. So we need to keep a copy of the cert in our
cache | [
"Function",
"to",
"acqure",
"the",
"keyfile"
] | a386dfa8c4ce59bd18978a3537c03cd6ad07bf06 | https://github.com/ofa/django-bouncy/blob/a386dfa8c4ce59bd18978a3537c03cd6ad07bf06/django_bouncy/utils.py#L67-L91 | train | 1,030 |
ofa/django-bouncy | django_bouncy/utils.py | verify_notification | def verify_notification(data):
"""
Function to verify notification came from a trusted source
Returns True if verfied, False if not verified
"""
pemfile = grab_keyfile(data['SigningCertURL'])
cert = crypto.load_certificate(crypto.FILETYPE_PEM, pemfile)
signature = base64.decodestring(six.b(data['Signature']))
if data['Type'] == "Notification":
hash_format = NOTIFICATION_HASH_FORMAT
else:
hash_format = SUBSCRIPTION_HASH_FORMAT
try:
crypto.verify(
cert, signature, six.b(hash_format.format(**data)), 'sha1')
except crypto.Error:
return False
return True | python | def verify_notification(data):
"""
Function to verify notification came from a trusted source
Returns True if verfied, False if not verified
"""
pemfile = grab_keyfile(data['SigningCertURL'])
cert = crypto.load_certificate(crypto.FILETYPE_PEM, pemfile)
signature = base64.decodestring(six.b(data['Signature']))
if data['Type'] == "Notification":
hash_format = NOTIFICATION_HASH_FORMAT
else:
hash_format = SUBSCRIPTION_HASH_FORMAT
try:
crypto.verify(
cert, signature, six.b(hash_format.format(**data)), 'sha1')
except crypto.Error:
return False
return True | [
"def",
"verify_notification",
"(",
"data",
")",
":",
"pemfile",
"=",
"grab_keyfile",
"(",
"data",
"[",
"'SigningCertURL'",
"]",
")",
"cert",
"=",
"crypto",
".",
"load_certificate",
"(",
"crypto",
".",
"FILETYPE_PEM",
",",
"pemfile",
")",
"signature",
"=",
"base64",
".",
"decodestring",
"(",
"six",
".",
"b",
"(",
"data",
"[",
"'Signature'",
"]",
")",
")",
"if",
"data",
"[",
"'Type'",
"]",
"==",
"\"Notification\"",
":",
"hash_format",
"=",
"NOTIFICATION_HASH_FORMAT",
"else",
":",
"hash_format",
"=",
"SUBSCRIPTION_HASH_FORMAT",
"try",
":",
"crypto",
".",
"verify",
"(",
"cert",
",",
"signature",
",",
"six",
".",
"b",
"(",
"hash_format",
".",
"format",
"(",
"*",
"*",
"data",
")",
")",
",",
"'sha1'",
")",
"except",
"crypto",
".",
"Error",
":",
"return",
"False",
"return",
"True"
] | Function to verify notification came from a trusted source
Returns True if verfied, False if not verified | [
"Function",
"to",
"verify",
"notification",
"came",
"from",
"a",
"trusted",
"source"
] | a386dfa8c4ce59bd18978a3537c03cd6ad07bf06 | https://github.com/ofa/django-bouncy/blob/a386dfa8c4ce59bd18978a3537c03cd6ad07bf06/django_bouncy/utils.py#L94-L114 | train | 1,031 |
ofa/django-bouncy | django_bouncy/utils.py | approve_subscription | def approve_subscription(data):
"""
Function to approve a SNS subscription with Amazon
We don't do a ton of verification here, past making sure that the endpoint
we're told to go to to verify the subscription is on the correct host
"""
url = data['SubscribeURL']
domain = urlparse(url).netloc
pattern = getattr(
settings,
'BOUNCY_SUBSCRIBE_DOMAIN_REGEX',
r"sns.[a-z0-9\-]+.amazonaws.com$"
)
if not re.search(pattern, domain):
logger.error('Invalid Subscription Domain %s', url)
return HttpResponseBadRequest('Improper Subscription Domain')
try:
result = urlopen(url).read()
logger.info('Subscription Request Sent %s', url)
except urllib.HTTPError as error:
result = error.read()
logger.warning('HTTP Error Creating Subscription %s', str(result))
signals.subscription.send(
sender='bouncy_approve_subscription',
result=result,
notification=data
)
# Return a 200 Status Code
return HttpResponse(six.u(result)) | python | def approve_subscription(data):
"""
Function to approve a SNS subscription with Amazon
We don't do a ton of verification here, past making sure that the endpoint
we're told to go to to verify the subscription is on the correct host
"""
url = data['SubscribeURL']
domain = urlparse(url).netloc
pattern = getattr(
settings,
'BOUNCY_SUBSCRIBE_DOMAIN_REGEX',
r"sns.[a-z0-9\-]+.amazonaws.com$"
)
if not re.search(pattern, domain):
logger.error('Invalid Subscription Domain %s', url)
return HttpResponseBadRequest('Improper Subscription Domain')
try:
result = urlopen(url).read()
logger.info('Subscription Request Sent %s', url)
except urllib.HTTPError as error:
result = error.read()
logger.warning('HTTP Error Creating Subscription %s', str(result))
signals.subscription.send(
sender='bouncy_approve_subscription',
result=result,
notification=data
)
# Return a 200 Status Code
return HttpResponse(six.u(result)) | [
"def",
"approve_subscription",
"(",
"data",
")",
":",
"url",
"=",
"data",
"[",
"'SubscribeURL'",
"]",
"domain",
"=",
"urlparse",
"(",
"url",
")",
".",
"netloc",
"pattern",
"=",
"getattr",
"(",
"settings",
",",
"'BOUNCY_SUBSCRIBE_DOMAIN_REGEX'",
",",
"r\"sns.[a-z0-9\\-]+.amazonaws.com$\"",
")",
"if",
"not",
"re",
".",
"search",
"(",
"pattern",
",",
"domain",
")",
":",
"logger",
".",
"error",
"(",
"'Invalid Subscription Domain %s'",
",",
"url",
")",
"return",
"HttpResponseBadRequest",
"(",
"'Improper Subscription Domain'",
")",
"try",
":",
"result",
"=",
"urlopen",
"(",
"url",
")",
".",
"read",
"(",
")",
"logger",
".",
"info",
"(",
"'Subscription Request Sent %s'",
",",
"url",
")",
"except",
"urllib",
".",
"HTTPError",
"as",
"error",
":",
"result",
"=",
"error",
".",
"read",
"(",
")",
"logger",
".",
"warning",
"(",
"'HTTP Error Creating Subscription %s'",
",",
"str",
"(",
"result",
")",
")",
"signals",
".",
"subscription",
".",
"send",
"(",
"sender",
"=",
"'bouncy_approve_subscription'",
",",
"result",
"=",
"result",
",",
"notification",
"=",
"data",
")",
"# Return a 200 Status Code",
"return",
"HttpResponse",
"(",
"six",
".",
"u",
"(",
"result",
")",
")"
] | Function to approve a SNS subscription with Amazon
We don't do a ton of verification here, past making sure that the endpoint
we're told to go to to verify the subscription is on the correct host | [
"Function",
"to",
"approve",
"a",
"SNS",
"subscription",
"with",
"Amazon"
] | a386dfa8c4ce59bd18978a3537c03cd6ad07bf06 | https://github.com/ofa/django-bouncy/blob/a386dfa8c4ce59bd18978a3537c03cd6ad07bf06/django_bouncy/utils.py#L117-L150 | train | 1,032 |
ofa/django-bouncy | django_bouncy/utils.py | clean_time | def clean_time(time_string):
"""Return a datetime from the Amazon-provided datetime string"""
# Get a timezone-aware datetime object from the string
time = dateutil.parser.parse(time_string)
if not settings.USE_TZ:
# If timezone support is not active, convert the time to UTC and
# remove the timezone field
time = time.astimezone(timezone.utc).replace(tzinfo=None)
return time | python | def clean_time(time_string):
"""Return a datetime from the Amazon-provided datetime string"""
# Get a timezone-aware datetime object from the string
time = dateutil.parser.parse(time_string)
if not settings.USE_TZ:
# If timezone support is not active, convert the time to UTC and
# remove the timezone field
time = time.astimezone(timezone.utc).replace(tzinfo=None)
return time | [
"def",
"clean_time",
"(",
"time_string",
")",
":",
"# Get a timezone-aware datetime object from the string",
"time",
"=",
"dateutil",
".",
"parser",
".",
"parse",
"(",
"time_string",
")",
"if",
"not",
"settings",
".",
"USE_TZ",
":",
"# If timezone support is not active, convert the time to UTC and",
"# remove the timezone field",
"time",
"=",
"time",
".",
"astimezone",
"(",
"timezone",
".",
"utc",
")",
".",
"replace",
"(",
"tzinfo",
"=",
"None",
")",
"return",
"time"
] | Return a datetime from the Amazon-provided datetime string | [
"Return",
"a",
"datetime",
"from",
"the",
"Amazon",
"-",
"provided",
"datetime",
"string"
] | a386dfa8c4ce59bd18978a3537c03cd6ad07bf06 | https://github.com/ofa/django-bouncy/blob/a386dfa8c4ce59bd18978a3537c03cd6ad07bf06/django_bouncy/utils.py#L153-L161 | train | 1,033 |
bruth/django-preserialize | preserialize/utils.py | parse_selectors | def parse_selectors(model, fields=None, exclude=None, key_map=None, **options):
"""Validates fields are valid and maps pseudo-fields to actual fields
for a given model class.
"""
fields = fields or DEFAULT_SELECTORS
exclude = exclude or ()
key_map = key_map or {}
validated = []
for alias in fields:
# Map the output key name to the actual field/accessor name for
# the model
actual = key_map.get(alias, alias)
# Validate the field exists
cleaned = resolver.get_field(model, actual)
if cleaned is None:
raise AttributeError('The "{0}" attribute could not be found '
'on the model "{1}"'.format(actual, model))
# Mapped value, so use the original name listed in `fields`
if type(cleaned) is list:
validated.extend(cleaned)
elif alias != actual:
validated.append(alias)
else:
validated.append(cleaned)
return tuple([x for x in validated if x not in exclude]) | python | def parse_selectors(model, fields=None, exclude=None, key_map=None, **options):
"""Validates fields are valid and maps pseudo-fields to actual fields
for a given model class.
"""
fields = fields or DEFAULT_SELECTORS
exclude = exclude or ()
key_map = key_map or {}
validated = []
for alias in fields:
# Map the output key name to the actual field/accessor name for
# the model
actual = key_map.get(alias, alias)
# Validate the field exists
cleaned = resolver.get_field(model, actual)
if cleaned is None:
raise AttributeError('The "{0}" attribute could not be found '
'on the model "{1}"'.format(actual, model))
# Mapped value, so use the original name listed in `fields`
if type(cleaned) is list:
validated.extend(cleaned)
elif alias != actual:
validated.append(alias)
else:
validated.append(cleaned)
return tuple([x for x in validated if x not in exclude]) | [
"def",
"parse_selectors",
"(",
"model",
",",
"fields",
"=",
"None",
",",
"exclude",
"=",
"None",
",",
"key_map",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"fields",
"=",
"fields",
"or",
"DEFAULT_SELECTORS",
"exclude",
"=",
"exclude",
"or",
"(",
")",
"key_map",
"=",
"key_map",
"or",
"{",
"}",
"validated",
"=",
"[",
"]",
"for",
"alias",
"in",
"fields",
":",
"# Map the output key name to the actual field/accessor name for",
"# the model",
"actual",
"=",
"key_map",
".",
"get",
"(",
"alias",
",",
"alias",
")",
"# Validate the field exists",
"cleaned",
"=",
"resolver",
".",
"get_field",
"(",
"model",
",",
"actual",
")",
"if",
"cleaned",
"is",
"None",
":",
"raise",
"AttributeError",
"(",
"'The \"{0}\" attribute could not be found '",
"'on the model \"{1}\"'",
".",
"format",
"(",
"actual",
",",
"model",
")",
")",
"# Mapped value, so use the original name listed in `fields`",
"if",
"type",
"(",
"cleaned",
")",
"is",
"list",
":",
"validated",
".",
"extend",
"(",
"cleaned",
")",
"elif",
"alias",
"!=",
"actual",
":",
"validated",
".",
"append",
"(",
"alias",
")",
"else",
":",
"validated",
".",
"append",
"(",
"cleaned",
")",
"return",
"tuple",
"(",
"[",
"x",
"for",
"x",
"in",
"validated",
"if",
"x",
"not",
"in",
"exclude",
"]",
")"
] | Validates fields are valid and maps pseudo-fields to actual fields
for a given model class. | [
"Validates",
"fields",
"are",
"valid",
"and",
"maps",
"pseudo",
"-",
"fields",
"to",
"actual",
"fields",
"for",
"a",
"given",
"model",
"class",
"."
] | d772c224bd8c2c9e9ff997d82c54fe6ebb9444b6 | https://github.com/bruth/django-preserialize/blob/d772c224bd8c2c9e9ff997d82c54fe6ebb9444b6/preserialize/utils.py#L96-L125 | train | 1,034 |
bruth/django-preserialize | preserialize/utils.py | ModelFieldResolver._get_local_fields | def _get_local_fields(self, model):
"Return the names of all locally defined fields on the model class."
local = [f for f in model._meta.fields]
m2m = [f for f in model._meta.many_to_many]
fields = local + m2m
names = tuple([x.name for x in fields])
return {
':local': dict(list(zip(names, fields))),
} | python | def _get_local_fields(self, model):
"Return the names of all locally defined fields on the model class."
local = [f for f in model._meta.fields]
m2m = [f for f in model._meta.many_to_many]
fields = local + m2m
names = tuple([x.name for x in fields])
return {
':local': dict(list(zip(names, fields))),
} | [
"def",
"_get_local_fields",
"(",
"self",
",",
"model",
")",
":",
"local",
"=",
"[",
"f",
"for",
"f",
"in",
"model",
".",
"_meta",
".",
"fields",
"]",
"m2m",
"=",
"[",
"f",
"for",
"f",
"in",
"model",
".",
"_meta",
".",
"many_to_many",
"]",
"fields",
"=",
"local",
"+",
"m2m",
"names",
"=",
"tuple",
"(",
"[",
"x",
".",
"name",
"for",
"x",
"in",
"fields",
"]",
")",
"return",
"{",
"':local'",
":",
"dict",
"(",
"list",
"(",
"zip",
"(",
"names",
",",
"fields",
")",
")",
")",
",",
"}"
] | Return the names of all locally defined fields on the model class. | [
"Return",
"the",
"names",
"of",
"all",
"locally",
"defined",
"fields",
"on",
"the",
"model",
"class",
"."
] | d772c224bd8c2c9e9ff997d82c54fe6ebb9444b6 | https://github.com/bruth/django-preserialize/blob/d772c224bd8c2c9e9ff997d82c54fe6ebb9444b6/preserialize/utils.py#L42-L51 | train | 1,035 |
bruth/django-preserialize | preserialize/utils.py | ModelFieldResolver._get_related_fields | def _get_related_fields(self, model):
"Returns the names of all related fields for model class."
reverse_fk = self._get_all_related_objects(model)
reverse_m2m = self._get_all_related_many_to_many_objects(model)
fields = tuple(reverse_fk + reverse_m2m)
names = tuple([x.get_accessor_name() for x in fields])
return {
':related': dict(list(zip(names, fields))),
} | python | def _get_related_fields(self, model):
"Returns the names of all related fields for model class."
reverse_fk = self._get_all_related_objects(model)
reverse_m2m = self._get_all_related_many_to_many_objects(model)
fields = tuple(reverse_fk + reverse_m2m)
names = tuple([x.get_accessor_name() for x in fields])
return {
':related': dict(list(zip(names, fields))),
} | [
"def",
"_get_related_fields",
"(",
"self",
",",
"model",
")",
":",
"reverse_fk",
"=",
"self",
".",
"_get_all_related_objects",
"(",
"model",
")",
"reverse_m2m",
"=",
"self",
".",
"_get_all_related_many_to_many_objects",
"(",
"model",
")",
"fields",
"=",
"tuple",
"(",
"reverse_fk",
"+",
"reverse_m2m",
")",
"names",
"=",
"tuple",
"(",
"[",
"x",
".",
"get_accessor_name",
"(",
")",
"for",
"x",
"in",
"fields",
"]",
")",
"return",
"{",
"':related'",
":",
"dict",
"(",
"list",
"(",
"zip",
"(",
"names",
",",
"fields",
")",
")",
")",
",",
"}"
] | Returns the names of all related fields for model class. | [
"Returns",
"the",
"names",
"of",
"all",
"related",
"fields",
"for",
"model",
"class",
"."
] | d772c224bd8c2c9e9ff997d82c54fe6ebb9444b6 | https://github.com/bruth/django-preserialize/blob/d772c224bd8c2c9e9ff997d82c54fe6ebb9444b6/preserialize/utils.py#L53-L63 | train | 1,036 |
cokelaer/reports | reports/htmltable.py | HTMLTable.to_html | def to_html(self, index=False, escape=False, header=True,
collapse_table=True, class_outer="table_outer", **kargs):
"""Return HTML version of the table
This is a wrapper of the to_html method of the pandas dataframe.
:param bool index: do not include the index
:param bool escape: do not escape special characters
:param bool header: include header
:param bool collapse_table: long tables are shorten with a scroll bar
:param kargs: any parameter accepted by
:meth:`pandas.DataFrame.to_html`
"""
_buffer = {}
for k, v in self.pd_options.items():
# save the current option
_buffer[k] = pd.get_option(k)
# set with user value
pd.set_option(k, v)
# class sortable is to use the sorttable javascript
# note that the class has one t and the javascript library has 2
# as in the original version of sorttable.js
table = self.df.to_html(escape=escape, header=header, index=index,
classes='sortable', **kargs)
# get back to default options
for k, v in _buffer.items():
pd.set_option(k, v)
# We wrap the table in a dedicated class/div nammed table_scroller
# that users must define.
return '<div class="%s">' % class_outer + table+"</div>" | python | def to_html(self, index=False, escape=False, header=True,
collapse_table=True, class_outer="table_outer", **kargs):
"""Return HTML version of the table
This is a wrapper of the to_html method of the pandas dataframe.
:param bool index: do not include the index
:param bool escape: do not escape special characters
:param bool header: include header
:param bool collapse_table: long tables are shorten with a scroll bar
:param kargs: any parameter accepted by
:meth:`pandas.DataFrame.to_html`
"""
_buffer = {}
for k, v in self.pd_options.items():
# save the current option
_buffer[k] = pd.get_option(k)
# set with user value
pd.set_option(k, v)
# class sortable is to use the sorttable javascript
# note that the class has one t and the javascript library has 2
# as in the original version of sorttable.js
table = self.df.to_html(escape=escape, header=header, index=index,
classes='sortable', **kargs)
# get back to default options
for k, v in _buffer.items():
pd.set_option(k, v)
# We wrap the table in a dedicated class/div nammed table_scroller
# that users must define.
return '<div class="%s">' % class_outer + table+"</div>" | [
"def",
"to_html",
"(",
"self",
",",
"index",
"=",
"False",
",",
"escape",
"=",
"False",
",",
"header",
"=",
"True",
",",
"collapse_table",
"=",
"True",
",",
"class_outer",
"=",
"\"table_outer\"",
",",
"*",
"*",
"kargs",
")",
":",
"_buffer",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"self",
".",
"pd_options",
".",
"items",
"(",
")",
":",
"# save the current option",
"_buffer",
"[",
"k",
"]",
"=",
"pd",
".",
"get_option",
"(",
"k",
")",
"# set with user value",
"pd",
".",
"set_option",
"(",
"k",
",",
"v",
")",
"# class sortable is to use the sorttable javascript",
"# note that the class has one t and the javascript library has 2",
"# as in the original version of sorttable.js",
"table",
"=",
"self",
".",
"df",
".",
"to_html",
"(",
"escape",
"=",
"escape",
",",
"header",
"=",
"header",
",",
"index",
"=",
"index",
",",
"classes",
"=",
"'sortable'",
",",
"*",
"*",
"kargs",
")",
"# get back to default options",
"for",
"k",
",",
"v",
"in",
"_buffer",
".",
"items",
"(",
")",
":",
"pd",
".",
"set_option",
"(",
"k",
",",
"v",
")",
"# We wrap the table in a dedicated class/div nammed table_scroller",
"# that users must define.",
"return",
"'<div class=\"%s\">'",
"%",
"class_outer",
"+",
"table",
"+",
"\"</div>\""
] | Return HTML version of the table
This is a wrapper of the to_html method of the pandas dataframe.
:param bool index: do not include the index
:param bool escape: do not escape special characters
:param bool header: include header
:param bool collapse_table: long tables are shorten with a scroll bar
:param kargs: any parameter accepted by
:meth:`pandas.DataFrame.to_html` | [
"Return",
"HTML",
"version",
"of",
"the",
"table"
] | 7703b1e27d440c3193ee6cc90bfecd78cc98b737 | https://github.com/cokelaer/reports/blob/7703b1e27d440c3193ee6cc90bfecd78cc98b737/reports/htmltable.py#L75-L108 | train | 1,037 |
cokelaer/reports | reports/htmltable.py | HTMLTable.add_bgcolor | def add_bgcolor(self, colname, cmap='copper', mode='absmax',
threshold=2):
"""Change column content into HTML paragraph with background color
:param colname:
:param cmap: a colormap (matplotlib) or created using
colormap package (from pypi).
:param mode: type of normalisation in 'absmax', 'max', 'clip'
(see details below)
:param threshold: used if mode is set to 'clip'
Colormap have values between 0 and 1 so we need to normalised the data
between 0 and 1. There are 3 mode to normalise the data so far.
If mode is set to 'absmax', negatives and positives values are
expected to be found in a range from -inf to inf. Values are
scaled in between [0,1] X' = (X / M +1) /2. where m is the absolute
maximum. Ideally a colormap should be made of 3 colors, the first
color used for negative values, the second for zeros and third color
for positive values.
If mode is set to 'clip', values are clipped to a max value (parameter
*threshold* and values are normalised by that same threshold.
If mode is set to 'max', values are normalised by the max.
"""
try:
# if a cmap is provided, it may be just a known cmap name
cmap = cmap_builder(cmap)
except:
pass
data = self.df[colname].values
if len(data) == 0:
return
if mode == 'clip':
data = [min(x, threshold)/float(threshold) for x in data]
elif mode == 'absmax':
m = abs(data.min())
M = abs(data.max())
M = max([m, M])
if M != 0:
data = (data / M + 1)/2.
elif mode == 'max':
if data.max() != 0:
data = data / float(data.max())
# the expected RGB values for a given data point
rgbcolors = [cmap(x)[0:3] for x in data]
hexcolors = [rgb2hex(*x, normalised=True) for x in rgbcolors]
# need to read original data again
data = self.df[colname].values
# need to set precision since this is going to be a text not a number
# so pandas will not use the precision for those cases:
def prec(x):
try:
# this may fail if for instance x is nan or inf
x = easydev.precision(x, self.pd_options['precision'])
return x
except:
return x
data = [prec(x) for x in data]
html_formatter = '<p style="background-color:{0}">{1}</p>'
self.df[colname] = [html_formatter.format(x, y)
for x, y in zip(hexcolors, data)] | python | def add_bgcolor(self, colname, cmap='copper', mode='absmax',
threshold=2):
"""Change column content into HTML paragraph with background color
:param colname:
:param cmap: a colormap (matplotlib) or created using
colormap package (from pypi).
:param mode: type of normalisation in 'absmax', 'max', 'clip'
(see details below)
:param threshold: used if mode is set to 'clip'
Colormap have values between 0 and 1 so we need to normalised the data
between 0 and 1. There are 3 mode to normalise the data so far.
If mode is set to 'absmax', negatives and positives values are
expected to be found in a range from -inf to inf. Values are
scaled in between [0,1] X' = (X / M +1) /2. where m is the absolute
maximum. Ideally a colormap should be made of 3 colors, the first
color used for negative values, the second for zeros and third color
for positive values.
If mode is set to 'clip', values are clipped to a max value (parameter
*threshold* and values are normalised by that same threshold.
If mode is set to 'max', values are normalised by the max.
"""
try:
# if a cmap is provided, it may be just a known cmap name
cmap = cmap_builder(cmap)
except:
pass
data = self.df[colname].values
if len(data) == 0:
return
if mode == 'clip':
data = [min(x, threshold)/float(threshold) for x in data]
elif mode == 'absmax':
m = abs(data.min())
M = abs(data.max())
M = max([m, M])
if M != 0:
data = (data / M + 1)/2.
elif mode == 'max':
if data.max() != 0:
data = data / float(data.max())
# the expected RGB values for a given data point
rgbcolors = [cmap(x)[0:3] for x in data]
hexcolors = [rgb2hex(*x, normalised=True) for x in rgbcolors]
# need to read original data again
data = self.df[colname].values
# need to set precision since this is going to be a text not a number
# so pandas will not use the precision for those cases:
def prec(x):
try:
# this may fail if for instance x is nan or inf
x = easydev.precision(x, self.pd_options['precision'])
return x
except:
return x
data = [prec(x) for x in data]
html_formatter = '<p style="background-color:{0}">{1}</p>'
self.df[colname] = [html_formatter.format(x, y)
for x, y in zip(hexcolors, data)] | [
"def",
"add_bgcolor",
"(",
"self",
",",
"colname",
",",
"cmap",
"=",
"'copper'",
",",
"mode",
"=",
"'absmax'",
",",
"threshold",
"=",
"2",
")",
":",
"try",
":",
"# if a cmap is provided, it may be just a known cmap name",
"cmap",
"=",
"cmap_builder",
"(",
"cmap",
")",
"except",
":",
"pass",
"data",
"=",
"self",
".",
"df",
"[",
"colname",
"]",
".",
"values",
"if",
"len",
"(",
"data",
")",
"==",
"0",
":",
"return",
"if",
"mode",
"==",
"'clip'",
":",
"data",
"=",
"[",
"min",
"(",
"x",
",",
"threshold",
")",
"/",
"float",
"(",
"threshold",
")",
"for",
"x",
"in",
"data",
"]",
"elif",
"mode",
"==",
"'absmax'",
":",
"m",
"=",
"abs",
"(",
"data",
".",
"min",
"(",
")",
")",
"M",
"=",
"abs",
"(",
"data",
".",
"max",
"(",
")",
")",
"M",
"=",
"max",
"(",
"[",
"m",
",",
"M",
"]",
")",
"if",
"M",
"!=",
"0",
":",
"data",
"=",
"(",
"data",
"/",
"M",
"+",
"1",
")",
"/",
"2.",
"elif",
"mode",
"==",
"'max'",
":",
"if",
"data",
".",
"max",
"(",
")",
"!=",
"0",
":",
"data",
"=",
"data",
"/",
"float",
"(",
"data",
".",
"max",
"(",
")",
")",
"# the expected RGB values for a given data point",
"rgbcolors",
"=",
"[",
"cmap",
"(",
"x",
")",
"[",
"0",
":",
"3",
"]",
"for",
"x",
"in",
"data",
"]",
"hexcolors",
"=",
"[",
"rgb2hex",
"(",
"*",
"x",
",",
"normalised",
"=",
"True",
")",
"for",
"x",
"in",
"rgbcolors",
"]",
"# need to read original data again",
"data",
"=",
"self",
".",
"df",
"[",
"colname",
"]",
".",
"values",
"# need to set precision since this is going to be a text not a number",
"# so pandas will not use the precision for those cases:",
"def",
"prec",
"(",
"x",
")",
":",
"try",
":",
"# this may fail if for instance x is nan or inf",
"x",
"=",
"easydev",
".",
"precision",
"(",
"x",
",",
"self",
".",
"pd_options",
"[",
"'precision'",
"]",
")",
"return",
"x",
"except",
":",
"return",
"x",
"data",
"=",
"[",
"prec",
"(",
"x",
")",
"for",
"x",
"in",
"data",
"]",
"html_formatter",
"=",
"'<p style=\"background-color:{0}\">{1}</p>'",
"self",
".",
"df",
"[",
"colname",
"]",
"=",
"[",
"html_formatter",
".",
"format",
"(",
"x",
",",
"y",
")",
"for",
"x",
",",
"y",
"in",
"zip",
"(",
"hexcolors",
",",
"data",
")",
"]"
] | Change column content into HTML paragraph with background color
:param colname:
:param cmap: a colormap (matplotlib) or created using
colormap package (from pypi).
:param mode: type of normalisation in 'absmax', 'max', 'clip'
(see details below)
:param threshold: used if mode is set to 'clip'
Colormap have values between 0 and 1 so we need to normalised the data
between 0 and 1. There are 3 mode to normalise the data so far.
If mode is set to 'absmax', negatives and positives values are
expected to be found in a range from -inf to inf. Values are
scaled in between [0,1] X' = (X / M +1) /2. where m is the absolute
maximum. Ideally a colormap should be made of 3 colors, the first
color used for negative values, the second for zeros and third color
for positive values.
If mode is set to 'clip', values are clipped to a max value (parameter
*threshold* and values are normalised by that same threshold.
If mode is set to 'max', values are normalised by the max. | [
"Change",
"column",
"content",
"into",
"HTML",
"paragraph",
"with",
"background",
"color"
] | 7703b1e27d440c3193ee6cc90bfecd78cc98b737 | https://github.com/cokelaer/reports/blob/7703b1e27d440c3193ee6cc90bfecd78cc98b737/reports/htmltable.py#L110-L180 | train | 1,038 |
makinacorpus/django-tracking-fields | tracking_fields/admin.py | TrackingEventAdmin.changelist_view | def changelist_view(self, request, extra_context=None):
""" Get object currently tracked and add a button to get back to it """
extra_context = extra_context or {}
if 'object' in request.GET.keys():
value = request.GET['object'].split(':')
content_type = get_object_or_404(
ContentType,
id=value[0],
)
tracked_object = get_object_or_404(
content_type.model_class(),
id=value[1],
)
extra_context['tracked_object'] = tracked_object
extra_context['tracked_object_opts'] = tracked_object._meta
return super(TrackingEventAdmin, self).changelist_view(
request, extra_context) | python | def changelist_view(self, request, extra_context=None):
""" Get object currently tracked and add a button to get back to it """
extra_context = extra_context or {}
if 'object' in request.GET.keys():
value = request.GET['object'].split(':')
content_type = get_object_or_404(
ContentType,
id=value[0],
)
tracked_object = get_object_or_404(
content_type.model_class(),
id=value[1],
)
extra_context['tracked_object'] = tracked_object
extra_context['tracked_object_opts'] = tracked_object._meta
return super(TrackingEventAdmin, self).changelist_view(
request, extra_context) | [
"def",
"changelist_view",
"(",
"self",
",",
"request",
",",
"extra_context",
"=",
"None",
")",
":",
"extra_context",
"=",
"extra_context",
"or",
"{",
"}",
"if",
"'object'",
"in",
"request",
".",
"GET",
".",
"keys",
"(",
")",
":",
"value",
"=",
"request",
".",
"GET",
"[",
"'object'",
"]",
".",
"split",
"(",
"':'",
")",
"content_type",
"=",
"get_object_or_404",
"(",
"ContentType",
",",
"id",
"=",
"value",
"[",
"0",
"]",
",",
")",
"tracked_object",
"=",
"get_object_or_404",
"(",
"content_type",
".",
"model_class",
"(",
")",
",",
"id",
"=",
"value",
"[",
"1",
"]",
",",
")",
"extra_context",
"[",
"'tracked_object'",
"]",
"=",
"tracked_object",
"extra_context",
"[",
"'tracked_object_opts'",
"]",
"=",
"tracked_object",
".",
"_meta",
"return",
"super",
"(",
"TrackingEventAdmin",
",",
"self",
")",
".",
"changelist_view",
"(",
"request",
",",
"extra_context",
")"
] | Get object currently tracked and add a button to get back to it | [
"Get",
"object",
"currently",
"tracked",
"and",
"add",
"a",
"button",
"to",
"get",
"back",
"to",
"it"
] | 463313d0f9c0f8107a0413f4d418d1a8c2311981 | https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/admin.py#L118-L134 | train | 1,039 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.list | def list(self, filter=None, type=None, sort=None, limit=None, page=None): # pylint: disable=redefined-builtin
"""Get a list of configs.
:param filter: (optional) Filters to apply as a string list.
:param type: (optional) `union` or `inter` as string.
:param sort: (optional) Sort fields to apply as string list.
:param limit: (optional) Limit returned list length.
:param page: (optional) Page to return.
:return: :class:`configs.Page <configs.Page>` object
"""
schema = self.LIST_SCHEMA
resp = self.service.list(self.base, filter, type, sort, limit, page)
cs, l = self.service.decode(schema, resp, many=True, links=True)
return Page(cs, l) | python | def list(self, filter=None, type=None, sort=None, limit=None, page=None): # pylint: disable=redefined-builtin
"""Get a list of configs.
:param filter: (optional) Filters to apply as a string list.
:param type: (optional) `union` or `inter` as string.
:param sort: (optional) Sort fields to apply as string list.
:param limit: (optional) Limit returned list length.
:param page: (optional) Page to return.
:return: :class:`configs.Page <configs.Page>` object
"""
schema = self.LIST_SCHEMA
resp = self.service.list(self.base, filter, type, sort, limit, page)
cs, l = self.service.decode(schema, resp, many=True, links=True)
return Page(cs, l) | [
"def",
"list",
"(",
"self",
",",
"filter",
"=",
"None",
",",
"type",
"=",
"None",
",",
"sort",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"page",
"=",
"None",
")",
":",
"# pylint: disable=redefined-builtin",
"schema",
"=",
"self",
".",
"LIST_SCHEMA",
"resp",
"=",
"self",
".",
"service",
".",
"list",
"(",
"self",
".",
"base",
",",
"filter",
",",
"type",
",",
"sort",
",",
"limit",
",",
"page",
")",
"cs",
",",
"l",
"=",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
",",
"many",
"=",
"True",
",",
"links",
"=",
"True",
")",
"return",
"Page",
"(",
"cs",
",",
"l",
")"
] | Get a list of configs.
:param filter: (optional) Filters to apply as a string list.
:param type: (optional) `union` or `inter` as string.
:param sort: (optional) Sort fields to apply as string list.
:param limit: (optional) Limit returned list length.
:param page: (optional) Page to return.
:return: :class:`configs.Page <configs.Page>` object | [
"Get",
"a",
"list",
"of",
"configs",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L187-L200 | train | 1,040 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.iter_list | def iter_list(self, *args, **kwargs):
"""Get a list of configs. Whereas ``list`` fetches a single page of
configs according to its ``limit`` and ``page`` arguments,
``iter_list`` returns all configs by internally making
successive calls to ``list``.
:param args: Arguments that ``list`` takes.
:param kwargs: Optional arguments that ``list`` takes.
:return: :class:`configs.Config <configs.Config>` list
"""
return self.service.iter_list(self.list, *args, **kwargs) | python | def iter_list(self, *args, **kwargs):
"""Get a list of configs. Whereas ``list`` fetches a single page of
configs according to its ``limit`` and ``page`` arguments,
``iter_list`` returns all configs by internally making
successive calls to ``list``.
:param args: Arguments that ``list`` takes.
:param kwargs: Optional arguments that ``list`` takes.
:return: :class:`configs.Config <configs.Config>` list
"""
return self.service.iter_list(self.list, *args, **kwargs) | [
"def",
"iter_list",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"service",
".",
"iter_list",
"(",
"self",
".",
"list",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Get a list of configs. Whereas ``list`` fetches a single page of
configs according to its ``limit`` and ``page`` arguments,
``iter_list`` returns all configs by internally making
successive calls to ``list``.
:param args: Arguments that ``list`` takes.
:param kwargs: Optional arguments that ``list`` takes.
:return: :class:`configs.Config <configs.Config>` list | [
"Get",
"a",
"list",
"of",
"configs",
".",
"Whereas",
"list",
"fetches",
"a",
"single",
"page",
"of",
"configs",
"according",
"to",
"its",
"limit",
"and",
"page",
"arguments",
"iter_list",
"returns",
"all",
"configs",
"by",
"internally",
"making",
"successive",
"calls",
"to",
"list",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L202-L213 | train | 1,041 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.get_plaintext | def get_plaintext(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Get a config as plaintext.
:param id: Config ID as an int.
:rtype: string
"""
return self.service.get_id(self.base, id, params={'format': 'text'}).text | python | def get_plaintext(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Get a config as plaintext.
:param id: Config ID as an int.
:rtype: string
"""
return self.service.get_id(self.base, id, params={'format': 'text'}).text | [
"def",
"get_plaintext",
"(",
"self",
",",
"id",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"return",
"self",
".",
"service",
".",
"get_id",
"(",
"self",
".",
"base",
",",
"id",
",",
"params",
"=",
"{",
"'format'",
":",
"'text'",
"}",
")",
".",
"text"
] | Get a config as plaintext.
:param id: Config ID as an int.
:rtype: string | [
"Get",
"a",
"config",
"as",
"plaintext",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L233-L239 | train | 1,042 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.create | def create(self, resource):
"""Create a new config.
:param resource: :class:`configs.Config <configs.Config>` object
:return: :class:`configs.Config <configs.Config>` object
:rtype: configs.Config
"""
schema = self.CREATE_SCHEMA
json = self.service.encode(schema, resource)
schema = self.GET_SCHEMA
resp = self.service.create(self.base, json)
return self.service.decode(schema, resp) | python | def create(self, resource):
"""Create a new config.
:param resource: :class:`configs.Config <configs.Config>` object
:return: :class:`configs.Config <configs.Config>` object
:rtype: configs.Config
"""
schema = self.CREATE_SCHEMA
json = self.service.encode(schema, resource)
schema = self.GET_SCHEMA
resp = self.service.create(self.base, json)
return self.service.decode(schema, resp) | [
"def",
"create",
"(",
"self",
",",
"resource",
")",
":",
"schema",
"=",
"self",
".",
"CREATE_SCHEMA",
"json",
"=",
"self",
".",
"service",
".",
"encode",
"(",
"schema",
",",
"resource",
")",
"schema",
"=",
"self",
".",
"GET_SCHEMA",
"resp",
"=",
"self",
".",
"service",
".",
"create",
"(",
"self",
".",
"base",
",",
"json",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Create a new config.
:param resource: :class:`configs.Config <configs.Config>` object
:return: :class:`configs.Config <configs.Config>` object
:rtype: configs.Config | [
"Create",
"a",
"new",
"config",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L253-L265 | train | 1,043 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.edit | def edit(self, resource):
"""Edit a config.
:param resource: :class:`configs.Config <configs.Config>` object
:return: :class:`configs.Config <configs.Config>` object
:rtype: configs.Config
"""
schema = self.EDIT_SCHEMA
json = self.service.encode(schema, resource)
schema = self.GET_SCHEMA
resp = self.service.edit(self.base, resource.id, json)
return self.service.decode(schema, resp) | python | def edit(self, resource):
"""Edit a config.
:param resource: :class:`configs.Config <configs.Config>` object
:return: :class:`configs.Config <configs.Config>` object
:rtype: configs.Config
"""
schema = self.EDIT_SCHEMA
json = self.service.encode(schema, resource)
schema = self.GET_SCHEMA
resp = self.service.edit(self.base, resource.id, json)
return self.service.decode(schema, resp) | [
"def",
"edit",
"(",
"self",
",",
"resource",
")",
":",
"schema",
"=",
"self",
".",
"EDIT_SCHEMA",
"json",
"=",
"self",
".",
"service",
".",
"encode",
"(",
"schema",
",",
"resource",
")",
"schema",
"=",
"self",
".",
"GET_SCHEMA",
"resp",
"=",
"self",
".",
"service",
".",
"edit",
"(",
"self",
".",
"base",
",",
"resource",
".",
"id",
",",
"json",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Edit a config.
:param resource: :class:`configs.Config <configs.Config>` object
:return: :class:`configs.Config <configs.Config>` object
:rtype: configs.Config | [
"Edit",
"a",
"config",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L267-L279 | train | 1,044 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.edit_shares | def edit_shares(self, id, user_ids): # pylint: disable=invalid-name,redefined-builtin
"""Edit shares for a config.
:param id: Config ID as an int.
:param user_ids: User IDs as int list.
:return: :class:`cdrouter.Share <cdrouter.Share>` list
"""
return self.service.edit_shares(self.base, id, user_ids) | python | def edit_shares(self, id, user_ids): # pylint: disable=invalid-name,redefined-builtin
"""Edit shares for a config.
:param id: Config ID as an int.
:param user_ids: User IDs as int list.
:return: :class:`cdrouter.Share <cdrouter.Share>` list
"""
return self.service.edit_shares(self.base, id, user_ids) | [
"def",
"edit_shares",
"(",
"self",
",",
"id",
",",
"user_ids",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"return",
"self",
".",
"service",
".",
"edit_shares",
"(",
"self",
".",
"base",
",",
"id",
",",
"user_ids",
")"
] | Edit shares for a config.
:param id: Config ID as an int.
:param user_ids: User IDs as int list.
:return: :class:`cdrouter.Share <cdrouter.Share>` list | [
"Edit",
"shares",
"for",
"a",
"config",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L296-L303 | train | 1,045 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.check_config | def check_config(self, contents):
"""Process config contents with cdrouter-cli -check-config.
:param contents: Config contents as string.
:return: :class:`configs.CheckConfig <configs.CheckConfig>` object
:rtype: configs.CheckConfig
"""
schema = CheckConfigSchema()
resp = self.service.post(self.base,
params={'process': 'check'}, json={'contents': contents})
return self.service.decode(schema, resp) | python | def check_config(self, contents):
"""Process config contents with cdrouter-cli -check-config.
:param contents: Config contents as string.
:return: :class:`configs.CheckConfig <configs.CheckConfig>` object
:rtype: configs.CheckConfig
"""
schema = CheckConfigSchema()
resp = self.service.post(self.base,
params={'process': 'check'}, json={'contents': contents})
return self.service.decode(schema, resp) | [
"def",
"check_config",
"(",
"self",
",",
"contents",
")",
":",
"schema",
"=",
"CheckConfigSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"post",
"(",
"self",
".",
"base",
",",
"params",
"=",
"{",
"'process'",
":",
"'check'",
"}",
",",
"json",
"=",
"{",
"'contents'",
":",
"contents",
"}",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Process config contents with cdrouter-cli -check-config.
:param contents: Config contents as string.
:return: :class:`configs.CheckConfig <configs.CheckConfig>` object
:rtype: configs.CheckConfig | [
"Process",
"config",
"contents",
"with",
"cdrouter",
"-",
"cli",
"-",
"check",
"-",
"config",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L313-L323 | train | 1,046 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.upgrade_config | def upgrade_config(self, contents):
"""Process config contents with cdrouter-cli -upgrade-config.
:param contents: Config contents as string.
:return: :class:`configs.UpgradeConfig <configs.UpgradeConfig>` object
:rtype: configs.UpgradeConfig
"""
schema = UpgradeConfigSchema()
resp = self.service.post(self.base,
params={'process': 'upgrade'}, json={'contents': contents})
return self.service.decode(schema, resp) | python | def upgrade_config(self, contents):
"""Process config contents with cdrouter-cli -upgrade-config.
:param contents: Config contents as string.
:return: :class:`configs.UpgradeConfig <configs.UpgradeConfig>` object
:rtype: configs.UpgradeConfig
"""
schema = UpgradeConfigSchema()
resp = self.service.post(self.base,
params={'process': 'upgrade'}, json={'contents': contents})
return self.service.decode(schema, resp) | [
"def",
"upgrade_config",
"(",
"self",
",",
"contents",
")",
":",
"schema",
"=",
"UpgradeConfigSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"post",
"(",
"self",
".",
"base",
",",
"params",
"=",
"{",
"'process'",
":",
"'upgrade'",
"}",
",",
"json",
"=",
"{",
"'contents'",
":",
"contents",
"}",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Process config contents with cdrouter-cli -upgrade-config.
:param contents: Config contents as string.
:return: :class:`configs.UpgradeConfig <configs.UpgradeConfig>` object
:rtype: configs.UpgradeConfig | [
"Process",
"config",
"contents",
"with",
"cdrouter",
"-",
"cli",
"-",
"upgrade",
"-",
"config",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L325-L335 | train | 1,047 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.get_networks | def get_networks(self, contents):
"""Process config contents with cdrouter-cli -print-networks-json.
:param contents: Config contents as string.
:return: :class:`configs.Networks <configs.Networks>` object
:rtype: configs.Networks
"""
schema = NetworksSchema()
resp = self.service.post(self.base,
params={'process': 'networks'}, json={'contents': contents})
return self.service.decode(schema, resp) | python | def get_networks(self, contents):
"""Process config contents with cdrouter-cli -print-networks-json.
:param contents: Config contents as string.
:return: :class:`configs.Networks <configs.Networks>` object
:rtype: configs.Networks
"""
schema = NetworksSchema()
resp = self.service.post(self.base,
params={'process': 'networks'}, json={'contents': contents})
return self.service.decode(schema, resp) | [
"def",
"get_networks",
"(",
"self",
",",
"contents",
")",
":",
"schema",
"=",
"NetworksSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"post",
"(",
"self",
".",
"base",
",",
"params",
"=",
"{",
"'process'",
":",
"'networks'",
"}",
",",
"json",
"=",
"{",
"'contents'",
":",
"contents",
"}",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Process config contents with cdrouter-cli -print-networks-json.
:param contents: Config contents as string.
:return: :class:`configs.Networks <configs.Networks>` object
:rtype: configs.Networks | [
"Process",
"config",
"contents",
"with",
"cdrouter",
"-",
"cli",
"-",
"print",
"-",
"networks",
"-",
"json",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L337-L347 | train | 1,048 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.bulk_copy | def bulk_copy(self, ids):
"""Bulk copy a set of configs.
:param ids: Int list of config IDs.
:return: :class:`configs.Config <configs.Config>` list
"""
schema = self.GET_SCHEMA
return self.service.bulk_copy(self.base, self.RESOURCE, ids, schema) | python | def bulk_copy(self, ids):
"""Bulk copy a set of configs.
:param ids: Int list of config IDs.
:return: :class:`configs.Config <configs.Config>` list
"""
schema = self.GET_SCHEMA
return self.service.bulk_copy(self.base, self.RESOURCE, ids, schema) | [
"def",
"bulk_copy",
"(",
"self",
",",
"ids",
")",
":",
"schema",
"=",
"self",
".",
"GET_SCHEMA",
"return",
"self",
".",
"service",
".",
"bulk_copy",
"(",
"self",
".",
"base",
",",
"self",
".",
"RESOURCE",
",",
"ids",
",",
"schema",
")"
] | Bulk copy a set of configs.
:param ids: Int list of config IDs.
:return: :class:`configs.Config <configs.Config>` list | [
"Bulk",
"copy",
"a",
"set",
"of",
"configs",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L357-L364 | train | 1,049 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.bulk_edit | def bulk_edit(self, _fields, ids=None, filter=None, type=None, all=False, testvars=None): # pylint: disable=redefined-builtin
"""Bulk edit a set of configs.
:param _fields: :class:`configs.Config <configs.Config>` object
:param ids: (optional) Int list of config IDs.
:param filter: (optional) String list of filters.
:param type: (optional) `union` or `inter` as string.
:param all: (optional) Apply to all if bool `True`.
:param testvars: (optional) :class:`configs.ConfigTestvars <configs.ConfigTestvars>` list
"""
schema = self.EDIT_SCHEMA
_fields = self.service.encode(schema, _fields, skip_none=True)
return self.service.bulk_edit(self.base, self.RESOURCE,
_fields, ids=ids, filter=filter, type=type, all=all, testvars=testvars) | python | def bulk_edit(self, _fields, ids=None, filter=None, type=None, all=False, testvars=None): # pylint: disable=redefined-builtin
"""Bulk edit a set of configs.
:param _fields: :class:`configs.Config <configs.Config>` object
:param ids: (optional) Int list of config IDs.
:param filter: (optional) String list of filters.
:param type: (optional) `union` or `inter` as string.
:param all: (optional) Apply to all if bool `True`.
:param testvars: (optional) :class:`configs.ConfigTestvars <configs.ConfigTestvars>` list
"""
schema = self.EDIT_SCHEMA
_fields = self.service.encode(schema, _fields, skip_none=True)
return self.service.bulk_edit(self.base, self.RESOURCE,
_fields, ids=ids, filter=filter, type=type, all=all, testvars=testvars) | [
"def",
"bulk_edit",
"(",
"self",
",",
"_fields",
",",
"ids",
"=",
"None",
",",
"filter",
"=",
"None",
",",
"type",
"=",
"None",
",",
"all",
"=",
"False",
",",
"testvars",
"=",
"None",
")",
":",
"# pylint: disable=redefined-builtin",
"schema",
"=",
"self",
".",
"EDIT_SCHEMA",
"_fields",
"=",
"self",
".",
"service",
".",
"encode",
"(",
"schema",
",",
"_fields",
",",
"skip_none",
"=",
"True",
")",
"return",
"self",
".",
"service",
".",
"bulk_edit",
"(",
"self",
".",
"base",
",",
"self",
".",
"RESOURCE",
",",
"_fields",
",",
"ids",
"=",
"ids",
",",
"filter",
"=",
"filter",
",",
"type",
"=",
"type",
",",
"all",
"=",
"all",
",",
"testvars",
"=",
"testvars",
")"
] | Bulk edit a set of configs.
:param _fields: :class:`configs.Config <configs.Config>` object
:param ids: (optional) Int list of config IDs.
:param filter: (optional) String list of filters.
:param type: (optional) `union` or `inter` as string.
:param all: (optional) Apply to all if bool `True`.
:param testvars: (optional) :class:`configs.ConfigTestvars <configs.ConfigTestvars>` list | [
"Bulk",
"edit",
"a",
"set",
"of",
"configs",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L366-L379 | train | 1,050 |
qacafe/cdrouter.py | cdrouter/configs.py | ConfigsService.bulk_delete | def bulk_delete(self, ids=None, filter=None, type=None, all=False): # pylint: disable=redefined-builtin
"""Bulk delete a set of configs.
:param ids: (optional) Int list of config IDs.
:param filter: (optional) String list of filters.
:param type: (optional) `union` or `inter` as string.
:param all: (optional) Apply to all if bool `True`.
"""
return self.service.bulk_delete(self.base, self.RESOURCE,
ids=ids, filter=filter, type=type, all=all) | python | def bulk_delete(self, ids=None, filter=None, type=None, all=False): # pylint: disable=redefined-builtin
"""Bulk delete a set of configs.
:param ids: (optional) Int list of config IDs.
:param filter: (optional) String list of filters.
:param type: (optional) `union` or `inter` as string.
:param all: (optional) Apply to all if bool `True`.
"""
return self.service.bulk_delete(self.base, self.RESOURCE,
ids=ids, filter=filter, type=type, all=all) | [
"def",
"bulk_delete",
"(",
"self",
",",
"ids",
"=",
"None",
",",
"filter",
"=",
"None",
",",
"type",
"=",
"None",
",",
"all",
"=",
"False",
")",
":",
"# pylint: disable=redefined-builtin",
"return",
"self",
".",
"service",
".",
"bulk_delete",
"(",
"self",
".",
"base",
",",
"self",
".",
"RESOURCE",
",",
"ids",
"=",
"ids",
",",
"filter",
"=",
"filter",
",",
"type",
"=",
"type",
",",
"all",
"=",
"all",
")"
] | Bulk delete a set of configs.
:param ids: (optional) Int list of config IDs.
:param filter: (optional) String list of filters.
:param type: (optional) `union` or `inter` as string.
:param all: (optional) Apply to all if bool `True`. | [
"Bulk",
"delete",
"a",
"set",
"of",
"configs",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/configs.py#L381-L390 | train | 1,051 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | response_token_setter | def response_token_setter(remote, resp):
"""Extract token from response and set it for the user.
:param remote: The remote application.
:param resp: The response.
:raises invenio_oauthclient.errors.OAuthClientError: If authorization with
remote service failed.
:raises invenio_oauthclient.errors.OAuthResponseError: In case of bad
authorized request.
:returns: The token.
"""
if resp is None:
raise OAuthRejectedRequestError('User rejected request.', remote, resp)
else:
if 'access_token' in resp:
return oauth2_token_setter(remote, resp)
elif 'oauth_token' in resp and 'oauth_token_secret' in resp:
return oauth1_token_setter(remote, resp)
elif 'error' in resp:
# Only OAuth2 specifies how to send error messages
raise OAuthClientError(
'Authorization with remote service failed.', remote, resp,
)
raise OAuthResponseError('Bad OAuth authorized request', remote, resp) | python | def response_token_setter(remote, resp):
"""Extract token from response and set it for the user.
:param remote: The remote application.
:param resp: The response.
:raises invenio_oauthclient.errors.OAuthClientError: If authorization with
remote service failed.
:raises invenio_oauthclient.errors.OAuthResponseError: In case of bad
authorized request.
:returns: The token.
"""
if resp is None:
raise OAuthRejectedRequestError('User rejected request.', remote, resp)
else:
if 'access_token' in resp:
return oauth2_token_setter(remote, resp)
elif 'oauth_token' in resp and 'oauth_token_secret' in resp:
return oauth1_token_setter(remote, resp)
elif 'error' in resp:
# Only OAuth2 specifies how to send error messages
raise OAuthClientError(
'Authorization with remote service failed.', remote, resp,
)
raise OAuthResponseError('Bad OAuth authorized request', remote, resp) | [
"def",
"response_token_setter",
"(",
"remote",
",",
"resp",
")",
":",
"if",
"resp",
"is",
"None",
":",
"raise",
"OAuthRejectedRequestError",
"(",
"'User rejected request.'",
",",
"remote",
",",
"resp",
")",
"else",
":",
"if",
"'access_token'",
"in",
"resp",
":",
"return",
"oauth2_token_setter",
"(",
"remote",
",",
"resp",
")",
"elif",
"'oauth_token'",
"in",
"resp",
"and",
"'oauth_token_secret'",
"in",
"resp",
":",
"return",
"oauth1_token_setter",
"(",
"remote",
",",
"resp",
")",
"elif",
"'error'",
"in",
"resp",
":",
"# Only OAuth2 specifies how to send error messages",
"raise",
"OAuthClientError",
"(",
"'Authorization with remote service failed.'",
",",
"remote",
",",
"resp",
",",
")",
"raise",
"OAuthResponseError",
"(",
"'Bad OAuth authorized request'",
",",
"remote",
",",
"resp",
")"
] | Extract token from response and set it for the user.
:param remote: The remote application.
:param resp: The response.
:raises invenio_oauthclient.errors.OAuthClientError: If authorization with
remote service failed.
:raises invenio_oauthclient.errors.OAuthResponseError: In case of bad
authorized request.
:returns: The token. | [
"Extract",
"token",
"from",
"response",
"and",
"set",
"it",
"for",
"the",
"user",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L69-L92 | train | 1,052 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | oauth1_token_setter | def oauth1_token_setter(remote, resp, token_type='', extra_data=None):
"""Set an OAuth1 token.
:param remote: The remote application.
:param resp: The response.
:param token_type: The token type. (Default: ``''``)
:param extra_data: Extra information. (Default: ``None``)
:returns: A :class:`invenio_oauthclient.models.RemoteToken` instance.
"""
return token_setter(
remote,
resp['oauth_token'],
secret=resp['oauth_token_secret'],
extra_data=extra_data,
token_type=token_type,
) | python | def oauth1_token_setter(remote, resp, token_type='', extra_data=None):
"""Set an OAuth1 token.
:param remote: The remote application.
:param resp: The response.
:param token_type: The token type. (Default: ``''``)
:param extra_data: Extra information. (Default: ``None``)
:returns: A :class:`invenio_oauthclient.models.RemoteToken` instance.
"""
return token_setter(
remote,
resp['oauth_token'],
secret=resp['oauth_token_secret'],
extra_data=extra_data,
token_type=token_type,
) | [
"def",
"oauth1_token_setter",
"(",
"remote",
",",
"resp",
",",
"token_type",
"=",
"''",
",",
"extra_data",
"=",
"None",
")",
":",
"return",
"token_setter",
"(",
"remote",
",",
"resp",
"[",
"'oauth_token'",
"]",
",",
"secret",
"=",
"resp",
"[",
"'oauth_token_secret'",
"]",
",",
"extra_data",
"=",
"extra_data",
",",
"token_type",
"=",
"token_type",
",",
")"
] | Set an OAuth1 token.
:param remote: The remote application.
:param resp: The response.
:param token_type: The token type. (Default: ``''``)
:param extra_data: Extra information. (Default: ``None``)
:returns: A :class:`invenio_oauthclient.models.RemoteToken` instance. | [
"Set",
"an",
"OAuth1",
"token",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L95-L110 | train | 1,053 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | oauth2_token_setter | def oauth2_token_setter(remote, resp, token_type='', extra_data=None):
"""Set an OAuth2 token.
The refresh_token can be used to obtain a new access_token after
the old one is expired. It is saved in the database for long term use.
A refresh_token will be present only if `access_type=offline` is included
in the authorization code request.
:param remote: The remote application.
:param resp: The response.
:param token_type: The token type. (Default: ``''``)
:param extra_data: Extra information. (Default: ``None``)
:returns: A :class:`invenio_oauthclient.models.RemoteToken` instance.
"""
return token_setter(
remote,
resp['access_token'],
secret='',
token_type=token_type,
extra_data=extra_data,
) | python | def oauth2_token_setter(remote, resp, token_type='', extra_data=None):
"""Set an OAuth2 token.
The refresh_token can be used to obtain a new access_token after
the old one is expired. It is saved in the database for long term use.
A refresh_token will be present only if `access_type=offline` is included
in the authorization code request.
:param remote: The remote application.
:param resp: The response.
:param token_type: The token type. (Default: ``''``)
:param extra_data: Extra information. (Default: ``None``)
:returns: A :class:`invenio_oauthclient.models.RemoteToken` instance.
"""
return token_setter(
remote,
resp['access_token'],
secret='',
token_type=token_type,
extra_data=extra_data,
) | [
"def",
"oauth2_token_setter",
"(",
"remote",
",",
"resp",
",",
"token_type",
"=",
"''",
",",
"extra_data",
"=",
"None",
")",
":",
"return",
"token_setter",
"(",
"remote",
",",
"resp",
"[",
"'access_token'",
"]",
",",
"secret",
"=",
"''",
",",
"token_type",
"=",
"token_type",
",",
"extra_data",
"=",
"extra_data",
",",
")"
] | Set an OAuth2 token.
The refresh_token can be used to obtain a new access_token after
the old one is expired. It is saved in the database for long term use.
A refresh_token will be present only if `access_type=offline` is included
in the authorization code request.
:param remote: The remote application.
:param resp: The response.
:param token_type: The token type. (Default: ``''``)
:param extra_data: Extra information. (Default: ``None``)
:returns: A :class:`invenio_oauthclient.models.RemoteToken` instance. | [
"Set",
"an",
"OAuth2",
"token",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L113-L133 | train | 1,054 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | token_setter | def token_setter(remote, token, secret='', token_type='', extra_data=None,
user=None):
"""Set token for user.
:param remote: The remote application.
:param token: The token to set.
:param token_type: The token type. (Default: ``''``)
:param extra_data: Extra information. (Default: ``None``)
:param user: The user owner of the remote token. If it's not defined,
the current user is used automatically. (Default: ``None``)
:returns: A :class:`invenio_oauthclient.models.RemoteToken` instance or
``None``.
"""
session[token_session_key(remote.name)] = (token, secret)
user = user or current_user
# Save token if user is not anonymous (user exists but can be not active at
# this moment)
if not user.is_anonymous:
uid = user.id
cid = remote.consumer_key
# Check for already existing token
t = RemoteToken.get(uid, cid, token_type=token_type)
if t:
t.update_token(token, secret)
else:
t = RemoteToken.create(
uid, cid, token, secret,
token_type=token_type, extra_data=extra_data
)
return t
return None | python | def token_setter(remote, token, secret='', token_type='', extra_data=None,
user=None):
"""Set token for user.
:param remote: The remote application.
:param token: The token to set.
:param token_type: The token type. (Default: ``''``)
:param extra_data: Extra information. (Default: ``None``)
:param user: The user owner of the remote token. If it's not defined,
the current user is used automatically. (Default: ``None``)
:returns: A :class:`invenio_oauthclient.models.RemoteToken` instance or
``None``.
"""
session[token_session_key(remote.name)] = (token, secret)
user = user or current_user
# Save token if user is not anonymous (user exists but can be not active at
# this moment)
if not user.is_anonymous:
uid = user.id
cid = remote.consumer_key
# Check for already existing token
t = RemoteToken.get(uid, cid, token_type=token_type)
if t:
t.update_token(token, secret)
else:
t = RemoteToken.create(
uid, cid, token, secret,
token_type=token_type, extra_data=extra_data
)
return t
return None | [
"def",
"token_setter",
"(",
"remote",
",",
"token",
",",
"secret",
"=",
"''",
",",
"token_type",
"=",
"''",
",",
"extra_data",
"=",
"None",
",",
"user",
"=",
"None",
")",
":",
"session",
"[",
"token_session_key",
"(",
"remote",
".",
"name",
")",
"]",
"=",
"(",
"token",
",",
"secret",
")",
"user",
"=",
"user",
"or",
"current_user",
"# Save token if user is not anonymous (user exists but can be not active at",
"# this moment)",
"if",
"not",
"user",
".",
"is_anonymous",
":",
"uid",
"=",
"user",
".",
"id",
"cid",
"=",
"remote",
".",
"consumer_key",
"# Check for already existing token",
"t",
"=",
"RemoteToken",
".",
"get",
"(",
"uid",
",",
"cid",
",",
"token_type",
"=",
"token_type",
")",
"if",
"t",
":",
"t",
".",
"update_token",
"(",
"token",
",",
"secret",
")",
"else",
":",
"t",
"=",
"RemoteToken",
".",
"create",
"(",
"uid",
",",
"cid",
",",
"token",
",",
"secret",
",",
"token_type",
"=",
"token_type",
",",
"extra_data",
"=",
"extra_data",
")",
"return",
"t",
"return",
"None"
] | Set token for user.
:param remote: The remote application.
:param token: The token to set.
:param token_type: The token type. (Default: ``''``)
:param extra_data: Extra information. (Default: ``None``)
:param user: The user owner of the remote token. If it's not defined,
the current user is used automatically. (Default: ``None``)
:returns: A :class:`invenio_oauthclient.models.RemoteToken` instance or
``None``. | [
"Set",
"token",
"for",
"user",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L136-L169 | train | 1,055 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | token_getter | def token_getter(remote, token=''):
"""Retrieve OAuth access token.
Used by flask-oauthlib to get the access token when making requests.
:param remote: The remote application.
:param token: Type of token to get. Data passed from ``oauth.request()`` to
identify which token to retrieve. (Default: ``''``)
:returns: The token.
"""
session_key = token_session_key(remote.name)
if session_key not in session and current_user.is_authenticated:
# Fetch key from token store if user is authenticated, and the key
# isn't already cached in the session.
remote_token = RemoteToken.get(
current_user.get_id(),
remote.consumer_key,
token_type=token,
)
if remote_token is None:
return None
# Store token and secret in session
session[session_key] = remote_token.token()
return session.get(session_key, None) | python | def token_getter(remote, token=''):
"""Retrieve OAuth access token.
Used by flask-oauthlib to get the access token when making requests.
:param remote: The remote application.
:param token: Type of token to get. Data passed from ``oauth.request()`` to
identify which token to retrieve. (Default: ``''``)
:returns: The token.
"""
session_key = token_session_key(remote.name)
if session_key not in session and current_user.is_authenticated:
# Fetch key from token store if user is authenticated, and the key
# isn't already cached in the session.
remote_token = RemoteToken.get(
current_user.get_id(),
remote.consumer_key,
token_type=token,
)
if remote_token is None:
return None
# Store token and secret in session
session[session_key] = remote_token.token()
return session.get(session_key, None) | [
"def",
"token_getter",
"(",
"remote",
",",
"token",
"=",
"''",
")",
":",
"session_key",
"=",
"token_session_key",
"(",
"remote",
".",
"name",
")",
"if",
"session_key",
"not",
"in",
"session",
"and",
"current_user",
".",
"is_authenticated",
":",
"# Fetch key from token store if user is authenticated, and the key",
"# isn't already cached in the session.",
"remote_token",
"=",
"RemoteToken",
".",
"get",
"(",
"current_user",
".",
"get_id",
"(",
")",
",",
"remote",
".",
"consumer_key",
",",
"token_type",
"=",
"token",
",",
")",
"if",
"remote_token",
"is",
"None",
":",
"return",
"None",
"# Store token and secret in session",
"session",
"[",
"session_key",
"]",
"=",
"remote_token",
".",
"token",
"(",
")",
"return",
"session",
".",
"get",
"(",
"session_key",
",",
"None",
")"
] | Retrieve OAuth access token.
Used by flask-oauthlib to get the access token when making requests.
:param remote: The remote application.
:param token: Type of token to get. Data passed from ``oauth.request()`` to
identify which token to retrieve. (Default: ``''``)
:returns: The token. | [
"Retrieve",
"OAuth",
"access",
"token",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L172-L199 | train | 1,056 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | token_delete | def token_delete(remote, token=''):
"""Remove OAuth access tokens from session.
:param remote: The remote application.
:param token: Type of token to get. Data passed from ``oauth.request()`` to
identify which token to retrieve. (Default: ``''``)
:returns: The token.
"""
session_key = token_session_key(remote.name)
return session.pop(session_key, None) | python | def token_delete(remote, token=''):
"""Remove OAuth access tokens from session.
:param remote: The remote application.
:param token: Type of token to get. Data passed from ``oauth.request()`` to
identify which token to retrieve. (Default: ``''``)
:returns: The token.
"""
session_key = token_session_key(remote.name)
return session.pop(session_key, None) | [
"def",
"token_delete",
"(",
"remote",
",",
"token",
"=",
"''",
")",
":",
"session_key",
"=",
"token_session_key",
"(",
"remote",
".",
"name",
")",
"return",
"session",
".",
"pop",
"(",
"session_key",
",",
"None",
")"
] | Remove OAuth access tokens from session.
:param remote: The remote application.
:param token: Type of token to get. Data passed from ``oauth.request()`` to
identify which token to retrieve. (Default: ``''``)
:returns: The token. | [
"Remove",
"OAuth",
"access",
"tokens",
"from",
"session",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L202-L211 | train | 1,057 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | oauth_error_handler | def oauth_error_handler(f):
"""Decorator to handle exceptions."""
@wraps(f)
def inner(*args, **kwargs):
# OAuthErrors should not happen, so they are not caught here. Hence
# they will result in a 500 Internal Server Error which is what we
# are interested in.
try:
return f(*args, **kwargs)
except OAuthClientError as e:
current_app.logger.warning(e.message, exc_info=True)
return oauth2_handle_error(
e.remote, e.response, e.code, e.uri, e.description
)
except OAuthCERNRejectedAccountError as e:
current_app.logger.warning(e.message, exc_info=True)
flash(_('CERN account not allowed.'),
category='danger')
return redirect('/')
except OAuthRejectedRequestError:
flash(_('You rejected the authentication request.'),
category='info')
return redirect('/')
except AlreadyLinkedError:
flash(_('External service is already linked to another account.'),
category='danger')
return redirect(url_for('invenio_oauthclient_settings.index'))
return inner | python | def oauth_error_handler(f):
"""Decorator to handle exceptions."""
@wraps(f)
def inner(*args, **kwargs):
# OAuthErrors should not happen, so they are not caught here. Hence
# they will result in a 500 Internal Server Error which is what we
# are interested in.
try:
return f(*args, **kwargs)
except OAuthClientError as e:
current_app.logger.warning(e.message, exc_info=True)
return oauth2_handle_error(
e.remote, e.response, e.code, e.uri, e.description
)
except OAuthCERNRejectedAccountError as e:
current_app.logger.warning(e.message, exc_info=True)
flash(_('CERN account not allowed.'),
category='danger')
return redirect('/')
except OAuthRejectedRequestError:
flash(_('You rejected the authentication request.'),
category='info')
return redirect('/')
except AlreadyLinkedError:
flash(_('External service is already linked to another account.'),
category='danger')
return redirect(url_for('invenio_oauthclient_settings.index'))
return inner | [
"def",
"oauth_error_handler",
"(",
"f",
")",
":",
"@",
"wraps",
"(",
"f",
")",
"def",
"inner",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# OAuthErrors should not happen, so they are not caught here. Hence",
"# they will result in a 500 Internal Server Error which is what we",
"# are interested in.",
"try",
":",
"return",
"f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"OAuthClientError",
"as",
"e",
":",
"current_app",
".",
"logger",
".",
"warning",
"(",
"e",
".",
"message",
",",
"exc_info",
"=",
"True",
")",
"return",
"oauth2_handle_error",
"(",
"e",
".",
"remote",
",",
"e",
".",
"response",
",",
"e",
".",
"code",
",",
"e",
".",
"uri",
",",
"e",
".",
"description",
")",
"except",
"OAuthCERNRejectedAccountError",
"as",
"e",
":",
"current_app",
".",
"logger",
".",
"warning",
"(",
"e",
".",
"message",
",",
"exc_info",
"=",
"True",
")",
"flash",
"(",
"_",
"(",
"'CERN account not allowed.'",
")",
",",
"category",
"=",
"'danger'",
")",
"return",
"redirect",
"(",
"'/'",
")",
"except",
"OAuthRejectedRequestError",
":",
"flash",
"(",
"_",
"(",
"'You rejected the authentication request.'",
")",
",",
"category",
"=",
"'info'",
")",
"return",
"redirect",
"(",
"'/'",
")",
"except",
"AlreadyLinkedError",
":",
"flash",
"(",
"_",
"(",
"'External service is already linked to another account.'",
")",
",",
"category",
"=",
"'danger'",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"'invenio_oauthclient_settings.index'",
")",
")",
"return",
"inner"
] | Decorator to handle exceptions. | [
"Decorator",
"to",
"handle",
"exceptions",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L217-L244 | train | 1,058 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | authorized_default_handler | def authorized_default_handler(resp, remote, *args, **kwargs):
"""Store access token in session.
Default authorized handler.
:param remote: The remote application.
:param resp: The response.
:returns: Redirect response.
"""
response_token_setter(remote, resp)
db.session.commit()
return redirect(url_for('invenio_oauthclient_settings.index')) | python | def authorized_default_handler(resp, remote, *args, **kwargs):
"""Store access token in session.
Default authorized handler.
:param remote: The remote application.
:param resp: The response.
:returns: Redirect response.
"""
response_token_setter(remote, resp)
db.session.commit()
return redirect(url_for('invenio_oauthclient_settings.index')) | [
"def",
"authorized_default_handler",
"(",
"resp",
",",
"remote",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"response_token_setter",
"(",
"remote",
",",
"resp",
")",
"db",
".",
"session",
".",
"commit",
"(",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"'invenio_oauthclient_settings.index'",
")",
")"
] | Store access token in session.
Default authorized handler.
:param remote: The remote application.
:param resp: The response.
:returns: Redirect response. | [
"Store",
"access",
"token",
"in",
"session",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L251-L262 | train | 1,059 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | signup_handler | def signup_handler(remote, *args, **kwargs):
"""Handle extra signup information.
:param remote: The remote application.
:returns: Redirect response or the template rendered.
"""
# User already authenticated so move on
if current_user.is_authenticated:
return redirect('/')
# Retrieve token from session
oauth_token = token_getter(remote)
if not oauth_token:
return redirect('/')
session_prefix = token_session_key(remote.name)
# Test to see if this is coming from on authorized request
if not session.get(session_prefix + '_autoregister', False):
return redirect(url_for('.login', remote_app=remote.name))
form = create_registrationform(request.form)
if form.validate_on_submit():
account_info = session.get(session_prefix + '_account_info')
response = session.get(session_prefix + '_response')
# Register user
user = oauth_register(form)
if user is None:
raise OAuthError('Could not create user.', remote)
# Remove session key
session.pop(session_prefix + '_autoregister', None)
# Link account and set session data
token = token_setter(remote, oauth_token[0], secret=oauth_token[1],
user=user)
handlers = current_oauthclient.signup_handlers[remote.name]
if token is None:
raise OAuthError('Could not create token for user.', remote)
if not token.remote_account.extra_data:
account_setup = handlers['setup'](token, response)
account_setup_received.send(
remote, token=token, response=response,
account_setup=account_setup
)
# Registration has been finished
db.session.commit()
account_setup_committed.send(remote, token=token)
else:
# Registration has been finished
db.session.commit()
# Authenticate the user
if not oauth_authenticate(remote.consumer_key, user,
require_existing_link=False):
# Redirect the user after registration (which doesn't include the
# activation), waiting for user to confirm his email.
return redirect(url_for('security.login'))
# Remove account info from session
session.pop(session_prefix + '_account_info', None)
session.pop(session_prefix + '_response', None)
# Redirect to next
next_url = get_session_next_url(remote.name)
if next_url:
return redirect(next_url)
else:
return redirect('/')
# Pre-fill form
account_info = session.get(session_prefix + '_account_info')
if not form.is_submitted():
form = fill_form(form, account_info['user'])
return render_template(
current_app.config['OAUTHCLIENT_SIGNUP_TEMPLATE'],
form=form,
remote=remote,
app_title=current_app.config['OAUTHCLIENT_REMOTE_APPS'][
remote.name].get('title', ''),
app_description=current_app.config['OAUTHCLIENT_REMOTE_APPS'][
remote.name].get('description', ''),
app_icon=current_app.config['OAUTHCLIENT_REMOTE_APPS'][
remote.name].get('icon', None),
) | python | def signup_handler(remote, *args, **kwargs):
"""Handle extra signup information.
:param remote: The remote application.
:returns: Redirect response or the template rendered.
"""
# User already authenticated so move on
if current_user.is_authenticated:
return redirect('/')
# Retrieve token from session
oauth_token = token_getter(remote)
if not oauth_token:
return redirect('/')
session_prefix = token_session_key(remote.name)
# Test to see if this is coming from on authorized request
if not session.get(session_prefix + '_autoregister', False):
return redirect(url_for('.login', remote_app=remote.name))
form = create_registrationform(request.form)
if form.validate_on_submit():
account_info = session.get(session_prefix + '_account_info')
response = session.get(session_prefix + '_response')
# Register user
user = oauth_register(form)
if user is None:
raise OAuthError('Could not create user.', remote)
# Remove session key
session.pop(session_prefix + '_autoregister', None)
# Link account and set session data
token = token_setter(remote, oauth_token[0], secret=oauth_token[1],
user=user)
handlers = current_oauthclient.signup_handlers[remote.name]
if token is None:
raise OAuthError('Could not create token for user.', remote)
if not token.remote_account.extra_data:
account_setup = handlers['setup'](token, response)
account_setup_received.send(
remote, token=token, response=response,
account_setup=account_setup
)
# Registration has been finished
db.session.commit()
account_setup_committed.send(remote, token=token)
else:
# Registration has been finished
db.session.commit()
# Authenticate the user
if not oauth_authenticate(remote.consumer_key, user,
require_existing_link=False):
# Redirect the user after registration (which doesn't include the
# activation), waiting for user to confirm his email.
return redirect(url_for('security.login'))
# Remove account info from session
session.pop(session_prefix + '_account_info', None)
session.pop(session_prefix + '_response', None)
# Redirect to next
next_url = get_session_next_url(remote.name)
if next_url:
return redirect(next_url)
else:
return redirect('/')
# Pre-fill form
account_info = session.get(session_prefix + '_account_info')
if not form.is_submitted():
form = fill_form(form, account_info['user'])
return render_template(
current_app.config['OAUTHCLIENT_SIGNUP_TEMPLATE'],
form=form,
remote=remote,
app_title=current_app.config['OAUTHCLIENT_REMOTE_APPS'][
remote.name].get('title', ''),
app_description=current_app.config['OAUTHCLIENT_REMOTE_APPS'][
remote.name].get('description', ''),
app_icon=current_app.config['OAUTHCLIENT_REMOTE_APPS'][
remote.name].get('icon', None),
) | [
"def",
"signup_handler",
"(",
"remote",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# User already authenticated so move on",
"if",
"current_user",
".",
"is_authenticated",
":",
"return",
"redirect",
"(",
"'/'",
")",
"# Retrieve token from session",
"oauth_token",
"=",
"token_getter",
"(",
"remote",
")",
"if",
"not",
"oauth_token",
":",
"return",
"redirect",
"(",
"'/'",
")",
"session_prefix",
"=",
"token_session_key",
"(",
"remote",
".",
"name",
")",
"# Test to see if this is coming from on authorized request",
"if",
"not",
"session",
".",
"get",
"(",
"session_prefix",
"+",
"'_autoregister'",
",",
"False",
")",
":",
"return",
"redirect",
"(",
"url_for",
"(",
"'.login'",
",",
"remote_app",
"=",
"remote",
".",
"name",
")",
")",
"form",
"=",
"create_registrationform",
"(",
"request",
".",
"form",
")",
"if",
"form",
".",
"validate_on_submit",
"(",
")",
":",
"account_info",
"=",
"session",
".",
"get",
"(",
"session_prefix",
"+",
"'_account_info'",
")",
"response",
"=",
"session",
".",
"get",
"(",
"session_prefix",
"+",
"'_response'",
")",
"# Register user",
"user",
"=",
"oauth_register",
"(",
"form",
")",
"if",
"user",
"is",
"None",
":",
"raise",
"OAuthError",
"(",
"'Could not create user.'",
",",
"remote",
")",
"# Remove session key",
"session",
".",
"pop",
"(",
"session_prefix",
"+",
"'_autoregister'",
",",
"None",
")",
"# Link account and set session data",
"token",
"=",
"token_setter",
"(",
"remote",
",",
"oauth_token",
"[",
"0",
"]",
",",
"secret",
"=",
"oauth_token",
"[",
"1",
"]",
",",
"user",
"=",
"user",
")",
"handlers",
"=",
"current_oauthclient",
".",
"signup_handlers",
"[",
"remote",
".",
"name",
"]",
"if",
"token",
"is",
"None",
":",
"raise",
"OAuthError",
"(",
"'Could not create token for user.'",
",",
"remote",
")",
"if",
"not",
"token",
".",
"remote_account",
".",
"extra_data",
":",
"account_setup",
"=",
"handlers",
"[",
"'setup'",
"]",
"(",
"token",
",",
"response",
")",
"account_setup_received",
".",
"send",
"(",
"remote",
",",
"token",
"=",
"token",
",",
"response",
"=",
"response",
",",
"account_setup",
"=",
"account_setup",
")",
"# Registration has been finished",
"db",
".",
"session",
".",
"commit",
"(",
")",
"account_setup_committed",
".",
"send",
"(",
"remote",
",",
"token",
"=",
"token",
")",
"else",
":",
"# Registration has been finished",
"db",
".",
"session",
".",
"commit",
"(",
")",
"# Authenticate the user",
"if",
"not",
"oauth_authenticate",
"(",
"remote",
".",
"consumer_key",
",",
"user",
",",
"require_existing_link",
"=",
"False",
")",
":",
"# Redirect the user after registration (which doesn't include the",
"# activation), waiting for user to confirm his email.",
"return",
"redirect",
"(",
"url_for",
"(",
"'security.login'",
")",
")",
"# Remove account info from session",
"session",
".",
"pop",
"(",
"session_prefix",
"+",
"'_account_info'",
",",
"None",
")",
"session",
".",
"pop",
"(",
"session_prefix",
"+",
"'_response'",
",",
"None",
")",
"# Redirect to next",
"next_url",
"=",
"get_session_next_url",
"(",
"remote",
".",
"name",
")",
"if",
"next_url",
":",
"return",
"redirect",
"(",
"next_url",
")",
"else",
":",
"return",
"redirect",
"(",
"'/'",
")",
"# Pre-fill form",
"account_info",
"=",
"session",
".",
"get",
"(",
"session_prefix",
"+",
"'_account_info'",
")",
"if",
"not",
"form",
".",
"is_submitted",
"(",
")",
":",
"form",
"=",
"fill_form",
"(",
"form",
",",
"account_info",
"[",
"'user'",
"]",
")",
"return",
"render_template",
"(",
"current_app",
".",
"config",
"[",
"'OAUTHCLIENT_SIGNUP_TEMPLATE'",
"]",
",",
"form",
"=",
"form",
",",
"remote",
"=",
"remote",
",",
"app_title",
"=",
"current_app",
".",
"config",
"[",
"'OAUTHCLIENT_REMOTE_APPS'",
"]",
"[",
"remote",
".",
"name",
"]",
".",
"get",
"(",
"'title'",
",",
"''",
")",
",",
"app_description",
"=",
"current_app",
".",
"config",
"[",
"'OAUTHCLIENT_REMOTE_APPS'",
"]",
"[",
"remote",
".",
"name",
"]",
".",
"get",
"(",
"'description'",
",",
"''",
")",
",",
"app_icon",
"=",
"current_app",
".",
"config",
"[",
"'OAUTHCLIENT_REMOTE_APPS'",
"]",
"[",
"remote",
".",
"name",
"]",
".",
"get",
"(",
"'icon'",
",",
"None",
")",
",",
")"
] | Handle extra signup information.
:param remote: The remote application.
:returns: Redirect response or the template rendered. | [
"Handle",
"extra",
"signup",
"information",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L376-L466 | train | 1,060 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | oauth_logout_handler | def oauth_logout_handler(sender_app, user=None):
"""Remove all access tokens from session on logout."""
oauth = current_app.extensions['oauthlib.client']
for remote in oauth.remote_apps.values():
token_delete(remote)
db.session.commit() | python | def oauth_logout_handler(sender_app, user=None):
"""Remove all access tokens from session on logout."""
oauth = current_app.extensions['oauthlib.client']
for remote in oauth.remote_apps.values():
token_delete(remote)
db.session.commit() | [
"def",
"oauth_logout_handler",
"(",
"sender_app",
",",
"user",
"=",
"None",
")",
":",
"oauth",
"=",
"current_app",
".",
"extensions",
"[",
"'oauthlib.client'",
"]",
"for",
"remote",
"in",
"oauth",
".",
"remote_apps",
".",
"values",
"(",
")",
":",
"token_delete",
"(",
"remote",
")",
"db",
".",
"session",
".",
"commit",
"(",
")"
] | Remove all access tokens from session on logout. | [
"Remove",
"all",
"access",
"tokens",
"from",
"session",
"on",
"logout",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L469-L474 | train | 1,061 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/handlers.py | make_handler | def make_handler(f, remote, with_response=True):
"""Make a handler for authorized and disconnect callbacks.
:param f: Callable or an import path to a callable
"""
if isinstance(f, six.string_types):
f = import_string(f)
@wraps(f)
def inner(*args, **kwargs):
if with_response:
return f(args[0], remote, *args[1:], **kwargs)
else:
return f(remote, *args, **kwargs)
return inner | python | def make_handler(f, remote, with_response=True):
"""Make a handler for authorized and disconnect callbacks.
:param f: Callable or an import path to a callable
"""
if isinstance(f, six.string_types):
f = import_string(f)
@wraps(f)
def inner(*args, **kwargs):
if with_response:
return f(args[0], remote, *args[1:], **kwargs)
else:
return f(remote, *args, **kwargs)
return inner | [
"def",
"make_handler",
"(",
"f",
",",
"remote",
",",
"with_response",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"f",
",",
"six",
".",
"string_types",
")",
":",
"f",
"=",
"import_string",
"(",
"f",
")",
"@",
"wraps",
"(",
"f",
")",
"def",
"inner",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"with_response",
":",
"return",
"f",
"(",
"args",
"[",
"0",
"]",
",",
"remote",
",",
"*",
"args",
"[",
"1",
":",
"]",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"f",
"(",
"remote",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"inner"
] | Make a handler for authorized and disconnect callbacks.
:param f: Callable or an import path to a callable | [
"Make",
"a",
"handler",
"for",
"authorized",
"and",
"disconnect",
"callbacks",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/handlers.py#L480-L494 | train | 1,062 |
SylvanasSun/python-common-cache | common_cache/__init__.py | _enable_lock | def _enable_lock(func):
"""
The decorator for ensuring thread-safe when current cache instance is concurrent status.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self.is_concurrent:
only_read = kwargs.get('only_read')
if only_read is None or only_read:
with self._rwlock:
return func(*args, **kwargs)
else:
self._rwlock.acquire_writer()
try:
return func(*args, **kwargs)
finally:
self._rwlock.release()
else:
return func(*args, **kwargs)
return wrapper | python | def _enable_lock(func):
"""
The decorator for ensuring thread-safe when current cache instance is concurrent status.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self.is_concurrent:
only_read = kwargs.get('only_read')
if only_read is None or only_read:
with self._rwlock:
return func(*args, **kwargs)
else:
self._rwlock.acquire_writer()
try:
return func(*args, **kwargs)
finally:
self._rwlock.release()
else:
return func(*args, **kwargs)
return wrapper | [
"def",
"_enable_lock",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
"=",
"args",
"[",
"0",
"]",
"if",
"self",
".",
"is_concurrent",
":",
"only_read",
"=",
"kwargs",
".",
"get",
"(",
"'only_read'",
")",
"if",
"only_read",
"is",
"None",
"or",
"only_read",
":",
"with",
"self",
".",
"_rwlock",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"self",
".",
"_rwlock",
".",
"acquire_writer",
"(",
")",
"try",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"finally",
":",
"self",
".",
"_rwlock",
".",
"release",
"(",
")",
"else",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapper"
] | The decorator for ensuring thread-safe when current cache instance is concurrent status. | [
"The",
"decorator",
"for",
"ensuring",
"thread",
"-",
"safe",
"when",
"current",
"cache",
"instance",
"is",
"concurrent",
"status",
"."
] | f113eb3cd751eed5ab5373e8610a31a444220cf8 | https://github.com/SylvanasSun/python-common-cache/blob/f113eb3cd751eed5ab5373e8610a31a444220cf8/common_cache/__init__.py#L18-L40 | train | 1,063 |
SylvanasSun/python-common-cache | common_cache/__init__.py | _enable_cleanup | def _enable_cleanup(func):
"""
Execute cleanup operation when the decorated function completed.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
result = func(*args, **kwargs)
self.cleanup(self)
return result
return wrapper | python | def _enable_cleanup(func):
"""
Execute cleanup operation when the decorated function completed.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
result = func(*args, **kwargs)
self.cleanup(self)
return result
return wrapper | [
"def",
"_enable_cleanup",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
"=",
"args",
"[",
"0",
"]",
"result",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"cleanup",
"(",
"self",
")",
"return",
"result",
"return",
"wrapper"
] | Execute cleanup operation when the decorated function completed. | [
"Execute",
"cleanup",
"operation",
"when",
"the",
"decorated",
"function",
"completed",
"."
] | f113eb3cd751eed5ab5373e8610a31a444220cf8 | https://github.com/SylvanasSun/python-common-cache/blob/f113eb3cd751eed5ab5373e8610a31a444220cf8/common_cache/__init__.py#L43-L55 | train | 1,064 |
SylvanasSun/python-common-cache | common_cache/__init__.py | _enable_thread_pool | def _enable_thread_pool(func):
"""
Use thread pool for executing a task if self.enable_thread_pool is True.
Return an instance of future when flag is_async is True otherwise will to
block waiting for the result until timeout then returns the result.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self.enable_thread_pool and hasattr(self, 'thread_pool'):
future = self.thread_pool.submit(func, *args, **kwargs)
is_async = kwargs.get('is_async')
if is_async is None or not is_async:
timeout = kwargs.get('timeout')
if timeout is None:
timeout = 2
try:
result = future.result(timeout=timeout)
except TimeoutError as e:
self.logger.exception(e)
result = None
return result
return future
else:
return func(*args, **kwargs)
return wrapper | python | def _enable_thread_pool(func):
"""
Use thread pool for executing a task if self.enable_thread_pool is True.
Return an instance of future when flag is_async is True otherwise will to
block waiting for the result until timeout then returns the result.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self.enable_thread_pool and hasattr(self, 'thread_pool'):
future = self.thread_pool.submit(func, *args, **kwargs)
is_async = kwargs.get('is_async')
if is_async is None or not is_async:
timeout = kwargs.get('timeout')
if timeout is None:
timeout = 2
try:
result = future.result(timeout=timeout)
except TimeoutError as e:
self.logger.exception(e)
result = None
return result
return future
else:
return func(*args, **kwargs)
return wrapper | [
"def",
"_enable_thread_pool",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
"=",
"args",
"[",
"0",
"]",
"if",
"self",
".",
"enable_thread_pool",
"and",
"hasattr",
"(",
"self",
",",
"'thread_pool'",
")",
":",
"future",
"=",
"self",
".",
"thread_pool",
".",
"submit",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"is_async",
"=",
"kwargs",
".",
"get",
"(",
"'is_async'",
")",
"if",
"is_async",
"is",
"None",
"or",
"not",
"is_async",
":",
"timeout",
"=",
"kwargs",
".",
"get",
"(",
"'timeout'",
")",
"if",
"timeout",
"is",
"None",
":",
"timeout",
"=",
"2",
"try",
":",
"result",
"=",
"future",
".",
"result",
"(",
"timeout",
"=",
"timeout",
")",
"except",
"TimeoutError",
"as",
"e",
":",
"self",
".",
"logger",
".",
"exception",
"(",
"e",
")",
"result",
"=",
"None",
"return",
"result",
"return",
"future",
"else",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapper"
] | Use thread pool for executing a task if self.enable_thread_pool is True.
Return an instance of future when flag is_async is True otherwise will to
block waiting for the result until timeout then returns the result. | [
"Use",
"thread",
"pool",
"for",
"executing",
"a",
"task",
"if",
"self",
".",
"enable_thread_pool",
"is",
"True",
"."
] | f113eb3cd751eed5ab5373e8610a31a444220cf8 | https://github.com/SylvanasSun/python-common-cache/blob/f113eb3cd751eed5ab5373e8610a31a444220cf8/common_cache/__init__.py#L58-L86 | train | 1,065 |
SylvanasSun/python-common-cache | common_cache/__init__.py | Cache.statistic_record | def statistic_record(self, desc=True, timeout=3, is_async=False, only_read=True, *keys):
"""
Returns a list that each element is a dictionary of the statistic info of the cache item.
"""
if len(keys) == 0:
records = self._generate_statistic_records()
else:
records = self._generate_statistic_records_by_keys(keys)
return sorted(records, key=lambda t: t['hit_counts'], reverse=desc) | python | def statistic_record(self, desc=True, timeout=3, is_async=False, only_read=True, *keys):
"""
Returns a list that each element is a dictionary of the statistic info of the cache item.
"""
if len(keys) == 0:
records = self._generate_statistic_records()
else:
records = self._generate_statistic_records_by_keys(keys)
return sorted(records, key=lambda t: t['hit_counts'], reverse=desc) | [
"def",
"statistic_record",
"(",
"self",
",",
"desc",
"=",
"True",
",",
"timeout",
"=",
"3",
",",
"is_async",
"=",
"False",
",",
"only_read",
"=",
"True",
",",
"*",
"keys",
")",
":",
"if",
"len",
"(",
"keys",
")",
"==",
"0",
":",
"records",
"=",
"self",
".",
"_generate_statistic_records",
"(",
")",
"else",
":",
"records",
"=",
"self",
".",
"_generate_statistic_records_by_keys",
"(",
"keys",
")",
"return",
"sorted",
"(",
"records",
",",
"key",
"=",
"lambda",
"t",
":",
"t",
"[",
"'hit_counts'",
"]",
",",
"reverse",
"=",
"desc",
")"
] | Returns a list that each element is a dictionary of the statistic info of the cache item. | [
"Returns",
"a",
"list",
"that",
"each",
"element",
"is",
"a",
"dictionary",
"of",
"the",
"statistic",
"info",
"of",
"the",
"cache",
"item",
"."
] | f113eb3cd751eed5ab5373e8610a31a444220cf8 | https://github.com/SylvanasSun/python-common-cache/blob/f113eb3cd751eed5ab5373e8610a31a444220cf8/common_cache/__init__.py#L589-L597 | train | 1,066 |
dourvaris/nano-python | src/nano/ed25519_blake2.py | signature_unsafe | def signature_unsafe(m, sk, pk, hash_func=H):
"""
Not safe to use with secret keys or secret data.
See module docstring. This function should be used for testing only.
"""
h = hash_func(sk)
a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2))
r = Hint(bytearray([h[j] for j in range(b // 8, b // 4)]) + m)
R = scalarmult_B(r)
S = (r + Hint(encodepoint(R) + pk + m) * a) % l
return bytes(encodepoint(R) + encodeint(S)) | python | def signature_unsafe(m, sk, pk, hash_func=H):
"""
Not safe to use with secret keys or secret data.
See module docstring. This function should be used for testing only.
"""
h = hash_func(sk)
a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2))
r = Hint(bytearray([h[j] for j in range(b // 8, b // 4)]) + m)
R = scalarmult_B(r)
S = (r + Hint(encodepoint(R) + pk + m) * a) % l
return bytes(encodepoint(R) + encodeint(S)) | [
"def",
"signature_unsafe",
"(",
"m",
",",
"sk",
",",
"pk",
",",
"hash_func",
"=",
"H",
")",
":",
"h",
"=",
"hash_func",
"(",
"sk",
")",
"a",
"=",
"2",
"**",
"(",
"b",
"-",
"2",
")",
"+",
"sum",
"(",
"2",
"**",
"i",
"*",
"bit",
"(",
"h",
",",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"3",
",",
"b",
"-",
"2",
")",
")",
"r",
"=",
"Hint",
"(",
"bytearray",
"(",
"[",
"h",
"[",
"j",
"]",
"for",
"j",
"in",
"range",
"(",
"b",
"//",
"8",
",",
"b",
"//",
"4",
")",
"]",
")",
"+",
"m",
")",
"R",
"=",
"scalarmult_B",
"(",
"r",
")",
"S",
"=",
"(",
"r",
"+",
"Hint",
"(",
"encodepoint",
"(",
"R",
")",
"+",
"pk",
"+",
"m",
")",
"*",
"a",
")",
"%",
"l",
"return",
"bytes",
"(",
"encodepoint",
"(",
"R",
")",
"+",
"encodeint",
"(",
"S",
")",
")"
] | Not safe to use with secret keys or secret data.
See module docstring. This function should be used for testing only. | [
"Not",
"safe",
"to",
"use",
"with",
"secret",
"keys",
"or",
"secret",
"data",
".",
"See",
"module",
"docstring",
".",
"This",
"function",
"should",
"be",
"used",
"for",
"testing",
"only",
"."
] | f26b8bc895b997067780f925049a70e82c0c2479 | https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/ed25519_blake2.py#L214-L224 | train | 1,067 |
dourvaris/nano-python | src/nano/ed25519_blake2.py | checkvalid | def checkvalid(s, m, pk):
"""
Not safe to use when any argument is secret.
See module docstring. This function should be used only for
verifying public signatures of public messages.
"""
if len(s) != b // 4:
raise ValueError("signature length is wrong")
if len(pk) != b // 8:
raise ValueError("public-key length is wrong")
s = bytearray(s)
m = bytearray(m)
pk = bytearray(pk)
R = decodepoint(s[: b // 8])
A = decodepoint(pk)
S = decodeint(s[b // 8 : b // 4])
h = Hint(encodepoint(R) + pk + m)
(x1, y1, z1, t1) = P = scalarmult_B(S)
(x2, y2, z2, t2) = Q = edwards_add(R, scalarmult(A, h))
if (
not isoncurve(P)
or not isoncurve(Q)
or (x1 * z2 - x2 * z1) % q != 0
or (y1 * z2 - y2 * z1) % q != 0
):
raise SignatureMismatch("signature does not pass verification") | python | def checkvalid(s, m, pk):
"""
Not safe to use when any argument is secret.
See module docstring. This function should be used only for
verifying public signatures of public messages.
"""
if len(s) != b // 4:
raise ValueError("signature length is wrong")
if len(pk) != b // 8:
raise ValueError("public-key length is wrong")
s = bytearray(s)
m = bytearray(m)
pk = bytearray(pk)
R = decodepoint(s[: b // 8])
A = decodepoint(pk)
S = decodeint(s[b // 8 : b // 4])
h = Hint(encodepoint(R) + pk + m)
(x1, y1, z1, t1) = P = scalarmult_B(S)
(x2, y2, z2, t2) = Q = edwards_add(R, scalarmult(A, h))
if (
not isoncurve(P)
or not isoncurve(Q)
or (x1 * z2 - x2 * z1) % q != 0
or (y1 * z2 - y2 * z1) % q != 0
):
raise SignatureMismatch("signature does not pass verification") | [
"def",
"checkvalid",
"(",
"s",
",",
"m",
",",
"pk",
")",
":",
"if",
"len",
"(",
"s",
")",
"!=",
"b",
"//",
"4",
":",
"raise",
"ValueError",
"(",
"\"signature length is wrong\"",
")",
"if",
"len",
"(",
"pk",
")",
"!=",
"b",
"//",
"8",
":",
"raise",
"ValueError",
"(",
"\"public-key length is wrong\"",
")",
"s",
"=",
"bytearray",
"(",
"s",
")",
"m",
"=",
"bytearray",
"(",
"m",
")",
"pk",
"=",
"bytearray",
"(",
"pk",
")",
"R",
"=",
"decodepoint",
"(",
"s",
"[",
":",
"b",
"//",
"8",
"]",
")",
"A",
"=",
"decodepoint",
"(",
"pk",
")",
"S",
"=",
"decodeint",
"(",
"s",
"[",
"b",
"//",
"8",
":",
"b",
"//",
"4",
"]",
")",
"h",
"=",
"Hint",
"(",
"encodepoint",
"(",
"R",
")",
"+",
"pk",
"+",
"m",
")",
"(",
"x1",
",",
"y1",
",",
"z1",
",",
"t1",
")",
"=",
"P",
"=",
"scalarmult_B",
"(",
"S",
")",
"(",
"x2",
",",
"y2",
",",
"z2",
",",
"t2",
")",
"=",
"Q",
"=",
"edwards_add",
"(",
"R",
",",
"scalarmult",
"(",
"A",
",",
"h",
")",
")",
"if",
"(",
"not",
"isoncurve",
"(",
"P",
")",
"or",
"not",
"isoncurve",
"(",
"Q",
")",
"or",
"(",
"x1",
"*",
"z2",
"-",
"x2",
"*",
"z1",
")",
"%",
"q",
"!=",
"0",
"or",
"(",
"y1",
"*",
"z2",
"-",
"y2",
"*",
"z1",
")",
"%",
"q",
"!=",
"0",
")",
":",
"raise",
"SignatureMismatch",
"(",
"\"signature does not pass verification\"",
")"
] | Not safe to use when any argument is secret.
See module docstring. This function should be used only for
verifying public signatures of public messages. | [
"Not",
"safe",
"to",
"use",
"when",
"any",
"argument",
"is",
"secret",
".",
"See",
"module",
"docstring",
".",
"This",
"function",
"should",
"be",
"used",
"only",
"for",
"verifying",
"public",
"signatures",
"of",
"public",
"messages",
"."
] | f26b8bc895b997067780f925049a70e82c0c2479 | https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/ed25519_blake2.py#L255-L285 | train | 1,068 |
prosegrinder/python-cmudict | cmudict/__init__.py | dict | def dict():
"""
Compatibility with NLTK.
Returns the cmudict lexicon as a dictionary, whose keys are
lowercase words and whose values are lists of pronunciations.
"""
default = defaultdict(list)
for key, value in entries():
default[key].append(value)
return default | python | def dict():
"""
Compatibility with NLTK.
Returns the cmudict lexicon as a dictionary, whose keys are
lowercase words and whose values are lists of pronunciations.
"""
default = defaultdict(list)
for key, value in entries():
default[key].append(value)
return default | [
"def",
"dict",
"(",
")",
":",
"default",
"=",
"defaultdict",
"(",
"list",
")",
"for",
"key",
",",
"value",
"in",
"entries",
"(",
")",
":",
"default",
"[",
"key",
"]",
".",
"append",
"(",
"value",
")",
"return",
"default"
] | Compatibility with NLTK.
Returns the cmudict lexicon as a dictionary, whose keys are
lowercase words and whose values are lists of pronunciations. | [
"Compatibility",
"with",
"NLTK",
".",
"Returns",
"the",
"cmudict",
"lexicon",
"as",
"a",
"dictionary",
"whose",
"keys",
"are",
"lowercase",
"words",
"and",
"whose",
"values",
"are",
"lists",
"of",
"pronunciations",
"."
] | e7af7ae9e923add04e14fa303ad44d5abd0cc20a | https://github.com/prosegrinder/python-cmudict/blob/e7af7ae9e923add04e14fa303ad44d5abd0cc20a/cmudict/__init__.py#L42-L51 | train | 1,069 |
prosegrinder/python-cmudict | cmudict/__init__.py | symbols | def symbols():
"""Return a list of symbols."""
symbols = []
for line in symbols_stream():
symbols.append(line.decode('utf-8').strip())
return symbols | python | def symbols():
"""Return a list of symbols."""
symbols = []
for line in symbols_stream():
symbols.append(line.decode('utf-8').strip())
return symbols | [
"def",
"symbols",
"(",
")",
":",
"symbols",
"=",
"[",
"]",
"for",
"line",
"in",
"symbols_stream",
"(",
")",
":",
"symbols",
".",
"append",
"(",
"line",
".",
"decode",
"(",
"'utf-8'",
")",
".",
"strip",
"(",
")",
")",
"return",
"symbols"
] | Return a list of symbols. | [
"Return",
"a",
"list",
"of",
"symbols",
"."
] | e7af7ae9e923add04e14fa303ad44d5abd0cc20a | https://github.com/prosegrinder/python-cmudict/blob/e7af7ae9e923add04e14fa303ad44d5abd0cc20a/cmudict/__init__.py#L92-L97 | train | 1,070 |
mcieslik-mctp/papy | src/papy/core.py | Dagger.connect_inputs | def connect_inputs(self, datas):
"""
Connects input ``Pipers`` to "datas" input data in the correct order
determined, by the ``Piper.ornament`` attribute and the ``Dagger._cmp``
function.
It is assumed that the input data is in the form of an iterator and
that all inputs have the same number of input items. A pipeline will
**deadlock** otherwise.
Arguments:
- datas (sequence of sequences) An ordered sequence of inputs for
all input ``Pipers``.
"""
start_pipers = self.get_inputs()
self.log.debug('%s trying to connect inputs in the order %s' % \
(repr(self), repr(start_pipers)))
for piper, data in izip(start_pipers, datas):
piper.connect([data])
self.log.debug('%s succesfuly connected inputs' % repr(self)) | python | def connect_inputs(self, datas):
"""
Connects input ``Pipers`` to "datas" input data in the correct order
determined, by the ``Piper.ornament`` attribute and the ``Dagger._cmp``
function.
It is assumed that the input data is in the form of an iterator and
that all inputs have the same number of input items. A pipeline will
**deadlock** otherwise.
Arguments:
- datas (sequence of sequences) An ordered sequence of inputs for
all input ``Pipers``.
"""
start_pipers = self.get_inputs()
self.log.debug('%s trying to connect inputs in the order %s' % \
(repr(self), repr(start_pipers)))
for piper, data in izip(start_pipers, datas):
piper.connect([data])
self.log.debug('%s succesfuly connected inputs' % repr(self)) | [
"def",
"connect_inputs",
"(",
"self",
",",
"datas",
")",
":",
"start_pipers",
"=",
"self",
".",
"get_inputs",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'%s trying to connect inputs in the order %s'",
"%",
"(",
"repr",
"(",
"self",
")",
",",
"repr",
"(",
"start_pipers",
")",
")",
")",
"for",
"piper",
",",
"data",
"in",
"izip",
"(",
"start_pipers",
",",
"datas",
")",
":",
"piper",
".",
"connect",
"(",
"[",
"data",
"]",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'%s succesfuly connected inputs'",
"%",
"repr",
"(",
"self",
")",
")"
] | Connects input ``Pipers`` to "datas" input data in the correct order
determined, by the ``Piper.ornament`` attribute and the ``Dagger._cmp``
function.
It is assumed that the input data is in the form of an iterator and
that all inputs have the same number of input items. A pipeline will
**deadlock** otherwise.
Arguments:
- datas (sequence of sequences) An ordered sequence of inputs for
all input ``Pipers``. | [
"Connects",
"input",
"Pipers",
"to",
"datas",
"input",
"data",
"in",
"the",
"correct",
"order",
"determined",
"by",
"the",
"Piper",
".",
"ornament",
"attribute",
"and",
"the",
"Dagger",
".",
"_cmp",
"function",
"."
] | 708e50827b5db46bbea081982cb74b9b0e464064 | https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/core.py#L188-L209 | train | 1,071 |
mcieslik-mctp/papy | src/papy/core.py | Dagger.start | def start(self):
"""
Given the pipeline topology starts ``Pipers`` in the order input ->
output. See ``Piper.start``. ``Pipers`` instances are started in two
stages, which allows them to share ``NuMaps``.
"""
# top - > bottom of pipeline
pipers = self.postorder()
#
for piper in pipers:
piper.start(stages=(0, 1))
for piper in pipers:
piper.start(stages=(2,)) | python | def start(self):
"""
Given the pipeline topology starts ``Pipers`` in the order input ->
output. See ``Piper.start``. ``Pipers`` instances are started in two
stages, which allows them to share ``NuMaps``.
"""
# top - > bottom of pipeline
pipers = self.postorder()
#
for piper in pipers:
piper.start(stages=(0, 1))
for piper in pipers:
piper.start(stages=(2,)) | [
"def",
"start",
"(",
"self",
")",
":",
"# top - > bottom of pipeline",
"pipers",
"=",
"self",
".",
"postorder",
"(",
")",
"# ",
"for",
"piper",
"in",
"pipers",
":",
"piper",
".",
"start",
"(",
"stages",
"=",
"(",
"0",
",",
"1",
")",
")",
"for",
"piper",
"in",
"pipers",
":",
"piper",
".",
"start",
"(",
"stages",
"=",
"(",
"2",
",",
")",
")"
] | Given the pipeline topology starts ``Pipers`` in the order input ->
output. See ``Piper.start``. ``Pipers`` instances are started in two
stages, which allows them to share ``NuMaps``. | [
"Given",
"the",
"pipeline",
"topology",
"starts",
"Pipers",
"in",
"the",
"order",
"input",
"-",
">",
"output",
".",
"See",
"Piper",
".",
"start",
".",
"Pipers",
"instances",
"are",
"started",
"in",
"two",
"stages",
"which",
"allows",
"them",
"to",
"share",
"NuMaps",
"."
] | 708e50827b5db46bbea081982cb74b9b0e464064 | https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/core.py#L234-L247 | train | 1,072 |
mcieslik-mctp/papy | src/papy/core.py | Dagger.stop | def stop(self):
"""
Stops the ``Pipers`` according to pipeline topology.
"""
self.log.debug('%s begins stopping routine' % repr(self))
self.log.debug('%s triggers stopping in input pipers' % repr(self))
inputs = self.get_inputs()
for piper in inputs:
piper.stop(forced=True)
self.log.debug('%s pulls output pipers until stop' % repr(self))
outputs = self.get_outputs()
while outputs:
for piper in outputs:
try:
# for i in xrange(stride)?
piper.next()
except StopIteration:
outputs.remove(piper)
self.log.debug("%s stopped output piper: %s" % \
(repr(self), repr(piper)))
continue
except Exception, excp:
self.log.debug("%s %s raised an exception: %s" % \
(repr(self), piper, excp))
self.log.debug("%s stops the remaining pipers" % repr(self))
postorder = self.postorder()
for piper in postorder:
if piper not in inputs:
piper.stop(ends=[0])
self.log.debug("%s finishes stopping of input pipers" % repr(self))
for piper in inputs:
if hasattr(piper.imap, 'stop'):
piper.imap.stop(ends=[0])
self.log.debug('%s finishes stopping routine' % repr(self)) | python | def stop(self):
"""
Stops the ``Pipers`` according to pipeline topology.
"""
self.log.debug('%s begins stopping routine' % repr(self))
self.log.debug('%s triggers stopping in input pipers' % repr(self))
inputs = self.get_inputs()
for piper in inputs:
piper.stop(forced=True)
self.log.debug('%s pulls output pipers until stop' % repr(self))
outputs = self.get_outputs()
while outputs:
for piper in outputs:
try:
# for i in xrange(stride)?
piper.next()
except StopIteration:
outputs.remove(piper)
self.log.debug("%s stopped output piper: %s" % \
(repr(self), repr(piper)))
continue
except Exception, excp:
self.log.debug("%s %s raised an exception: %s" % \
(repr(self), piper, excp))
self.log.debug("%s stops the remaining pipers" % repr(self))
postorder = self.postorder()
for piper in postorder:
if piper not in inputs:
piper.stop(ends=[0])
self.log.debug("%s finishes stopping of input pipers" % repr(self))
for piper in inputs:
if hasattr(piper.imap, 'stop'):
piper.imap.stop(ends=[0])
self.log.debug('%s finishes stopping routine' % repr(self)) | [
"def",
"stop",
"(",
"self",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'%s begins stopping routine'",
"%",
"repr",
"(",
"self",
")",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'%s triggers stopping in input pipers'",
"%",
"repr",
"(",
"self",
")",
")",
"inputs",
"=",
"self",
".",
"get_inputs",
"(",
")",
"for",
"piper",
"in",
"inputs",
":",
"piper",
".",
"stop",
"(",
"forced",
"=",
"True",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'%s pulls output pipers until stop'",
"%",
"repr",
"(",
"self",
")",
")",
"outputs",
"=",
"self",
".",
"get_outputs",
"(",
")",
"while",
"outputs",
":",
"for",
"piper",
"in",
"outputs",
":",
"try",
":",
"# for i in xrange(stride)?",
"piper",
".",
"next",
"(",
")",
"except",
"StopIteration",
":",
"outputs",
".",
"remove",
"(",
"piper",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"%s stopped output piper: %s\"",
"%",
"(",
"repr",
"(",
"self",
")",
",",
"repr",
"(",
"piper",
")",
")",
")",
"continue",
"except",
"Exception",
",",
"excp",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"%s %s raised an exception: %s\"",
"%",
"(",
"repr",
"(",
"self",
")",
",",
"piper",
",",
"excp",
")",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"%s stops the remaining pipers\"",
"%",
"repr",
"(",
"self",
")",
")",
"postorder",
"=",
"self",
".",
"postorder",
"(",
")",
"for",
"piper",
"in",
"postorder",
":",
"if",
"piper",
"not",
"in",
"inputs",
":",
"piper",
".",
"stop",
"(",
"ends",
"=",
"[",
"0",
"]",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"%s finishes stopping of input pipers\"",
"%",
"repr",
"(",
"self",
")",
")",
"for",
"piper",
"in",
"inputs",
":",
"if",
"hasattr",
"(",
"piper",
".",
"imap",
",",
"'stop'",
")",
":",
"piper",
".",
"imap",
".",
"stop",
"(",
"ends",
"=",
"[",
"0",
"]",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'%s finishes stopping routine'",
"%",
"repr",
"(",
"self",
")",
")"
] | Stops the ``Pipers`` according to pipeline topology. | [
"Stops",
"the",
"Pipers",
"according",
"to",
"pipeline",
"topology",
"."
] | 708e50827b5db46bbea081982cb74b9b0e464064 | https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/core.py#L249-L283 | train | 1,073 |
mcieslik-mctp/papy | src/papy/core.py | Dagger.del_piper | def del_piper(self, piper, forced=False):
"""
Removes a ``Piper`` from the ``Dagger`` instance.
Arguments:
- piper(``Piper`` or id(``Piper``)) ``Piper`` instance or ``Piper``
instance id.
- forced(bool) [default: ``False``] If "forced" is ``True``, will not
raise a ``DaggerError`` if the ``Piper`` hase outgoing pipes and
will also remove it.
"""
self.log.debug('%s trying to delete piper %s' % \
(repr(self), repr(piper)))
try:
piper = self.resolve(piper, forgive=False)
except DaggerError:
self.log.error('%s cannot resolve piper from %s' % \
(repr(self), repr(piper)))
raise DaggerError('%s cannot resolve piper from %s' % \
(repr(self), repr(piper)))
if self.incoming_edges(piper) and not forced:
self.log.error('%s piper %s has down-stream pipers (use forced =True to override)' % \
(repr(self), piper))
raise DaggerError('%s piper %s has down-stream pipers (use forced =True to override)' % \
(repr(self), piper))
self.del_node(piper)
self.log.debug('%s deleted piper %s' % (repr(self), piper)) | python | def del_piper(self, piper, forced=False):
"""
Removes a ``Piper`` from the ``Dagger`` instance.
Arguments:
- piper(``Piper`` or id(``Piper``)) ``Piper`` instance or ``Piper``
instance id.
- forced(bool) [default: ``False``] If "forced" is ``True``, will not
raise a ``DaggerError`` if the ``Piper`` hase outgoing pipes and
will also remove it.
"""
self.log.debug('%s trying to delete piper %s' % \
(repr(self), repr(piper)))
try:
piper = self.resolve(piper, forgive=False)
except DaggerError:
self.log.error('%s cannot resolve piper from %s' % \
(repr(self), repr(piper)))
raise DaggerError('%s cannot resolve piper from %s' % \
(repr(self), repr(piper)))
if self.incoming_edges(piper) and not forced:
self.log.error('%s piper %s has down-stream pipers (use forced =True to override)' % \
(repr(self), piper))
raise DaggerError('%s piper %s has down-stream pipers (use forced =True to override)' % \
(repr(self), piper))
self.del_node(piper)
self.log.debug('%s deleted piper %s' % (repr(self), piper)) | [
"def",
"del_piper",
"(",
"self",
",",
"piper",
",",
"forced",
"=",
"False",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'%s trying to delete piper %s'",
"%",
"(",
"repr",
"(",
"self",
")",
",",
"repr",
"(",
"piper",
")",
")",
")",
"try",
":",
"piper",
"=",
"self",
".",
"resolve",
"(",
"piper",
",",
"forgive",
"=",
"False",
")",
"except",
"DaggerError",
":",
"self",
".",
"log",
".",
"error",
"(",
"'%s cannot resolve piper from %s'",
"%",
"(",
"repr",
"(",
"self",
")",
",",
"repr",
"(",
"piper",
")",
")",
")",
"raise",
"DaggerError",
"(",
"'%s cannot resolve piper from %s'",
"%",
"(",
"repr",
"(",
"self",
")",
",",
"repr",
"(",
"piper",
")",
")",
")",
"if",
"self",
".",
"incoming_edges",
"(",
"piper",
")",
"and",
"not",
"forced",
":",
"self",
".",
"log",
".",
"error",
"(",
"'%s piper %s has down-stream pipers (use forced =True to override)'",
"%",
"(",
"repr",
"(",
"self",
")",
",",
"piper",
")",
")",
"raise",
"DaggerError",
"(",
"'%s piper %s has down-stream pipers (use forced =True to override)'",
"%",
"(",
"repr",
"(",
"self",
")",
",",
"piper",
")",
")",
"self",
".",
"del_node",
"(",
"piper",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'%s deleted piper %s'",
"%",
"(",
"repr",
"(",
"self",
")",
",",
"piper",
")",
")"
] | Removes a ``Piper`` from the ``Dagger`` instance.
Arguments:
- piper(``Piper`` or id(``Piper``)) ``Piper`` instance or ``Piper``
instance id.
- forced(bool) [default: ``False``] If "forced" is ``True``, will not
raise a ``DaggerError`` if the ``Piper`` hase outgoing pipes and
will also remove it. | [
"Removes",
"a",
"Piper",
"from",
"the",
"Dagger",
"instance",
"."
] | 708e50827b5db46bbea081982cb74b9b0e464064 | https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/core.py#L346-L374 | train | 1,074 |
mcieslik-mctp/papy | src/papy/core.py | Plumber.start | def start(self, datas):
"""
Starts the pipeline by connecting the input ``Pipers`` of the pipeline
to the input data, connecting the pipeline and starting the ``NuMap``
instances.
The order of items in the "datas" argument sequence should correspond
to the order of the input ``Pipers`` defined by ``Dagger._cmp`` and
``Piper.ornament``.
Arguments:
- datas(sequence) A sequence of external input data in the form of
sequences or iterators.
"""
if not self._started.isSet() and \
not self._running.isSet() and \
not self._pausing.isSet():
# Plumber statistics
self.stats = {}
self.stats['start_time'] = None
self.stats['run_time'] = None
# connects input pipers to external data
self.connect_inputs(datas)
# connects pipers within the pipeline
self.connect()
# make pointers to results collected for pipers by imaps
self.stats['pipers_tracked'] = {}
for piper in self.postorder():
if hasattr(piper.imap, '_tasks_tracked') and piper.track:
self.stats['pipers_tracked'][piper] = \
[piper.imap._tasks_tracked[t.task] for t in piper.imap_tasks]
self.stats['start_time'] = time()
# starts the Dagger
# this starts Pipers and NuMaps
super(Plumber, self).start()
# transitioning to started state
self._started.set()
self._finished.clear()
else:
raise PlumberError | python | def start(self, datas):
"""
Starts the pipeline by connecting the input ``Pipers`` of the pipeline
to the input data, connecting the pipeline and starting the ``NuMap``
instances.
The order of items in the "datas" argument sequence should correspond
to the order of the input ``Pipers`` defined by ``Dagger._cmp`` and
``Piper.ornament``.
Arguments:
- datas(sequence) A sequence of external input data in the form of
sequences or iterators.
"""
if not self._started.isSet() and \
not self._running.isSet() and \
not self._pausing.isSet():
# Plumber statistics
self.stats = {}
self.stats['start_time'] = None
self.stats['run_time'] = None
# connects input pipers to external data
self.connect_inputs(datas)
# connects pipers within the pipeline
self.connect()
# make pointers to results collected for pipers by imaps
self.stats['pipers_tracked'] = {}
for piper in self.postorder():
if hasattr(piper.imap, '_tasks_tracked') and piper.track:
self.stats['pipers_tracked'][piper] = \
[piper.imap._tasks_tracked[t.task] for t in piper.imap_tasks]
self.stats['start_time'] = time()
# starts the Dagger
# this starts Pipers and NuMaps
super(Plumber, self).start()
# transitioning to started state
self._started.set()
self._finished.clear()
else:
raise PlumberError | [
"def",
"start",
"(",
"self",
",",
"datas",
")",
":",
"if",
"not",
"self",
".",
"_started",
".",
"isSet",
"(",
")",
"and",
"not",
"self",
".",
"_running",
".",
"isSet",
"(",
")",
"and",
"not",
"self",
".",
"_pausing",
".",
"isSet",
"(",
")",
":",
"# Plumber statistics",
"self",
".",
"stats",
"=",
"{",
"}",
"self",
".",
"stats",
"[",
"'start_time'",
"]",
"=",
"None",
"self",
".",
"stats",
"[",
"'run_time'",
"]",
"=",
"None",
"# connects input pipers to external data",
"self",
".",
"connect_inputs",
"(",
"datas",
")",
"# connects pipers within the pipeline",
"self",
".",
"connect",
"(",
")",
"# make pointers to results collected for pipers by imaps",
"self",
".",
"stats",
"[",
"'pipers_tracked'",
"]",
"=",
"{",
"}",
"for",
"piper",
"in",
"self",
".",
"postorder",
"(",
")",
":",
"if",
"hasattr",
"(",
"piper",
".",
"imap",
",",
"'_tasks_tracked'",
")",
"and",
"piper",
".",
"track",
":",
"self",
".",
"stats",
"[",
"'pipers_tracked'",
"]",
"[",
"piper",
"]",
"=",
"[",
"piper",
".",
"imap",
".",
"_tasks_tracked",
"[",
"t",
".",
"task",
"]",
"for",
"t",
"in",
"piper",
".",
"imap_tasks",
"]",
"self",
".",
"stats",
"[",
"'start_time'",
"]",
"=",
"time",
"(",
")",
"# starts the Dagger",
"# this starts Pipers and NuMaps",
"super",
"(",
"Plumber",
",",
"self",
")",
".",
"start",
"(",
")",
"# transitioning to started state",
"self",
".",
"_started",
".",
"set",
"(",
")",
"self",
".",
"_finished",
".",
"clear",
"(",
")",
"else",
":",
"raise",
"PlumberError"
] | Starts the pipeline by connecting the input ``Pipers`` of the pipeline
to the input data, connecting the pipeline and starting the ``NuMap``
instances.
The order of items in the "datas" argument sequence should correspond
to the order of the input ``Pipers`` defined by ``Dagger._cmp`` and
``Piper.ornament``.
Arguments:
- datas(sequence) A sequence of external input data in the form of
sequences or iterators. | [
"Starts",
"the",
"pipeline",
"by",
"connecting",
"the",
"input",
"Pipers",
"of",
"the",
"pipeline",
"to",
"the",
"input",
"data",
"connecting",
"the",
"pipeline",
"and",
"starting",
"the",
"NuMap",
"instances",
".",
"The",
"order",
"of",
"items",
"in",
"the",
"datas",
"argument",
"sequence",
"should",
"correspond",
"to",
"the",
"order",
"of",
"the",
"input",
"Pipers",
"defined",
"by",
"Dagger",
".",
"_cmp",
"and",
"Piper",
".",
"ornament",
"."
] | 708e50827b5db46bbea081982cb74b9b0e464064 | https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/core.py#L650-L692 | train | 1,075 |
mcieslik-mctp/papy | src/papy/core.py | Plumber.pause | def pause(self):
"""
Pauses a running pipeline. This will stop retrieving results from the
pipeline. Parallel parts of the pipeline will stop after the ``NuMap``
buffer is has been filled. A paused pipeline can be run or stopped.
"""
# 1. stop the plumbing thread by raising a StopIteration on a stride
# boundary
if self._started.isSet() and \
self._running.isSet() and \
not self._pausing.isSet():
self._pausing.set()
self._plunger.join()
del self._plunger
self._pausing.clear()
self._running.clear()
else:
raise PlumberError | python | def pause(self):
"""
Pauses a running pipeline. This will stop retrieving results from the
pipeline. Parallel parts of the pipeline will stop after the ``NuMap``
buffer is has been filled. A paused pipeline can be run or stopped.
"""
# 1. stop the plumbing thread by raising a StopIteration on a stride
# boundary
if self._started.isSet() and \
self._running.isSet() and \
not self._pausing.isSet():
self._pausing.set()
self._plunger.join()
del self._plunger
self._pausing.clear()
self._running.clear()
else:
raise PlumberError | [
"def",
"pause",
"(",
"self",
")",
":",
"# 1. stop the plumbing thread by raising a StopIteration on a stride ",
"# boundary",
"if",
"self",
".",
"_started",
".",
"isSet",
"(",
")",
"and",
"self",
".",
"_running",
".",
"isSet",
"(",
")",
"and",
"not",
"self",
".",
"_pausing",
".",
"isSet",
"(",
")",
":",
"self",
".",
"_pausing",
".",
"set",
"(",
")",
"self",
".",
"_plunger",
".",
"join",
"(",
")",
"del",
"self",
".",
"_plunger",
"self",
".",
"_pausing",
".",
"clear",
"(",
")",
"self",
".",
"_running",
".",
"clear",
"(",
")",
"else",
":",
"raise",
"PlumberError"
] | Pauses a running pipeline. This will stop retrieving results from the
pipeline. Parallel parts of the pipeline will stop after the ``NuMap``
buffer is has been filled. A paused pipeline can be run or stopped. | [
"Pauses",
"a",
"running",
"pipeline",
".",
"This",
"will",
"stop",
"retrieving",
"results",
"from",
"the",
"pipeline",
".",
"Parallel",
"parts",
"of",
"the",
"pipeline",
"will",
"stop",
"after",
"the",
"NuMap",
"buffer",
"is",
"has",
"been",
"filled",
".",
"A",
"paused",
"pipeline",
"can",
"be",
"run",
"or",
"stopped",
"."
] | 708e50827b5db46bbea081982cb74b9b0e464064 | https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/core.py#L737-L755 | train | 1,076 |
mcieslik-mctp/papy | src/papy/core.py | Plumber.stop | def stop(self):
"""
Stops a paused pipeline. This will a trigger a ``StopIteration`` in the
inputs of the pipeline. And retrieve the buffered results. This will
stop all ``Pipers`` and ``NuMaps``. Python will not terminate cleanly
if a pipeline is running or paused.
"""
if self._started.isSet() and \
not self._running.isSet() and \
not self._pausing.isSet():
# stops the dagger
super(Plumber, self).stop()
# disconnects all pipers
self.disconnect()
self.stats['run_time'] = time() - self.stats['start_time']
self._started.clear()
else:
raise PlumberError | python | def stop(self):
"""
Stops a paused pipeline. This will a trigger a ``StopIteration`` in the
inputs of the pipeline. And retrieve the buffered results. This will
stop all ``Pipers`` and ``NuMaps``. Python will not terminate cleanly
if a pipeline is running or paused.
"""
if self._started.isSet() and \
not self._running.isSet() and \
not self._pausing.isSet():
# stops the dagger
super(Plumber, self).stop()
# disconnects all pipers
self.disconnect()
self.stats['run_time'] = time() - self.stats['start_time']
self._started.clear()
else:
raise PlumberError | [
"def",
"stop",
"(",
"self",
")",
":",
"if",
"self",
".",
"_started",
".",
"isSet",
"(",
")",
"and",
"not",
"self",
".",
"_running",
".",
"isSet",
"(",
")",
"and",
"not",
"self",
".",
"_pausing",
".",
"isSet",
"(",
")",
":",
"# stops the dagger",
"super",
"(",
"Plumber",
",",
"self",
")",
".",
"stop",
"(",
")",
"# disconnects all pipers",
"self",
".",
"disconnect",
"(",
")",
"self",
".",
"stats",
"[",
"'run_time'",
"]",
"=",
"time",
"(",
")",
"-",
"self",
".",
"stats",
"[",
"'start_time'",
"]",
"self",
".",
"_started",
".",
"clear",
"(",
")",
"else",
":",
"raise",
"PlumberError"
] | Stops a paused pipeline. This will a trigger a ``StopIteration`` in the
inputs of the pipeline. And retrieve the buffered results. This will
stop all ``Pipers`` and ``NuMaps``. Python will not terminate cleanly
if a pipeline is running or paused. | [
"Stops",
"a",
"paused",
"pipeline",
".",
"This",
"will",
"a",
"trigger",
"a",
"StopIteration",
"in",
"the",
"inputs",
"of",
"the",
"pipeline",
".",
"And",
"retrieve",
"the",
"buffered",
"results",
".",
"This",
"will",
"stop",
"all",
"Pipers",
"and",
"NuMaps",
".",
"Python",
"will",
"not",
"terminate",
"cleanly",
"if",
"a",
"pipeline",
"is",
"running",
"or",
"paused",
"."
] | 708e50827b5db46bbea081982cb74b9b0e464064 | https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/core.py#L757-L775 | train | 1,077 |
mcieslik-mctp/papy | src/papy/core.py | _Consume.next | def next(self):
"""
Returns the next sequence of results, given stride and n.
"""
try:
results = self._stride_buffer.pop()
except (IndexError, AttributeError):
self._rebuffer()
results = self._stride_buffer.pop()
if not results:
raise StopIteration
return results | python | def next(self):
"""
Returns the next sequence of results, given stride and n.
"""
try:
results = self._stride_buffer.pop()
except (IndexError, AttributeError):
self._rebuffer()
results = self._stride_buffer.pop()
if not results:
raise StopIteration
return results | [
"def",
"next",
"(",
"self",
")",
":",
"try",
":",
"results",
"=",
"self",
".",
"_stride_buffer",
".",
"pop",
"(",
")",
"except",
"(",
"IndexError",
",",
"AttributeError",
")",
":",
"self",
".",
"_rebuffer",
"(",
")",
"results",
"=",
"self",
".",
"_stride_buffer",
".",
"pop",
"(",
")",
"if",
"not",
"results",
":",
"raise",
"StopIteration",
"return",
"results"
] | Returns the next sequence of results, given stride and n. | [
"Returns",
"the",
"next",
"sequence",
"of",
"results",
"given",
"stride",
"and",
"n",
"."
] | 708e50827b5db46bbea081982cb74b9b0e464064 | https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/core.py#L1490-L1502 | train | 1,078 |
mcieslik-mctp/papy | src/papy/core.py | _Chain.next | def next(self):
"""
Returns the next result from the chained iterables given ``"stride"``.
"""
if self.s:
self.s -= 1
else:
self.s = self.stride - 1
self.i = (self.i + 1) % self.l # new iterable
return self.iterables[self.i].next() | python | def next(self):
"""
Returns the next result from the chained iterables given ``"stride"``.
"""
if self.s:
self.s -= 1
else:
self.s = self.stride - 1
self.i = (self.i + 1) % self.l # new iterable
return self.iterables[self.i].next() | [
"def",
"next",
"(",
"self",
")",
":",
"if",
"self",
".",
"s",
":",
"self",
".",
"s",
"-=",
"1",
"else",
":",
"self",
".",
"s",
"=",
"self",
".",
"stride",
"-",
"1",
"self",
".",
"i",
"=",
"(",
"self",
".",
"i",
"+",
"1",
")",
"%",
"self",
".",
"l",
"# new iterable",
"return",
"self",
".",
"iterables",
"[",
"self",
".",
"i",
"]",
".",
"next",
"(",
")"
] | Returns the next result from the chained iterables given ``"stride"``. | [
"Returns",
"the",
"next",
"result",
"from",
"the",
"chained",
"iterables",
"given",
"stride",
"."
] | 708e50827b5db46bbea081982cb74b9b0e464064 | https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/core.py#L1557-L1567 | train | 1,079 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.list_csv | def list_csv(self, filter=None, type=None, sort=None, limit=None, page=None): # pylint: disable=redefined-builtin
"""Get a list of results as CSV.
:param filter: (optional) Filters to apply as a string list.
:param type: (optional) `union` or `inter` as string.
:param sort: (optional) Sort fields to apply as string list.
:param limit: (optional) Limit returned list length.
:param page: (optional) Page to return.
:rtype: string
"""
return self.service.list(self.base, filter, type, sort, limit, page, format='csv').text | python | def list_csv(self, filter=None, type=None, sort=None, limit=None, page=None): # pylint: disable=redefined-builtin
"""Get a list of results as CSV.
:param filter: (optional) Filters to apply as a string list.
:param type: (optional) `union` or `inter` as string.
:param sort: (optional) Sort fields to apply as string list.
:param limit: (optional) Limit returned list length.
:param page: (optional) Page to return.
:rtype: string
"""
return self.service.list(self.base, filter, type, sort, limit, page, format='csv').text | [
"def",
"list_csv",
"(",
"self",
",",
"filter",
"=",
"None",
",",
"type",
"=",
"None",
",",
"sort",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"page",
"=",
"None",
")",
":",
"# pylint: disable=redefined-builtin",
"return",
"self",
".",
"service",
".",
"list",
"(",
"self",
".",
"base",
",",
"filter",
",",
"type",
",",
"sort",
",",
"limit",
",",
"page",
",",
"format",
"=",
"'csv'",
")",
".",
"text"
] | Get a list of results as CSV.
:param filter: (optional) Filters to apply as a string list.
:param type: (optional) `union` or `inter` as string.
:param sort: (optional) Sort fields to apply as string list.
:param limit: (optional) Limit returned list length.
:param page: (optional) Page to return.
:rtype: string | [
"Get",
"a",
"list",
"of",
"results",
"as",
"CSV",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L600-L610 | train | 1,080 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.updates | def updates(self, id, update_id=None): # pylint: disable=invalid-name,redefined-builtin
"""Get updates of a running result via long-polling. If no updates are available, CDRouter waits up to 10 seconds before sending an empty response.
:param id: Result ID as an int.
:param update_id: (optional) Update ID as an int.
:return: :class:`results.Update <results.Update>` object
:rtype: results.Update
"""
if update_id is None:
update_id = -1
schema = UpdateSchema()
resp = self.service.get_id(self.base, id, params={'updates': update_id})
return self.service.decode(schema, resp) | python | def updates(self, id, update_id=None): # pylint: disable=invalid-name,redefined-builtin
"""Get updates of a running result via long-polling. If no updates are available, CDRouter waits up to 10 seconds before sending an empty response.
:param id: Result ID as an int.
:param update_id: (optional) Update ID as an int.
:return: :class:`results.Update <results.Update>` object
:rtype: results.Update
"""
if update_id is None:
update_id = -1
schema = UpdateSchema()
resp = self.service.get_id(self.base, id, params={'updates': update_id})
return self.service.decode(schema, resp) | [
"def",
"updates",
"(",
"self",
",",
"id",
",",
"update_id",
"=",
"None",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"if",
"update_id",
"is",
"None",
":",
"update_id",
"=",
"-",
"1",
"schema",
"=",
"UpdateSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"get_id",
"(",
"self",
".",
"base",
",",
"id",
",",
"params",
"=",
"{",
"'updates'",
":",
"update_id",
"}",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Get updates of a running result via long-polling. If no updates are available, CDRouter waits up to 10 seconds before sending an empty response.
:param id: Result ID as an int.
:param update_id: (optional) Update ID as an int.
:return: :class:`results.Update <results.Update>` object
:rtype: results.Update | [
"Get",
"updates",
"of",
"a",
"running",
"result",
"via",
"long",
"-",
"polling",
".",
"If",
"no",
"updates",
"are",
"available",
"CDRouter",
"waits",
"up",
"to",
"10",
"seconds",
"before",
"sending",
"an",
"empty",
"response",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L623-L636 | train | 1,081 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.pause | def pause(self, id, when=None): # pylint: disable=invalid-name,redefined-builtin
"""Pause a running result.
:param id: Result ID as an int.
:param when: Must be string `end-of-test` or `end-of-loop`.
"""
return self.service.post(self.base+str(id)+'/pause/', params={'when': when}) | python | def pause(self, id, when=None): # pylint: disable=invalid-name,redefined-builtin
"""Pause a running result.
:param id: Result ID as an int.
:param when: Must be string `end-of-test` or `end-of-loop`.
"""
return self.service.post(self.base+str(id)+'/pause/', params={'when': when}) | [
"def",
"pause",
"(",
"self",
",",
"id",
",",
"when",
"=",
"None",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"return",
"self",
".",
"service",
".",
"post",
"(",
"self",
".",
"base",
"+",
"str",
"(",
"id",
")",
"+",
"'/pause/'",
",",
"params",
"=",
"{",
"'when'",
":",
"when",
"}",
")"
] | Pause a running result.
:param id: Result ID as an int.
:param when: Must be string `end-of-test` or `end-of-loop`. | [
"Pause",
"a",
"running",
"result",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L660-L666 | train | 1,082 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.unpause | def unpause(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Unpause a running result.
:param id: Result ID as an int.
"""
return self.service.post(self.base+str(id)+'/unpause/') | python | def unpause(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Unpause a running result.
:param id: Result ID as an int.
"""
return self.service.post(self.base+str(id)+'/unpause/') | [
"def",
"unpause",
"(",
"self",
",",
"id",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"return",
"self",
".",
"service",
".",
"post",
"(",
"self",
".",
"base",
"+",
"str",
"(",
"id",
")",
"+",
"'/unpause/'",
")"
] | Unpause a running result.
:param id: Result ID as an int. | [
"Unpause",
"a",
"running",
"result",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L682-L687 | train | 1,083 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.export | def export(self, id, exclude_captures=False): # pylint: disable=invalid-name,redefined-builtin
"""Export a result.
:param id: Result ID as an int.
:param exclude_captures: If bool `True`, don't export capture files
:rtype: tuple `(io.BytesIO, 'filename')`
"""
return self.service.export(self.base, id, params={'exclude_captures': exclude_captures}) | python | def export(self, id, exclude_captures=False): # pylint: disable=invalid-name,redefined-builtin
"""Export a result.
:param id: Result ID as an int.
:param exclude_captures: If bool `True`, don't export capture files
:rtype: tuple `(io.BytesIO, 'filename')`
"""
return self.service.export(self.base, id, params={'exclude_captures': exclude_captures}) | [
"def",
"export",
"(",
"self",
",",
"id",
",",
"exclude_captures",
"=",
"False",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"return",
"self",
".",
"service",
".",
"export",
"(",
"self",
".",
"base",
",",
"id",
",",
"params",
"=",
"{",
"'exclude_captures'",
":",
"exclude_captures",
"}",
")"
] | Export a result.
:param id: Result ID as an int.
:param exclude_captures: If bool `True`, don't export capture files
:rtype: tuple `(io.BytesIO, 'filename')` | [
"Export",
"a",
"result",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L727-L734 | train | 1,084 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.bulk_export | def bulk_export(self, ids, exclude_captures=False):
"""Bulk export a set of results.
:param ids: Int list of result IDs.
:rtype: tuple `(io.BytesIO, 'filename')`
"""
return self.service.bulk_export(self.base, ids, params={'exclude_captures': exclude_captures}) | python | def bulk_export(self, ids, exclude_captures=False):
"""Bulk export a set of results.
:param ids: Int list of result IDs.
:rtype: tuple `(io.BytesIO, 'filename')`
"""
return self.service.bulk_export(self.base, ids, params={'exclude_captures': exclude_captures}) | [
"def",
"bulk_export",
"(",
"self",
",",
"ids",
",",
"exclude_captures",
"=",
"False",
")",
":",
"return",
"self",
".",
"service",
".",
"bulk_export",
"(",
"self",
".",
"base",
",",
"ids",
",",
"params",
"=",
"{",
"'exclude_captures'",
":",
"exclude_captures",
"}",
")"
] | Bulk export a set of results.
:param ids: Int list of result IDs.
:rtype: tuple `(io.BytesIO, 'filename')` | [
"Bulk",
"export",
"a",
"set",
"of",
"results",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L736-L742 | train | 1,085 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.bulk_copy | def bulk_copy(self, ids):
"""Bulk copy a set of results.
:param ids: Int list of result IDs.
:return: :class:`results.Result <results.Result>` list
"""
schema = ResultSchema()
return self.service.bulk_copy(self.base, self.RESOURCE, ids, schema) | python | def bulk_copy(self, ids):
"""Bulk copy a set of results.
:param ids: Int list of result IDs.
:return: :class:`results.Result <results.Result>` list
"""
schema = ResultSchema()
return self.service.bulk_copy(self.base, self.RESOURCE, ids, schema) | [
"def",
"bulk_copy",
"(",
"self",
",",
"ids",
")",
":",
"schema",
"=",
"ResultSchema",
"(",
")",
"return",
"self",
".",
"service",
".",
"bulk_copy",
"(",
"self",
".",
"base",
",",
"self",
".",
"RESOURCE",
",",
"ids",
",",
"schema",
")"
] | Bulk copy a set of results.
:param ids: Int list of result IDs.
:return: :class:`results.Result <results.Result>` list | [
"Bulk",
"copy",
"a",
"set",
"of",
"results",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L744-L751 | train | 1,086 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.all_stats | def all_stats(self):
"""Compute stats for all results.
:return: :class:`results.AllStats <results.AllStats>` object
:rtype: results.AllStats
"""
schema = AllStatsSchema()
resp = self.service.post(self.base, params={'stats': 'all'})
return self.service.decode(schema, resp) | python | def all_stats(self):
"""Compute stats for all results.
:return: :class:`results.AllStats <results.AllStats>` object
:rtype: results.AllStats
"""
schema = AllStatsSchema()
resp = self.service.post(self.base, params={'stats': 'all'})
return self.service.decode(schema, resp) | [
"def",
"all_stats",
"(",
"self",
")",
":",
"schema",
"=",
"AllStatsSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"post",
"(",
"self",
".",
"base",
",",
"params",
"=",
"{",
"'stats'",
":",
"'all'",
"}",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Compute stats for all results.
:return: :class:`results.AllStats <results.AllStats>` object
:rtype: results.AllStats | [
"Compute",
"stats",
"for",
"all",
"results",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L776-L784 | train | 1,087 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.set_stats | def set_stats(self, ids):
"""Compute stats for a set of results.
:param id: Result IDs as int list.
:return: :class:`results.SetStats <results.SetStats>` object
:rtype: results.SetStats
"""
schema = SetStatsSchema()
resp = self.service.post(self.base, params={'stats': 'set'}, json=[{'id': str(x)} for x in ids])
return self.service.decode(schema, resp) | python | def set_stats(self, ids):
"""Compute stats for a set of results.
:param id: Result IDs as int list.
:return: :class:`results.SetStats <results.SetStats>` object
:rtype: results.SetStats
"""
schema = SetStatsSchema()
resp = self.service.post(self.base, params={'stats': 'set'}, json=[{'id': str(x)} for x in ids])
return self.service.decode(schema, resp) | [
"def",
"set_stats",
"(",
"self",
",",
"ids",
")",
":",
"schema",
"=",
"SetStatsSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"post",
"(",
"self",
".",
"base",
",",
"params",
"=",
"{",
"'stats'",
":",
"'set'",
"}",
",",
"json",
"=",
"[",
"{",
"'id'",
":",
"str",
"(",
"x",
")",
"}",
"for",
"x",
"in",
"ids",
"]",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Compute stats for a set of results.
:param id: Result IDs as int list.
:return: :class:`results.SetStats <results.SetStats>` object
:rtype: results.SetStats | [
"Compute",
"stats",
"for",
"a",
"set",
"of",
"results",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L786-L795 | train | 1,088 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.diff_stats | def diff_stats(self, ids):
"""Compute diff stats for a set of results.
:param id: Result IDs as int list.
:return: :class:`results.DiffStats <results.DiffStats>` object
:rtype: results.DiffStats
"""
schema = DiffStatsSchema()
resp = self.service.post(self.base, params={'stats': 'diff'}, json=[{'id': str(x)} for x in ids])
return self.service.decode(schema, resp) | python | def diff_stats(self, ids):
"""Compute diff stats for a set of results.
:param id: Result IDs as int list.
:return: :class:`results.DiffStats <results.DiffStats>` object
:rtype: results.DiffStats
"""
schema = DiffStatsSchema()
resp = self.service.post(self.base, params={'stats': 'diff'}, json=[{'id': str(x)} for x in ids])
return self.service.decode(schema, resp) | [
"def",
"diff_stats",
"(",
"self",
",",
"ids",
")",
":",
"schema",
"=",
"DiffStatsSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"post",
"(",
"self",
".",
"base",
",",
"params",
"=",
"{",
"'stats'",
":",
"'diff'",
"}",
",",
"json",
"=",
"[",
"{",
"'id'",
":",
"str",
"(",
"x",
")",
"}",
"for",
"x",
"in",
"ids",
"]",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Compute diff stats for a set of results.
:param id: Result IDs as int list.
:return: :class:`results.DiffStats <results.DiffStats>` object
:rtype: results.DiffStats | [
"Compute",
"diff",
"stats",
"for",
"a",
"set",
"of",
"results",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L797-L806 | train | 1,089 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.single_stats | def single_stats(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Compute stats for a result.
:param id: Result ID as an int.
:return: :class:`results.SingleStats <results.SingleStats>` object
:rtype: results.SingleStats
"""
schema = SingleStatsSchema()
resp = self.service.get(self.base+str(id)+'/', params={'stats': 'all'})
return self.service.decode(schema, resp) | python | def single_stats(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Compute stats for a result.
:param id: Result ID as an int.
:return: :class:`results.SingleStats <results.SingleStats>` object
:rtype: results.SingleStats
"""
schema = SingleStatsSchema()
resp = self.service.get(self.base+str(id)+'/', params={'stats': 'all'})
return self.service.decode(schema, resp) | [
"def",
"single_stats",
"(",
"self",
",",
"id",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"schema",
"=",
"SingleStatsSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"get",
"(",
"self",
".",
"base",
"+",
"str",
"(",
"id",
")",
"+",
"'/'",
",",
"params",
"=",
"{",
"'stats'",
":",
"'all'",
"}",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Compute stats for a result.
:param id: Result ID as an int.
:return: :class:`results.SingleStats <results.SingleStats>` object
:rtype: results.SingleStats | [
"Compute",
"stats",
"for",
"a",
"result",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L808-L817 | train | 1,090 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.progress_stats | def progress_stats(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Compute progress stats for a result.
:param id: Result ID as an int.
:return: :class:`results.Progress <results.Progress>` object
:rtype: results.Progress
"""
schema = ProgressSchema()
resp = self.service.get(self.base+str(id)+'/', params={'stats': 'progress'})
return self.service.decode(schema, resp) | python | def progress_stats(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Compute progress stats for a result.
:param id: Result ID as an int.
:return: :class:`results.Progress <results.Progress>` object
:rtype: results.Progress
"""
schema = ProgressSchema()
resp = self.service.get(self.base+str(id)+'/', params={'stats': 'progress'})
return self.service.decode(schema, resp) | [
"def",
"progress_stats",
"(",
"self",
",",
"id",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"schema",
"=",
"ProgressSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"get",
"(",
"self",
".",
"base",
"+",
"str",
"(",
"id",
")",
"+",
"'/'",
",",
"params",
"=",
"{",
"'stats'",
":",
"'progress'",
"}",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Compute progress stats for a result.
:param id: Result ID as an int.
:return: :class:`results.Progress <results.Progress>` object
:rtype: results.Progress | [
"Compute",
"progress",
"stats",
"for",
"a",
"result",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L819-L828 | train | 1,091 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.summary_stats | def summary_stats(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Compute summary stats for a result.
:param id: Result ID as an int.
:return: :class:`results.SummaryStats <results.SummaryStats>` object
:rtype: results.SummaryStats
"""
schema = SummaryStatsSchema()
resp = self.service.get(self.base+str(id)+'/', params={'stats': 'summary'})
return self.service.decode(schema, resp) | python | def summary_stats(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Compute summary stats for a result.
:param id: Result ID as an int.
:return: :class:`results.SummaryStats <results.SummaryStats>` object
:rtype: results.SummaryStats
"""
schema = SummaryStatsSchema()
resp = self.service.get(self.base+str(id)+'/', params={'stats': 'summary'})
return self.service.decode(schema, resp) | [
"def",
"summary_stats",
"(",
"self",
",",
"id",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"schema",
"=",
"SummaryStatsSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"get",
"(",
"self",
".",
"base",
"+",
"str",
"(",
"id",
")",
"+",
"'/'",
",",
"params",
"=",
"{",
"'stats'",
":",
"'summary'",
"}",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
")"
] | Compute summary stats for a result.
:param id: Result ID as an int.
:return: :class:`results.SummaryStats <results.SummaryStats>` object
:rtype: results.SummaryStats | [
"Compute",
"summary",
"stats",
"for",
"a",
"result",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L830-L839 | train | 1,092 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.list_logdir | def list_logdir(self, id, filter=None, sort=None): # pylint: disable=invalid-name,redefined-builtin
"""Get a list of logdir files.
:param id: Result ID as an int.
:param filter: Filter to apply as string.
:param sort: Sort field to apply as string.
:return: :class:`results.LogDirFile <results.LogDirFile>` list
"""
schema = LogDirFileSchema()
resp = self.service.list(self.base+str(id)+'/logdir/', filter, sort)
return self.service.decode(schema, resp, many=True) | python | def list_logdir(self, id, filter=None, sort=None): # pylint: disable=invalid-name,redefined-builtin
"""Get a list of logdir files.
:param id: Result ID as an int.
:param filter: Filter to apply as string.
:param sort: Sort field to apply as string.
:return: :class:`results.LogDirFile <results.LogDirFile>` list
"""
schema = LogDirFileSchema()
resp = self.service.list(self.base+str(id)+'/logdir/', filter, sort)
return self.service.decode(schema, resp, many=True) | [
"def",
"list_logdir",
"(",
"self",
",",
"id",
",",
"filter",
"=",
"None",
",",
"sort",
"=",
"None",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"schema",
"=",
"LogDirFileSchema",
"(",
")",
"resp",
"=",
"self",
".",
"service",
".",
"list",
"(",
"self",
".",
"base",
"+",
"str",
"(",
"id",
")",
"+",
"'/logdir/'",
",",
"filter",
",",
"sort",
")",
"return",
"self",
".",
"service",
".",
"decode",
"(",
"schema",
",",
"resp",
",",
"many",
"=",
"True",
")"
] | Get a list of logdir files.
:param id: Result ID as an int.
:param filter: Filter to apply as string.
:param sort: Sort field to apply as string.
:return: :class:`results.LogDirFile <results.LogDirFile>` list | [
"Get",
"a",
"list",
"of",
"logdir",
"files",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L841-L851 | train | 1,093 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.get_logdir_file | def get_logdir_file(self, id, filename): # pylint: disable=invalid-name,redefined-builtin
"""Download a logdir file.
:param id: Result ID as an int.
:param filename: Logdir filename as string.
:rtype: tuple `(io.BytesIO, 'filename')`
"""
resp = self.service.get(self.base+str(id)+'/logdir/'+filename+'/', stream=True)
b = io.BytesIO()
stream.stream_response_to_file(resp, path=b)
resp.close()
b.seek(0)
return (b, self.service.filename(resp)) | python | def get_logdir_file(self, id, filename): # pylint: disable=invalid-name,redefined-builtin
"""Download a logdir file.
:param id: Result ID as an int.
:param filename: Logdir filename as string.
:rtype: tuple `(io.BytesIO, 'filename')`
"""
resp = self.service.get(self.base+str(id)+'/logdir/'+filename+'/', stream=True)
b = io.BytesIO()
stream.stream_response_to_file(resp, path=b)
resp.close()
b.seek(0)
return (b, self.service.filename(resp)) | [
"def",
"get_logdir_file",
"(",
"self",
",",
"id",
",",
"filename",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"resp",
"=",
"self",
".",
"service",
".",
"get",
"(",
"self",
".",
"base",
"+",
"str",
"(",
"id",
")",
"+",
"'/logdir/'",
"+",
"filename",
"+",
"'/'",
",",
"stream",
"=",
"True",
")",
"b",
"=",
"io",
".",
"BytesIO",
"(",
")",
"stream",
".",
"stream_response_to_file",
"(",
"resp",
",",
"path",
"=",
"b",
")",
"resp",
".",
"close",
"(",
")",
"b",
".",
"seek",
"(",
"0",
")",
"return",
"(",
"b",
",",
"self",
".",
"service",
".",
"filename",
"(",
"resp",
")",
")"
] | Download a logdir file.
:param id: Result ID as an int.
:param filename: Logdir filename as string.
:rtype: tuple `(io.BytesIO, 'filename')` | [
"Download",
"a",
"logdir",
"file",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L853-L865 | train | 1,094 |
qacafe/cdrouter.py | cdrouter/results.py | ResultsService.download_logdir_archive | def download_logdir_archive(self, id, format='zip', exclude_captures=False): # pylint: disable=invalid-name,redefined-builtin
"""Download logdir archive in tgz or zip format.
:param id: Result ID as an int.
:param format: (optional) Format to download, must be string `zip` or `tgz`.
:param exclude_captures: If bool `True`, don't include capture files
:rtype: tuple `(io.BytesIO, 'filename')`
"""
resp = self.service.get(self.base+str(id)+'/logdir/', params={'format': format, 'exclude_captures': exclude_captures}, stream=True)
b = io.BytesIO()
stream.stream_response_to_file(resp, path=b)
resp.close()
b.seek(0)
return (b, self.service.filename(resp)) | python | def download_logdir_archive(self, id, format='zip', exclude_captures=False): # pylint: disable=invalid-name,redefined-builtin
"""Download logdir archive in tgz or zip format.
:param id: Result ID as an int.
:param format: (optional) Format to download, must be string `zip` or `tgz`.
:param exclude_captures: If bool `True`, don't include capture files
:rtype: tuple `(io.BytesIO, 'filename')`
"""
resp = self.service.get(self.base+str(id)+'/logdir/', params={'format': format, 'exclude_captures': exclude_captures}, stream=True)
b = io.BytesIO()
stream.stream_response_to_file(resp, path=b)
resp.close()
b.seek(0)
return (b, self.service.filename(resp)) | [
"def",
"download_logdir_archive",
"(",
"self",
",",
"id",
",",
"format",
"=",
"'zip'",
",",
"exclude_captures",
"=",
"False",
")",
":",
"# pylint: disable=invalid-name,redefined-builtin",
"resp",
"=",
"self",
".",
"service",
".",
"get",
"(",
"self",
".",
"base",
"+",
"str",
"(",
"id",
")",
"+",
"'/logdir/'",
",",
"params",
"=",
"{",
"'format'",
":",
"format",
",",
"'exclude_captures'",
":",
"exclude_captures",
"}",
",",
"stream",
"=",
"True",
")",
"b",
"=",
"io",
".",
"BytesIO",
"(",
")",
"stream",
".",
"stream_response_to_file",
"(",
"resp",
",",
"path",
"=",
"b",
")",
"resp",
".",
"close",
"(",
")",
"b",
".",
"seek",
"(",
"0",
")",
"return",
"(",
"b",
",",
"self",
".",
"service",
".",
"filename",
"(",
"resp",
")",
")"
] | Download logdir archive in tgz or zip format.
:param id: Result ID as an int.
:param format: (optional) Format to download, must be string `zip` or `tgz`.
:param exclude_captures: If bool `True`, don't include capture files
:rtype: tuple `(io.BytesIO, 'filename')` | [
"Download",
"logdir",
"archive",
"in",
"tgz",
"or",
"zip",
"format",
"."
] | aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5 | https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/results.py#L867-L880 | train | 1,095 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/contrib/cern.py | logout | def logout():
"""CERN logout view."""
logout_url = REMOTE_APP['logout_url']
apps = current_app.config.get('OAUTHCLIENT_REMOTE_APPS')
if apps:
cern_app = apps.get('cern', REMOTE_APP)
logout_url = cern_app['logout_url']
return redirect(logout_url, code=302) | python | def logout():
"""CERN logout view."""
logout_url = REMOTE_APP['logout_url']
apps = current_app.config.get('OAUTHCLIENT_REMOTE_APPS')
if apps:
cern_app = apps.get('cern', REMOTE_APP)
logout_url = cern_app['logout_url']
return redirect(logout_url, code=302) | [
"def",
"logout",
"(",
")",
":",
"logout_url",
"=",
"REMOTE_APP",
"[",
"'logout_url'",
"]",
"apps",
"=",
"current_app",
".",
"config",
".",
"get",
"(",
"'OAUTHCLIENT_REMOTE_APPS'",
")",
"if",
"apps",
":",
"cern_app",
"=",
"apps",
".",
"get",
"(",
"'cern'",
",",
"REMOTE_APP",
")",
"logout_url",
"=",
"cern_app",
"[",
"'logout_url'",
"]",
"return",
"redirect",
"(",
"logout_url",
",",
"code",
"=",
"302",
")"
] | CERN logout view. | [
"CERN",
"logout",
"view",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/cern.py#L178-L187 | train | 1,096 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/contrib/cern.py | find_remote_by_client_id | def find_remote_by_client_id(client_id):
"""Return a remote application based with given client ID."""
for remote in current_oauthclient.oauth.remote_apps.values():
if remote.name == 'cern' and remote.consumer_key == client_id:
return remote | python | def find_remote_by_client_id(client_id):
"""Return a remote application based with given client ID."""
for remote in current_oauthclient.oauth.remote_apps.values():
if remote.name == 'cern' and remote.consumer_key == client_id:
return remote | [
"def",
"find_remote_by_client_id",
"(",
"client_id",
")",
":",
"for",
"remote",
"in",
"current_oauthclient",
".",
"oauth",
".",
"remote_apps",
".",
"values",
"(",
")",
":",
"if",
"remote",
".",
"name",
"==",
"'cern'",
"and",
"remote",
".",
"consumer_key",
"==",
"client_id",
":",
"return",
"remote"
] | Return a remote application based with given client ID. | [
"Return",
"a",
"remote",
"application",
"based",
"with",
"given",
"client",
"ID",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/cern.py#L190-L194 | train | 1,097 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/contrib/cern.py | fetch_groups | def fetch_groups(groups):
"""Prepare list of allowed group names.
:param groups: The complete list of groups.
:returns: A filtered list of groups.
"""
hidden_groups = current_app.config.get(
'OAUTHCLIENT_CERN_HIDDEN_GROUPS', OAUTHCLIENT_CERN_HIDDEN_GROUPS)
hidden_groups_re = current_app.config.get(
'OAUTHCLIENT_CERN_HIDDEN_GROUPS_RE',
OAUTHCLIENT_CERN_HIDDEN_GROUPS_RE)
groups = [group for group in groups if group not in hidden_groups]
filter_groups = []
for regexp in hidden_groups_re:
for group in groups:
if regexp.match(group):
filter_groups.append(group)
groups = [group for group in groups if group not in filter_groups]
return groups | python | def fetch_groups(groups):
"""Prepare list of allowed group names.
:param groups: The complete list of groups.
:returns: A filtered list of groups.
"""
hidden_groups = current_app.config.get(
'OAUTHCLIENT_CERN_HIDDEN_GROUPS', OAUTHCLIENT_CERN_HIDDEN_GROUPS)
hidden_groups_re = current_app.config.get(
'OAUTHCLIENT_CERN_HIDDEN_GROUPS_RE',
OAUTHCLIENT_CERN_HIDDEN_GROUPS_RE)
groups = [group for group in groups if group not in hidden_groups]
filter_groups = []
for regexp in hidden_groups_re:
for group in groups:
if regexp.match(group):
filter_groups.append(group)
groups = [group for group in groups if group not in filter_groups]
return groups | [
"def",
"fetch_groups",
"(",
"groups",
")",
":",
"hidden_groups",
"=",
"current_app",
".",
"config",
".",
"get",
"(",
"'OAUTHCLIENT_CERN_HIDDEN_GROUPS'",
",",
"OAUTHCLIENT_CERN_HIDDEN_GROUPS",
")",
"hidden_groups_re",
"=",
"current_app",
".",
"config",
".",
"get",
"(",
"'OAUTHCLIENT_CERN_HIDDEN_GROUPS_RE'",
",",
"OAUTHCLIENT_CERN_HIDDEN_GROUPS_RE",
")",
"groups",
"=",
"[",
"group",
"for",
"group",
"in",
"groups",
"if",
"group",
"not",
"in",
"hidden_groups",
"]",
"filter_groups",
"=",
"[",
"]",
"for",
"regexp",
"in",
"hidden_groups_re",
":",
"for",
"group",
"in",
"groups",
":",
"if",
"regexp",
".",
"match",
"(",
"group",
")",
":",
"filter_groups",
".",
"append",
"(",
"group",
")",
"groups",
"=",
"[",
"group",
"for",
"group",
"in",
"groups",
"if",
"group",
"not",
"in",
"filter_groups",
"]",
"return",
"groups"
] | Prepare list of allowed group names.
:param groups: The complete list of groups.
:returns: A filtered list of groups. | [
"Prepare",
"list",
"of",
"allowed",
"group",
"names",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/cern.py#L197-L216 | train | 1,098 |
inveniosoftware/invenio-oauthclient | invenio_oauthclient/contrib/cern.py | fetch_extra_data | def fetch_extra_data(resource):
"""Return a dict with extra data retrieved from cern oauth."""
person_id = resource.get('PersonID', [None])[0]
identity_class = resource.get('IdentityClass', [None])[0]
department = resource.get('Department', [None])[0]
return dict(
person_id=person_id,
identity_class=identity_class,
department=department
) | python | def fetch_extra_data(resource):
"""Return a dict with extra data retrieved from cern oauth."""
person_id = resource.get('PersonID', [None])[0]
identity_class = resource.get('IdentityClass', [None])[0]
department = resource.get('Department', [None])[0]
return dict(
person_id=person_id,
identity_class=identity_class,
department=department
) | [
"def",
"fetch_extra_data",
"(",
"resource",
")",
":",
"person_id",
"=",
"resource",
".",
"get",
"(",
"'PersonID'",
",",
"[",
"None",
"]",
")",
"[",
"0",
"]",
"identity_class",
"=",
"resource",
".",
"get",
"(",
"'IdentityClass'",
",",
"[",
"None",
"]",
")",
"[",
"0",
"]",
"department",
"=",
"resource",
".",
"get",
"(",
"'Department'",
",",
"[",
"None",
"]",
")",
"[",
"0",
"]",
"return",
"dict",
"(",
"person_id",
"=",
"person_id",
",",
"identity_class",
"=",
"identity_class",
",",
"department",
"=",
"department",
")"
] | Return a dict with extra data retrieved from cern oauth. | [
"Return",
"a",
"dict",
"with",
"extra",
"data",
"retrieved",
"from",
"cern",
"oauth",
"."
] | 2500dc6935738107617aeade79e050d7608004bb | https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/cern.py#L219-L229 | train | 1,099 |