function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|
def get_inet_num(search_term):
"""
Get intetnums for a domain
:param search_term: keywork without dots, no domains are allowed. domain.com -> invalid |---| domain -> valid
:type search_term: str
:return: iterable with IP/CIDR notation or None
:rtype: list(str) | None
"""
# Disable request logging
requests_log = logging.getLogger("requests")
requests_log.addHandler(logging.NullHandler())
requests_log.propagate = False
# Search the RIPE database
# There is an issue with RIPE. When making a request and including
# the type-filter inetnum, the JSON response also includes other types.
request = requests.get('http://rest.db.ripe.net/search.json', params={
'query-string': search_term,
'type-filter': 'inetnum'
})
json_results = json.loads(request.text)
try:
# Filter any object that doesn't have the type 'inetnum'
ranges = [x['primary-key']['attribute'][0]['value']
for x in json_results['objects']['object']
if x['type'] == 'inetnum']
except __HOLE__:
return None
# Turn the IP range string into CIDR
cidrs = []
for _range in ranges:
_range = _range.split(' - ');
cidrs.append(netaddr.iprange_to_cidrs(_range[0], _range[1]))
results = set()
# Print the CIDR's
for cidr in cidrs:
results.add(str(cidr[0]))
return results | KeyError | dataset/ETHPy150Open cr0hn/enteletaor/enteletaor_lib/libs/contrib/inetnum.py/get_inet_num |
def __init__(self, title='Plugin Editor', default_path=None, parent=None):
QDockWidget.__init__(self, title, parent)
self.setupUi()
self.thread_manager = ThreadManager(self)
try:
self.rope_project = codeeditor.get_rope_project()
except (__HOLE__, AttributeError): # Might happen when frozen
self.rope_project = None
data_path = QDesktopServices.storageLocation(
QDesktopServices.DataLocation)
self.default_path = default_path or os.getcwd()
self.rope_temp_path = os.path.join(data_path, '.temp')
self.tabs.currentChanged.connect(self._tab_changed)
self.enter_completion = True | IOError | dataset/ETHPy150Open rproepp/spykeviewer/spykeviewer/ui/plugin_editor_dock.py/PluginEditorDock.__init__ |
def _setup_editor(self):
font = QFont('Some font that does not exist')
font.setStyleHint(font.TypeWriter, font.PreferDefault)
editor = codeeditor.CodeEditor(self)
try:
editor.setup_editor(
linenumbers=True, language='py',
scrollflagarea=False,
codecompletion_enter=self.enter_completion,
tab_mode=False, edge_line=False, font=font,
codecompletion_auto=True, go_to_definition=True,
codecompletion_single=True, calltips=True)
except __HOLE__: # codecompletion_single is gone in 2.3.0
editor.setup_editor(
linenumbers=True, language='py',
scrollflagarea=False,
codecompletion_enter=self.enter_completion,
tab_mode=False, edge_line=False, font=font,
codecompletion_auto=True, go_to_definition=True,
calltips=True)
editor.setCursor(Qt.IBeamCursor)
editor.horizontalScrollBar().setCursor(Qt.ArrowCursor)
editor.verticalScrollBar().setCursor(Qt.ArrowCursor)
return editor | TypeError | dataset/ETHPy150Open rproepp/spykeviewer/spykeviewer/ui/plugin_editor_dock.py/PluginEditorDock._setup_editor |
def save_file(self, editor, force_dialog=False):
""" Save the file from an editor object.
:param editor: The editor for which the while should be saved.
:param bool force_dialog: If True, a "Save as..." dialog will be
shown even if a file name is associated with the editor.
"""
if not editor:
return
if force_dialog or not editor.file_name:
d = QFileDialog(
self, 'Choose where to save file',
self.tabs.currentWidget().file_name or self.default_path)
d.setAcceptMode(QFileDialog.AcceptSave)
d.setNameFilter("Python files (*.py)")
d.setDefaultSuffix('py')
if d.exec_():
file_name = unicode(d.selectedFiles()[0])
else:
return False
else:
file_name = editor.file_name
err = self.code_has_errors(editor)
if err:
if QMessageBox.warning(
self, 'Error saving "%s"' % editor.file_name,
'Compile error:\n' + err + '\n\nIf this file contains '
'a plugin, it will disappear from the plugin list.\n'
'Save anyway?',
QMessageBox.Yes | QMessageBox.No) == QMessageBox.No:
return False
try:
f = open(file_name, 'w')
f.write('\n'.join(self.code(editor)).encode('UTF-8'))
f.close()
except __HOLE__, e:
QMessageBox.critical(
self, 'Error saving "%s"' % editor.file_name, str(e))
return False
editor.file_name = file_name
editor.file_was_changed = False
fname = os.path.split(editor.file_name)[1]
self.tabs.setTabText(self.tabs.indexOf(editor), fname)
self.plugin_saved.emit(editor.file_name)
return True | IOError | dataset/ETHPy150Open rproepp/spykeviewer/spykeviewer/ui/plugin_editor_dock.py/PluginEditorDock.save_file |
def check_package(self, package, package_dir):
"""Check namespace packages' __init__ for declare_namespace"""
try:
return self.packages_checked[package]
except __HOLE__:
pass
init_py = orig.build_py.check_package(self, package, package_dir)
self.packages_checked[package] = init_py
if not init_py or not self.distribution.namespace_packages:
return init_py
for pkg in self.distribution.namespace_packages:
if pkg == package or pkg.startswith(package + '.'):
break
else:
return init_py
f = open(init_py, 'rbU')
if 'declare_namespace'.encode() not in f.read():
from distutils.errors import DistutilsError
raise DistutilsError(
"Namespace package problem: %s is a namespace package, but "
"its\n__init__.py does not call declare_namespace()! Please "
'fix it.\n(See the setuptools manual under '
'"Namespace Packages" for details.)\n"' % (package,)
)
f.close()
return init_py | KeyError | dataset/ETHPy150Open chalasr/Flask-P2P/venv/lib/python2.7/site-packages/setuptools/command/build_py.py/build_py.check_package |
def get_rc_exc(rc):
rc = int(rc)
try: return rc_exc_cache[rc]
except __HOLE__: pass
name = "ErrorReturnCode_%d" % rc
exc = type(name, (ErrorReturnCode,), {})
rc_exc_cache[rc] = exc
return exc | KeyError | dataset/ETHPy150Open chrippa/livestreamer/src/livestreamer/packages/pbs.py/get_rc_exc |
def _format_arg(self, arg):
if IS_PY3:
arg = str(arg)
else:
try:
arg = unicode(arg, DEFAULT_ENCODING).encode(DEFAULT_ENCODING)
except __HOLE__:
arg = unicode(arg).encode(DEFAULT_ENCODING)
if self._partial:
escaped = arg.replace('"', '\\"')
escaped = escaped.replace("$", "\$")
escaped = escaped.replace("`", "\`")
arg = '"{0}"'.format(escaped)
return arg | TypeError | dataset/ETHPy150Open chrippa/livestreamer/src/livestreamer/packages/pbs.py/Command._format_arg |
def bake(self, *args, **kwargs):
fn = Command(self._path)
fn._partial = True
call_args, kwargs = self._extract_call_args(kwargs)
pruned_call_args = call_args
for k,v in Command.call_args.items():
try:
if pruned_call_args[k] == v:
del pruned_call_args[k]
except __HOLE__: continue
fn._partial_call_args.update(self._partial_call_args)
fn._partial_call_args.update(pruned_call_args)
fn._partial_baked_args.extend(self._partial_baked_args)
fn._partial_baked_args.extend(fn._compile_args(args, kwargs))
return fn | KeyError | dataset/ETHPy150Open chrippa/livestreamer/src/livestreamer/packages/pbs.py/Command.bake |
def crawl():
"""
Initialize all crawlers (and indexers).
Start the:
1. GitHub crawler, :class:`crawler.GitHubCrawler`.
2. Bitbucket crawler, :class:`crawler.BitbucketCrawler`.
3. Git indexer, :class:`bitshift.crawler.indexer.GitIndexer`.
"""
_configure_logging()
time.sleep(5)
repo_clone_queue = Queue.Queue(maxsize=MAX_URL_QUEUE_SIZE)
run_event = Event()
run_event.set()
threads = [GitIndexer(repo_clone_queue, run_event)]
if sys.argv[1:]:
names = sys.argv[1:]
ranks = GitHubCrawler.get_ranks(names)
for name in names:
repo = GitRepository("https://github.com/" + name, name, "GitHub",
ranks[name])
repo_clone_queue.put(repo)
else:
threads += [GitHubCrawler(repo_clone_queue, run_event),
BitbucketCrawler(repo_clone_queue, run_event)]
for thread in threads:
thread.start()
try:
while 1:
time.sleep(0.1)
except __HOLE__:
run_event.clear()
with repo_clone_queue.mutex:
repo_clone_queue.queue.clear()
for thread in threads:
thread.join() | KeyboardInterrupt | dataset/ETHPy150Open earwig/bitshift/bitshift/crawler/crawl.py/crawl |
def get_champions_by_id(ids):
"""
Gets a bunch of champions by ID
Args:
ids (list<int>): the IDs of the champions to get
Returns:
list<Champion>: the requested champions
"""
champions = {champ.id: champ for champ in get_champions()}
results = []
for id_ in ids:
try:
champ = champions[id_]
except __HOLE__:
champ = None
results.append(champ)
return results | KeyError | dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/core/staticdataapi.py/get_champions_by_id |
def get_champions_by_name(names):
"""
Gets a bunch of champions by name
Args:
names (list<str>): the names of the champions to get
Returns:
list<Champion>: the requested champions
"""
indices = {names[i]: i for i in range(len(names))}
champions = get_champions()
results = [None for _ in range(len(names))]
for champion in champions:
try:
index = indices[champion.name]
results[index] = champion
except __HOLE__:
pass
return results | KeyError | dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/core/staticdataapi.py/get_champions_by_name |
def get_items(ids=None):
"""
Gets a bunch of items (or all of them)
Args:
ids (list<int>): the IDs of the items to get (or None to get all items) (default None)
Returns:
list<Item>: the items
"""
if ids is not None:
items = {item.id: item for item in get_items()}
results = []
for id_ in ids:
try:
item = items[id_]
except __HOLE__:
item = None
results.append(item)
return results
else:
if cassiopeia.core.requests.data_store.has_all(cassiopeia.type.core.staticdata.Item):
return cassiopeia.core.requests.data_store.get_all(cassiopeia.type.core.staticdata.Item)
items = cassiopeia.dto.staticdataapi.get_items()
items = [cassiopeia.type.core.staticdata.Item(item[1]) for item in items.data.items()]
cassiopeia.core.requests.data_store.store(items, [item.id for item in items], [cassiopeia.type.core.staticdata.Item])
return items | KeyError | dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/core/staticdataapi.py/get_items |
def get_masteries(ids=None):
"""
Gets a bunch of masteries (or all of them)
Args:
ids (list<int>): the IDs of the masteries to get (or None to get all masteries) (default None)
Returns:
list<Mastery>: the masteries
"""
if ids is not None:
masteries = {mastery.id: mastery for mastery in get_masteries()}
results = []
for id_ in ids:
try:
mastery = masteries[id_]
except __HOLE__:
mastery = None
results.append(mastery)
return results
else:
if cassiopeia.core.requests.data_store.has_all(cassiopeia.type.core.staticdata.Mastery):
return cassiopeia.core.requests.data_store.get_all(cassiopeia.type.core.staticdata.Mastery)
masteries = cassiopeia.dto.staticdataapi.get_masteries()
masteries = [cassiopeia.type.core.staticdata.Mastery(mastery[1]) for mastery in masteries.data.items()]
cassiopeia.core.requests.data_store.store(masteries, [mastery.id for mastery in masteries], [cassiopeia.type.core.staticdata.Mastery])
return masteries | KeyError | dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/core/staticdataapi.py/get_masteries |
def get_runes(ids=None):
"""
Gets a bunch of runes (or all of them)
Args:
ids (list<int>): the IDs of the runes to get (or None to get all runes) (default None)
Returns:
list<Rune>: the runes
"""
if ids is not None:
runes = {rune.id: rune for rune in get_runes()}
results = []
for id_ in ids:
try:
rune = runes[id_]
except __HOLE__:
rune = None
results.append(rune)
return results
else:
if cassiopeia.core.requests.data_store.has_all(cassiopeia.type.core.staticdata.Rune):
return cassiopeia.core.requests.data_store.get_all(cassiopeia.type.core.staticdata.Rune)
runes = cassiopeia.dto.staticdataapi.get_runes()
runes = [cassiopeia.type.core.staticdata.Rune(rune[1]) for rune in runes.data.items()]
cassiopeia.core.requests.data_store.store(runes, [rune.id for rune in runes], [cassiopeia.type.core.staticdata.Rune])
return runes | KeyError | dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/core/staticdataapi.py/get_runes |
def get_summoner_spells(ids=None):
"""
Gets a bunch of summoner spells (or all of them)
Args:
ids (list<int>): the IDs of the summoner spells to get (or None to get all summoner spells) (default None)
Returns:
list<SummonerSpell>: the summoner spells
"""
if ids is not None:
summoner_spells = {summoner_spell.id: summoner_spell for summoner_spell in get_summoner_spells()}
results = []
for id_ in ids:
try:
summoner_spell = summoner_spells[id_]
except __HOLE__:
summoner_spell = None
results.append(summoner_spell)
return results
else:
if cassiopeia.core.requests.data_store.has_all(cassiopeia.type.core.staticdata.SummonerSpell):
return cassiopeia.core.requests.data_store.get_all(cassiopeia.type.core.staticdata.SummonerSpell)
summoner_spells = cassiopeia.dto.staticdataapi.get_summoner_spells()
summoner_spells = [cassiopeia.type.core.staticdata.SummonerSpell(summoner_spell[1]) for summoner_spell in summoner_spells.data.items()]
cassiopeia.core.requests.data_store.store(summoner_spells, [summoner_spell.id for summoner_spell in summoner_spells], [cassiopeia.type.core.staticdata.SummonerSpell])
return summoner_spells | KeyError | dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/core/staticdataapi.py/get_summoner_spells |
def run(self, module_config):
if hasattr(self, "__submodules__"):
try:
self.__submodules__[module_config.sub_action]['action'](module_config)
except __HOLE__:
self.__submodules__["default"]['action'](module_config)
else:
raise NotImplemented("Run method must be override")
# ---------------------------------------------------------------------- | KeyError | dataset/ETHPy150Open cr0hn/enteletaor/enteletaor_lib/modules/__init__.py/IModule.run |
def find_modules():
"""
Find modules and return a dict module instances.
:return: dict with modules instaces as format: dict(str: IModule)
:rtype: dict(str: IModule)
"""
import os
import os.path
import inspect
base_dir = os.path.abspath(os.path.dirname(__file__))
# Modules found
results = dict()
for root, dirs, files in os.walk(base_dir):
# Check if folder is a package
if "__init__.py" not in files:
continue
# Remove files or path that starts with "_"
if any(True for x in root.split("/") if x.startswith("_")):
continue
for filename in files:
if filename.endswith(".py") and \
not filename.startswith("celery") and \
not filename.startswith("test_"):
if filename.startswith("_"):
if filename != "__init__.py":
continue
# loop_file = os.path.join(root, filename)
loop_file = os.path.join(root, filename) \
.replace(base_dir, '') \
.replace(os.path.sep, '.') \
.replace('.py', '')
loop_file = loop_file[1:] if loop_file.startswith(".") else loop_file
# Load module info
try:
classes = __import__("%s.%s" % (__package__, loop_file), globals=globals(), locals=locals(), level=loop_file.count("."))
except __HOLE__:
classes = __import__(loop_file, globals=globals(), locals=locals(), level=loop_file.count("."))
# Get Modules instances
for m in dir(classes):
_loaded_module = getattr(classes, m)
if inspect.isclass(_loaded_module) \
and _loaded_module.__name__ != "IModule":
# Check if class inherits from IModule
for c in inspect.getmro(_loaded_module):
if c.__name__ == "IModule":
try:
results[_loaded_module.name] = _loaded_module
except AttributeError:
log.warning("Module '%s' has not attribute 'name' and can't be loaded." %
_loaded_module.__name__)
# Found!
break
return results | ImportError | dataset/ETHPy150Open cr0hn/enteletaor/enteletaor_lib/modules/__init__.py/find_modules |
def get_task_from_tree_view(self, tree_view):
"""returns the task object from the given QTreeView
"""
task = None
selection_model = tree_view.selectionModel()
indexes = selection_model.selectedIndexes()
if indexes:
current_index = indexes[0]
item_model = tree_view.model()
current_item = item_model.itemFromIndex(current_index)
if current_item:
try:
task = current_item.task
except __HOLE__:
pass
return task | AttributeError | dataset/ETHPy150Open eoyilmaz/anima/anima/ui/version_mover.py/VersionMover.get_task_from_tree_view |
def copy_versions(self):
"""copies versions from one task to another
"""
# get from task
from_task = self.get_task_from_tree_view(self.from_task_tree_view)
# get logged in user
logged_in_user = self.get_logged_in_user()
if not from_task:
QtGui.QMessageBox.critical(
self,
'Error',
'Please select a task from <b>From Task</b> list'
)
return
# get to task
to_task = self.get_task_from_tree_view(self.to_task_tree_view)
if not to_task:
QtGui.QMessageBox.critical(
self,
'Error',
'Please select a task from <b>To Task</b> list'
)
return
# check if tasks are the same
if from_task == to_task:
QtGui.QMessageBox.critical(
self,
'Error',
'Please select two different tasks'
)
return
# get take names and related versions
# get distinct take names
from_take_names = map(
lambda x: x[0],
db.DBSession.query(distinct(Version.take_name))
.filter(Version.task == from_task)
.order_by(Version.take_name)
.all()
)
# create versions for each take
answer = QtGui.QMessageBox.question(
self,
'Info',
"Will copy %s versions from take names:<br><br>"
"%s"
"<br><br>"
"Is that Ok?" % (
len(from_take_names),
'<br>'.join(from_take_names)
),
QtGui.QMessageBox.Yes,
QtGui.QMessageBox.No
)
if answer == QtGui.QMessageBox.Yes:
for take_name in from_take_names:
latest_version = Version.query\
.filter_by(task=from_task)\
.filter_by(take_name=take_name)\
.order_by(Version.version_number.desc())\
.first()
# create a new version
new_version = Version(
task=to_task,
take_name=take_name
)
new_version.created_by = logged_in_user
new_version.extension = latest_version.extension
new_version.description = \
'Moved from another task (id=%s) with Version Mover' % \
latest_version.task.id
new_version.created_with = latest_version.created_with
db.DBSession.add(new_version)
db.DBSession.commit()
# update path
new_version.update_paths()
db.DBSession.add(new_version)
db.DBSession.commit()
# now copy the last_version file to the new_version path
try:
os.makedirs(new_version.absolute_path)
except __HOLE__: # path exists
pass
# move the file there
shutil.copyfile(
latest_version.absolute_full_path,
new_version.absolute_full_path
)
# inform the user
QtGui.QMessageBox.information(
self,
'Success',
'Successfully copied %s versions' % len(from_take_names)
) | OSError | dataset/ETHPy150Open eoyilmaz/anima/anima/ui/version_mover.py/VersionMover.copy_versions |
def destroy_index(self, dirname):
if exists(dirname):
try:
rmtree(dirname)
except __HOLE__, e:
pass | OSError | dataset/ETHPy150Open dokipen/whoosh/tests/test_indexing.py/TestIndexing.destroy_index |
def perform_clustering(self,kwargs):
"""
Does the actual clustering.
"""
cutoff = kwargs["cutoff"]
try:
max_clusters = kwargs["max_clusters"]
except __HOLE__:
max_clusters = sys.maxint
nodes = range(self.condensed_matrix.row_length)
clusters = []
elements_already_clustered = 0
iteration = 0
# Do it while there are nodes left
while not len(nodes) == 0 and not len(clusters) >= max_clusters:
cluster = self.__do_one_iteration(nodes,cutoff)
clusters.append(cluster)
elements_already_clustered = elements_already_clustered + cluster.get_size()
if elements_already_clustered + len(nodes) > self.condensed_matrix.row_length:
print "[ERROR :: GROMOS perform_clustering] ", elements_already_clustered + len(nodes), iteration
exit(1)
iteration = iteration + 1
return Clustering(clusters,details="GROMOS (cutoff = "+str(cutoff)+")") | KeyError | dataset/ETHPy150Open victor-gil-sepulveda/pyProCT/pyproct/clustering/algorithms/gromos/gromosAlgorithm.py/GromosAlgorithm.perform_clustering |
def process(self):
(opts, args) = getopts()
chkopts(opts)
self.up_progress(10)
dev_list = comma_split(opts.dev)
if len(dev_list) < 2:
# TRANSLATORS:
# bondingするためのdeviceが少ないです
raise KssCommandOptException('ERROR: Small device for bonding. - dev=%s' % (opts.dev))
interface_list = get_ifconfig_info()
for dev in dev_list:
if dev not in interface_list:
raise KssCommandOptException('ERROR: Bonding target device not found. - dev=%s' % (dev))
if opts.primary not in dev_list:
raise KssCommandOptException('ERROR: Primary device not found in bonding device. - primary=%s dev=%s' % (opts.primary, opts.dev))
exist_bond_max_num = -1
exist_bond_list = get_ifconfig_info("regex:^bond")
for bond_name in exist_bond_list.keys():
try:
num = int(bond_name.replace("bond",""))
except __HOLE__:
continue
if exist_bond_max_num < num:
exist_bond_max_num = num
self.up_progress(10)
physical_bond_name = "bond%s" % (exist_bond_max_num + 1)
bridge_bond_name = "bondbr%s" % (exist_bond_max_num + 1)
bond_options = '"mode=%s primary=%s miimon=%s"' % (opts.mode, opts.primary, BONDING_CONFIG_MII_DEFAULT)
self.up_progress(10)
dop = DictOp()
ifcfg_parser = ifcfgParser()
modprobe_parser = modprobe_confParser()
dop.addconf("ifcfg", ifcfg_parser.read_conf())
if dop.getconf("ifcfg") == {}:
raise KssCommandException('Failure read network config file.')
dop.addconf("modprobe_conf", modprobe_parser.read_conf())
if dop.getconf("modprobe_conf") == {}:
raise KssCommandException('Failure read modprobe config file.')
self.up_progress(10)
eth_conf_copykey = ["HWADDR",
"BOOTPROTO",
"ONBOOT",
"USERCTL",
]
bond_conf_nocopykey = ["TYPE",
"HWADDR",
"MACADDR",
"ETHTOOL_OPTS",
"ESSID",
"CHANNEL",
]
self.up_progress(10)
for dev in dev_list:
conf = dop.get("ifcfg", dev)
if dev == opts.primary:
primary_conf = copy.deepcopy(conf)
dop.unset("ifcfg", dev)
dop.set("ifcfg", [dev, "DEVICE"], conf["DEVICE"]["value"])
for key in eth_conf_copykey:
if key in conf:
dop.set("ifcfg", [dev, key], conf[key]["value"])
dop.set("ifcfg", [dev, "MASTER"], physical_bond_name)
dop.set("ifcfg", [dev, "SLAVE"], "yes")
dop.set("ifcfg", [dev, "BOOTPROTO"], "none")
if dop.get("ifcfg", "p%s" % (dev)):
hwaddr = dop.get("ifcfg", ["p%s" % (dev), "HWADDR"])
if hwaddr:
dop.set("ifcfg", [dev, "HWADDR"], hwaddr)
dop.unset("ifcfg", "p%s" % (dev))
for key in bond_conf_nocopykey:
if key in primary_conf:
del primary_conf[key]
dop.set("ifcfg", bridge_bond_name, primary_conf)
dop.set("ifcfg", [bridge_bond_name, "DEVICE"], bridge_bond_name)
dop.set("ifcfg", [bridge_bond_name, "TYPE"], "Bridge")
dop.set("ifcfg", [physical_bond_name, "DEVICE"], physical_bond_name)
dop.set("ifcfg", [physical_bond_name, "BRIDGE"], bridge_bond_name)
dop.set("ifcfg", [physical_bond_name, "BOOTPROTO"], "none")
dop.set("ifcfg", [physical_bond_name, "ONBOOT"], dop.get("ifcfg", [bridge_bond_name, "ONBOOT"]))
dop.set("ifcfg", [physical_bond_name, "BONDING_OPTS"], bond_options)
self.up_progress(10)
dop.set("modprobe_conf", ["alias", physical_bond_name], "bonding")
for dev in dev_list:
if os.path.isfile("%s/ifcfg-%s" % (NETWORK_IFCFG_DIR, dev)):
copy_file("%s/ifcfg-%s" % (NETWORK_IFCFG_DIR, dev), VENDOR_DATA_BONDING_EVACUATION_DIR)
if os.path.isfile("%s/ifcfg-p%s" % (NETWORK_IFCFG_DIR, dev)):
move_file("%s/ifcfg-p%s" % (NETWORK_IFCFG_DIR, dev), VENDOR_DATA_BONDING_EVACUATION_DIR)
if ifcfg_parser.write_conf(dop.getconf("ifcfg")) is False:
raise KssCommandException('Failure write network config file.')
if modprobe_parser.write_conf(dop.getconf("modprobe_conf")) is False:
raise KssCommandException('Failure write modprobe config file.')
self.up_progress(10)
#
# Delete bridge device
#
bridge_list = get_bridge_info()
for dev in dev_list:
if dev in bridge_list:
ifdown_cmd = (NETWORK_IFDOWN_COMMAND,
dev,
)
(ifdown_rc, ifdown_res) = execute_command(ifdown_cmd)
if ifdown_rc != 0:
raise KssCommandException('Failure stop interface. interface:%s' % (dev))
for brif in bridge_list[dev]:
brctl_delif_cmd = (NETWORK_BRCTL_COMMAND,
"delif",
dev,
brif,
)
(brctl_rc, brctl_res) = execute_command(brctl_delif_cmd)
if brctl_rc != 0:
raise KssCommandException('Failure delete bridge port. bridge:%s port:%s' % (dev, brif))
brctl_delbr_cmd = (NETWORK_BRCTL_COMMAND,
"delbr",
dev,
)
(brctl_rc, brctl_res) = execute_command(brctl_delbr_cmd)
if brctl_rc != 0:
raise KssCommandException('Failure delete bridge. bridge:%s' % (dev, brif))
self.up_progress(10)
#
# Restart network
#
network_restart_cmd = (NETWORK_COMMAND,
"restart",
)
(net_rc, net_res) = execute_command(network_restart_cmd)
if net_rc != 0:
raise KssCommandException('Failure restart network.')
self.logger.info("Created bonding device. - dev=%s bond=%s" % (opts.dev, bridge_bond_name))
print >>sys.stdout, _("Created bonding device. - dev=%s bond=%s" % (opts.dev, bridge_bond_name))
return True | ValueError | dataset/ETHPy150Open karesansui/karesansui/bin/add_bonding.py/AddBonding.process |
def __init__(self, counts=None, calledfuncs=None, infile=None,
callers=None, outfile=None):
self.counts = counts
if self.counts is None:
self.counts = {}
self.counter = self.counts.copy() # map (filename, lineno) to count
self.calledfuncs = calledfuncs
if self.calledfuncs is None:
self.calledfuncs = {}
self.calledfuncs = self.calledfuncs.copy()
self.callers = callers
if self.callers is None:
self.callers = {}
self.callers = self.callers.copy()
self.infile = infile
self.outfile = outfile
if self.infile:
# Try to merge existing counts file.
try:
counts, calledfuncs, callers = \
pickle.load(open(self.infile, 'rb'))
self.update(self.__class__(counts, calledfuncs, callers))
except (__HOLE__, EOFError, ValueError), err:
print >> sys.stderr, ("Skipping counts file %r: %s"
% (self.infile, err)) | IOError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/trace.py/CoverageResults.__init__ |
def write_results(self, show_missing=True, summary=False, coverdir=None):
"""
@param coverdir
"""
if self.calledfuncs:
print
print "functions called:"
calls = self.calledfuncs.keys()
calls.sort()
for filename, modulename, funcname in calls:
print ("filename: %s, modulename: %s, funcname: %s"
% (filename, modulename, funcname))
if self.callers:
print
print "calling relationships:"
calls = self.callers.keys()
calls.sort()
lastfile = lastcfile = ""
for ((pfile, pmod, pfunc), (cfile, cmod, cfunc)) in calls:
if pfile != lastfile:
print
print "***", pfile, "***"
lastfile = pfile
lastcfile = ""
if cfile != pfile and lastcfile != cfile:
print " -->", cfile
lastcfile = cfile
print " %s.%s -> %s.%s" % (pmod, pfunc, cmod, cfunc)
# turn the counts data ("(filename, lineno) = count") into something
# accessible on a per-file basis
per_file = {}
for filename, lineno in self.counts.keys():
lines_hit = per_file[filename] = per_file.get(filename, {})
lines_hit[lineno] = self.counts[(filename, lineno)]
# accumulate summary info, if needed
sums = {}
for filename, count in per_file.iteritems():
# skip some "files" we don't care about...
if filename == "<string>":
continue
if filename.startswith("<doctest "):
continue
if filename.endswith((".pyc", ".pyo")):
filename = filename[:-1]
if coverdir is None:
dir = os.path.dirname(os.path.abspath(filename))
modulename = modname(filename)
else:
dir = coverdir
if not os.path.exists(dir):
os.makedirs(dir)
modulename = fullmodname(filename)
# If desired, get a list of the line numbers which represent
# executable content (returned as a dict for better lookup speed)
if show_missing:
lnotab = find_executable_linenos(filename)
else:
lnotab = {}
source = linecache.getlines(filename)
coverpath = os.path.join(dir, modulename + ".cover")
n_hits, n_lines = self.write_results_file(coverpath, source,
lnotab, count)
if summary and n_lines:
percent = 100 * n_hits // n_lines
sums[modulename] = n_lines, percent, modulename, filename
if summary and sums:
mods = sums.keys()
mods.sort()
print "lines cov% module (path)"
for m in mods:
n_lines, percent, modulename, filename = sums[m]
print "%5d %3d%% %s (%s)" % sums[m]
if self.outfile:
# try and store counts and module info into self.outfile
try:
pickle.dump((self.counts, self.calledfuncs, self.callers),
open(self.outfile, 'wb'), 1)
except __HOLE__, err:
print >> sys.stderr, "Can't save counts files because %s" % err | IOError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/trace.py/CoverageResults.write_results |
def write_results_file(self, path, lines, lnotab, lines_hit):
"""Return a coverage results file in path."""
try:
outfile = open(path, "w")
except __HOLE__, err:
print >> sys.stderr, ("trace: Could not open %r for writing: %s"
"- skipping" % (path, err))
return 0, 0
n_lines = 0
n_hits = 0
for i, line in enumerate(lines):
lineno = i + 1
# do the blank/comment match to try to mark more lines
# (help the reader find stuff that hasn't been covered)
if lineno in lines_hit:
outfile.write("%5d: " % lines_hit[lineno])
n_hits += 1
n_lines += 1
elif rx_blank.match(line):
outfile.write(" ")
else:
# lines preceded by no marks weren't hit
# Highlight them if so indicated, unless the line contains
# #pragma: NO COVER
if lineno in lnotab and not PRAGMA_NOCOVER in lines[i]:
outfile.write(">>>>>> ")
n_lines += 1
else:
outfile.write(" ")
outfile.write(lines[i].expandtabs(8))
outfile.close()
return n_hits, n_lines | IOError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/trace.py/CoverageResults.write_results_file |
def find_executable_linenos(filename):
"""Return dict where keys are line numbers in the line number table."""
try:
prog = open(filename, "rU").read()
except __HOLE__, err:
print >> sys.stderr, ("Not printing coverage data for %r: %s"
% (filename, err))
return {}
code = compile(prog, filename, "exec")
strs = find_strings(filename)
return find_lines(code, strs) | IOError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/trace.py/find_executable_linenos |
def main(argv=None):
import getopt
if argv is None:
argv = sys.argv
try:
opts, prog_argv = getopt.getopt(argv[1:], "tcrRf:d:msC:lTg",
["help", "version", "trace", "count",
"report", "no-report", "summary",
"file=", "missing",
"ignore-module=", "ignore-dir=",
"coverdir=", "listfuncs",
"trackcalls", "timing"])
except getopt.error, msg:
sys.stderr.write("%s: %s\n" % (sys.argv[0], msg))
sys.stderr.write("Try `%s --help' for more information\n"
% sys.argv[0])
sys.exit(1)
trace = 0
count = 0
report = 0
no_report = 0
counts_file = None
missing = 0
ignore_modules = []
ignore_dirs = []
coverdir = None
summary = 0
listfuncs = False
countcallers = False
timing = False
for opt, val in opts:
if opt == "--help":
usage(sys.stdout)
sys.exit(0)
if opt == "--version":
sys.stdout.write("trace 2.0\n")
sys.exit(0)
if opt == "-T" or opt == "--trackcalls":
countcallers = True
continue
if opt == "-l" or opt == "--listfuncs":
listfuncs = True
continue
if opt == "-g" or opt == "--timing":
timing = True
continue
if opt == "-t" or opt == "--trace":
trace = 1
continue
if opt == "-c" or opt == "--count":
count = 1
continue
if opt == "-r" or opt == "--report":
report = 1
continue
if opt == "-R" or opt == "--no-report":
no_report = 1
continue
if opt == "-f" or opt == "--file":
counts_file = val
continue
if opt == "-m" or opt == "--missing":
missing = 1
continue
if opt == "-C" or opt == "--coverdir":
coverdir = val
continue
if opt == "-s" or opt == "--summary":
summary = 1
continue
if opt == "--ignore-module":
for mod in val.split(","):
ignore_modules.append(mod.strip())
continue
if opt == "--ignore-dir":
for s in val.split(os.pathsep):
s = os.path.expandvars(s)
# should I also call expanduser? (after all, could use $HOME)
s = s.replace("$prefix",
os.path.join(sys.prefix, "lib",
"python" + sys.version[:3]))
s = s.replace("$exec_prefix",
os.path.join(sys.exec_prefix, "lib",
"python" + sys.version[:3]))
s = os.path.normpath(s)
ignore_dirs.append(s)
continue
assert 0, "Should never get here"
if listfuncs and (count or trace):
_err_exit("cannot specify both --listfuncs and (--trace or --count)")
if not (count or trace or report or listfuncs or countcallers):
_err_exit("must specify one of --trace, --count, --report, "
"--listfuncs, or --trackcalls")
if report and no_report:
_err_exit("cannot specify both --report and --no-report")
if report and not counts_file:
_err_exit("--report requires a --file")
if no_report and len(prog_argv) == 0:
_err_exit("missing name of file to run")
# everything is ready
if report:
results = CoverageResults(infile=counts_file, outfile=counts_file)
results.write_results(missing, summary=summary, coverdir=coverdir)
else:
sys.argv = prog_argv
progname = prog_argv[0]
sys.path[0] = os.path.split(progname)[0]
t = Trace(count, trace, countfuncs=listfuncs,
countcallers=countcallers, ignoremods=ignore_modules,
ignoredirs=ignore_dirs, infile=counts_file,
outfile=counts_file, timing=timing)
try:
with open(progname) as fp:
code = compile(fp.read(), progname, 'exec')
# try to emulate __main__ namespace as much as possible
globs = {
'__file__': progname,
'__name__': '__main__',
'__package__': None,
'__cached__': None,
}
t.runctx(code, globs, globs)
except __HOLE__, err:
_err_exit("Cannot run file %r because: %s" % (sys.argv[0], err))
except SystemExit:
pass
results = t.results()
if not no_report:
results.write_results(missing, summary=summary, coverdir=coverdir) | IOError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/trace.py/main |
def _get_deploy(self, app=None, env=None, number=None, deploy_id=None):
if deploy_id:
return Deploy.query.get(deploy_id)
try:
app = App.query.filter(App.name == app)[0]
except __HOLE__:
return None
try:
return Deploy.query.filter(
Deploy.app_id == app.id,
Deploy.environment == env,
Deploy.number == number,
)[0]
except IndexError:
return None | IndexError | dataset/ETHPy150Open getsentry/freight/freight/api/deploy_details.py/DeployMixin._get_deploy |
def load_model_class(model_path):
"""
Load by import a class by a string path like:
'module.models.MyModel'.
This mechanism allows extension and customization of
the Entry model class.
"""
dot = model_path.rindex('.')
module_name = model_path[:dot]
class_name = model_path[dot + 1:]
try:
_class = getattr(import_module(module_name), class_name)
return _class
except (__HOLE__, AttributeError):
raise ImproperlyConfigured('%s cannot be imported' % model_path) | ImportError | dataset/ETHPy150Open Fantomas42/django-blog-zinnia/zinnia/models_bases/__init__.py/load_model_class |
def spyLCMTraffic():
lc = lcm.LCM()
lc.subscribe('.+', onLCMMessage)
try:
while True:
lc.handle()
except __HOLE__:
pass
print
print
printLCMCatalog() | KeyboardInterrupt | dataset/ETHPy150Open RobotLocomotion/director/src/python/director/lcmspy.py/spyLCMTraffic |
def clean_slug(self):
# Ensure slug is not an integer value for Event.get_by_ident
data = self.cleaned_data['slug']
try:
int(data)
except __HOLE__:
pass
else:
raise forms.ValidationError("Slug must not be an integer-value.")
return data | ValueError | dataset/ETHPy150Open swcarpentry/amy/workshops/forms.py/EventForm.clean_slug |
def _parse_orcid_work(self, work):
if not work:
return {}
biblio = {}
# logger.debug(u"%20s parsing orcid work" % (self.provider_name))
try:
if work["work-citation"]["work-citation-type"].lower()=="bibtex":
biblio = self.bibtex_parser.parse(work["work-citation"]["citation"])[0]
except (KeyError, TypeError, IndexError):
# logger.debug(u"%s missing work citation type" % (self.provider_name))
pass
try:
biblio["year"] = work["publication-date"]["year"]["value"]
biblio["year"] = re.sub("\D", "", biblio["year"])
except (KeyError, TypeError, IndexError):
biblio["year"] = ""
try:
biblio["title"] = work["work-title"]["title"]["value"]
except (KeyError, TypeError, IndexError):
biblio["title"] = ""
try:
biblio["journal"] = work["work-title"]["subtitle"]["value"]
except (__HOLE__, TypeError, IndexError):
biblio["journal"] = ""
try:
biblio["url"] = work["url"]["value"]
except (KeyError, TypeError, IndexError):
if "url" in biblio:
del biblio["genre"]
if not "authors" in biblio:
biblio["authors"] = ""
try:
if work["work-external-identifiers"]["work-external-identifier"][0]['work-external-identifier-type'] == "ISBN":
biblio["isbn"] = work["work-external-identifiers"]["work-external-identifier"][0]["work-external-identifier-id"]['value']
except (KeyError, TypeError, IndexError):
pass
try:
biblio["genre"] = work["work-type"].lower().replace("_", " ")
if biblio["genre"] == "data set":
biblio["genre"] = "dataset"
elif biblio["genre"] == "undefined":
del biblio["genre"]
except (KeyError, TypeError, IndexError):
pass
try:
biblio["full_citation"] = work["work-citation"]["citation"]
biblio["full_citation_type"] = work["work-citation"]["work-citation-type"].lower()
except (KeyError, TypeError, IndexError):
pass
return biblio | KeyError | dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/orcid.py/Orcid._parse_orcid_work |
def _extract_members(self, page, query_string=None):
if 'orcid-profile' not in page:
raise ProviderContentMalformedError("Content does not contain expected text")
data = provider._load_json(page)
members = []
try:
orcid_works = data["orcid-profile"]["orcid-activities"]["orcid-works"]["orcid-work"]
except KeyError:
return []
for work in orcid_works:
new_member = None
try:
ids = work["work-external-identifiers"]["work-external-identifier"]
for myid in ids:
if myid['work-external-identifier-type'] == "DOI":
doi = myid['work-external-identifier-id']['value']
doi = crossref.clean_doi(doi)
if doi:
new_member = ("doi", doi)
if myid['work-external-identifier-type'] == "PMID":
new_member = ("pmid", myid['work-external-identifier-id']['value'])
except __HOLE__:
pass
if not new_member:
# logger.info(u"no external identifiers, try saving whole citation for {orcid}".format(
# orcid=query_string))
biblio = self._parse_orcid_work(work)
new_member = ("biblio", biblio)
if new_member:
members += [new_member]
if not members:
raise ProviderItemNotFoundError
logger.info(u"returning {n} members for {orcid}".format(
n=len(members), orcid=query_string))
return(members) | KeyError | dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/orcid.py/Orcid._extract_members |
def member_items(self,
query_string,
provider_url_template=None,
cache_enabled=True):
logger.debug(u"%s getting member_items for %s" % (self.provider_name, query_string))
if not provider_url_template:
provider_url_template = self.member_items_url_template
query_string = query_string.replace("http://orcid.org/", "")
url = self._get_templated_url(provider_url_template, query_string, "members")
headers = {}
headers["accept"] = "application/json"
# try to get a response from the data provider
# cache FALSE for now because people probably changing ORCIDs a lot
response = self.http_get(url, headers=headers, cache_enabled=False)
if response.status_code != 200:
self.logger.info(u"%s status_code=%i"
% (self.provider_name, response.status_code))
if response.status_code == 404:
raise ProviderItemNotFoundError
elif response.status_code == 303: #redirect
pass
else:
self._get_error(response.status_code, response)
# extract the member ids
try:
members = self._extract_members(response.text, query_string)
except (__HOLE__, TypeError):
members = []
return(members) | AttributeError | dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/orcid.py/Orcid.member_items |
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = []
r = re.compile('[.\-]')
for n in r.split(dbapi_con.getinfo(self.dbapi.SQL_DBMS_VER)):
try:
version.append(int(n))
except __HOLE__:
version.append(n)
return tuple(version) | ValueError | dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/connectors/pyodbc.py/PyODBCConnector._get_server_version_info |
def _get_node(template, context=Context(), name='subject', block_lookups={}):
try:
return _iter_nodes(template, context, name, block_lookups)
except __HOLE__:
context.template = template.template
return _iter_nodes(template.template, context, name, block_lookups) | TypeError | dataset/ETHPy150Open BradWhittington/django-templated-email/templated_email/utils.py/_get_node |
def exists(self, remote_path):
"""
Validate whether a remote file or directory exists.
:param remote_path: Path to remote file.
:type remote_path: ``str``
:rtype: ``bool``
"""
try:
self.sftp.lstat(remote_path).st_mode
except __HOLE__:
return False
return True | IOError | dataset/ETHPy150Open StackStorm/st2/st2actions/st2actions/runners/ssh/paramiko_ssh.py/ParamikoSSHClient.exists |
def parse_bytes(header):
"""
Parse a Range header into (bytes, list_of_ranges). Note that the
ranges are *inclusive* (like in HTTP, not like in Python
typically).
Will return None if the header is invalid
"""
if not header:
raise TypeError(
"The header must not be empty")
ranges = []
last_end = 0
try:
(units, range) = header.split("=", 1)
units = units.strip().lower()
for item in range.split(","):
if '-' not in item:
raise ValueError()
if item.startswith('-'):
# This is a range asking for a trailing chunk
if last_end < 0:
raise ValueError('too many end ranges')
begin = int(item)
end = None
last_end = -1
else:
(begin, end) = item.split("-", 1)
begin = int(begin)
if begin < last_end or last_end < 0:
print begin, last_end
raise ValueError('begin<last_end, or last_end<0')
if not end.strip():
end = None
else:
end = int(end)
if end is not None and begin > end:
raise ValueError('begin>end')
last_end = end
ranges.append((begin, end))
except __HOLE__, e:
# In this case where the Range header is malformed,
# section 14.16 says to treat the request as if the
# Range header was not present. How do I log this?
print e
return None
return (units, ranges) | ValueError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webob_0_9/webob/byterange.py/Range.parse_bytes |
def parse(cls, value):
"""
Parse the header. May return None if it cannot parse.
"""
if value is None:
return None
value = value.strip()
if not value.startswith('bytes '):
# Unparseable
return None
value = value[len('bytes '):].strip()
if '/' not in value:
# Invalid, no length given
return None
range, length = value.split('/', 1)
if '-' not in range:
# Invalid, no range
return None
start, end = range.split('-', 1)
try:
start = int(start)
if end == '*':
end = None
else:
end = int(end)
if length == '*':
length = None
else:
length = int(length)
except __HOLE__:
# Parse problem
return None
if end is None:
return cls(start, None, length)
else:
return cls(start, end-1, length) | ValueError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webob_0_9/webob/byterange.py/ContentRange.parse |
def ingest( self ):
NAME = [ "Grayscale" ]
# for each channel
for x in range(self.channel):
# label by RGB Channel
self.label ( x+1, NAME[x] )
# for each slice
for sl in range(self.startslice , self.endslice+1, self.batchsz):
imarray = np.zeros ( [self.batchsz,self._yimgsz,self._ximgsz], dtype=np.uint16 )
for b in range ( self.batchsz ):
if ( sl + b < self.endslice ):
# raw data
#filenm = '{}{}_{:0>4}.tif'.format(self.path, self.token.strip('Affine'), sl+b)
filenm = '{}allenAtlasPadded{:0>4}.tif'.format(self.path, sl+b)
# load the image and check the dimension
try:
print "Opening filename: " + filenm
imgdata = cv2.imread(filenm, -1)
#img = Image.open(filenm, 'r')
#imgdata = np.asarray ( img )
#if imgdata == None:
# imgdata = np.zeros((self._yimgsz,self._ximgsz))
imarray[(sl+b-self.startslice)%self.batchsz,0:imgdata.shape[0],0:imgdata.shape[1]] = imgdata
except __HOLE__, e:
print e
# ingset any remaining slices
self.upload( x+1, sl, imarray ) | IOError | dataset/ETHPy150Open neurodata/ndstore/ingest/raju/rajuchannel.py/RajuIngest.ingest |
@override_settings(STATICFILES_STORAGE='tests.tests.test_storage.PipelineNoPathStorage')
@pipeline_settings(JS_COMPRESSOR=None, CSS_COMPRESSOR=None, COMPILERS=['tests.tests.test_storage.DummyCSSCompiler'])
def test_post_process_no_path(self):
"""
Test post_process with a storage that doesn't implement the path method.
"""
staticfiles_storage._setup()
try:
call_command('collectstatic', verbosity=0, interactive=False)
except __HOLE__:
self.fail('Received an error running collectstatic') | NotImplementedError | dataset/ETHPy150Open jazzband/django-pipeline/tests/tests/test_storage.py/StorageTest.test_post_process_no_path |
@utils.enforce_id_param
def get_stream_urls(self, song_id):
"""Returns a list of urls that point to a streamable version of this song.
If you just need the audio and are ok with gmusicapi doing the download,
consider using :func:`get_stream_audio` instead.
This abstracts away the differences between different kinds of tracks:
* normal tracks return a single url
* All Access tracks return multiple urls, which must be combined
:param song_id: a single song id.
While acquiring the urls requires authentication, retreiving the
contents does not.
However, there are limitations on how the stream urls can be used:
* the urls expire after a minute
* only one IP can be streaming music at once.
Other attempts will get an http 403 with
``X-Rejected-Reason: ANOTHER_STREAM_BEING_PLAYED``.
*This is only intended for streaming*. The streamed audio does not contain metadata.
Use :func:`get_song_download_info` or :func:`Musicmanager.download_song
<gmusicapi.clients.Musicmanager.download_song>`
to download files with metadata.
"""
res = self._make_call(webclient.GetStreamUrl, song_id)
try:
return [res['url']]
except __HOLE__:
return res['urls'] | KeyError | dataset/ETHPy150Open simon-weber/gmusicapi/gmusicapi/clients/webclient.py/Webclient.get_stream_urls |
def __delitem__(self, header):
try:
del self._headers[header.lower()]
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/http/__init__.py/HttpResponse.__delitem__ |
def getTranslation(language):
global g_Translations
if language not in g_Translations:
filename = os.path.join(sys.path[0], 'assets', 'locales', language, 'plexconnect.mo')
try:
fp = open(filename, 'rb')
g_Translations[language] = gettext.GNUTranslations(fp)
fp.close()
except __HOLE__:
g_Translations[language] = gettext.NullTranslations()
return g_Translations[language] | IOError | dataset/ETHPy150Open iBaa/PlexConnect/Localize.py/getTranslation |
@auth.s3_requires_membership(1)
def sms_modem_channel():
"""
RESTful CRUD controller for modem channels
- appears in the administration menu
Multiple Modems can be configured to receive Inbound Messages
"""
try:
import serial
except __HOLE__:
session.error = T("Python Serial module not available within the running Python - this needs installing to activate the Modem")
redirect(URL(c="admin", f="index"))
tablename = "%s_%s" % (module, resourcename)
table = s3db[tablename]
table.modem_port.label = T("Port")
table.modem_baud.label = T("Baud")
table.enabled.label = T("Enabled")
table.modem_port.comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Port"),
T("The serial port at which the modem is connected - /dev/ttyUSB0, etc on linux and com1, com2, etc on Windows")))
table.modem_baud.comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Baud"),
T("Baud rate to use for your modem - The default is safe for most cases")))
table.enabled.comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Enabled"),
T("Unselect to disable the modem")))
# CRUD Strings
s3.crud_strings[tablename] = Storage(
label_create = T("Add Modem Channel"),
title_display = T("Modem Channel Details"),
title_list = T("Modem Channels"),
title_update = T("Edit Modem Channel"),
label_list_button = T("View Modem Channels"),
msg_record_created = T("Modem Channel added"),
msg_record_modified = T("Modem Channel updated"),
msg_record_deleted = T("Modem Channel deleted"),
msg_list_empty = T("No Modem Channels currently defined"))
return s3_rest_controller()
#------------------------------------------------------------------------------ | ImportError | dataset/ETHPy150Open sahana/eden/controllers/msg.py/sms_modem_channel |
def twitter_search():
"""
RESTful CRUD controller to add keywords
for Twitter Search
"""
tablename = "msg_twitter_search"
table = s3db[tablename]
table.is_processed.writable = False
table.is_searched.writable = False
table.is_processed.readable = False
table.is_searched.readable = False
langs = settings.get_L10n_languages().keys()
# Tweak languages to those supported by Twitter
S3Msg = s3base.S3Msg()
try:
import tweepy
except:
session.error = T("tweepy module not available within the running Python - this needs installing for non-Tropo Twitter support!")
redirect(URL(c="msg", f="index"))
twitter_settings = S3Msg.get_twitter_api()
supported_languages = ['fr', 'en', 'ar', 'ja', 'es', 'de', 'it', 'id', 'pt', 'ko', 'tr', 'ru', 'nl', 'fil',
'msa', 'zh-tw', 'zh-cn', 'hi', 'no', 'sv', 'fi', 'da', 'pl', 'hu', 'fa', 'he', 'ur', 'th']
if twitter_settings:
twitter_api = twitter_settings[0]
try:
supported_languages = map(lambda x: str(x["code"]), twitter_api.supported_languages())
except (tweepy.TweepError, __HOLE__):
# List according to Twitter 1.1 API https://dev.twitter.com/docs/api/1.1/get/help/languages
pass
substitute_list = {"en-gb": "en",
"pt-br": "pt"}
new_langs = []
lang_default = current.response.s3.language
for l in langs:
if l in supported_languages:
new_langs.append(l)
else:
supported_substitute = substitute_list.get(l)
if supported_substitute:
if lang_default == l:
lang_default = supported_substitute
if supported_substitute not in langs:
new_langs.append(supported_substitute)
else:
if lang_default == l:
lang_default = 'en'
langs = new_langs
table.lang.requires = IS_IN_SET(langs)
table.lang.default = lang_default
comment = "Add the keywords separated by single spaces."
table.keywords.comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Keywords"),
T(comment)))
# CRUD Strings
s3.crud_strings[tablename] = Storage(
title_display = T("Twitter Search Queries"),
title_list = T("Twitter Search Queries"),
label_create = T("Add Twitter Search Query"),
title_update = T("Edit Twitter Search Query"),
label_list_button = T("View Queries"),
msg_record_created = T("Query added"),
msg_record_deleted = T("Query deleted"),
msg_list_empty = T("No Query currently defined"),
msg_record_modified = T("Query updated")
)
if request.post_vars.get("search_after_save"):
url_after_save = URL(f="twitter_result")
else:
url_after_save = None
s3db.configure(tablename,
listadd=True,
deletable=True,
create_onaccept=action_after_save,
create_next=url_after_save
)
def prep(r):
if r.interactive:
table = s3db.msg_twitter_channel
if not db(table.id > 0).select(table.id,
limitby=(0, 1)).first():
session.error = T("Need to configure Twitter Authentication")
redirect(URL(f="twitter_channel"))
return True
s3.prep = prep
def postp(r, output):
if r.interactive:
# Normal Action Buttons
s3_action_buttons(r)
# Custom Action Buttons
rtable = r.table
query = (rtable.deleted == False) & \
(rtable.is_searched == False)
records = db(query).select(rtable.id)
restrict_s = [str(record.id) for record in records]
query = (rtable.deleted == False) & \
(rtable.is_processed == False)
records = db(query).select(rtable.id)
restrict_k = [str(record.id) for record in records]
# @ToDo: Make these S3Methods rather than additional controllers
from s3 import s3_str
s3.actions += [dict(label=s3_str(T("Search")),
_class="action-btn",
url=URL(args=["[id]", "poll"]),
restrict = restrict_s),
dict(label=s3_str(T("Analyze with KeyGraph")),
_class="action-btn",
url = URL(args=["[id]", "keygraph"]),
restrict = restrict_k),
]
inject_search_after_save(output)
return output
s3.postp = postp
return s3_rest_controller()
# ----------------------------------------------------------------------------- | AttributeError | dataset/ETHPy150Open sahana/eden/controllers/msg.py/twitter_search |
def expiration_datetime(self):
"""Return provider session live seconds. Returns a timedelta ready to
use with session.set_expiry().
If provider returns a timestamp instead of session seconds to live, the
timedelta is inferred from current time (using UTC timezone). None is
returned if there's no value stored or it's invalid.
"""
if self.extra_data and 'expires' in self.extra_data:
try:
expires = int(self.extra_data.get('expires'))
except (__HOLE__, TypeError):
return None
now = datetime.utcnow()
# Detect if expires is a timestamp
if expires > time.mktime(now.timetuple()):
# expires is a datetime
return datetime.fromtimestamp(expires) - now
else:
# expires is a timedelta
return timedelta(seconds=expires) | ValueError | dataset/ETHPy150Open omab/python-social-auth/social/storage/base.py/UserMixin.expiration_datetime |
def wsgibase(environ, responder):
"""
this is the gluon wsgi application. the first function called when a page
is requested (static or dynamic). it can be called by paste.httpserver
or by apache mod_wsgi.
- fills request with info
- the environment variables, replacing '.' with '_'
- adds web2py path and version info
- compensates for fcgi missing path_info and query_string
- validates the path in url
The url path must be either:
1. for static pages:
- /<application>/static/<file>
2. for dynamic pages:
- /<application>[/<controller>[/<function>[/<sub>]]][.<extension>]
- (sub may go several levels deep, currently 3 levels are supported:
sub1/sub2/sub3)
The naming conventions are:
- application, controller, function and extension may only contain
[a-zA-Z0-9_]
- file and sub may also contain '-', '=', '.' and '/'
"""
eget = environ.get
current.__dict__.clear()
request = Request(environ)
response = Response()
session = Session()
env = request.env
#env.web2py_path = global_settings.applications_parent
env.web2py_version = web2py_version
#env.update(global_settings)
static_file = False
try:
try:
try:
# ##################################################
# handle fcgi missing path_info and query_string
# select rewrite parameters
# rewrite incoming URL
# parse rewritten header variables
# parse rewritten URL
# serve file if static
# ##################################################
fixup_missing_path_info(environ)
(static_file, version, environ) = url_in(request, environ)
response.status = env.web2py_status_code or response.status
if static_file:
if eget('QUERY_STRING', '').startswith('attachment'):
response.headers['Content-Disposition'] \
= 'attachment'
if version:
response.headers['Cache-Control'] = 'max-age=315360000'
response.headers[
'Expires'] = 'Thu, 31 Dec 2037 23:59:59 GMT'
response.stream(static_file, request=request)
# ##################################################
# fill in request items
# ##################################################
app = request.application # must go after url_in!
if not global_settings.local_hosts:
local_hosts = set(['127.0.0.1', '::ffff:127.0.0.1', '::1'])
if not global_settings.web2py_runtime_gae:
try:
fqdn = socket.getfqdn()
local_hosts.add(socket.gethostname())
local_hosts.add(fqdn)
local_hosts.update([
addrinfo[4][0] for addrinfo
in getipaddrinfo(fqdn)])
if env.server_name:
local_hosts.add(env.server_name)
local_hosts.update([
addrinfo[4][0] for addrinfo
in getipaddrinfo(env.server_name)])
except (socket.gaierror, __HOLE__):
pass
global_settings.local_hosts = list(local_hosts)
else:
local_hosts = global_settings.local_hosts
client = get_client(env)
x_req_with = str(env.http_x_requested_with).lower()
request.update(
client = client,
folder = abspath('applications', app) + os.sep,
ajax = x_req_with == 'xmlhttprequest',
cid = env.http_web2py_component_element,
is_local = env.remote_addr in local_hosts,
is_https = env.wsgi_url_scheme in HTTPS_SCHEMES or \
request.env.http_x_forwarded_proto in HTTPS_SCHEMES \
or env.https == 'on'
)
request.compute_uuid() # requires client
request.url = environ['PATH_INFO']
# ##################################################
# access the requested application
# ##################################################
disabled = pjoin(request.folder, 'DISABLED')
if not exists(request.folder):
if app == rwthread.routes.default_application \
and app != 'welcome':
redirect(URL('welcome', 'default', 'index'))
elif rwthread.routes.error_handler:
_handler = rwthread.routes.error_handler
redirect(URL(_handler['application'],
_handler['controller'],
_handler['function'],
args=app))
else:
raise HTTP(404, rwthread.routes.error_message
% 'invalid request',
web2py_error='invalid application')
elif not request.is_local and exists(disabled):
raise HTTP(503, "<html><body><h1>Temporarily down for maintenance</h1></body></html>")
# ##################################################
# build missing folders
# ##################################################
create_missing_app_folders(request)
# ##################################################
# get the GET and POST data
# ##################################################
#parse_get_post_vars(request, environ)
# ##################################################
# expose wsgi hooks for convenience
# ##################################################
request.wsgi = LazyWSGI(environ, request, response)
# ##################################################
# load cookies
# ##################################################
if env.http_cookie:
try:
request.cookies.load(env.http_cookie)
except Cookie.CookieError, e:
pass # invalid cookies
# ##################################################
# try load session or create new session file
# ##################################################
if not env.web2py_disable_session:
session.connect(request, response)
# ##################################################
# run controller
# ##################################################
if global_settings.debugging and app != "admin":
import gluon.debug
# activate the debugger
gluon.debug.dbg.do_debug(mainpyfile=request.folder)
serve_controller(request, response, session)
except HTTP, http_response:
if static_file:
return http_response.to(responder, env=env)
if request.body:
request.body.close()
if hasattr(current,'request'):
# ##################################################
# on success, try store session in database
# ##################################################
session._try_store_in_db(request, response)
# ##################################################
# on success, commit database
# ##################################################
if response.do_not_commit is True:
BaseAdapter.close_all_instances(None)
elif response.custom_commit:
BaseAdapter.close_all_instances(response.custom_commit)
else:
BaseAdapter.close_all_instances('commit')
# ##################################################
# if session not in db try store session on filesystem
# this must be done after trying to commit database!
# ##################################################
session._try_store_in_cookie_or_file(request, response)
# Set header so client can distinguish component requests.
if request.cid:
http_response.headers.setdefault(
'web2py-component-content', 'replace')
if request.ajax:
if response.flash:
http_response.headers['web2py-component-flash'] = \
urllib2.quote(xmlescape(response.flash)\
.replace('\n',''))
if response.js:
http_response.headers['web2py-component-command'] = \
urllib2.quote(response.js.replace('\n',''))
# ##################################################
# store cookies in headers
# ##################################################
session._fixup_before_save()
http_response.cookies2headers(response.cookies)
ticket = None
except RestrictedError, e:
if request.body:
request.body.close()
# ##################################################
# on application error, rollback database
# ##################################################
# log tickets before rollback if not in DB
if not request.tickets_db:
ticket = e.log(request) or 'unknown'
# rollback
if response._custom_rollback:
response._custom_rollback()
else:
BaseAdapter.close_all_instances('rollback')
# if tickets in db, reconnect and store it in db
if request.tickets_db:
ticket = e.log(request) or 'unknown'
http_response = \
HTTP(500, rwthread.routes.error_message_ticket %
dict(ticket=ticket),
web2py_error='ticket %s' % ticket)
except:
if request.body:
request.body.close()
# ##################################################
# on application error, rollback database
# ##################################################
try:
if response._custom_rollback:
response._custom_rollback()
else:
BaseAdapter.close_all_instances('rollback')
except:
pass
e = RestrictedError('Framework', '', '', locals())
ticket = e.log(request) or 'unrecoverable'
http_response = \
HTTP(500, rwthread.routes.error_message_ticket
% dict(ticket=ticket),
web2py_error='ticket %s' % ticket)
finally:
if response and hasattr(response, 'session_file') \
and response.session_file:
response.session_file.close()
session._unlock(response)
http_response, new_environ = try_rewrite_on_error(
http_response, request, environ, ticket)
if not http_response:
return wsgibase(new_environ, responder)
if global_settings.web2py_crontype == 'soft':
newcron.softcron(global_settings.applications_parent).start()
return http_response.to(responder, env=env) | TypeError | dataset/ETHPy150Open uwdata/termite-visualizations/web2py/gluon/main.py/wsgibase |
def appfactory(wsgiapp=wsgibase,
logfilename='httpserver.log',
profiler_dir=None,
profilerfilename=None):
"""
generates a wsgi application that does logging and profiling and calls
wsgibase
.. function:: gluon.main.appfactory(
[wsgiapp=wsgibase
[, logfilename='httpserver.log'
[, profilerfilename='profiler.log']]])
"""
if profilerfilename is not None:
raise BaseException("Deprecated API")
if profiler_dir:
profiler_dir = abspath(profiler_dir)
logger.warn('profiler is on. will use dir %s', profiler_dir)
if not os.path.isdir(profiler_dir):
try:
os.makedirs(profiler_dir)
except:
raise BaseException("Can't create dir %s" % profiler_dir)
filepath = pjoin(profiler_dir, 'wtest')
try:
filehandle = open( filepath, 'w' )
filehandle.close()
os.unlink(filepath)
except __HOLE__:
raise BaseException("Unable to write to dir %s" % profiler_dir)
def app_with_logging(environ, responder):
"""
a wsgi app that does logging and profiling and calls wsgibase
"""
status_headers = []
def responder2(s, h):
"""
wsgi responder app
"""
status_headers.append(s)
status_headers.append(h)
return responder(s, h)
time_in = time.time()
ret = [0]
if not profiler_dir:
ret[0] = wsgiapp(environ, responder2)
else:
import cProfile
prof = cProfile.Profile()
prof.enable()
ret[0] = wsgiapp(environ, responder2)
prof.disable()
destfile = pjoin(profiler_dir, "req_%s.prof" % web2py_uuid())
prof.dump_stats(destfile)
try:
line = '%s, %s, %s, %s, %s, %s, %f\n' % (
environ['REMOTE_ADDR'],
datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S'),
environ['REQUEST_METHOD'],
environ['PATH_INFO'].replace(',', '%2C'),
environ['SERVER_PROTOCOL'],
(status_headers[0])[:3],
time.time() - time_in,
)
if not logfilename:
sys.stdout.write(line)
elif isinstance(logfilename, str):
write_file(logfilename, line, 'a')
else:
logfilename.write(line)
except:
pass
return ret[0]
return app_with_logging | IOError | dataset/ETHPy150Open uwdata/termite-visualizations/web2py/gluon/main.py/appfactory |
def connect_url(self, url, connect_args={}):
"""Connect to a database using an SqlAlchemy URL.
Args:
url: An SqlAlchemy-style DB connection URL.
connect_args: extra argument to be passed to the underlying
DB-API driver.
Returns:
True if connection was successful.
"""
if self.trans_ctx and self.trans_ctx.transaction.is_active:
print("You have an active transaction, either %commit or "
"%rollback before connecting to a new database.")
return False
try:
parsed_url = sa.engine.url.make_url(str(url))
except sa.exc.ArgumentError as e:
print(e)
return False
safe_url = self.safe_url(parsed_url)
if safe_url:
print("ipydb is connecting to: %s" % safe_url)
try:
self.engine = engine.from_url(parsed_url,
connect_args=connect_args)
except __HOLE__: # pragma: nocover
print("It looks like you don't have a driver for %s.\n"
"See the following URL for supported "
"database drivers:\n\t%s" % (
parsed_url.drivername,
'http://docs.sqlalchemy.org/en/latest/'
'dialects/index.html#included-dialects'))
return False
# force a connect so that we can fail early if the connection url won't
# work
try:
with self.engine.connect():
pass
except sa.exc.OperationalError as e: # pragma: nocover
print(e)
return False
self.connected = True
self.nickname = None
if self.do_reflection:
self.metadata_accessor.get_metadata(self.engine, noisy=True)
return True | ImportError | dataset/ETHPy150Open jaysw/ipydb/ipydb/plugin.py/SqlPlugin.connect_url |
@uri.setter
def uri(self, value):
try:
self._uri = value.encode("ascii")
except __HOLE__:
raise ValueError("uri value must be an ascii string")
except AttributeError:
raise TypeError("uri value must be a str type") | UnicodeDecodeError | dataset/ETHPy150Open javgh/greenaddress-pos-tools/nfc/ndef/uri_record.py/UriRecord.uri |
def create(self, req, body):
"""Creates a new instance event."""
context = req.environ['nova.context']
authorize(context, action='create')
response_events = []
accepted_events = []
accepted_instances = set()
instances = {}
result = 200
body_events = body.get('events', [])
if not isinstance(body_events, list) or not len(body_events):
raise webob.exc.HTTPBadRequest()
for _event in body_events:
client_event = dict(_event)
event = objects.InstanceExternalEvent(context)
status = client_event.get('status', 'completed')
if status not in external_event_obj.EVENT_STATUSES:
raise webob.exc.HTTPBadRequest(
_('Invalid event status `%s\'') % status)
if client_event.get('name') not in external_event_obj.EVENT_NAMES:
raise webob.exc.HTTPBadRequest(
_('Invalid event name %s') % client_event.get('name'))
try:
event.instance_uuid = client_event.pop('server_uuid')
event.name = client_event.pop('name')
event.status = client_event.pop('status', 'completed')
event.tag = client_event.pop('tag', None)
except __HOLE__ as missing_key:
msg = _('event entity requires key %(key)s') % missing_key
raise webob.exc.HTTPBadRequest(explanation=msg)
if client_event:
msg = (_('event entity contains unsupported items: %s') %
', '.join(client_event.keys()))
raise webob.exc.HTTPBadRequest(explanation=msg)
instance = instances.get(event.instance_uuid)
if not instance:
try:
instance = objects.Instance.get_by_uuid(
context, event.instance_uuid)
instances[event.instance_uuid] = instance
except exception.InstanceNotFound:
LOG.debug('Dropping event %(name)s:%(tag)s for unknown '
'instance %(instance_uuid)s',
{'name': event.name, 'tag': event.tag,
'instance_uuid': event.instance_uuid})
_event['status'] = 'failed'
_event['code'] = 404
result = 207
# NOTE: before accepting the event, make sure the instance
# for which the event is sent is assigned to a host; otherwise
# it will not be possible to dispatch the event
if instance:
if instance.host:
accepted_events.append(event)
accepted_instances.add(instance)
LOG.info(_LI('Creating event %(name)s:%(tag)s for '
'instance %(instance_uuid)s'),
{'name': event.name, 'tag': event.tag,
'instance_uuid': event.instance_uuid})
# NOTE: as the event is processed asynchronously verify
# whether 202 is a more suitable response code than 200
_event['status'] = 'completed'
_event['code'] = 200
else:
LOG.debug("Unable to find a host for instance "
"%(instance)s. Dropping event %(event)s",
{'instance': event.instance_uuid,
'event': event.name})
_event['status'] = 'failed'
_event['code'] = 422
result = 207
response_events.append(_event)
if accepted_events:
self.compute_api.external_instance_event(
context, accepted_instances, accepted_events)
else:
msg = _('No instances found for any event')
raise webob.exc.HTTPNotFound(explanation=msg)
# FIXME(cyeoh): This needs some infrastructure support so that
# we have a general way to do this
robj = wsgi.ResponseObject({'events': response_events})
robj._code = result
return robj | KeyError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/server_external_events.py/ServerExternalEventsController.create |
def has_perm(self, user_obj, perm, obj=None):
"""
This method checks if the user_obj has perm on obj. Returns True or False
Looks for the rule with the code_name = perm and the content_type of the obj
If it exists returns the value of obj.field_name or obj.field_name() in case
the field is a method.
"""
if obj is None:
return False
if not user_obj.is_authenticated():
user_obj = User.objects.get(pk=settings.ANONYMOUS_USER_ID)
# Centralized authorizations
# You need to define a module in settings.CENTRAL_AUTHORIZATIONS that has a
# central_authorizations function inside
if hasattr(settings, 'CENTRAL_AUTHORIZATIONS'):
module = getattr(settings, 'CENTRAL_AUTHORIZATIONS')
try:
mod = import_module(module)
except ImportError, e:
raise RulesError('Error importing central authorizations module %s: "%s"' % (module, e))
try:
central_authorizations = getattr(mod, 'central_authorizations')
except AttributeError:
raise RulesError('Error module %s does not have a central_authorization function"' % (module))
try:
is_authorized = central_authorizations(user_obj, perm)
# If the value returned is a boolean we pass it up and stop checking
# If not, we continue checking
if isinstance(is_authorized, bool):
return is_authorized
except TypeError:
raise RulesError('central_authorizations should receive 2 parameters: (user_obj, perm)')
# Note:
# is_active and is_superuser are checked by default in django.contrib.auth.models
# lines from 301-306 in Django 1.2.3
# If this checks dissapear in mainstream, tests will fail, so we won't double check them :)
ctype = ContentType.objects.get_for_model(obj)
# We get the rule data and return the value of that rule
try:
rule = RulePermission.objects.get(codename = perm, content_type = ctype)
except RulePermission.DoesNotExist:
return False
bound_field = None
try:
bound_field = getattr(obj, rule.field_name)
except __HOLE__:
raise NonexistentFieldName("Field_name %s from rule %s does not longer exist in model %s. \
The rule is obsolete!", (rule.field_name, rule.codename, rule.content_type.model))
if not isinstance(bound_field, bool) and not callable(bound_field):
raise NotBooleanPermission("Attribute %s from model %s on rule %s does not return a boolean value",
(rule.field_name, rule.content_type.model, rule.codename))
if not callable(bound_field):
is_authorized = bound_field
else:
# Otherwise it is a callabe bound_field
# Let's see if we pass or not user_obj as a parameter
if (len(inspect.getargspec(bound_field)[0]) == 2):
is_authorized = bound_field(user_obj)
else:
is_authorized = bound_field()
if not isinstance(is_authorized, bool):
raise NotBooleanPermission("Callable %s from model %s on rule %s does not return a boolean value",
(rule.field_name, rule.content_type.model, rule.codename))
return is_authorized | AttributeError | dataset/ETHPy150Open maraujop/django-rules/django_rules/backends.py/ObjectPermissionBackend.has_perm |
def looks_like_ip(maybe_ip):
"""Does the given str look like an IP address?"""
if not maybe_ip[0].isdigit():
return False
try:
socket.inet_aton(maybe_ip)
return True
except (__HOLE__, UnicodeError):
if IP_RE.match(maybe_ip):
return True
except socket.error:
return False | AttributeError | dataset/ETHPy150Open john-kurkowski/tldextract/tldextract/remote.py/looks_like_ip |
def _verify_third_party_invite(self, event, auth_events):
"""
Validates that the invite event is authorized by a previous third-party invite.
Checks that the public key, and keyserver, match those in the third party invite,
and that the invite event has a signature issued using that public key.
Args:
event: The m.room.member join event being validated.
auth_events: All relevant previous context events which may be used
for authorization decisions.
Return:
True if the event fulfills the expectations of a previous third party
invite event.
"""
if "third_party_invite" not in event.content:
return False
if "signed" not in event.content["third_party_invite"]:
return False
signed = event.content["third_party_invite"]["signed"]
for key in {"mxid", "token"}:
if key not in signed:
return False
token = signed["token"]
invite_event = auth_events.get(
(EventTypes.ThirdPartyInvite, token,)
)
if not invite_event:
return False
if event.user_id != invite_event.user_id:
return False
if signed["mxid"] != event.state_key:
return False
if signed["token"] != token:
return False
for public_key_object in self.get_public_keys(invite_event):
public_key = public_key_object["public_key"]
try:
for server, signature_block in signed["signatures"].items():
for key_name, encoded_signature in signature_block.items():
if not key_name.startswith("ed25519:"):
continue
verify_key = decode_verify_key_bytes(
key_name,
decode_base64(public_key)
)
verify_signed_json(signed, server, verify_key)
# We got the public key from the invite, so we know that the
# correct server signed the signed bundle.
# The caller is responsible for checking that the signing
# server has not revoked that public key.
return True
except (__HOLE__, SignatureVerifyException,):
continue
return False | KeyError | dataset/ETHPy150Open matrix-org/synapse/synapse/api/auth.py/Auth._verify_third_party_invite |
@defer.inlineCallbacks
def get_user_by_req(self, request, allow_guest=False):
""" Get a registered user's ID.
Args:
request - An HTTP request with an access_token query parameter.
Returns:
tuple of:
UserID (str)
Access token ID (str)
Raises:
AuthError if no user by that token exists or the token is invalid.
"""
# Can optionally look elsewhere in the request (e.g. headers)
try:
user_id = yield self._get_appservice_user_id(request.args)
if user_id:
request.authenticated_entity = user_id
defer.returnValue(
Requester(UserID.from_string(user_id), "", False)
)
access_token = request.args["access_token"][0]
user_info = yield self.get_user_by_access_token(access_token)
user = user_info["user"]
token_id = user_info["token_id"]
is_guest = user_info["is_guest"]
ip_addr = self.hs.get_ip_from_request(request)
user_agent = request.requestHeaders.getRawHeaders(
"User-Agent",
default=[""]
)[0]
if user and access_token and ip_addr:
preserve_context_over_fn(
self.store.insert_client_ip,
user=user,
access_token=access_token,
ip=ip_addr,
user_agent=user_agent
)
if is_guest and not allow_guest:
raise AuthError(
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
)
request.authenticated_entity = user.to_string()
defer.returnValue(Requester(user, token_id, is_guest))
except __HOLE__:
raise AuthError(
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
errcode=Codes.MISSING_TOKEN
) | KeyError | dataset/ETHPy150Open matrix-org/synapse/synapse/api/auth.py/Auth.get_user_by_req |
@defer.inlineCallbacks
def get_user_from_macaroon(self, macaroon_str):
try:
macaroon = pymacaroons.Macaroon.deserialize(macaroon_str)
self.validate_macaroon(macaroon, "access", False)
user_prefix = "user_id = "
user = None
guest = False
for caveat in macaroon.caveats:
if caveat.caveat_id.startswith(user_prefix):
user = UserID.from_string(caveat.caveat_id[len(user_prefix):])
elif caveat.caveat_id == "guest = true":
guest = True
if user is None:
raise AuthError(
self.TOKEN_NOT_FOUND_HTTP_STATUS, "No user caveat in macaroon",
errcode=Codes.UNKNOWN_TOKEN
)
if guest:
ret = {
"user": user,
"is_guest": True,
"token_id": None,
}
else:
# This codepath exists so that we can actually return a
# token ID, because we use token IDs in place of device
# identifiers throughout the codebase.
# TODO(daniel): Remove this fallback when device IDs are
# properly implemented.
ret = yield self._look_up_user_by_access_token(macaroon_str)
if ret["user"] != user:
logger.error(
"Macaroon user (%s) != DB user (%s)",
user,
ret["user"]
)
raise AuthError(
self.TOKEN_NOT_FOUND_HTTP_STATUS,
"User mismatch in macaroon",
errcode=Codes.UNKNOWN_TOKEN
)
defer.returnValue(ret)
except (pymacaroons.exceptions.MacaroonException, TypeError, __HOLE__):
raise AuthError(
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
errcode=Codes.UNKNOWN_TOKEN
) | ValueError | dataset/ETHPy150Open matrix-org/synapse/synapse/api/auth.py/Auth.get_user_from_macaroon |
@defer.inlineCallbacks
def get_appservice_by_req(self, request):
try:
token = request.args["access_token"][0]
service = yield self.store.get_app_service_by_token(token)
if not service:
logger.warn("Unrecognised appservice access token: %s" % (token,))
raise AuthError(
self.TOKEN_NOT_FOUND_HTTP_STATUS,
"Unrecognised access token.",
errcode=Codes.UNKNOWN_TOKEN
)
request.authenticated_entity = service.sender
defer.returnValue(service)
except __HOLE__:
raise AuthError(
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token."
) | KeyError | dataset/ETHPy150Open matrix-org/synapse/synapse/api/auth.py/Auth.get_appservice_by_req |
def error_check(code):
if code == 0:
return
else:
error_string = "code {0}: {1}".format(code, error_buffer.value.decode())
try:
raise _ERRORS[code - 1](error_string)
except __HOLE__:
raise DrmaaException(error_string)
# da vedere: NO_RUSAGE, NO_MORE_ELEMENTS | IndexError | dataset/ETHPy150Open pygridtools/drmaa-python/drmaa/errors.py/error_check |
def _check_content(self, content):
try:
po = polib.pofile(content)
except __HOLE__, e:
logger.warning("Parse error: %s" % e, exc_info=True)
raise PoParseError(unicode(e))
# If file is empty, the method hangs so we should bail out.
if not content:
logger.warning("Pofile: File '%s' is empty." % self.filename)
raise PoParseError("Uploaded file is empty.")
# Msgfmt check
if settings.FILECHECKS['POFILE_MSGFMT']:
msgfmt_check(content, self.is_pot)
# Check required header fields
required_metadata = ['Content-Type', 'Content-Transfer-Encoding']
for metadata in required_metadata:
if not metadata in po.metadata:
logger.warning(
"pofile: Required metadata '%s' not found." % metadata
)
raise PoParseError(_(
"Uploaded file header doesn't have '%s' metadata!" % metadata
))
# Save to avoid parsing it again
self._po = po | IOError | dataset/ETHPy150Open rvanlaar/easy-transifex/src/transifex/transifex/resources/formats/pofile.py/GettextHandler._check_content |
def _parse(self, is_source, lang_rules):
"""
Parse a PO file and create a stringset with all PO entries in the file.
"""
if lang_rules:
nplural = len(lang_rules)
else:
nplural = self.language.get_pluralrules_numbers()
if not hasattr(self, '_po'):
self.is_content_valid()
self._parse_copyrights(self.content)
try:
self._po = polib.pofile(self.content)
except __HOLE__, e:
raise PoParseError(unicode(e))
for entry in self._po:
pluralized = False
same_nplural = True
# skip obsolete entries
if entry.obsolete:
continue
# treat fuzzy translation as nonexistent
if "fuzzy" in entry.flags:
if not is_source:
if not entry.msgid_plural:
self._add_suggestion_string(
entry.msgid, entry.msgstr,
context=escape_context(entry.msgctxt) or '',
occurrences=self._serialize_occurrences(entry.occurrences)
)
continue
else:
# Drop fuzzy flag from template
entry.flags.remove("fuzzy")
if entry.msgid_plural:
pluralized = True
if is_source:
nplural_file = len(entry.msgstr_plural.keys())
if nplural_file != 2:
raise PoParseError(_("Your source file is not a POT file and"
" the translation file you're using has more"
" than two plurals which is not supported."))
# English plural rules
messages = [(1, entry.msgstr_plural['0'] or entry.msgid),
(5, entry.msgstr_plural['1'] or entry.msgid_plural)]
plural_keys = [0,1]
else:
message_keys = entry.msgstr_plural.keys()
message_keys.sort()
nplural_file = len(message_keys)
messages = []
if nplural:
if len(nplural) != nplural_file:
logger.error("Passed plural rules has nplurals=%s"
", but '%s' file has nplurals=%s. String '%s'"
"skipped." % (nplural, self.filename, nplural_file,
entry.msgid))
same_nplural = False
else:
same_nplural = False
if not same_nplural:
# Skip half translated plurals
continue
# plural_keys = message_keys
for n, key in enumerate(message_keys):
messages.append((nplural[n], entry.msgstr_plural['%s' % n]))
else:
# pass empty strings for non source files
if not is_source and entry.msgstr in ["", None]:
continue
# Not pluralized, so no plural rules. Use 5 as 'other'.
if is_source:
messages = [(5, entry.msgstr or entry.msgid)]
else:
messages = [(5, entry.msgstr)]
# Add messages with the correct number (plural)
for number, msgstr in enumerate(messages):
if entry.comment:
comment = entry.comment
else:
comment = None
if entry.flags:
flags = ', '.join( f for f in entry.flags)
else:
flags = None
context=escape_context(entry.msgctxt) or ''
self._add_translation_string(
entry.msgid, msgstr[1], context=context,
occurrences=self._serialize_occurrences(entry.occurrences),
rule=msgstr[0], pluralized=pluralized, comment=comment,
flags=flags
)
if is_source:
entry.msgstr = "%(hash)s_tr" % {
'hash': hash_tag(entry.msgid, context)
}
if entry.msgid_plural:
for n, rule in enumerate(plural_keys):
entry.msgstr_plural['%s' % n] = (
"%(hash)s_pl_%(key)s" % {
'hash':hash_tag(entry.msgid, context),
'key':n
}
)
return self._po | IOError | dataset/ETHPy150Open rvanlaar/easy-transifex/src/transifex/transifex/resources/formats/pofile.py/GettextHandler._parse |
def items_for_result(cl, result, form):
"""
Generates the actual list of data.
"""
first = True
pk = cl.lookup_opts.pk.attname
for field_name in cl.list_display:
row_class = ''
try:
f, attr, value = lookup_field(field_name, result, cl.model_admin)
except (__HOLE__, ObjectDoesNotExist):
result_repr = EMPTY_CHANGELIST_VALUE
else:
if f is None:
allow_tags = getattr(attr, 'allow_tags', False)
boolean = getattr(attr, 'boolean', False)
if boolean:
allow_tags = True
result_repr = _boolean_icon(value)
else:
result_repr = smart_unicode(value)
# Strip HTML tags in the resulting text, except if the
# function has an "allow_tags" attribute set to True.
if not allow_tags:
result_repr = escape(result_repr)
else:
result_repr = mark_safe(result_repr)
else:
if value is None:
result_repr = EMPTY_CHANGELIST_VALUE
if isinstance(f.rel, models.ManyToOneRel):
field_val = getattr(result, f.name)
if field_val is None:
result_repr = EMPTY_CHANGELIST_VALUE
else:
result_repr = escape(field_val)
else:
result_repr = display_for_field(value, f)
if isinstance(f, models.DateField) or isinstance(f, models.TimeField):
row_class = ' class="nowrap"'
if force_unicode(result_repr) == '':
result_repr = mark_safe(' ')
# If list_display_links not defined, add the link tag to the first field
if (first and not cl.list_display_links) or field_name in cl.list_display_links:
table_tag = {True:'th', False:'td'}[first]
first = False
url = cl.url_for_result(result)
# Convert the pk to something that can be used in Javascript.
# Problem cases are long ints (23L) and non-ASCII strings.
if cl.to_field:
attr = str(cl.to_field)
else:
attr = pk
value = result.serializable_value(attr)
result_id = repr(force_unicode(value))[1:]
yield mark_safe(u'<%s%s><a href="%s"%s>%s</a></%s>' % \
(table_tag, row_class, url, (cl.is_popup and ' onclick="opener.dismissRelatedLookupPopup(window, %s); return false;"' % result_id or ''), conditional_escape(result_repr), table_tag))
else:
# By default the fields come from ModelAdmin.list_editable, but if we pull
# the fields out of the form instead of list_editable custom admins
# can provide fields on a per request basis
if form and field_name in form.fields:
bf = form[field_name]
result_repr = mark_safe(force_unicode(bf.errors) + force_unicode(bf))
else:
result_repr = conditional_escape(result_repr)
yield mark_safe(u'<td%s>%s</td>' % (row_class, result_repr))
if form and not form[cl.model._meta.pk.name].is_hidden:
yield mark_safe(u'<td>%s</td>' % force_unicode(form[cl.model._meta.pk.name])) | AttributeError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/contrib/admin/templatetags/admin_list.py/items_for_result |
def _load_urllib(self, filename, kwargs):
'''(internal) Loading a network file. First download it, save it to a
temporary file, and pass it to _load_local().'''
if PY2:
import urllib2 as urllib_request
def gettype(info):
return info.gettype()
else:
import urllib.request as urllib_request
def gettype(info):
return info.get_content_type()
proto = filename.split(':', 1)[0]
if proto == 'smb':
try:
# note: it's important to load SMBHandler every time
# otherwise the data is occasionaly not loaded
from smb.SMBHandler import SMBHandler
except __HOLE__:
Logger.warning(
'Loader: can not load PySMB: make sure it is installed')
return
import tempfile
data = fd = _out_osfd = None
try:
_out_filename = ''
if proto == 'smb':
# read from samba shares
fd = urllib_request.build_opener(SMBHandler).open(filename)
else:
# read from internet
fd = urllib_request.urlopen(filename)
if '#.' in filename:
# allow extension override from URL fragment
suffix = '.' + filename.split('#.')[-1]
else:
ctype = gettype(fd.info())
suffix = mimetypes.guess_extension(ctype)
if not suffix:
# strip query string and split on path
parts = filename.split('?')[0].split('/')[1:]
while len(parts) > 1 and not parts[0]:
# strip out blanks from '//'
parts = parts[1:]
if len(parts) > 1 and '.' in parts[-1]:
# we don't want '.com', '.net', etc. as the extension
suffix = '.' + parts[-1].split('.')[-1]
_out_osfd, _out_filename = tempfile.mkstemp(
prefix='kivyloader', suffix=suffix)
idata = fd.read()
fd.close()
fd = None
# write to local filename
write(_out_osfd, idata)
close(_out_osfd)
_out_osfd = None
# load data
data = self._load_local(_out_filename, kwargs)
# FIXME create a clean API for that
for imdata in data._data:
imdata.source = filename
except Exception:
Logger.exception('Loader: Failed to load image <%s>' % filename)
# close file when remote file not found or download error
try:
close(_out_osfd)
except OSError:
pass
return self.error_image
finally:
if fd:
fd.close()
if _out_osfd:
close(_out_osfd)
if _out_filename != '':
unlink(_out_filename)
return data | ImportError | dataset/ETHPy150Open kivy/kivy/kivy/loader.py/LoaderBase._load_urllib |
def _update(self, *largs):
'''(internal) Check if a data is loaded, and pass to the client.'''
# want to start it ?
if self._start_wanted:
if not self._running:
self.start()
self._start_wanted = False
# in pause mode, don't unqueue anything.
if self._paused:
self._trigger_update()
return
for x in range(self.max_upload_per_frame):
try:
filename, data = self._q_done.pop()
except __HOLE__:
return
# create the image
image = data # ProxyImage(data)
if not image.nocache:
Cache.append('kv.loader', filename, image)
# update client
for c_filename, client in self._client[:]:
if filename != c_filename:
continue
# got one client to update
client.image = image
client.loaded = True
client.dispatch('on_load')
self._client.remove((c_filename, client))
self._trigger_update() | IndexError | dataset/ETHPy150Open kivy/kivy/kivy/loader.py/LoaderBase._update |
def validate(self):
"""Explicitly validate all the fields."""
for name, field in self:
try:
field.validate_for_object(self)
except __HOLE__ as error:
raise ValidationError(
"Error for field '{name}'.".format(name=name),
error,
) | ValidationError | dataset/ETHPy150Open beregond/jsonmodels/jsonmodels/models.py/Base.validate |
def __repr__(self):
try:
txt = six.text_type(self)
except __HOLE__:
txt = ''
return '<{name}: {text}>'.format(
name=self.__class__.__name__,
text=txt,
) | TypeError | dataset/ETHPy150Open beregond/jsonmodels/jsonmodels/models.py/Base.__repr__ |
def __setattr__(self, name, value):
try:
return super(Base, self).__setattr__(name, value)
except __HOLE__ as error:
raise ValidationError(
"Error for field '{name}'.".format(name=name),
error
) | ValidationError | dataset/ETHPy150Open beregond/jsonmodels/jsonmodels/models.py/Base.__setattr__ |
def execute(self, arbiter, props):
if 'name' in props:
watcher = self._get_watcher(arbiter, props['name'])
if 'process' in props:
try:
return {
"process": props['process'],
"info": watcher.process_info(props['process'],
props.get('extended')),
}
except __HOLE__:
raise MessageError("process %r not found in %r" % (
props['process'], props['name']))
else:
return {"name": props['name'],
"info": watcher.info(props.get('extended'))}
else:
infos = {}
for watcher in arbiter.watchers:
infos[watcher.name] = watcher.info()
return {"infos": infos} | KeyError | dataset/ETHPy150Open circus-tent/circus/circus/commands/stats.py/Stats.execute |
def emit_java_headers(target, source, env):
"""Create and return lists of Java stub header files that will
be created from a set of class files.
"""
class_suffix = env.get('JAVACLASSSUFFIX', '.class')
classdir = env.get('JAVACLASSDIR')
if not classdir:
try:
s = source[0]
except IndexError:
classdir = '.'
else:
try:
classdir = s.attributes.java_classdir
except __HOLE__:
classdir = '.'
classdir = env.Dir(classdir).rdir()
if str(classdir) == '.':
c_ = None
else:
c_ = str(classdir) + os.sep
slist = []
for src in source:
try:
classname = src.attributes.java_classname
except AttributeError:
classname = str(src)
if c_ and classname[:len(c_)] == c_:
classname = classname[len(c_):]
if class_suffix and classname[-len(class_suffix):] == class_suffix:
classname = classname[:-len(class_suffix)]
classname = SCons.Tool.javac.classname(classname)
s = src.rfile()
s.attributes.java_classname = classname
slist.append(s)
s = source[0].rfile()
if not hasattr(s.attributes, 'java_classdir'):
s.attributes.java_classdir = classdir
if target[0].__class__ is SCons.Node.FS.File:
tlist = target
else:
if not isinstance(target[0], SCons.Node.FS.Dir):
target[0].__class__ = SCons.Node.FS.Dir
target[0]._morph()
tlist = []
for s in source:
fname = s.attributes.java_classname.replace('.', '_') + '.h'
t = target[0].File(fname)
t.attributes.java_lookupdir = target[0]
tlist.append(t)
return tlist, source | AttributeError | dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/javah.py/emit_java_headers |
def JavaHOutFlagGenerator(target, source, env, for_signature):
try:
t = target[0]
except (__HOLE__, IndexError, TypeError):
t = target
try:
return '-d ' + str(t.attributes.java_lookupdir)
except AttributeError:
return '-o ' + str(t) | AttributeError | dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/javah.py/JavaHOutFlagGenerator |
def UpdatePackageInfo(self, pkginfo):
"""Updates an existing PackageInfo entity."""
unattended_install = self.request.get('unattended_install', None)
if unattended_install is not None:
unattended_install = unattended_install == 'on'
unattended_uninstall = self.request.get('unattended_uninstall', None)
if unattended_uninstall is not None:
unattended_uninstall = unattended_uninstall == 'on'
# Parse any force_install_after_date str into a datetime object.
force_install_after_date_str = self.request.get(
'force_install_after_date', None)
force_install_after_date_time_str = self.request.get(
'force_install_after_date_time', None)
if force_install_after_date_str or force_install_after_date_time_str:
date_string = '%s %s' % (
force_install_after_date_str, force_install_after_date_time_str)
try:
force_install_after_date = datetime.datetime.strptime(
date_string, '%Y-%m-%d %H:%M')
except __HOLE__:
self.error(400)
self.Render(
'error.html',
{'message': 'invalid force_install date and/or time format'})
else:
# This will remove force_install_after_date from the plist, as it was
# unset in the UI.
force_install_after_date = ''
kwargs = {
'unattended_install': unattended_install,
'unattended_uninstall': unattended_uninstall,
# get_all() returns an empty array if set, and has no default value opt.
'catalogs': self.request.get_all('catalogs'),
'manifests': self.request.get_all('manifests'),
'install_types': self.request.get_all('install_types'),
'manifest_mod_access': self.request.get_all('manifest_mod_access'),
# get() returns an empty string if not set, so default to None.
'name': self.request.get('name', None),
'description': self.request.get('description', None),
'display_name': self.request.get('display_name', None),
'version': self.request.get('version', None),
'minimum_os_version': self.request.get('minimum_os_version', None),
'maximum_os_version': self.request.get('maximum_os_version', None),
'force_install_after_date': force_install_after_date,
'category': self.request.get('category', None),
'developer': self.request.get('developer', None),
}
try:
pkginfo.Update(**kwargs)
if settings.EMAIL_ON_EVERY_CHANGE:
self.NotifyAdminsOfPackageChange(pkginfo, **kwargs)
except models.PackageInfoLockError:
self.error(302)
self.Render(
'error.html',
{'message': 'PackageInfo was locked; refresh and try again'})
except models.PackageInfoUpdateError as e:
self.error(403)
self.Render(
'error.html', {'message': 'PackageInfoUpdateError: %s' % str(e)})
else:
filename = pkginfo.filename
self.redirect(
'/admin/packages?msg=%s saved.&activepkg=%s#package-%s' % (
filename, filename, filename)) | ValueError | dataset/ETHPy150Open google/simian/src/simian/mac/admin/package.py/Package.UpdatePackageInfo |
def process_view(self, request, callback, callback_args, callback_kwargs):
if getattr(request, 'csrf_processing_done', False):
return None
try:
csrf_token = _sanitize_token(
request.COOKIES[settings.CSRF_COOKIE_NAME])
# Use same token next time
request.META['CSRF_COOKIE'] = csrf_token
except __HOLE__:
csrf_token = None
# Generate token and store it in the request, so it's
# available to the view.
request.META["CSRF_COOKIE"] = _get_new_csrf_key()
# Wait until request.META["CSRF_COOKIE"] has been manipulated before
# bailing out, so that get_token still works
if getattr(callback, 'csrf_exempt', False):
return None
# Assume that anything not defined as 'safe' by RFC2616 needs protection
if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
if getattr(request, '_dont_enforce_csrf_checks', False):
# Mechanism to turn off CSRF checks for test suite.
# It comes after the creation of CSRF cookies, so that
# everything else continues to work exactly the same
# (e.g. cookies are sent, etc.), but before any
# branches that call reject().
return self._accept(request)
if request.is_secure():
# Suppose user visits http://example.com/
# An active network attacker (man-in-the-middle, MITM) sends a
# POST form that targets https://example.com/detonate-bomb/ and
# submits it via JavaScript.
#
# The attacker will need to provide a CSRF cookie and token, but
# that's no problem for a MITM and the session-independent
# nonce we're using. So the MITM can circumvent the CSRF
# protection. This is true for any HTTP connection, but anyone
# using HTTPS expects better! For this reason, for
# https://example.com/ we need additional protection that treats
# http://example.com/ as completely untrusted. Under HTTPS,
# Barth et al. found that the Referer header is missing for
# same-domain requests in only about 0.2% of cases or less, so
# we can use strict Referer checking.
referer = request.META.get('HTTP_REFERER')
if referer is None:
logger.warning('Forbidden (%s): %s',
REASON_NO_REFERER, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, REASON_NO_REFERER)
# Note that request.get_host() includes the port.
good_referer = 'https://%s/' % request.get_host()
if not same_origin(referer, good_referer):
reason = REASON_BAD_REFERER % (referer, good_referer)
logger.warning('Forbidden (%s): %s', reason, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, reason)
if csrf_token is None:
# No CSRF cookie. For POST requests, we insist on a CSRF cookie,
# and in this way we can avoid all CSRF attacks, including login
# CSRF.
logger.warning('Forbidden (%s): %s',
REASON_NO_CSRF_COOKIE, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, REASON_NO_CSRF_COOKIE)
# Check non-cookie token for match.
request_csrf_token = ""
if request.method == "POST":
request_csrf_token = request.POST.get('csrfmiddlewaretoken', '')
if request_csrf_token == "":
# Fall back to X-CSRFToken, to make things easier for AJAX,
# and possible for PUT/DELETE.
request_csrf_token = request.META.get('HTTP_X_CSRFTOKEN', '')
if not constant_time_compare(request_csrf_token, csrf_token):
logger.warning('Forbidden (%s): %s',
REASON_BAD_TOKEN, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, REASON_BAD_TOKEN)
return self._accept(request) | KeyError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/middleware/csrf.py/CsrfViewMiddleware.process_view |
def shell(self, argstr, exitcodes=(0,)):
orig = sys.stdout
orig_stderr = sys.stderr
try:
sys.stdout = six.StringIO()
sys.stderr = six.StringIO()
_shell = shell.ClimateShell()
_shell.initialize_app(argstr.split())
except __HOLE__:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.assertIn(exc_value.code, exitcodes)
finally:
stdout = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = orig
stderr = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = orig_stderr
return (stdout, stderr) | SystemExit | dataset/ETHPy150Open openstack/python-blazarclient/climateclient/tests/test_shell.py/ClimateShellTestCase.shell |
def to_python(self, value):
if isinstance(value, six.string_types):
try:
return json.loads(value, **self.load_kwargs)
except __HOLE__:
raise ValidationError(_("Enter valid JSON"))
return value | ValueError | dataset/ETHPy150Open bradjasper/django-jsonfield/jsonfield/fields.py/JSONFormFieldBase.to_python |
def clean(self, value):
if not value and not self.required:
return None
# Trap cleaning errors & bubble them up as JSON errors
try:
return super(JSONFormFieldBase, self).clean(value)
except __HOLE__:
raise ValidationError(_("Enter valid JSON")) | TypeError | dataset/ETHPy150Open bradjasper/django-jsonfield/jsonfield/fields.py/JSONFormFieldBase.clean |
def pre_init(self, value, obj):
"""Convert a string value to JSON only if it needs to be deserialized.
SubfieldBase metaclass has been modified to call this method instead of
to_python so that we can check the obj state and determine if it needs to be
deserialized"""
try:
if obj._state.adding:
# Make sure the primary key actually exists on the object before
# checking if it's empty. This is a special case for South datamigrations
# see: https://github.com/bradjasper/django-jsonfield/issues/52
if getattr(obj, "pk", None) is not None:
if isinstance(value, six.string_types):
try:
return json.loads(value, **self.load_kwargs)
except __HOLE__:
raise ValidationError(_("Enter valid JSON"))
except AttributeError:
# south fake meta class doesn't create proper attributes
# see this:
# https://github.com/bradjasper/django-jsonfield/issues/52
pass
return value | ValueError | dataset/ETHPy150Open bradjasper/django-jsonfield/jsonfield/fields.py/JSONFieldBase.pre_init |
@pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set")
@pytest.mark.parametrize("compression", [None, 'gzip', 'snappy', 'lz4'])
def test_end_to_end(kafka_broker, compression):
if compression == 'lz4':
# LZ4 requires 0.8.2
if version() < (0, 8, 2):
return
# LZ4 python libs dont work on python2.6
elif sys.version_info < (2, 7):
return
connect_str = 'localhost:' + str(kafka_broker.port)
producer = KafkaProducer(bootstrap_servers=connect_str,
retries=5,
max_block_ms=10000,
compression_type=compression,
value_serializer=str.encode)
consumer = KafkaConsumer(bootstrap_servers=connect_str,
group_id=None,
consumer_timeout_ms=10000,
auto_offset_reset='earliest',
value_deserializer=bytes.decode)
topic = random_string(5)
messages = 100
futures = []
for i in range(messages):
futures.append(producer.send(topic, 'msg %d' % i))
ret = [f.get(timeout=30) for f in futures]
assert len(ret) == messages
producer.close()
consumer.subscribe([topic])
msgs = set()
for i in range(messages):
try:
msgs.add(next(consumer).value)
except __HOLE__:
break
assert msgs == set(['msg %d' % i for i in range(messages)]) | StopIteration | dataset/ETHPy150Open dpkp/kafka-python/test/test_producer.py/test_end_to_end |
def load_template(self, template_name, template_dirs=None):
key = template_name
if template_dirs:
# If template directories were specified, use a hash to differentiate
key = '-'.join([template_name, hashlib.sha1('|'.join(template_dirs)).hexdigest()])
if settings.DEBUG or key not in self.template_cache:
if os.path.splitext(template_name)[1] in ('.jade',):
try:
source, display_name = self.load_template_source(template_name, template_dirs)
source = process(source,filename=template_name,compiler=Compiler)
origin = make_origin(display_name, self.load_template_source, template_name, template_dirs)
template = Template(source, origin, template_name)
except __HOLE__:
template, origin = self.find_template(template_name, template_dirs)
else:
template, origin = self.find_template(template_name, template_dirs)
if not hasattr(template, 'render'):
try:
template = Template(process(source,filename=template_name,compiler=Compiler), origin, template_name)
except (TemplateDoesNotExist, UnboundLocalError):
# If compiling the template we found raises TemplateDoesNotExist,
# back off to returning he source and display name for the template
# we were asked to load. This allows for correct identification (later)
# of the actual template that does not exist.
return template, origin
self.template_cache[key] = template
return self.template_cache[key], None
# def _preprocess(self, source, name, filename=None):
# parser = Parser(source,filename=filename)
# block = parser.parse()
# compiler = Compiler(block)
# return compiler.compile().strip() | NotImplementedError | dataset/ETHPy150Open syrusakbary/pyjade/pyjade/ext/django/loader.py/Loader.load_template |
def setUp(self):
os.chdir(os.path.split(os.path.abspath(__file__))[0])
try:
os.remove('app1.db')
os.remove('app2.db')
except OSError:
pass
try:
shutil.rmtree('migrations')
except __HOLE__:
pass | OSError | dataset/ETHPy150Open miguelgrinberg/Flask-Migrate/tests/test_multidb_migrate.py/TestMigrate.setUp |
def tearDown(self):
try:
os.remove('app1.db')
os.remove('app2.db')
except __HOLE__:
pass
try:
shutil.rmtree('migrations')
except OSError:
pass | OSError | dataset/ETHPy150Open miguelgrinberg/Flask-Migrate/tests/test_multidb_migrate.py/TestMigrate.tearDown |
def get_message(self, command=None):
try:
if command is not None and self._Messages[command]:
_msg = __import__(
self._Messages[command],
globals(),
locals(),
[command]
)
# Get the right instance from Import List
_Message = getattr(_msg, command)
if self._connection.auth_token != b'':
token = self._connection.auth_token
else:
token = self._auth_token
message_instance = _Message(self._connection)\
.set_session_token(token)
message_instance._push_callback = self._push_received
return message_instance
except __HOLE__ as e:
self._connection.close()
raise PyOrientBadMethodCallException(
"Unable to find command " + str(e), []
) | KeyError | dataset/ETHPy150Open mogui/pyorient/pyorient/orient.py/OrientDB.get_message |
def _action_has_highlight(actions):
for action in actions:
try:
if action.get("set_tweak", None) == "highlight":
return action.get("value", True)
except __HOLE__:
pass
return False | AttributeError | dataset/ETHPy150Open matrix-org/synapse/synapse/handlers/sync.py/_action_has_highlight |
def add_one_time_default(self, field, field_def):
# OK, they want to pick their own one-time default. Who are we to refuse?
print(" ? Please enter Python code for your one-off default value.")
print(" ? The datetime module is available, so you can do e.g. datetime.date.today()")
while True:
code = raw_input(" >>> ")
if not code:
print(" ! Please enter some code, or 'exit' (with no quotes) to exit.")
elif code == "exit":
sys.exit(1)
else:
try:
result = eval(code, {}, {"datetime": datetime_utils})
except (SyntaxError, __HOLE__) as e:
print(" ! Invalid input: %s" % e)
else:
break
# Right, add the default in.
field_def[2]['default'] = value_clean(result) | NameError | dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/creator/actions.py/_NullIssuesField.add_one_time_default |
def ReadData(self, source):
"""Parses source option and reads appropriate data source.
Args:
source: Source of Appstats data. Either filename if being read from
a file or MEMCACHE if being read from memcache.
Returns:
errormessage: An error message to display to the user if an error occured
while reading data, None if no error occured.
recordlist: A list of Appstats records in RequestStatProto protobuf format
in reverse chronological order (i.e. most recent first).
"""
errormessage = None
recordlist = None
if source == 'MEMCACHE':
recordlist = loader.FromMemcache()
else:
rootdir = self.GetRoot()
if rootdir is None:
errormessage = 'No parent directory has app.yaml!'
return errormessage, recordlist
source_root = os.path.join(rootdir, source)
try:
outfile = mockable_open(source_root, 'rb')
except __HOLE__:
logging.error('Cannot open %s', source)
errormessage = 'Unable to open file!'
return errormessage, recordlist
mtime = os.path.getmtime(source_root)
if cache.IsCached(source, mtime):
logging.info('Using cached records from %s', source)
recordlist = cache.recordlist
else:
logging.info('Reading fresh records from %s', source)
recordlist = loader.UnpickleFromFile(outfile)
cache.Insert(source, mtime, recordlist)
return errormessage, recordlist | IOError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/ext/analytics/main.py/StatsPage.ReadData |
@staticmethod
def _safeID(*args, **kwargs):
instanceID = (args, tuple( sorted( kwargs.items() ) ))
try:
hash(instanceID)
return instanceID
except __HOLE__:
return hash(_pickle.dumps(instanceID)) | TypeError | dataset/ETHPy150Open OrbitzWorldwide/droned/romeo/lib/romeo/entity.py/ParameterizedSingleton._safeID |
@staticmethod
def deserialize(buffer, decode='pickle'):
"""This method is used only by the Journal service to reconstruct
serialized objects by calling the custom ``construct(state)``
method of their class.
@param buffer (a ``.read()``-supporting file-like object)
@param decode (str) <pickle|json> default='pickle'
@raise AssertionError
@return Entity instance
"""
state = None
if decode == 'pickle':
state = _pickle.load(buffer)
elif decode == 'json' and _json:
state = _json.load(buffer)
if isinstance(state, type(None)):
raise AssertionError('state not deserialized') #nothing decoded
try:
module = __import__(state['__module__'], fromlist=[state['__class__']])
except __HOLE__:
#python2.4 __import__ implementation doesn't accept **kwargs
module = __import__(state['__module__'], {}, {}, [state['__class__']])
myClass = getattr(module, state['__class__'])
return myClass.construct(state) | TypeError | dataset/ETHPy150Open OrbitzWorldwide/droned/romeo/lib/romeo/entity.py/Entity.deserialize |
@classmethod
def for_domain(cls, dom):
'''Get job info for the domain
Query the libvirt job info for the domain (ie progress
of migration, or snapshot operation)
Returns: a DomainJobInfo instance
'''
if cls._have_job_stats:
try:
stats = dom.jobStats()
return cls(**stats)
except libvirt.libvirtError as ex:
if ex.get_error_code() == libvirt.VIR_ERR_NO_SUPPORT:
# Remote libvirt doesn't support new API
LOG.debug("Missing remote virDomainGetJobStats: %s", ex)
cls._have_job_stats = False
return cls._get_job_stats_compat(dom)
elif ex.get_error_code() in (
libvirt.VIR_ERR_NO_DOMAIN,
libvirt.VIR_ERR_OPERATION_INVALID):
# Transient guest finished migration, so it has gone
# away completely
LOG.debug("Domain has shutdown/gone away: %s", ex)
return cls(type=libvirt.VIR_DOMAIN_JOB_COMPLETED)
else:
LOG.debug("Failed to get job stats: %s", ex)
raise
except __HOLE__ as ex:
# Local python binding doesn't support new API
LOG.debug("Missing local virDomainGetJobStats: %s", ex)
cls._have_job_stats = False
return cls._get_job_stats_compat(dom)
else:
return cls._get_job_stats_compat(dom) | AttributeError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/libvirt/host.py/DomainJobInfo.for_domain |
def _dispatch_events(self):
"""Wait for & dispatch events from native thread
Blocks until native thread indicates some events
are ready. Then dispatches all queued events.
"""
# Wait to be notified that there are some
# events pending
try:
_c = self._event_notify_recv.read(1)
assert _c
except __HOLE__:
return # will be raised when pipe is closed
# Process as many events as possible without
# blocking
last_close_event = None
while not self._event_queue.empty():
try:
event = self._event_queue.get(block=False)
if isinstance(event, virtevent.LifecycleEvent):
# call possibly with delay
self._event_emit_delayed(event)
elif 'conn' in event and 'reason' in event:
last_close_event = event
except native_Queue.Empty:
pass
if last_close_event is None:
return
conn = last_close_event['conn']
# get_new_connection may already have disabled the host,
# in which case _wrapped_conn is None.
with self._wrapped_conn_lock:
if conn == self._wrapped_conn:
reason = str(last_close_event['reason'])
msg = _("Connection to libvirt lost: %s") % reason
self._wrapped_conn = None
if self._conn_event_handler is not None:
self._conn_event_handler(False, msg) | ValueError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/libvirt/host.py/Host._dispatch_events |
def _init_events_pipe(self):
"""Create a self-pipe for the native thread to synchronize on.
This code is taken from the eventlet tpool module, under terms
of the Apache License v2.0.
"""
self._event_queue = native_Queue.Queue()
try:
rpipe, wpipe = os.pipe()
self._event_notify_send = greenio.GreenPipe(wpipe, 'wb', 0)
self._event_notify_recv = greenio.GreenPipe(rpipe, 'rb', 0)
except (ImportError, __HOLE__):
# This is Windows compatibility -- use a socket instead
# of a pipe because pipes don't really exist on Windows.
sock = native_socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('localhost', 0))
sock.listen(50)
csock = native_socket.socket(socket.AF_INET, socket.SOCK_STREAM)
csock.connect(('localhost', sock.getsockname()[1]))
nsock, addr = sock.accept()
self._event_notify_send = nsock.makefile('wb', 0)
gsock = greenio.GreenSocket(csock)
self._event_notify_recv = gsock.makefile('rb', 0) | NotImplementedError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/libvirt/host.py/Host._init_events_pipe |
def _get_new_connection(self):
# call with _wrapped_conn_lock held
LOG.debug('Connecting to libvirt: %s', self._uri)
wrapped_conn = None
try:
wrapped_conn = self._connect(self._uri, self._read_only)
finally:
# Enabling the compute service, in case it was disabled
# since the connection was successful.
disable_reason = None
if not wrapped_conn:
disable_reason = 'Failed to connect to libvirt'
if self._conn_event_handler is not None:
self._conn_event_handler(bool(wrapped_conn), disable_reason)
self._wrapped_conn = wrapped_conn
try:
LOG.debug("Registering for lifecycle events %s", self)
wrapped_conn.domainEventRegisterAny(
None,
libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE,
self._event_lifecycle_callback,
self)
except Exception as e:
LOG.warning(_LW("URI %(uri)s does not support events: %(error)s"),
{'uri': self._uri, 'error': e})
try:
LOG.debug("Registering for connection events: %s", str(self))
wrapped_conn.registerCloseCallback(self._close_callback, None)
except (__HOLE__, AttributeError) as e:
# NOTE: The registerCloseCallback of python-libvirt 1.0.1+
# is defined with 3 arguments, and the above registerClose-
# Callback succeeds. However, the one of python-libvirt 1.0.0
# is defined with 4 arguments and TypeError happens here.
# Then python-libvirt 0.9 does not define a method register-
# CloseCallback.
LOG.debug("The version of python-libvirt does not support "
"registerCloseCallback or is too old: %s", e)
except libvirt.libvirtError as e:
LOG.warning(_LW("URI %(uri)s does not support connection"
" events: %(error)s"),
{'uri': self._uri, 'error': e})
return wrapped_conn | TypeError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/libvirt/host.py/Host._get_new_connection |
def list_instance_domains(self, only_running=True, only_guests=True):
"""Get a list of libvirt.Domain objects for nova instances
:param only_running: True to only return running instances
:param only_guests: True to filter out any host domain (eg Dom-0)
Query libvirt to a get a list of all libvirt.Domain objects
that correspond to nova instances. If the only_running parameter
is true this list will only include active domains, otherwise
inactive domains will be included too. If the only_guests parameter
is true the list will have any "host" domain (aka Xen Domain-0)
filtered out.
:returns: list of libvirt.Domain objects
"""
if not self._skip_list_all_domains:
try:
alldoms = self._list_instance_domains_fast(only_running)
except (libvirt.libvirtError, __HOLE__) as ex:
LOG.info(_LI("Unable to use bulk domain list APIs, "
"falling back to slow code path: %(ex)s"),
{'ex': ex})
self._skip_list_all_domains = True
if self._skip_list_all_domains:
# Old libvirt, or a libvirt driver which doesn't
# implement the new API
alldoms = self._list_instance_domains_slow(only_running)
doms = []
for dom in alldoms:
if only_guests and dom.ID() == 0:
continue
doms.append(dom)
return doms | AttributeError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/libvirt/host.py/Host.list_instance_domains |
def is_cpu_control_policy_capable(self):
"""Returns whether kernel configuration CGROUP_SCHED is enabled
CONFIG_CGROUP_SCHED may be disabled in some kernel configs to
improve scheduler latency.
"""
try:
with open("/proc/self/mounts", "r") as fd:
for line in fd.readlines():
# mount options and split options
bits = line.split()[3].split(",")
if "cpu" in bits:
return True
return False
except __HOLE__:
return False | IOError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/libvirt/host.py/Host.is_cpu_control_policy_capable |
def safe_initialisation(custom_command="", comm=None, nprocs=1):
"""
Wrapper around the init function to handle errors
KeyWord Arguments
-----------------
custom_command : str
testing purposes
comm : MPI.Intracomm
object that helps communicating between the processes
nprocs : int
number of processes
"""
try:
cosmo, data, command_line, success = initialise(custom_command)
except io_mp.ConfigurationError as message:
if comm:
for index in range(1, nprocs):
comm.send('failed', dest=index, tag=1)
print str(message)
raise io_mp.ConfigurationError(
"The initialisation was not successful, resulting in a "
"potentially half created `log.param`. Please see the "
"above error message. If you run the exact same command, it"
" will not work. You should solve the problem, and try again.")
except __HOLE__ as e:
if comm:
for index in range(1, nprocs):
comm.send('failed', dest=index, tag=1)
raise io_mp.ConfigurationError(
"You are running in a folder that was created following "
"a non-successful initialisation (wrong parameter name, "
"wrong likelihood, etc...). If you have solved the issue, you "
"should remove completely the output folder, and try again." +
" Alternatively, there could be a problem with "+e.message)
return cosmo, data, command_line, success | KeyError | dataset/ETHPy150Open baudren/montepython_public/montepython/run.py/safe_initialisation |
def __init__(self, *args, **kwargs):
super(CustomPayloadParser, self).__init__(*args, **kwargs)
# Remove some built-in tags that we don't want to expose.
# There are no built-in filters we have to worry about.
for tag_name in self.BLACKLISTED_TAGS:
try:
del self.tags[tag_name]
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/notifications/webhooks.py/CustomPayloadParser.__init__ |
def _info_slots(t):
try:
for line in t.talk_raw(CMD_CLUSTER_NODES).split('\n'):
if len(line) == 0 or 'fail' in line or 'myself' not in line:
continue
node = ClusterNode(*line.split(' '))
return {
'node_id': node.node_id,
'slave': node.role_in_cluster != 'master',
'master_id': node.master_id if node.master_id != '-' else None,
'slots': node.assigned_slots,
'slots_migrating': node.slots_migrating,
}
except (__HOLE__, LookupError, ReplyError):
return {
'node_id': None,
'slave': False,
'master_id': None,
'slots': [],
} | ValueError | dataset/ETHPy150Open HunanTV/redis-ctl/daemonutils/stats_models.py/_info_slots |
def with_ascendants_for_slug(self, slug, **kwargs):
"""
Given a slug, returns a list of pages from ascendants to
descendants, that form the parent/child page relationships
for that slug. The main concern is to do this in a single
database query rather than querying the database for parents
of a given page.
Primarily used in ``PageMiddleware`` to provide the current
page, which in the case of non-page views, won't match the
slug exactly, but will likely match a page that has been
created for linking to the entry point for the app, eg the
blog page when viewing blog posts.
Also used within ``Page.get_ascendants``, which gets called
in the ``pages.views`` view, for building a list of possible
templates that can be used for the page.
If a valid chain of pages is found, we also assign the pages
to the ``page._ascendants`` attr of the main/first/deepest
page, so that when its ``get_ascendants`` method is called,
the ascendants chain can be re-used without querying the
database again. This occurs at least once, given the second
use-case described above.
"""
if slug == "/":
slugs = [home_slug()]
else:
# Create a list of slugs within this slug,
# eg: ['about', 'about/team', 'about/team/mike']
parts = slug.split("/")
slugs = ["/".join(parts[:i]) for i in range(1, len(parts) + 1)]
# Find the deepest page that matches one of our slugs.
# Sorting by "-slug" should ensure that the pages are in
# descendant -> ascendant order.
pages_for_user = self.published(**kwargs)
pages = list(pages_for_user.filter(slug__in=slugs).order_by("-slug"))
if not pages:
return []
# Check to see if the other pages retrieved form a valid path
# in the page tree, i.e. pages[0].parent == pages[1],
# pages[1].parent == pages[2], and so on. If they do, assign
# the ascendants to the main/first/deepest page, so that it
# can be re-used on calls to its get_ascendants method.
pages[0]._ascendants = []
for i, page in enumerate(pages):
try:
parent = pages[i + 1]
except __HOLE__:
# IndexError indicates that this is the last page in
# the list, so it should have no parent.
if page.parent_id:
break # Invalid parent
else:
if page.parent_id != parent.id:
break # Invalid parent
else:
# Valid parents
pages[0]._ascendants = pages[1:]
return pages | IndexError | dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/pages/managers.py/PageManager.with_ascendants_for_slug |
def test_basic_json(self):
"""Test the basic JSON project creation with only the required fields"""
# dataset format = (dataset_name, [ximagesz, yimagesz, zimagesz], [[xvoxel, yvoxel, zvoxel], [xoffset, yoffset, zoffset], timerange, scalinglevels, scaling)
dataset = (p.dataset, [2000,2000,30], [1.0,1.0,5.0], None, None, None, None)
# project format = (project_name, token_name, public)
project = (p.token, None, None)
# channel format = { chan1 : (channel_name, datatype, channel_type, data_url, file_name, exceptions, resolution, windowrange, readonly), chan2: ...... }
channels = { p.channels[0] : (p.channels[0], p.datatype, p.channel_type, 'http://127.0.0.1/data/sample_dir/', 'SLICE', 'tif', None, None, None, None) }
metadata = { 'Author': 'Will', 'Animal':'Mouse', 'Date_Collected':'10/2/2015' }
json_file = tempfile.NamedTemporaryFile(mode='w+b')
json_file.write(createJson(dataset, project, channels, metadata=metadata))
json_file.seek(0)
# posting the JSON url and checking if it is successful
response = json.loads(postURL("http://{}/ca/autoIngest/".format(SITE_HOST), json_file).read())
assert('SUCCESS. The ingest process has now started.' == response)
# fetching the JSON info
f = getURL("http://{}/ca/{}/info/".format(SITE_HOST, p.token))
# read the JSON file
proj_info = json.loads(f.read())
assert( proj_info['project']['name'] == p.token )
assert( proj_info['dataset']['imagesize']['0'] == [2000,2000,30])
assert( proj_info['dataset']['cube_dimension']['0'] == [128,128,16])
assert( proj_info['dataset']['scalinglevels'] == 1)
assert( proj_info['channels'][p.channels[0]]['resolution'] == 0)
assert( proj_info['channels'][p.channels[0]]['datatype'] == p.datatype)
try:
assert( proj_info['metadata'][0]['Author'] == 'Will')
except KeyError:
print "LIMS System not working"
except __HOLE__:
print "LIMS System not working" | AssertionError | dataset/ETHPy150Open neurodata/ndstore/test/test_json.py/Test_Project_Json.test_basic_json |
def jump(dir):
w = vim.current.window
check_history(w)
history = list(w.vars[VHIST])
bufnr = vim.current.buffer.number
now = time()
lastbuf = w.vars.get(VLAST, None)
if not lastbuf or (bufnr == lastbuf[0] and
now - lastbuf[1] > vim.vars['vial_bufhist_timeout']):
history = add_to_history(w, bufnr)
if bufnr not in history:
history = add_to_history(w, bufnr)
names = {r.number: (split(r.name)
if r.name
else ['', '[buf-{}]'.format(r.number)])
for r in vim.buffers if vfunc.buflisted(r.number)}
history[:] = filter(lambda r: r in names, history)
dups = True
while dups:
dups = False
for name, g in groupby(sorted(names.iteritems(), key=skey), skey):
g = list(g)
if len(g) > 1:
dups = True
for nr, (path, _) in g:
p, n = split(path)
names[nr] = p, n + '/' + name
width = vim.vars['vial_bufhist_width']
if width < 0:
width += int(vim.eval('&columns')) - 1
try:
idx = history.index(bufnr)
except __HOLE__:
return
idx += dir
if idx < 0:
idx = 0
elif idx >= len(history):
idx = len(history) - 1
anr = history[idx]
active = names[anr][1]
before = ' '.join(names[r][1] for r in history[:idx])
after = ' '.join(names[r][1] for r in history[idx+1:])
half = (width - len(active) - 4) / 2
if len(before) < len(after):
blen = min(half, len(before))
alen = width - len(active) - blen - 4
else:
alen = min(half, len(after))
blen = width - len(active) - alen - 4
if len(before) > blen:
before = '...' + before[3-blen:]
if len(after) > alen:
after = after[:alen-3] + '...'
if before: before += ' '
if after: after = ' ' + after
vim.command('let x=&ruler | let y=&showcmd')
vim.command('set noruler noshowcmd')
redraw()
echon(before)
vim.command('echohl CursorLine')
echon(active)
vim.command('echohl None')
echon(after)
vim.command('let &ruler=x | let &showcmd=y')
if anr != bufnr:
w.vars['vial_bufhist_switch'] = 1
vim.command('silent b {}'.format(anr))
w.vars['vial_bufhist_switch'] = 0
vim.command('augroup vial_bufhist_wait_action')
vim.command('au!')
vim.command('au CursorMoved,CursorHold <buffer> python %s()' % moved.ref)
vim.command('augroup END') | ValueError | dataset/ETHPy150Open baverman/vial/vial/plugins/bufhist/plugin.py/jump |
def reopen(self, path='', prefix='', midfix=''):
"""
Close and then open log files on path if given otherwise self.path
Use ha in log file name if given
"""
self.close()
if path:
self.path = path
if prefix:
self.prefix = prefix
if midfix:
self.midfix = midfix
prefix = "{0}_".format(self.prefix) if self.prefix else ""
midfix = "{0}_".format(self.midfix) if self.midfix else ""
date = time.strftime('%Y%m%d_%H%M%S', time.gmtime(time.time()))
if self.same and (self.rx or self.tx):
if not self.buffify:
name = "{0}{1}{2}.txt".format(prefix, midfix, date)
path = os.path.join(self.path, name)
try:
log = io.open(path, mode='wb+')
if self.rx:
self.rxLog = log
if self.tx:
self.txLog = log
except IOError:
self.rxLog = self.txLog = None
return False
else:
try:
log = io.BytesIO()
if self.rx:
self.rxLog = log
if self.tx:
self.txLog = log
except IOError:
self.rxLog = self.txLog = None
return False
else:
if self.rx:
if not self.buffify:
name = "{0}{1}{2}_rx.txt".format(prefix, midfix, date)
path = os.path.join(self.path, name)
try:
self.rxLog = io.open(path, mode='wb+')
except __HOLE__:
self.rxLog = None
return False
else:
try:
self.rxLog = io.BytesIO()
except IOError:
self.rxLog = None
return False
if self.tx:
if not self.buffify:
name = "{0}{1}{2}_tx.txt".format(prefix, midfix, date)
path = os.path.join(self.path, name)
try:
self.txLog = io.open(path, mode='wb+')
except IOError:
self.txLog = None
return False
else:
try:
self.txLog = io.BytesIO()
except IOError:
self.txLog = None
return False
return True | IOError | dataset/ETHPy150Open ioflo/ioflo/ioflo/aio/wiring.py/WireLog.reopen |
def reduce_nums(val_1, val_2, val_op):
"""apply arithmetic rules and try to return an integer result"""
#print val_1, val_2, val_op
# now perform the operation, make certain a and b are numeric
try: a = 0 + val_1
except TypeError: a = int(val_1)
try: b = 0 + val_2
except __HOLE__: b = int(val_2)
d = val_op
if d == '%': c = a%b
elif d=='+': c = a+b
elif d=='-': c = a-b
elif d=='*': c = a*b
elif d=='/': c = a/b
elif d=='^': c = a^b
elif d=='|': c = a|b
elif d=='||': c = int(a or b)
elif d=='&': c = a&b
elif d=='&&': c = int(a and b)
elif d=='==': c = int(a == b)
elif d=='!=': c = int(a != b)
elif d=='<=': c = int(a <= b)
elif d=='<': c = int(a < b)
elif d=='>': c = int(a > b)
elif d=='>=': c = int(a >= b)
elif d=='^': c = int(a^b)
elif d=='<<': c = a<<b
elif d=='>>': c = a>>b
else: c = 0
return c | TypeError | dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Tools/preproc.py/reduce_nums |