function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|
def get_links_setup(self):
'''Returns setup for documents related to this doctype.
This method will return the `links_setup` property in the
`[doctype]_links.py` file in the doctype folder'''
try:
module = load_doctype_module(self.name, suffix='_links')
return frappe._dict(module.links)
except __HOLE__:
return frappe._dict() | ImportError | dataset/ETHPy150Open frappe/frappe/frappe/model/meta.py/Meta.get_links_setup |
def is_single(doctype):
try:
return frappe.db.get_value("DocType", doctype, "issingle")
except __HOLE__:
raise Exception, 'Cannot determine whether %s is single' % doctype | IndexError | dataset/ETHPy150Open frappe/frappe/frappe/model/meta.py/is_single |
def dis(x=None):
"""Disassemble classes, methods, functions, or code.
With no argument, disassemble the last traceback.
"""
if x is None:
distb()
return
if type(x) is types.InstanceType:
x = x.__class__
if hasattr(x, 'im_func'):
x = x.im_func
if hasattr(x, 'func_code'):
x = x.func_code
if hasattr(x, '__dict__'):
items = x.__dict__.items()
items.sort()
for name, x1 in items:
if type(x1) in (types.MethodType,
types.FunctionType,
types.CodeType,
types.ClassType):
print "Disassembly of %s:" % name
try:
dis(x1)
except __HOLE__, msg:
print "Sorry:", msg
print
elif hasattr(x, 'co_code'):
disassemble(x)
elif isinstance(x, str):
disassemble_string(x)
else:
raise TypeError, \
"don't know how to disassemble %s objects" % \
type(x).__name__ | TypeError | dataset/ETHPy150Open babble/babble/include/jython/Lib/dis.py/dis |
def distb(tb=None):
"""Disassemble a traceback (default: last traceback)."""
if tb is None:
try:
tb = sys.last_traceback
except __HOLE__:
raise RuntimeError, "no last traceback to disassemble"
while tb.tb_next: tb = tb.tb_next
disassemble(tb.tb_frame.f_code, tb.tb_lasti) | AttributeError | dataset/ETHPy150Open babble/babble/include/jython/Lib/dis.py/distb |
def parse_py(codelet):
"""
Adds 'symbols' field to the codelet after parsing the python code.
:param codelet: The codelet object to parsed.
:type code: Codelet
"""
def strip_encoding(lines):
"""Strips the encoding line from a file, which breaks the parser."""
it = iter(lines)
try:
first = next(it)
if not encoding_re.match(first):
yield first
second = next(it)
if not encoding_re.match(second):
yield second
except __HOLE__:
return
for line in it:
yield line
try:
tree = ast.parse("\n".join(strip_encoding(codelet.code.splitlines())))
except SyntaxError:
## TODO: add some logging here?
return {}
walker = _TreeWalker()
walker.visit(tree)
return walker.symbols | StopIteration | dataset/ETHPy150Open earwig/bitshift/bitshift/parser/python.py/parse_py |
def identifyAspectsUsed(self):
# relationshipSets are a dts property
self.relationshipSets = [(arcrole, ELR, linkqname, arcqname)
for arcrole, ELR, linkqname, arcqname in self.modelXbrl.baseSets.keys()
if ELR and (arcrole.startswith("XBRL-") or (linkqname and arcqname))]
aspectsUsed = set(f.concept
for f in self.modelXbrl.factsInInstance)
for cntx in self.modelXbrl.contexts.values():
for dim in cntx.qnameDims.values():
aspectsUsed.add(dim.dimension)
if dim.isExplicit:
aspectsUsed.add(dim.member)
else:
aspectsUsed.add(self.modelXbrl.qnameConcepts[dim.typedMember.qname])
for defaultDimQn, defaultDimMemberQn in self.modelXbrl.qnameDimensionDefaults.items():
aspectsUsed.add(self.modelXbrl.qnameConcepts[defaultDimQn])
aspectsUsed.add(self.modelXbrl.qnameConcepts[defaultDimMemberQn])
for relationshipSetKey in self.relationshipSets:
relationshipSet = self.modelXbrl.relationshipSet(*relationshipSetKey)
for rel in relationshipSet.modelRelationships:
if isinstance(rel.fromModelObject, ModelConcept):
aspectsUsed.add(rel.fromModelObject)
if isinstance(rel.toModelObject, ModelConcept):
aspectsUsed.add(rel.toModelObject)
try:
for qn in (XbrlConst.qnXbrliIdentifier, XbrlConst.qnXbrliPeriod, XbrlConst.qnXbrliUnit):
aspectsUsed.add(self.modelXbrl.qnameConcepts[qn])
except __HOLE__:
pass # no DTS
for roleTypes in (self.modelXbrl.roleTypes.values(), self.modelXbrl.arcroleTypes.values()):
for roleUriTypes in roleTypes:
for roleType in roleUriTypes:
for qn in roleType.usedOns:
if qn in self.modelXbrl.qnameConcepts: # qname may be undefined or invalid and still 2.1 legal
aspectsUsed.add(self.modelXbrl.qnameConcepts[qn])
# add aspects referenced by logging entries
for logEntry in self.loggingEntries:
for ref in logEntry['refs']:
modelObject = self.modelXbrl.modelObject(ref.get('objectId',''))
if isinstance(modelObject, ModelConcept) and modelObject.modelDocument.inDTS:
aspectsUsed.add(modelObject)
# add substitution groups
aspectsUsed |= set(aspect.substitutionGroup
for aspect in aspectsUsed
if aspect is not None)
aspectsUsed -= {None} # remove None if in aspectsUsed
self.aspectsUsed = aspectsUsed
typesUsed = set()
def typeUsed(modelType):
if modelType is not None and modelType.modelDocument.inDTS: # exclude nonDTS types (schema, etc)
typesUsed.add(modelType)
typesDerivedFrom = modelType.typeDerivedFrom
if isinstance(typesDerivedFrom, list): # union derivation
for typeDerivedFrom in typesDerivedFrom:
if typeDerivedFrom not in typesUsed:
typeUsed(typeDerivedFrom)
else: # single derivation
if typesDerivedFrom is not None and typesDerivedFrom not in typesUsed:
typeUsed(typesDerivedFrom)
for aspect in aspectsUsed:
modelType = aspect.type
if modelType is not None:
if modelType not in typesUsed:
typeUsed(modelType)
self.typesUsed = typesUsed | KeyError | dataset/ETHPy150Open Arelle/Arelle/arelle/plugin/xbrlDB/XbrlSemanticSqlDB.py/XbrlSqlDatabaseConnection.identifyAspectsUsed |
def insertDataPoints(self):
reportId = self.reportId
if self.filingPreviouslyInDB:
self.showStatus("deleting prior data points of this report")
# remove prior facts
self.lockTables(("data_point", "entity_identifier", "period", "aspect_value_selection",
"aspect_value_selection_set", "unit_measure", "unit",
"table_data_points"))
self.execute("DELETE FROM {0} WHERE {0}.report_id = {1}"
.format( self.dbTableName("data_point"), reportId),
close=False, fetch=False)
self.execute("DELETE FROM {0} WHERE {0}.report_id = {1}"
.format( self.dbTableName("entity_identifier"), reportId),
close=False, fetch=False)
self.execute("DELETE FROM {0} WHERE {0}.report_id = {1}"
.format( self.dbTableName("period"), reportId),
close=False, fetch=False)
self.execute("DELETE from {0} "
"USING {1} "
"WHERE {1}.report_id = {2} AND {0}.aspect_value_selection_id = {1}.aspect_value_selection_id"
.format( self.dbTableName("aspect_value_selection"),
self.dbTableName("aspect_value_selection_set"),
reportId),
close=False, fetch=False)
self.execute("DELETE FROM {0} WHERE {0}.report_id = {1};"
.format( self.dbTableName("aspect_value_selection_set"), reportId),
close=False, fetch=False)
self.execute("DELETE from {0} "
"USING {1} "
"WHERE {1}.report_id = {2} AND {0}.unit_id = {1}.unit_id"
.format( self.dbTableName("unit_measure"),
self.dbTableName("unit"),
reportId),
close=False, fetch=False)
self.execute("DELETE from {0} WHERE {0}.report_id = {1}"
.format( self.dbTableName("unit"), reportId),
close=False, fetch=False)
self.execute("DELETE FROM {0} WHERE {0}.report_id = {1}"
.format( self.dbTableName("table_data_points"), reportId),
close=False, fetch=False)
self.showStatus("insert data points")
# units
table = self.getTable('unit', 'unit_id',
('report_id', 'xml_id', 'xml_child_seq', 'measures_hash'),
('report_id', 'measures_hash'),
tuple((reportId,
unit.id,
elementChildSequence(unit),
unit.md5hash)
for unit in dict((unit.md5hash,unit) # deduplicate by md5hash
for unit in self.modelXbrl.units.values()).values()))
self.unitId = dict(((_reportId, measuresHash), id)
for id, _reportId, measuresHash in table)
# measures
table = self.getTable('unit_measure',
None,
('unit_id', 'qname', 'is_multiplicand'),
('unit_id', 'qname', 'is_multiplicand'),
tuple((self.unitId[(reportId,unit.md5hash)],
measure.clarkNotation,
i == 0)
for unit in self.modelXbrl.units.values()
for i in range(2)
for measure in unit.measures[i]))
table = self.getTable('entity_identifier', 'entity_identifier_id',
('report_id', 'scheme', 'identifier'),
('report_id', 'scheme', 'identifier'),
set((reportId,
cntx.entityIdentifier[0],
cntx.entityIdentifier[1])
for cntx in self.modelXbrl.contexts.values()),
checkIfExisting=True) # entities shared across multiple instance/inline docs
self.entityIdentifierId = dict(((_reportId, entScheme, entIdent), id)
for id, _reportId, entScheme, entIdent in table)
table = self.getTable('period', 'period_id',
('report_id', 'start_date', 'end_date', 'is_instant', 'is_forever'),
('report_id', 'start_date', 'end_date', 'is_instant', 'is_forever'),
set((reportId,
cntx.startDatetime if cntx.isStartEndPeriod else None,
cntx.endDatetime if (cntx.isStartEndPeriod or cntx.isInstantPeriod) else None,
cntx.isInstantPeriod,
cntx.isForeverPeriod)
for cntx in self.modelXbrl.contexts.values()),
checkIfExisting=True) # periods shared across multiple instance/inline docs
self.periodId = dict(((_reportId, start, end, isInstant, isForever), id)
for id, _reportId, start, end, isInstant, isForever in table)
def cntxDimsSet(cntx):
return frozenset((self.aspectQnameId[modelDimValue.dimensionQname],
self.aspectQnameId.get(modelDimValue.memberQname),
modelDimValue.isTyped,
modelDimValue.stringValue if modelDimValue.isTyped else None)
for modelDimValue in cntx.qnameDims.values()
if modelDimValue.dimensionQname in self.aspectQnameId)
cntxAspectValueSelectionSet = dict((cntx, cntxDimsSet(cntx))
for cntx in self.modelXbrl.contexts.values())
aspectValueSelections = set(aspectValueSelectionSet
for cntx, aspectValueSelectionSet in cntxAspectValueSelectionSet.items()
if aspectValueSelectionSet)
self.lockTables(("aspect_value_selection_set",))
self.execute("DELETE FROM {0} WHERE report_id = {1}"
.format(self.dbTableName("aspect_value_selection_set"), reportId),
close=False, fetch=False)
table = self.getTable('aspect_value_selection_set', 'aspect_value_selection_id',
('report_id', ),
('report_id', ),
tuple((reportId,)
for aspectValueSelection in aspectValueSelections)
)
# assure we only get single entry per result (above gives cross product)
table = self.execute("SELECT aspect_value_selection_id, report_id from {0} "
"WHERE report_id = {1}"
.format(self.dbTableName("aspect_value_selection_set"), reportId))
aspectValueSelectionSets = dict((aspectValueSelections.pop(), id)
for id, _reportId in table)
cntxAspectValueSelectionSetId = dict((cntx, aspectValueSelectionSets[_cntxDimsSet])
for cntx, _cntxDimsSet in cntxAspectValueSelectionSet.items()
if _cntxDimsSet)
table = self.getTable('aspect_value_selection',
None,
('aspect_value_selection_id', 'aspect_id', 'aspect_value_id', 'is_typed_value', 'typed_value'),
('aspect_value_selection_id', ),
tuple((aspectValueSetId, dimId, dimMbrId, isTyped, typedValue)
for aspectValueSelection, aspectValueSetId in aspectValueSelectionSets.items()
for dimId, dimMbrId, isTyped, typedValue in aspectValueSelection)
)
# facts
def insertFactSet(modelFacts, parentDatapointId):
facts = []
for fact in modelFacts:
if fact.concept is not None and getattr(fact, "xValid", UNVALIDATED) >= VALID and fact.qname is not None:
cntx = fact.context
documentId = self.documentIds[fact.modelDocument]
facts.append((reportId,
documentId,
fact.id,
elementChildSequence(fact),
fact.sourceline,
parentDatapointId, # parent ID
self.aspectQnameId.get(fact.qname),
fact.contextID,
self.entityIdentifierId.get((reportId, cntx.entityIdentifier[0], cntx.entityIdentifier[1]))
if cntx is not None else None,
self.periodId.get((reportId,
cntx.startDatetime if cntx.isStartEndPeriod else None,
cntx.endDatetime if (cntx.isStartEndPeriod or cntx.isInstantPeriod) else None,
cntx.isInstantPeriod,
cntx.isForeverPeriod)) if cntx is not None else None,
cntxAspectValueSelectionSetId.get(cntx) if cntx is not None else None,
self.unitId.get((reportId,fact.unit.md5hash)) if fact.unit is not None else None,
fact.isNil,
fact.precision,
fact.decimals,
roundValue(fact.value, fact.precision, fact.decimals) if fact.isNumeric and not fact.isNil else None,
fact.value
))
table = self.getTable('data_point', 'datapoint_id',
('report_id', 'document_id', 'xml_id', 'xml_child_seq', 'source_line',
'parent_datapoint_id', # tuple
'aspect_id',
'context_xml_id', 'entity_identifier_id', 'period_id', 'aspect_value_selection_id', 'unit_id',
'is_nil', 'precision_value', 'decimals_value', 'effective_value', 'value'),
('document_id', 'xml_child_seq'),
facts)
xmlIdDataPointId = dict(((docId, xml_child_seq), datapointId)
for datapointId, docId, xml_child_seq in table)
self.factDataPointId.update(xmlIdDataPointId)
for fact in modelFacts:
if fact.isTuple:
try:
insertFactSet(fact.modelTupleFacts,
xmlIdDataPointId[(self.documentIds[fact.modelDocument],
elementChildSequence(fact))])
except KeyError:
self.modelXbrl.info("xpDB:warning",
_("Loading XBRL DB: tuple's datapoint not found: %(tuple)s"),
modelObject=fact, tuple=fact.qname)
self.factDataPointId = {}
insertFactSet(self.modelXbrl.facts, None)
# hashes
if self.tableFacts: # if any entries
tableDataPoints = []
for roleType, tableCode, fact in self.tableFacts:
try:
tableDataPoints.append((reportId,
self.roleTypeIds[(self.documentIds[roleType.modelDocument],
roleType.roleURI)],
tableCode,
self.factDataPointId[(self.documentIds[fact.modelDocument],
elementChildSequence(fact))]))
except __HOLE__:
# print ("missing table data points role or data point")
pass
table = self.getTable('table_data_points', None,
('report_id', 'object_id', 'table_code', 'datapoint_id'),
('report_id', 'object_id', 'datapoint_id'),
tableDataPoints) | KeyError | dataset/ETHPy150Open Arelle/Arelle/arelle/plugin/xbrlDB/XbrlSemanticSqlDB.py/XbrlSqlDatabaseConnection.insertDataPoints |
def mkdir_p(path):
# From https://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python
try:
os.makedirs(path)
except __HOLE__ as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise | OSError | dataset/ETHPy150Open yosinski/deep-visualization-toolbox/misc.py/mkdir_p |
@wsgi.action("update")
def update(self, req, id, body):
"""Configure cloudpipe parameters for the project."""
context = req.environ['nova.context']
authorize(context)
if id != "configure-project":
msg = _("Unknown action %s") % id
raise webob.exc.HTTPBadRequest(explanation=msg)
project_id = context.project_id
networks = objects.NetworkList.get_by_project(context, project_id)
try:
params = body['configure_project']
vpn_ip = params['vpn_ip']
vpn_port = params['vpn_port']
for network in networks:
network.vpn_public_address = vpn_ip
network.vpn_public_port = vpn_port
network.save()
except (TypeError, __HOLE__, ValueError) as ex:
msg = _("Invalid request body: %s") % ex
raise webob.exc.HTTPBadRequest(explanation=msg)
return webob.Response(status_int=202) | KeyError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/cloudpipe_update.py/CloudpipeUpdateController.update |
def stop(self):
"""
Stops the server.
If the server is not running, this method has no effect.
"""
if self.started:
try:
self.httpd.shutdown()
self.httpd.server_close()
self.server_thread.join()
self.server_thread = None
self.logger.info("Stopped http server on %s:%s" % (self.host, self.port))
except __HOLE__:
pass
self.started = False
self.httpd = None | AttributeError | dataset/ETHPy150Open w3c/wptserve/wptserve/server.py/WebTestHttpd.stop |
def __init__(self, config):
self.config = config
self.filename = config.get('nagios.command_file')
try:
self.pipe = open(self.filename, 'a')
except (IOError, __HOLE__), e:
self.pipe = None | OSError | dataset/ETHPy150Open tehmaze/nagios-cli/nagios_cli/nagios.py/Command.__init__ |
def parse(self, filename, limit=None):
if limit is not None:
self.limit = limit
retry = 10
handle = None
while handle is None:
try:
handle = open(filename, 'rb')
except:
retry -= 1
if retry:
print '%s is not available, retry %d' % (filename, retry)
time.sleep(0.5)
else:
print '%s is not available' % (filename,)
sys.exit(0)
for line in handle:
line = line.strip()
if not line:
continue
elif self.section is None:
if self.define:
if line[:6] == 'define' and line[-1] == '{':
self.new_section(line[7:-1].strip())
elif line[-1] == '{':
self.new_section(line[:-1].strip())
elif line == '}':
yield self.end_section()
elif line[0] == '#':
continue
else:
if self.define:
try:
space = line.index(' ')
self.section[line[:space]] = line[space + 1:]
except ValueError:
pass
elif '=' in line:
try:
equal = line.index('=')
self.section[line[:equal]] = line[equal + 1:]
except __HOLE__:
pass | ValueError | dataset/ETHPy150Open tehmaze/nagios-cli/nagios_cli/nagios.py/Parser.parse |
def _smooth(self, efold, y):
try:
y.size > self.lookback
except __HOLE__:
'Y must have at least self.lookback elements.'
ysmooth = np.zeros(y.size)
ysmooth[0] = y[0]
peffective = 0.0 # trace of the smoothing matrix, the effective number of parameters
# treat the first self.lookback data points seperately, since the base-line is shorter
for i in xrange(1, self.lookback):
weights = self.weights(efold, lookback=i)
ysmooth[i] = weights.dot(y[0:i])
peffective += weights[-1]
weights = self.weights(efold)
for i in xrange(y.size - self.lookback - 1):
idx = self.lookback + i
# estimate current y as exponentially-weighted average of previous self.lookback y-values
ysmooth[idx] = weights.dot(y[idx - self.lookback:idx])
peffective += weights[-1]
ysmooth[-1] = weights.dot(y[y.size - self.lookback - 1:-1])
peffective += weights[-1]
return ysmooth, peffective | ValueError | dataset/ETHPy150Open brandonckelly/bck_stats/bck_stats/gcv_smoother.py/GcvExpSmoother._smooth |
def order(subgraph):
""" Return the number of unique nodes in a subgraph.
:arg subgraph:
:return:
"""
try:
return subgraph.__order__()
except AttributeError:
try:
return len(set(subgraph.nodes()))
except __HOLE__:
raise TypeError("Object %r is not graphy") | AttributeError | dataset/ETHPy150Open nigelsmall/py2neo/py2neo/types.py/order |
def size(subgraph):
""" Return the number of unique relationships in a subgraph.
:arg subgraph:
:return:
"""
try:
return subgraph.__size__()
except AttributeError:
try:
return len(set(subgraph.relationships()))
except __HOLE__:
raise TypeError("Object %r is not graphy") | AttributeError | dataset/ETHPy150Open nigelsmall/py2neo/py2neo/types.py/size |
def walk(*walkables):
""" Traverse over the arguments supplied, yielding the entities
from each in turn.
:arg walkables: sequence of walkable objects
"""
if not walkables:
return
walkable = walkables[0]
try:
entities = walkable.__walk__()
except AttributeError:
raise TypeError("Object %r is not walkable" % walkable)
for entity in entities:
yield entity
end_node = walkable.end_node()
for walkable in walkables[1:]:
try:
if end_node == walkable.start_node():
entities = walkable.__walk__()
end_node = walkable.end_node()
elif end_node == walkable.end_node():
entities = reversed(list(walkable.__walk__()))
end_node = walkable.start_node()
else:
raise ValueError("Cannot append walkable %r "
"to node %r" % (walkable, end_node))
except __HOLE__:
raise TypeError("Object %r is not walkable" % walkable)
for i, entity in enumerate(entities):
if i > 0:
yield entity | AttributeError | dataset/ETHPy150Open nigelsmall/py2neo/py2neo/types.py/walk |
def __eq__(self, other):
try:
return self.nodes() == other.nodes() and self.relationships() == other.relationships()
except __HOLE__:
return False | AttributeError | dataset/ETHPy150Open nigelsmall/py2neo/py2neo/types.py/Subgraph.__eq__ |
def __eq__(self, other):
try:
other_walk = tuple(walk(other))
except __HOLE__:
return False
else:
return tuple(walk(self)) == other_walk | TypeError | dataset/ETHPy150Open nigelsmall/py2neo/py2neo/types.py/Walkable.__eq__ |
def __setitem__(self, key, value):
if value is None:
try:
dict.__delitem__(self, key)
except __HOLE__:
pass
else:
dict.__setitem__(self, key, coerce_property(value)) | KeyError | dataset/ETHPy150Open nigelsmall/py2neo/py2neo/types.py/PropertyDict.__setitem__ |
def remote(obj):
""" Return the remote counterpart of a local object.
:param obj: the local object
:return: the corresponding remote entity
"""
try:
return obj.__remote__
except __HOLE__:
return None | AttributeError | dataset/ETHPy150Open nigelsmall/py2neo/py2neo/types.py/remote |
def __eq__(self, other):
if other is None:
return False
try:
other = cast_relationship(other)
except __HOLE__:
return False
else:
remote_self = remote(self)
remote_other = remote(other)
if remote_self and remote_other:
return remote_self == remote_other
else:
return (self.nodes() == other.nodes() and size(other) == 1 and
self.type() == other.type() and dict(self) == dict(other)) | TypeError | dataset/ETHPy150Open nigelsmall/py2neo/py2neo/types.py/Relationship.__eq__ |
def __init__(self, *entities):
entities = list(entities)
for i, entity in enumerate(entities):
if isinstance(entity, Entity):
continue
elif entity is None:
entities[i] = Node()
elif isinstance(entity, dict):
entities[i] = Node(**entity)
for i, entity in enumerate(entities):
try:
start_node = entities[i - 1].end_node()
end_node = entities[i + 1].start_node()
except (__HOLE__, AttributeError):
pass
else:
if isinstance(entity, string):
entities[i] = Relationship(start_node, entity, end_node)
elif isinstance(entity, tuple) and len(entity) == 2:
t, properties = entity
entities[i] = Relationship(start_node, t, end_node, **properties)
Walkable.__init__(self, walk(*entities)) | IndexError | dataset/ETHPy150Open nigelsmall/py2neo/py2neo/types.py/Path.__init__ |
def CollapseTree(self):
try:
self.CollapseAllChildren(self.root_id)
except __HOLE__:
self.Collapse(self.root_id)
self.Expand(self.root_id)
########################################################################
###
### EVENT TREE (ORDERED BY DATES AND IDS)
### | AttributeError | dataset/ETHPy150Open cmpilato/thotkeeper/lib/tk_main.py/TKTreeCtrl.CollapseTree |
def import_preferred_memcache_lib(self, servers):
"""Returns an initialized memcache client. Used by the constructor."""
try:
import pylibmc
except ImportError:
pass
else:
return pylibmc.Client(servers)
try:
from google.appengine.api import memcache
except __HOLE__:
pass
else:
return memcache.Client()
try:
import memcache
except ImportError:
pass
else:
return memcache.Client(servers)
# backwards compatibility | ImportError | dataset/ETHPy150Open jojoin/cutout/cutout/cache/memcachedcache.py/MemcachedCache.import_preferred_memcache_lib |
def poll(self):
try:
command = [self.config['bin'], '-1']
if str_to_bool(self.config['use_sudo']):
command.insert(0, self.config['sudo_cmd'])
output = subprocess.Popen(command,
stdout=subprocess.PIPE).communicate()[0]
except __HOLE__:
output = ""
return output | OSError | dataset/ETHPy150Open BrightcoveOS/Diamond/src/collectors/varnish/varnish.py/VarnishCollector.poll |
def parse_arg(s):
try:
return json.loads(s)
except __HOLE__:
return s | ValueError | dataset/ETHPy150Open aerospike/aerospike-client-python/examples/client/scan_apply.py/parse_arg |
def copy_test_to_media(module, name):
"""
Copies a file from Mezzanine's test data path to MEDIA_ROOT.
Used in tests and demo fixtures.
"""
mezzanine_path = path_for_import(module)
test_path = os.path.join(mezzanine_path, "static", "test", name)
to_path = os.path.join(settings.MEDIA_ROOT, name)
to_dir = os.path.dirname(to_path)
if not os.path.exists(to_dir):
os.makedirs(to_dir)
if os.path.isdir(test_path):
copy = copytree
else:
copy = copyfile
try:
copy(test_path, to_path)
except __HOLE__:
pass | OSError | dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/utils/tests.py/copy_test_to_media |
def run_version_check():
logging.info("Performing version check.")
logging.info("Current version: %s", current_version)
data = json_dumps({
'current_version': current_version
})
headers = {'content-type': 'application/json'}
try:
response = requests.post('https://version.redash.io/api/report?channel=stable',
data=data, headers=headers, timeout=3.0)
latest_version = response.json()['release']['version']
_compare_and_update(latest_version)
except requests.RequestException:
logging.exception("Failed checking for new version.")
except (__HOLE__, KeyError):
logging.exception("Failed checking for new version (probably bad/non-JSON response).") | ValueError | dataset/ETHPy150Open getredash/redash/redash/version_check.py/run_version_check |
def getOsCredentialsFromEnvironment():
credentials = {}
try:
credentials['VERSION'] = os.environ['OS_COMPUTE_API_VERSION']
credentials['USERNAME'] = os.environ['OS_USERNAME']
credentials['PASSWORD'] = os.environ['OS_PASSWORD']
credentials['TENANT_NAME'] = os.environ['OS_TENANT_NAME']
credentials['AUTH_URL'] = os.environ['OS_AUTH_URL']
except __HOLE__ as e:
print("ERROR: environment variable %s is not defined" % e, file=sys.stderr)
sys.exit(-1)
return credentials | KeyError | dataset/ETHPy150Open lukaspustina/dynamic-inventory-for-ansible-with-openstack/openstack_inventory.py/getOsCredentialsFromEnvironment |
def _GetClusterDescription(project, zone, cluster_name):
"""Gets the description for a Cloud Bigtable cluster.
Args:
project: str. Name of the project in which the cluster was created.
zone: str. Zone of the project in which the cluster was created.
cluster_name: str. Cluster ID of the desired Bigtable cluster.
Returns:
A dictionary containing a cluster description.
Raises:
KeyError: when the cluster was not found.
"""
env = {'CLOUDSDK_CORE_DISABLE_PROMPTS': '1'}
env.update(os.environ)
cmd = [FLAGS.gcloud_path, 'alpha', 'bigtable', 'clusters', 'list', '--quiet',
'--format', 'json', '--project', project]
stdout, stderr, returncode = vm_util.IssueCommand(cmd, env=env)
if returncode:
raise IOError('Command "{0}" failed:\nSTDOUT:\n{1}\nSTDERR:\n{2}'.format(
' '.join(cmd), stdout, stderr))
result = json.loads(stdout)
clusters = {cluster['name']: cluster for cluster in result['clusters']}
expected_cluster_name = 'projects/{0}/zones/{1}/clusters/{2}'.format(
project, zone, cluster_name)
try:
return clusters[expected_cluster_name]
except __HOLE__:
raise KeyError('Cluster {0} not found in {1}'.format(
expected_cluster_name, list(clusters))) | KeyError | dataset/ETHPy150Open GoogleCloudPlatform/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/cloud_bigtable_ycsb_benchmark.py/_GetClusterDescription |
def _GetDefaultProject():
cmd = [FLAGS.gcloud_path, 'config', 'list', '--format', 'json']
stdout, stderr, return_code = vm_util.IssueCommand(cmd)
if return_code:
raise subprocess.CalledProcessError(return_code, cmd, stdout)
config = json.loads(stdout)
try:
return config['core']['project']
except __HOLE__:
raise KeyError('No default project found in {0}'.format(config)) | KeyError | dataset/ETHPy150Open GoogleCloudPlatform/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/cloud_bigtable_ycsb_benchmark.py/_GetDefaultProject |
def test_common_rules():
try:
import DNS
except __HOLE__:
pytest.skip("PyDNS not installed.")
mock = DomainValidator()
dataset = [
('[email protected]', ''),
('', 'It cannot be empty.'),
('*' * 256, 'It cannot be longer than 255 chars.'),
('[email protected]', 'It cannot start with a dot.'),
('[email protected].', 'It cannot end with a dot.'),
('[email protected]', 'It cannot contain consecutive dots.'),
]
def closure(address, expect):
assert mock._apply_common_rules(address, 255) == (address, expect)
for address, expect in dataset:
yield closure, address, expect | ImportError | dataset/ETHPy150Open marrow/mailer/test/test_validator.py/test_common_rules |
def test_common_rules_fixed():
try:
import DNS
except __HOLE__:
pytest.skip("PyDNS not installed.")
mock = DomainValidator(fix=True)
dataset = [
('[email protected]', ('[email protected]', '')),
('[email protected].', ('[email protected]', '')),
]
def closure(address, expect):
assert mock._apply_common_rules(address, 255) == expect
for address, expect in dataset:
yield closure, address, expect | ImportError | dataset/ETHPy150Open marrow/mailer/test/test_validator.py/test_common_rules_fixed |
def test_domain_validation_basic():
try:
import DNS
except __HOLE__:
pytest.skip("PyDNS not installed.")
mock = DomainValidator()
dataset = [
('example.com', ''),
('xn--ls8h.la', ''), # IDN: (poop).la
('', 'Invalid domain: It cannot be empty.'),
('-bad.example.com', 'Invalid domain.'),
]
def closure(domain, expect):
assert mock.validate_domain(domain) == (domain, expect)
for domain, expect in dataset:
yield closure, domain, expect | ImportError | dataset/ETHPy150Open marrow/mailer/test/test_validator.py/test_domain_validation_basic |
def test_domain_lookup():
try:
import DNS
except __HOLE__:
pytest.skip("PyDNS not installed.")
mock = DomainValidator()
dataset = [
('gothcandy.com', 'a', '174.129.236.35'),
('a' * 64 + '.gothcandy.com', 'a', False),
('gothcandy.com', 'mx', [(10, 'mx1.emailsrvr.com'), (20, 'mx2.emailsrvr.com')]),
('nx.example.com', 'a', False),
('xn--ls8h.la', 'a', '38.103.165.13'), # IDN: (poop).la
]
def closure(domain, kind, expect):
try:
assert mock.lookup_domain(domain, kind, server=['8.8.8.8']) == expect
except DNS.DNSError:
pytest.skip("Skipped due to DNS error.")
for domain, kind, expect in dataset:
yield closure, domain, kind, expect | ImportError | dataset/ETHPy150Open marrow/mailer/test/test_validator.py/test_domain_lookup |
def test_domain_validation():
try:
import DNS
except __HOLE__:
pytest.skip("PyDNS not installed.")
mock = DomainValidator(lookup_dns='mx')
dataset = [
('example.com', 'Domain does not seem to exist.'),
# TODO This domain is always erroring out, please do something
# ('xn--ls8h.la', ''), # IDN: (poop).la
('', 'Invalid domain: It cannot be empty.'),
('-bad.example.com', 'Invalid domain.'),
('gothcandy.com', ''),
('a' * 64 + '.gothcandy.com', 'Domain does not seem to exist.'),
('gothcandy.com', ''),
('nx.example.com', 'Domain does not seem to exist.'),
]
def closure(domain, expect):
try:
assert mock.validate_domain(domain) == (domain, expect)
except DNS.DNSError:
pytest.skip("Skipped due to DNS error.")
for domain, expect in dataset:
yield closure, domain, expect | ImportError | dataset/ETHPy150Open marrow/mailer/test/test_validator.py/test_domain_validation |
def test_bad_lookup_record_1():
try:
import DNS
except __HOLE__:
pytest.skip("PyDNS not installed.")
with pytest.raises(RuntimeError):
DomainValidator(lookup_dns='cname') | ImportError | dataset/ETHPy150Open marrow/mailer/test/test_validator.py/test_bad_lookup_record_1 |
def test_bad_lookup_record_2():
try:
import DNS
except __HOLE__:
pytest.skip("PyDNS not installed.")
mock = DomainValidator()
with pytest.raises(RuntimeError):
mock.lookup_domain('example.com', 'cname') | ImportError | dataset/ETHPy150Open marrow/mailer/test/test_validator.py/test_bad_lookup_record_2 |
def fit(self, X, y, method='smoother', delta=None, include_constant=None):
"""
Fit the coefficients for the dynamic linear model.
@param method: The method used to estimate the dynamic coefficients, either 'smoother' or 'filter'. If
'smoother', then the Kalman Smoother is used, otherwise the Kalman Filter will be used. The two differ
in the fact that the Kalman Smoother uses both future and past data, while the Kalman Filter only uses
past data.
@param X: The time-varying covariates, and (ntime, pfeat) array.
@param y: The time-varying response, a 1-D array with ntime elements.
@param delta: The regularization parameters on the time variation of the coefficients. Default is
self.delta.
@param include_constant: Boolean, if true then include a constant in the regression model.
"""
try:
method.lower() in ['smoother', 'filter']
except __HOLE__:
"method must be either 'smoother' or 'filter'."
if delta is None:
delta = self.delta
else:
self.delta = delta
if include_constant is None:
include_constant = self.include_constant
else:
self.include_constant = include_constant
if include_constant:
Xtemp = self.add_constant_(X.copy())
else:
Xtemp = X.copy()
ntime, pfeat = Xtemp.shape
observation_matrix = Xtemp.reshape((ntime, 1, pfeat))
observation_offset = np.array([0.0])
transition_matrix = np.identity(pfeat)
transition_offset = np.zeros(pfeat)
mu = (1.0 - delta) / delta
# Var(beta_t - beta_{t-1}) = 1.0 / mu
transition_covariance = np.identity(pfeat) / mu
# parameters to be estimated using MLE
em_vars = ['initial_state_mean', 'initial_state_covariance']
kalman = pykalman.KalmanFilter(transition_matrices=transition_matrix, em_vars=em_vars,
observation_matrices=observation_matrix,
observation_offsets=observation_offset, transition_offsets=transition_offset,
observation_covariance=np.array([1.0]),
transition_covariance=transition_covariance)
kalman.em(y)
if method is 'smoother':
beta, beta_covar = kalman.smooth(y)
else:
beta, beta_covar = kalman.filter(y)
self.beta = beta
self.beta_cov = beta_covar
self.current_beta = beta[-1]
self.current_bcov = beta_covar[-1]
self.kalman = kalman | ValueError | dataset/ETHPy150Open brandonckelly/bck_stats/bck_stats/dynamic_linear_model.py/DynamicLinearModel.fit |
def get_url_data(self, url):
parsed_url = urlparse.urlsplit(url)
if (parsed_url.scheme in self.schemes and
parsed_url.netloc in self.netlocs and
parsed_url.path == self.path):
parsed_qs = urlparse.parse_qs(parsed_url.query)
try:
return {
'partner_id': parsed_qs['partner_id'][0],
'subp_id': parsed_qs['subp_id'][0],
'playlist_id': parsed_qs['playlist_id'][0],
}
except (KeyError, __HOLE__):
pass
raise UnhandledFeed(url) | IndexError | dataset/ETHPy150Open pculture/vidscraper/vidscraper/suites/kaltura.py/Feed.get_url_data |
def __getitem__(self, entity_class):
"""Return an :class:`EntityExtent` for the given entity class. This extent
can be used to access the set of entities of that class in the world
or to query these entities via their components.
Examples::
world[MyEntity]
world[...]
:param entity_class: The entity class for the extent.
May also be a tuple of entity classes, in which case
the extent returned contains union of all entities of the classes
in the world.
May also be the special value ellipsis (``...``), which
returns an extent containing all entities in the world. This allows
you to conveniently query all entities using ``world[...]``.
"""
if isinstance(entity_class, tuple):
entities = set()
for cls in entity_class:
if cls in self._extents:
entities |= self._extents[cls].entities
return EntityExtent(self, entities)
elif entity_class is Ellipsis:
return self._full_extent
try:
return self._extents[entity_class]
except __HOLE__:
extent = self._extents[entity_class] = EntityExtent(self, set())
return extent | KeyError | dataset/ETHPy150Open caseman/grease/grease/world.py/World.__getitem__ |
def remove(self, entity):
"""Remove the entity from the set and, world components,
and all necessary class sets
"""
super(WorldEntitySet, self).remove(entity)
for component in self.world.components:
try:
del component[entity]
except __HOLE__:
pass
for cls in entity.__class__.__mro__:
if issubclass(cls, Entity):
self.world[cls].entities.discard(entity) | KeyError | dataset/ETHPy150Open caseman/grease/grease/world.py/WorldEntitySet.remove |
def discard(self, entity):
"""Remove the entity from the set if it exists, if not,
do nothing
"""
try:
self.remove(entity)
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open caseman/grease/grease/world.py/WorldEntitySet.discard |
def get_repo_node(env, repo_name, node):
try:
from tracopt.versioncontrol.git.PyGIT import GitError
except __HOLE__: ## Pre-1.0 Trac
from tracext.git.PyGIT import GitError
from trac.core import TracError
try:
repo = env.get_repository(reponame=repo_name)
return repo.get_node(node)
except GitError:
raise TracError("Error reading Git files at %s; check your repository path (for repo %s) and file permissions" % (node, repo_name)) | ImportError | dataset/ETHPy150Open boldprogressives/trac-GitolitePlugin/trac_gitolite/utils.py/get_repo_node |
def create(self, args):
"""
Create a new user.
"""
if not self.settings.user_registration_enabled:
print messages['RegisterDisabled'].format(self.settings.user_registration_url)
return
self.api.set_token(None)
if args.name and args.email and args.password:
name = args.name[0]
email = args.email[0]
password = args.password[0]
else:
name = raw_input('Username: ')
try:
email, password = get_credentials(self.settings, create=True)
except PasswordsDontMatchException:
return
try:
self.api.create_user(name, email, password)
except __HOLE__:
raise InputErrorException('CommandNotImplemented')
print messages['UserCreatedNowCheckEmail'] | NotImplementedError | dataset/ETHPy150Open cloudControl/cctrl/cctrl/user.py/UserController.create |
def activate(self, args):
"""
Activate a new user using the information from the
activation email.
"""
self.api.set_token(None)
try:
self.api.update_user(
args.user_name[0],
activation_code=args.activation_code[0])
except GoneError:
raise InputErrorException('WrongUsername')
except __HOLE__:
raise InputErrorException('CommandNotImplemented') | NotImplementedError | dataset/ETHPy150Open cloudControl/cctrl/cctrl/user.py/UserController.activate |
def delete(self, args):
"""
Delete your user account.
"""
users = self.api.read_users()
if not args.force_delete:
question = raw_input('Do you really want to delete your user? ' +
'Type "Yes" without the quotes to delete: ')
else:
question = 'Yes'
if question.lower() == 'yes':
try:
self.api.delete_user(users[0]['username'])
except __HOLE__:
raise InputErrorException('CommandNotImplemented')
except ForbiddenError:
raise InputErrorException('DeleteAppsBeforeUser')
# After we have deleted our user we should also delete
# the token_file to avoid confusion
self.api.set_token(None)
else:
raise InputErrorException('SecurityQuestionDenied') | NotImplementedError | dataset/ETHPy150Open cloudControl/cctrl/cctrl/user.py/UserController.delete |
def force_unicode(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Similar to smart_unicode, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first, saves 30-40% in performance when s
# is an instance of unicode. This function gets called often in that
# setting.
if isinstance(s, unicode):
return s
if strings_only and is_protected_type(s):
return s
try:
if not isinstance(s, basestring,):
if hasattr(s, '__unicode__'):
s = unicode(s)
else:
try:
s = unicode(str(s), encoding, errors)
except UnicodeEncodeError:
if not isinstance(s, Exception):
raise
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII data without special
# handling to display as a string. We need to handle this
# without raising a further exception. We do an
# approximation to what the Exception's standard str()
# output should be.
s = u' '.join([force_unicode(arg, encoding, strings_only,
errors) for arg in s])
elif not isinstance(s, unicode):
# Note: We use .decode() here, instead of unicode(s, encoding,
# errors), so that if s is a SafeString, it ends up being a
# SafeUnicode at the end.
s = s.decode(encoding, errors)
except __HOLE__, e:
if not isinstance(s, Exception):
raise DjangoUnicodeDecodeError(s, *e.args)
else:
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII bytestring data without a
# working unicode method. Try to handle this without raising a
# further exception by individually forcing the exception args
# to unicode.
s = u' '.join([force_unicode(arg, encoding, strings_only,
errors) for arg in s])
return s | UnicodeDecodeError | dataset/ETHPy150Open xiaoxu193/PyTeaser/goose/utils/encoding.py/force_unicode |
def build_wigtrack (self):
"""Use this function to return a WigTrackI.
"""
data = WigTrackI()
add_func = data.add_loc
chrom = "Unknown"
span = 0
pos_fixed = 0 # pos for fixedStep data 0: variableStep, 1: fixedStep
for i in self.fhd:
if i.startswith("track"):
continue
elif i.startswith("#"):
continue
elif i.startswith("browse"):
continue
elif i.startswith("variableStep"): # define line
pos_fixed = 0
chromi = i.rfind("chrom=") # where the 'chrom=' is
spani = i.rfind("span=") # where the 'span=' is
if chromi != -1:
chrom = i[chromi+6:].strip().split()[0]
else:
chrom = "Unknown"
if spani != -1:
span = int(i[spani+5:].strip().split()[0])
else:
span = 0
elif i.startswith("fixedStep"):
chromi = i.rfind("chrom=") # where the 'chrom=' is
starti = i.rfind("start=") # where the 'chrom=' is
stepi = i.rfind("step=") # where the 'chrom=' is
spani = i.rfind("span=") # where the 'span=' is
if chromi != -1:
chrom = i[chromi+6:].strip().split()[0]
else:
raise Exception("fixedStep line must define chrom=XX")
if spani != -1:
span = int(i[spani+5:].strip().split()[0])
else:
span = 0
if starti != -1:
pos_fixed = int(i[starti+6:].strip().split()[0])
if pos_fixed < 1:
raise Exception("fixedStep start must be bigger than 0!")
else:
raise Exception("fixedStep line must define start=XX")
if stepi != -1:
step = int(i[stepi+5:].strip().split()[0])
else:
raise Exception("fixedStep line must define step=XX!")
else: # read data value
if pos_fixed: # fixedStep
value = i.strip()
add_func(chrom,int(pos_fixed),float(value))
pos_fixed += step
else: # variableStep
try:
(pos,value) = i.split()
except __HOLE__:
print i,pos_fixed
add_func(chrom,int(pos),float(value))
data.span = span
self.fhd.seek(0)
return data | ValueError | dataset/ETHPy150Open taoliu/taolib/CoreLib/Parser/WiggleIO.py/WiggleIO.build_wigtrack |
def build_binKeeper (self,chromLenDict={},binsize=200):
"""Use this function to return a dictionary of BinKeeper
objects.
chromLenDict is a dictionary for chromosome length like
{'chr1':100000,'chr2':200000}
bin is in bps. for detail, check BinKeeper.
"""
data = {}
chrom = "Unknown"
pos_fixed = 0
for i in self.fhd:
if i.startswith("track"):
continue
elif i.startswith("browse"):
continue
elif i.startswith("#"):
continue
elif i.startswith("variableStep"): # define line
pos_fixed = 0
chromi = i.rfind("chrom=") # where the 'chrom=' is
spani = i.rfind("span=") # where the 'span=' is
if chromi != -1:
chrom = i[chromi+6:].strip().split()[0]
else:
chrom = "Unknown"
if spani != -1:
span = int(i[spani+5:].strip().split()[0])
else:
span = 0
chrlength = chromLenDict.setdefault(chrom,250000000) + 10000000
data.setdefault(chrom,BinKeeperI(binsize=binsize,chromosomesize=chrlength))
add = data[chrom].add
elif i.startswith("fixedStep"):
chromi = i.rfind("chrom=") # where the 'chrom=' is
starti = i.rfind("start=") # where the 'chrom=' is
stepi = i.rfind("step=") # where the 'chrom=' is
spani = i.rfind("span=") # where the 'span=' is
if chromi != -1:
chrom = i[chromi+6:].strip().split()[0]
else:
raise Exception("fixedStep line must define chrom=XX")
if spani != -1:
span = int(i[spani+5:].strip().split()[0])
else:
span = 0
if starti != -1:
pos_fixed = int(i[starti+6:].strip().split()[0])
if pos_fixed < 1:
raise Exception("fixedStep start must be bigger than 0!")
else:
raise Exception("fixedStep line must define start=XX")
if stepi != -1:
step = int(i[stepi+5:].strip().split()[0])
else:
raise Exception("fixedStep line must define step=XX!")
chrlength = chromLenDict.setdefault(chrom,250000000) + 10000000
data.setdefault(chrom,BinKeeperI(binsize=binsize,chromosomesize=chrlength))
add = data[chrom].add
else: # read data value
if pos_fixed: # fixedStep
value = i.strip()
add(int(pos_fixed),float(value))
pos_fixed += step
else: # variableStep
try:
(pos,value) = i.split()
except __HOLE__:
print i,pos_fixed
add(int(pos),float(value))
self.fhd.seek(0)
return data
# def build_DBBinKeeper (self,dirname="NA",templatedb=None):
# """Use this function to build DBBinKeepers for every
# chromosome under a given directory.
# Parameters:
# dirname : where we store the DBBinKeeper files
# templatedb : if not None, copy the templatedb file
# instead of initialize a db file.
# """
# data= {}
# chrom = "Unknown"
# if not os.path.exists(dirname):
# os.mkdir(dirname)
# chromdbfile = None
# dbbk = None
# for i in self.fhd:
# if i.startswith("track"):
# continue
# elif i.startswith("browse"):
# continue
# elif i.startswith("variableStep"):
# ci = i.rfind("chrom=") # where the 'chrom=' is
# si = i.rfind("span=") # where the 'span=' is
# if ci != -1:
# chrom = i[i.rfind("chrom=")+6:].strip().split()[0]
# else:
# chrom = "Unknown"
# if si != -1:
# span = int(i[i.rfind("span=")+5:].strip().split()[0])
# else:
# span = 0
# if dbbk:
# dbbk.conn.commit()
# chromdbfile = os.path.join(dirname,chrom+".db")
# data[chrom] = chromdbfile
# if templatedb:
# shutil.copy(templatedb,chromdbfile)
# dbbk = DBBinKeeperI(chromdbfile,chromosome=chrom,bin=8,chromosomesize=250000000)
# else:
# dbbk = DBBinKeeperI(chromdbfile,chromosome=chrom,bin=8,chromosomesize=250000000)
# dbbk.init_tables()
# add = dbbk.add
# else:
# (pos,value) = i.split()
# add(int(pos),float(value))
# self.fhd.seek(0)
# if dbbk:
# dbbk.conn.commit()
# return data | ValueError | dataset/ETHPy150Open taoliu/taolib/CoreLib/Parser/WiggleIO.py/WiggleIO.build_binKeeper |
def verify_files(files, user):
'''
Verify that the named files exist and are owned by the named user
'''
if salt.utils.is_windows():
return True
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
except __HOLE__:
err = ('Failed to prepare the Salt environment for user '
'{0}. The user is not available.\n').format(user)
sys.stderr.write(err)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for fn_ in files:
dirname = os.path.dirname(fn_)
try:
try:
os.makedirs(dirname)
except OSError as err:
if err.errno != errno.EEXIST:
raise
if not os.path.isfile(fn_):
with salt.utils.fopen(fn_, 'w+') as fp_:
fp_.write('')
except IOError as err:
if os.path.isfile(dirname):
msg = 'Failed to create path {0}, is {1} a file?'.format(fn_, dirname)
raise SaltSystemExit(msg=msg)
if err.errno != errno.EACCES:
raise
msg = 'No permissions to access "{0}", are you running as the correct user?'.format(fn_)
raise SaltSystemExit(msg=msg)
except OSError as err:
msg = 'Failed to create path "{0}" - {1}'.format(fn_, err)
raise SaltSystemExit(msg=msg)
stats = os.stat(fn_)
if uid != stats.st_uid:
try:
os.chown(fn_, uid, -1)
except OSError:
pass
return True | KeyError | dataset/ETHPy150Open saltstack/salt/salt/utils/verify.py/verify_files |
def verify_env(dirs, user, permissive=False, pki_dir='', skip_extra=False):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
if salt.utils.is_windows():
return True
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
gid = pwnam[3]
groups = salt.utils.get_gid_list(user, include_default=False)
except __HOLE__:
err = ('Failed to prepare the Salt environment for user '
'{0}. The user is not available.\n').format(user)
sys.stderr.write(err)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for dir_ in dirs:
if not dir_:
continue
if not os.path.isdir(dir_):
try:
cumask = os.umask(18) # 077
os.makedirs(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
os.chown(dir_, uid, gid)
os.umask(cumask)
except OSError as err:
msg = 'Failed to create directory path "{0}" - {1}\n'
sys.stderr.write(msg.format(dir_, err))
sys.exit(err.errno)
mode = os.stat(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
fmode = os.stat(dir_)
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
# Allow the directory to be owned by any group root
# belongs to if we say it's ok to be permissive
pass
else:
# chown the file for the new user
os.chown(dir_, uid, gid)
for subdir in [a for a in os.listdir(dir_) if 'jobs' not in a]:
fsubdir = os.path.join(dir_, subdir)
if '{0}jobs'.format(os.path.sep) in fsubdir:
continue
for root, dirs, files in os.walk(fsubdir):
for name in files:
if name.startswith('.'):
continue
path = os.path.join(root, name)
try:
fmode = os.stat(path)
except (IOError, OSError):
pass
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
pass
else:
# chown the file for the new user
os.chown(path, uid, gid)
for name in dirs:
path = os.path.join(root, name)
fmode = os.stat(path)
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
pass
else:
# chown the file for the new user
os.chown(path, uid, gid)
# Allow the pki dir to be 700 or 750, but nothing else.
# This prevents other users from writing out keys, while
# allowing the use-case of 3rd-party software (like django)
# to read in what it needs to integrate.
#
# If the permissions aren't correct, default to the more secure 700.
# If acls are enabled, the pki_dir needs to remain readable, this
# is still secure because the private keys are still only readbale
# by the user running the master
if dir_ == pki_dir:
smode = stat.S_IMODE(mode.st_mode)
if smode != 448 and smode != 488:
if os.access(dir_, os.W_OK):
os.chmod(dir_, 448)
else:
msg = 'Unable to securely set the permissions of "{0}".'
msg = msg.format(dir_)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if skip_extra is False:
# Run the extra verification checks
zmq_version() | KeyError | dataset/ETHPy150Open saltstack/salt/salt/utils/verify.py/verify_env |
def check_user(user):
'''
Check user and assign process uid/gid.
'''
if salt.utils.is_windows():
return True
if user == salt.utils.get_user():
return True
import pwd # after confirming not running Windows
try:
pwuser = pwd.getpwnam(user)
try:
if hasattr(os, 'initgroups'):
os.initgroups(user, pwuser.pw_gid) # pylint: disable=minimum-python-version
else:
os.setgroups(salt.utils.get_gid_list(user, include_default=False))
os.setgid(pwuser.pw_gid)
os.setuid(pwuser.pw_uid)
# We could just reset the whole environment but let's just override
# the variables we can get from pwuser
if 'HOME' in os.environ:
os.environ['HOME'] = pwuser.pw_dir
if 'SHELL' in os.environ:
os.environ['SHELL'] = pwuser.pw_shell
for envvar in ('USER', 'LOGNAME'):
if envvar in os.environ:
os.environ[envvar] = pwuser.pw_name
except OSError:
msg = 'Salt configured to run as user "{0}" but unable to switch.'
msg = msg.format(user)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
return False
except __HOLE__:
msg = 'User not found: "{0}"'.format(user)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
return False
return True | KeyError | dataset/ETHPy150Open saltstack/salt/salt/utils/verify.py/check_user |
def valid_id(opts, id_):
'''
Returns if the passed id is valid
'''
try:
return bool(clean_path(opts['pki_dir'], id_))
except (__HOLE__, KeyError) as e:
return False | AttributeError | dataset/ETHPy150Open saltstack/salt/salt/utils/verify.py/valid_id |
def run(self):
import os
try:
os.remove(self.outputs[0].abspath())
except __HOLE__:
pass
return os.symlink(self.inputs[0].abspath(),
self.outputs[0].abspath()) | OSError | dataset/ETHPy150Open hwaf/hwaf/py-hwaftools/hwaf-rules.py/symlink_tsk.run |
def run(self):
"""
Execute the test. The execution is always successful, but the results
are stored on ``self.generator.bld.hwaf_utest_results`` for postprocessing.
"""
filename = self.inputs[0].abspath()
self.ut_exec = getattr(self.generator, 'ut_exec', [filename])
if getattr(self.generator, 'ut_fun', None):
# FIXME waf 1.8 - add a return statement here?
self.generator.ut_fun(self)
try:
fu = getattr(self.generator.bld, 'all_test_paths')
except __HOLE__:
# this operation may be performed by at most #maxjobs
fu = os.environ.copy()
lst = []
for g in self.generator.bld.groups:
for tg in g:
if getattr(tg, 'link_task', None):
s = tg.link_task.outputs[0].parent.abspath()
if s not in lst:
lst.append(s)
def add_path(dct, path, var):
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
if Utils.is_win32:
add_path(fu, lst, 'PATH')
elif Utils.unversioned_sys_platform() == 'darwin':
add_path(fu, lst, 'DYLD_LIBRARY_PATH')
add_path(fu, lst, 'LD_LIBRARY_PATH')
else:
add_path(fu, lst, 'LD_LIBRARY_PATH')
self.generator.bld.all_test_paths = fu
pass
self.ut_exec = Utils.to_list(self.ut_exec)
cwd = getattr(self.generator, 'ut_cwd', None) or self.inputs[0].parent.abspath()
args = Utils.to_list(getattr(self.generator, 'ut_args', []))
if args:
self.ut_exec.extend(args)
testcmd = getattr(Options.options, 'testcmd', False)
if testcmd:
self.ut_exec = (testcmd % self.ut_exec[0]).split(' ')
returncode = getattr(self.generator, 'ut_rc', 0)
if isinstance(returncode, (list,tuple)):
returncode = int(returncode[0])
#print(">>>> running %s..." % self.ut_exec[0])
proc = Utils.subprocess.Popen(
self.ut_exec,
cwd=cwd,
env=fu,
stderr=Utils.subprocess.PIPE,
stdout=Utils.subprocess.PIPE
)
(stdout, stderr) = proc.communicate()
tup = (filename, proc.returncode, stdout, stderr, returncode)
self.generator.utest_result = tup
g_testlock.acquire()
try:
bld = self.generator.bld
Logs.debug("ut: %r", tup)
try:
bld.hwaf_utest_results.append(tup)
except AttributeError:
bld.hwaf_utest_results = [tup]
finally:
#print(">>>> running %s... [done]" % self.ut_exec[0])
g_testlock.release() | AttributeError | dataset/ETHPy150Open hwaf/hwaf/py-hwaftools/hwaf-rules.py/hwaf_utest.run |
def _request(self, remaining, headers = {}):
remaining = self._inject_extension(remaining)
if '?' in remaining:
context_remaining = remaining + '&context_id=' + self.context_id
else:
context_remaining = remaining + '?context_id=' + self.context_id
url = '%s%s' % (self.base_url, context_remaining)
r = HTTP_PLUGIN.cached_session.get(url, auth = (self.login, self.password), headers = headers)
r.raise_for_status()
try:
return r.json()
except __HOLE__:
raise | ValueError | dataset/ETHPy150Open gateway4labs/labmanager/labmanager/rlms/ext/rest.py/RLMS._request |
def __cmp__(self, other):
try:
return cmp(self.name, other.name)
except __HOLE__:
return 0 | AttributeError | dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Memoize.py/Counter.__cmp__ |
def __call__(self, *args, **kw):
obj = args[0]
try:
memo_dict = obj._memo[self.method_name]
except __HOLE__:
self.miss = self.miss + 1
else:
key = self.keymaker(*args, **kw)
if key in memo_dict:
self.hit = self.hit + 1
else:
self.miss = self.miss + 1
return self.underlying_method(*args, **kw) | KeyError | dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Memoize.py/CountDict.__call__ |
def generate(env):
try:
env['BUILDERS']['PDF']
except __HOLE__:
global PDFBuilder
if PDFBuilder is None:
PDFBuilder = SCons.Builder.Builder(action = {},
source_scanner = SCons.Tool.PDFLaTeXScanner,
prefix = '$PDFPREFIX',
suffix = '$PDFSUFFIX',
emitter = {},
source_ext_match = None,
single_source=True)
env['BUILDERS']['PDF'] = PDFBuilder
env['PDFPREFIX'] = ''
env['PDFSUFFIX'] = '.pdf'
# put the epstopdf builder in this routine so we can add it after
# the pdftex builder so that one is the default for no source suffix | KeyError | dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/pdf.py/generate |
def GetWithRetries(self, uri, extra_headers=None, redirects_remaining=4,
encoding='UTF-8', converter=None, num_retries=DEFAULT_NUM_RETRIES,
delay=DEFAULT_DELAY, backoff=DEFAULT_BACKOFF, logger=None):
"""This is a wrapper method for Get with retrying capability.
To avoid various errors while retrieving bulk entities by retrying
specified times.
Note this method relies on the time module and so may not be usable
by default in Python2.2.
Args:
num_retries: Integer; the retry count.
delay: Integer; the initial delay for retrying.
backoff: Integer; how much the delay should lengthen after each failure.
logger: An object which has a debug(str) method to receive logging
messages. Recommended that you pass in the logging module.
Raises:
ValueError if any of the parameters has an invalid value.
RanOutOfTries on failure after number of retries.
"""
# Moved import for time module inside this method since time is not a
# default module in Python2.2. This method will not be usable in
# Python2.2.
import time
if backoff <= 1:
raise ValueError("backoff must be greater than 1")
num_retries = int(num_retries)
if num_retries < 0:
raise ValueError("num_retries must be 0 or greater")
if delay <= 0:
raise ValueError("delay must be greater than 0")
# Let's start
mtries, mdelay = num_retries, delay
while mtries > 0:
if mtries != num_retries:
if logger:
logger.debug("Retrying: %s" % uri)
try:
rv = self.Get(uri, extra_headers=extra_headers,
redirects_remaining=redirects_remaining,
encoding=encoding, converter=converter)
except __HOLE__:
# Allow this error
raise
except RequestError, e:
# Error 500 is 'internal server error' and warrants a retry
# Error 503 is 'service unavailable' and warrants a retry
if e[0]['status'] not in [500, 503]:
raise e
# Else, fall through to the retry code...
except Exception, e:
if logger:
logger.debug(e)
# Fall through to the retry code...
else:
# This is the right path.
return rv
mtries -= 1
time.sleep(mdelay)
mdelay *= backoff
raise RanOutOfTries('Ran out of tries.')
# CRUD operations | SystemExit | dataset/ETHPy150Open acil-bwh/SlicerCIP/Scripted/attic/PicasaSnap/gdata/service.py/GDataService.GetWithRetries |
@staticmethod
def gf(ctx, obj, path):
'''
find file in vim like rule
- search relative to the directory of the current file
- search in the git root directory
return Blob/Tree/LinkObject or None if no file found
'''
try:
return obj.parent[path]
except KeyError:
tree = obj.commit.tree
try:
return tree[path]
except __HOLE__:
# can't find file
return None | KeyError | dataset/ETHPy150Open nakamuray/blikit/blikit/docutilsext.py/HTMLTranslator.gf |
def run(self):
is_recursive = not 'no-recursive' in self.options
order_by = self.options.get('order_by', 'name')
is_reverse = 'reverse' in self.options
max_count = self.options.get('count', None)
pattern = self.options.get('pattern', None)
show_hidden = 'show-hidden' in self.options
title_only = 'title-only' in self.options
ctx = self.state.document.settings.ctx
obj = self.state.document.settings.obj
if isinstance(obj, TreeObject):
tree = obj
else:
tree = obj.parent
if self.arguments:
path = self.arguments[0]
try:
if path.startswith('/'):
tree = obj.commit.tree[path]
else:
tree = tree[path]
except __HOLE__, e:
error = self.state_machine.reporter.error(
'directory not found "%s"' % path,
nodes.literal_block(self.block_text, self.block_text),
line=self.lineno
)
return [error]
if order_by == 'name':
key_func = lambda x: x.name
elif order_by == 'last_modified':
key_func = lambda x: x.last_modified
count = 0
result = []
for root, dirs, files in tree.walk():
files.sort(key=key_func, reverse=is_reverse)
for f in files:
if f.name.startswith('.') and not show_hidden:
# skip hidden file
continue
if pattern is not None and not fnmatch.fnmatch(f.name, pattern):
continue
count += 1
doc = blikit.render.render_blob(ctx, f)
if title_only:
template = 'innertitle.html'
else:
template = 'innerdoc.html'
html = ctx.render_template(template, doc=doc, blob=f,
commit=f.commit, context=ctx)
result.append(nodes.raw('', html, format='html'))
if max_count is not None and count >= max_count:
# break inner loop
break
if max_count is not None and count >= max_count:
# break outer loop
break
if not is_recursive:
break
if not show_hidden:
# remove hidden dirs
dirs[:] = filter(lambda d: not d.name.startswith('.'), dirs)
dirs.sort(key=key_func, reverse=is_reverse)
return result | KeyError | dataset/ETHPy150Open nakamuray/blikit/blikit/docutilsext.py/ShowContents.run |
def test_required_args(self):
# required arg missing
try:
getargs_keywords(arg1=(1,2))
except __HOLE__, err:
self.assertEqual(str(err), "Required argument 'arg2' (pos 2) not found")
else:
self.fail('TypeError should have been raised') | TypeError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_getargs2.py/Keywords_TestCase.test_required_args |
def test_too_many_args(self):
try:
getargs_keywords((1,2),3,(4,(5,6)),(7,8,9),10,111)
except __HOLE__, err:
self.assertEqual(str(err), "function takes at most 5 arguments (6 given)")
else:
self.fail('TypeError should have been raised') | TypeError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_getargs2.py/Keywords_TestCase.test_too_many_args |
def test_invalid_keyword(self):
# extraneous keyword arg
try:
getargs_keywords((1,2),3,arg5=10,arg666=666)
except __HOLE__, err:
self.assertEqual(str(err), "'arg666' is an invalid keyword argument for this function")
else:
self.fail('TypeError should have been raised') | TypeError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_getargs2.py/Keywords_TestCase.test_invalid_keyword |
def test_main():
tests = [Signed_TestCase, Unsigned_TestCase, Tuple_TestCase, Keywords_TestCase]
try:
from _testcapi import getargs_L, getargs_K
except __HOLE__:
pass # PY_LONG_LONG not available
else:
tests.append(LongLong_TestCase)
test_support.run_unittest(*tests) | ImportError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_getargs2.py/test_main |
def _parse_volumes(volumes):
'''
Parse a given volumes state specification for later use in
modules.docker.create_container(). This produces a dict that can be directly
consumed by the Docker API /containers/create.
Note: this only really exists for backwards-compatibility, and because
modules.dockerio.start() currently takes a binds argument.
volumes
A structure containing information about the volumes to be included in the
container that will be created, either:
- a bare dictionary
- a list of dictionaries and lists
.. code-block:: yaml
# bare dict style
- volumes:
/usr/local/etc/ssl/certs/example.crt:
bind: /etc/ssl/certs/com.example.internal.crt
ro: True
/var/run:
bind: /var/run/host/
ro: False
# list of dicts style:
- volumes:
- /usr/local/etc/ssl/certs/example.crt:
bind: /etc/ssl/certs/com.example.internal.crt
ro: True
- /var/run: /var/run/host/ # read-write bound volume
- /var/lib/mysql # un-bound, container-only volume
note: bind mounts specified like "/etc/timezone:/tmp/host_tz" will fall
through this parser.
Returns a dict of volume specifications:
.. code-block:: yaml
{
'bindvols': {
'/usr/local/etc/ssl/certs/example.crt': {
'bind': '/etc/ssl/certs/com.example.internal.crt',
'ro': True
},
'/var/run/': {
'bind': '/var/run/host',
'ro': False
},
},
'contvols': [ '/var/lib/mysql/' ]
}
'''
log.trace("Parsing given volumes dict: " + str(volumes))
bindvolumes = {}
contvolumes = []
if isinstance(volumes, dict):
# If volumes as a whole is a dict, then there's no way to specify a non-bound volume
# so we exit early and assume the dict is properly formed.
bindvolumes = volumes
if isinstance(volumes, list):
for vol in volumes:
if isinstance(vol, dict):
for volsource, voldef in vol.items():
if isinstance(voldef, dict):
target = voldef['bind']
read_only = voldef.get('ro', False)
else:
target = str(voldef)
read_only = False
source = volsource
else: # isinstance(vol, dict)
if ':' in vol:
volspec = vol.split(':')
source = volspec[0]
target = volspec[1]
read_only = False
try:
if len(volspec) > 2:
read_only = volspec[2] == "ro"
except __HOLE__:
pass
else:
contvolumes.append(str(vol))
continue
bindvolumes[source] = {
'bind': target,
'ro': read_only
}
result = {'bindvols': bindvolumes, 'contvols': contvolumes}
log.trace("Finished parsing volumes, with result: " + str(result))
return result | IndexError | dataset/ETHPy150Open saltstack/salt/salt/states/dockerio.py/_parse_volumes |
@core_cmd.command()
@click.option('--ipython/--no-ipython', default=True)
def shell(ipython):
"""Runs a Python shell with Quokka context"""
import code
import readline
import rlcompleter
_vars = globals()
_vars.update(locals())
_vars.update(dict(app=app, db=db))
readline.set_completer(rlcompleter.Completer(_vars).complete)
readline.parse_and_bind("tab: complete")
try:
if ipython is True:
from IPython import start_ipython
start_ipython(argv=[], user_ns=_vars)
else:
raise ImportError
except __HOLE__:
shell = code.InteractiveConsole(_vars)
shell.interact() | ImportError | dataset/ETHPy150Open rochacbruno/quokka/manage.py/shell |
def pull(server, secret, name):
"""
Pull a blueprint from the secret and name on the configured server.
"""
r = http.get('/{0}/{1}'.format(secret, name), server=server)
if 200 == r.status:
b = Blueprint.load(r, name)
for filename in b.sources.itervalues():
logging.info('fetching source tarballs - this may take a while')
r = http.get('/{0}/{1}/{2}'.format(secret, name, filename),
server=server)
if 200 == r.status:
try:
f = open(filename, 'w')
f.write(r.read())
except __HOLE__:
logging.error('could not open {0}'.format(filename))
return None
finally:
f.close()
elif 404 == r.status:
logging.error('{0} not found'.format(filename))
return None
elif 502 == r.status:
logging.error('upstream storage service failed')
return None
else:
logging.error('unexpected {0} fetching tarball'.
format(r.status))
return None
return b
elif 404 == r.status:
logging.error('blueprint not found')
elif 502 == r.status:
logging.error('upstream storage service failed')
else:
logging.error('unexpected {0} fetching blueprint'.format(r.status))
return None | OSError | dataset/ETHPy150Open devstructure/blueprint/blueprint/io/__init__.py/pull |
def __get__(self, obj, type=None):
if obj is None:
return self.fget
# Get the cache or set a default one if needed
_cachename = self.cachename
_cache = getattr(obj, _cachename, None)
if _cache is None:
setattr(obj, _cachename, resettable_cache())
_cache = getattr(obj, _cachename)
# Get the name of the attribute to set and cache
name = self.name
_cachedval = _cache.get(name, None)
# print("[_cachedval=%s]" % _cachedval)
if _cachedval is None:
# Call the "fget" function
_cachedval = self.fget(obj)
# Set the attribute in obj
# print("Setting %s in cache to %s" % (name, _cachedval))
try:
_cache[name] = _cachedval
except KeyError:
setattr(_cache, name, _cachedval)
# Update the reset list if needed (and possible)
resetlist = self.resetlist
if resetlist is not ():
try:
_cache._resetdict[name] = self.resetlist
except __HOLE__:
pass
# else:
# print("Reading %s from cache (%s)" % (name, _cachedval))
return _cachedval | AttributeError | dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/tools/decorators.py/CachedAttribute.__get__ |
def __set__(self, obj, value):
_cache = getattr(obj, self.cachename)
name = self.name
try:
_cache[name] = value
except __HOLE__:
setattr(_cache, name, value) | KeyError | dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/tools/decorators.py/CachedWritableAttribute.__set__ |
def release_resources(self):
# TODO: implement in entirety
path = self.get_resource_path()
for fname in glob.glob(path):
try:
os.remove(fname)
except __HOLE__:
pass
# os.remove(path) | OSError | dataset/ETHPy150Open codelucas/newspaper/newspaper/article.py/Article.release_resources |
def set_reddit_top_img(self):
"""Wrapper for setting images. Queries known image attributes
first, then uses Reddit's image algorithm as a fallback.
"""
try:
s = images.Scraper(self)
self.set_top_img(s.largest_image_url())
except __HOLE__ as e:
if "Can't convert 'NoneType' object to str implicitly" in e.args[0]:
log.debug("No pictures found. Top image not set, %s" % e)
elif "timed out" in e.args[0]:
log.debug("Download of picture timed out. Top image not set, %s" % e)
else:
log.critical('TypeError other than None type error. Cannot set top image using the Reddit algorithm. Possible error with PIL., %s' % e)
except Exception as e:
log.critical('Other error with setting top image using the Reddit algorithm. Possible error with PIL, %s' % e) | TypeError | dataset/ETHPy150Open codelucas/newspaper/newspaper/article.py/Article.set_reddit_top_img |
def __init__(self, domain=None, idstring=None):
self.domain = str(domain or DNS_NAME)
try:
pid = os.getpid()
except __HOLE__:
# No getpid() in Jython.
pid = 1
self.idstring = ".".join([str(idstring or randrange(10000)), str(pid)]) | AttributeError | dataset/ETHPy150Open lavr/python-emails/emails/utils.py/MessageID.__init__ |
def GetGoogleSqlOAuth2RefreshToken(oauth_file_path):
"""Reads the user's Google Cloud SQL OAuth2.0 token from disk."""
if not os.path.exists(oauth_file_path):
return None
try:
with open(oauth_file_path) as oauth_file:
token = simplejson.load(oauth_file)
return token['refresh_token']
except (__HOLE__, KeyError, simplejson.decoder.JSONDecodeError):
logging.exception(
'Could not read OAuth2.0 token from %s', oauth_file_path)
return None | IOError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/GetGoogleSqlOAuth2RefreshToken |
def LoadTargetModule(handler_path,
cgi_path,
import_hook,
module_dict=sys.modules):
"""Loads a target CGI script by importing it as a Python module.
If the module for the target CGI script has already been loaded before,
the new module will be loaded in its place using the same module object,
possibly overwriting existing module attributes.
Args:
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). Should not have $PYTHON_LIB references.
cgi_path: Absolute path to the CGI script file on disk.
import_hook: Instance of HardenedModulesHook to use for module loading.
module_dict: Used for dependency injection.
Returns:
Tuple (module_fullname, script_module, module_code) where:
module_fullname: Fully qualified module name used to import the script.
script_module: The ModuleType object corresponding to the module_fullname.
If the module has not already been loaded, this will be an empty
shell of a module.
module_code: Code object (returned by compile built-in) corresponding
to the cgi_path to run. If the script_module was previously loaded
and has a main() function that can be reused, this will be None.
Raises:
CouldNotFindModuleError if the given handler_path is a file and doesn't have
the expected extension.
"""
CheckScriptExists(cgi_path, handler_path)
module_fullname = GetScriptModuleName(handler_path)
script_module = module_dict.get(module_fullname)
module_code = None
if script_module is not None and ModuleHasValidMainFunction(script_module):
logging.debug('Reusing main() function of module "%s"', module_fullname)
else:
if script_module is None:
script_module = imp.new_module(module_fullname)
script_module.__loader__ = import_hook
try:
module_code = import_hook.get_code(module_fullname)
full_path, search_path, submodule = (
import_hook.GetModuleInfo(module_fullname))
script_module.__file__ = full_path
if search_path is not None:
script_module.__path__ = search_path
except __HOLE__, e:
error = ('%s please see http://www.python.org/peps'
'/pep-0263.html for details (%s)' % (e, handler_path))
raise SyntaxError(error)
except:
exc_type, exc_value, exc_tb = sys.exc_info()
import_error_message = str(exc_type)
if exc_value:
import_error_message += ': ' + str(exc_value)
logging.exception('Encountered error loading module "%s": %s',
module_fullname, import_error_message)
missing_inits = FindMissingInitFiles(cgi_path, module_fullname)
if missing_inits:
logging.warning('Missing package initialization files: %s',
', '.join(missing_inits))
else:
logging.error('Parent package initialization files are present, '
'but must be broken')
independent_load_successful = True
if not os.path.isfile(cgi_path):
independent_load_successful = False
else:
try:
source_file = open(cgi_path)
try:
module_code = compile(source_file.read(), cgi_path, 'exec')
script_module.__file__ = cgi_path
finally:
source_file.close()
except OSError:
independent_load_successful = False
if not independent_load_successful:
raise exc_type, exc_value, exc_tb
module_dict[module_fullname] = script_module
return module_fullname, script_module, module_code | UnicodeDecodeError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/LoadTargetModule |
def ExecuteOrImportScript(config, handler_path, cgi_path, import_hook):
"""Executes a CGI script by importing it as a new module.
This possibly reuses the module's main() function if it is defined and
takes no arguments.
Basic technique lifted from PEP 338 and Python2.5's runpy module. See:
http://www.python.org/dev/peps/pep-0338/
See the section entitled "Import Statements and the Main Module" to understand
why a module named '__main__' cannot do relative imports. To get around this,
the requested module's path could be added to sys.path on each request.
Args:
config: AppInfoExternal instance representing the parsed app.yaml file.
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). Should not have $PYTHON_LIB references.
cgi_path: Absolute path to the CGI script file on disk.
import_hook: Instance of HardenedModulesHook to use for module loading.
Returns:
True if the response code had an error status (e.g., 404), or False if it
did not.
Raises:
Any kind of exception that could have been raised when loading the target
module, running a target script, or executing the application code itself.
"""
module_fullname, script_module, module_code = LoadTargetModule(
handler_path, cgi_path, import_hook)
script_module.__name__ = '__main__'
sys.modules['__main__'] = script_module
try:
import pdb
MonkeyPatchPdb(pdb)
if module_code:
exec module_code in script_module.__dict__
else:
script_module.main()
sys.stdout.flush()
sys.stdout.seek(0)
try:
headers = mimetools.Message(sys.stdout)
finally:
sys.stdout.seek(0, 2)
status_header = headers.get('status')
error_response = False
if status_header:
try:
status_code = int(status_header.split(' ', 1)[0])
error_response = status_code >= 400
except __HOLE__:
error_response = True
if not error_response:
try:
parent_package = import_hook.GetParentPackage(module_fullname)
except Exception:
parent_package = None
if parent_package is not None:
submodule = GetSubmoduleName(module_fullname)
setattr(parent_package, submodule, script_module)
return error_response
finally:
script_module.__name__ = module_fullname | ValueError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/ExecuteOrImportScript |
def ExecuteCGI(config,
root_path,
handler_path,
cgi_path,
env,
infile,
outfile,
module_dict,
exec_script=ExecuteOrImportScript,
exec_py27_handler=ExecutePy27Handler):
"""Executes Python file in this process as if it were a CGI.
Does not return an HTTP response line. CGIs should output headers followed by
the body content.
The modules in sys.modules should be the same before and after the CGI is
executed, with the specific exception of encodings-related modules, which
cannot be reloaded and thus must always stay in sys.modules.
Args:
config: AppInfoExternal instance representing the parsed app.yaml file.
root_path: Path to the root of the application.
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). May contain $PYTHON_LIB references.
cgi_path: Absolute path to the CGI script file on disk.
env: Dictionary of environment variables to use for the execution.
infile: File-like object to read HTTP request input data from.
outfile: FIle-like object to write HTTP response data to.
module_dict: Dictionary in which application-loaded modules should be
preserved between requests. This removes the need to reload modules that
are reused between requests, significantly increasing load performance.
This dictionary must be separate from the sys.modules dictionary.
exec_script: Used for dependency injection.
exec_py27_handler: Used for dependency injection.
"""
if handler_path == '_go_app':
from google.appengine.ext.go import execute_go_cgi
return execute_go_cgi(root_path, handler_path, cgi_path,
env, infile, outfile)
old_module_dict = sys.modules.copy()
old_builtin = __builtin__.__dict__.copy()
old_argv = sys.argv
old_stdin = sys.stdin
old_stdout = sys.stdout
old_stderr = sys.stderr
old_env = os.environ.copy()
old_cwd = os.getcwd()
old_file_type = types.FileType
reset_modules = False
app_log_handler = None
try:
ConnectAndDisconnectChildModules(sys.modules, module_dict)
ClearAllButEncodingsModules(sys.modules)
sys.modules.update(module_dict)
sys.argv = [cgi_path]
sys.stdin = cStringIO.StringIO(infile.getvalue())
sys.stdout = outfile
sys.stderr = LoggingStream()
logservice._global_buffer = logservice.LogsBuffer()
app_log_handler = app_logging.AppLogsHandler()
logging.getLogger().addHandler(app_log_handler)
os.environ.clear()
os.environ.update(env)
cgi_dir = os.path.normpath(os.path.dirname(cgi_path))
root_path = os.path.normpath(os.path.abspath(root_path))
if (cgi_dir.startswith(root_path + os.sep) and
not (config and config.runtime == 'python27')):
os.chdir(cgi_dir)
else:
os.chdir(root_path)
dist.fix_paths(root_path, SDK_ROOT)
hook = HardenedModulesHook(config, sys.modules, root_path)
sys.meta_path = [finder for finder in sys.meta_path
if not isinstance(finder, HardenedModulesHook)]
sys.meta_path.insert(0, hook)
if hasattr(sys, 'path_importer_cache'):
sys.path_importer_cache.clear()
__builtin__.file = FakeFile
__builtin__.open = FakeFile
types.FileType = FakeFile
if not (config and config.runtime == 'python27'):
__builtin__.buffer = NotImplementedFakeClass
sys.modules['__builtin__'] = __builtin__
logging.debug('Executing CGI with env:\n%s', repr(env))
try:
if handler_path and config and config.runtime == 'python27':
reset_modules = exec_py27_handler(config, handler_path, cgi_path, hook)
else:
reset_modules = exec_script(config, handler_path, cgi_path, hook)
except __HOLE__, e:
logging.debug('CGI exited with status: %s', e)
except:
reset_modules = True
raise
finally:
sys.path_importer_cache.clear()
_ClearTemplateCache(sys.modules)
module_dict.update(sys.modules)
ConnectAndDisconnectChildModules(sys.modules, old_module_dict)
ClearAllButEncodingsModules(sys.modules)
sys.modules.update(old_module_dict)
__builtin__.__dict__.update(old_builtin)
sys.argv = old_argv
sys.stdin = old_stdin
sys.stdout = old_stdout
sys.stderr = old_stderr
logging.getLogger().removeHandler(app_log_handler)
os.environ.clear()
os.environ.update(old_env)
os.chdir(old_cwd)
types.FileType = old_file_type | SystemExit | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/ExecuteCGI |
def ReadDataFile(data_path, openfile=file):
"""Reads a file on disk, returning a corresponding HTTP status and data.
Args:
data_path: Path to the file on disk to read.
openfile: Used for dependency injection.
Returns:
Tuple (status, data) where status is an HTTP response code, and data is
the data read; will be an empty string if an error occurred or the
file was empty.
"""
status = httplib.INTERNAL_SERVER_ERROR
data = ""
try:
data_file = openfile(data_path, 'rb')
try:
data = data_file.read()
finally:
data_file.close()
status = httplib.OK
except (__HOLE__, IOError), e:
logging.error('Error encountered reading file "%s":\n%s', data_path, e)
if e.errno in FILE_MISSING_EXCEPTIONS:
status = httplib.NOT_FOUND
else:
status = httplib.FORBIDDEN
return status, data | OSError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/ReadDataFile |
def ValidHeadersRewriter(response):
"""Remove invalid response headers.
Response headers must be printable ascii characters. This is enforced in
production by http_proto.cc IsValidHeader.
This rewriter will remove headers that contain non ascii characters.
"""
for (key, value) in response.headers.items():
try:
key.decode('ascii')
value.decode('ascii')
except __HOLE__:
del response.headers[key] | UnicodeDecodeError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/ValidHeadersRewriter |
def ParseStatusRewriter(response):
"""Parse status header, if it exists.
Handles the server-side 'status' header, which instructs the server to change
the HTTP response code accordingly. Handles the 'location' header, which
issues an HTTP 302 redirect to the client. Also corrects the 'content-length'
header to reflect actual content length in case extra information has been
appended to the response body.
If the 'status' header supplied by the client is invalid, this method will
set the response to a 500 with an error message as content.
"""
location_value = response.headers.getheader('location')
status_value = response.headers.getheader('status')
if status_value:
response_status = status_value
del response.headers['status']
elif location_value:
response_status = '%d Redirecting' % httplib.FOUND
else:
return response
status_parts = response_status.split(' ', 1)
response.status_code, response.status_message = (status_parts + [''])[:2]
try:
response.status_code = int(response.status_code)
except __HOLE__:
response.status_code = 500
response.body = cStringIO.StringIO(
'Error: Invalid "status" header value returned.') | ValueError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/ParseStatusRewriter |
def AreModuleFilesModified(self):
"""Determines if any monitored files have been modified.
Returns:
True if one or more files have been modified, False otherwise.
"""
for name, (mtime, fname) in self._modification_times.iteritems():
if name not in self._modules:
continue
module = self._modules[name]
try:
if mtime != os.path.getmtime(fname):
self._dirty = True
return True
except __HOLE__, e:
if e.errno in FILE_MISSING_EXCEPTIONS:
self._dirty = True
return True
raise e
return False | OSError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/ModuleManager.AreModuleFilesModified |
def UpdateModuleFileModificationTimes(self):
"""Records the current modification times of all monitored modules."""
if not self._dirty:
return
self._modification_times.clear()
for name, module in self._modules.items():
if not isinstance(module, types.ModuleType):
continue
module_file = self.GetModuleFile(module)
if not module_file:
continue
try:
self._modification_times[name] = (os.path.getmtime(module_file),
module_file)
except __HOLE__, e:
if e.errno not in FILE_MISSING_EXCEPTIONS:
raise e
self._dirty = False | OSError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/ModuleManager.UpdateModuleFileModificationTimes |
def CreateRequestHandler(root_path,
login_url,
static_caching=True,
default_partition=None,
interactive_console=True,
secret_hash='xxx'):
"""Creates a new BaseHTTPRequestHandler sub-class.
This class will be used with the Python BaseHTTPServer module's HTTP server.
Python's built-in HTTP server does not support passing context information
along to instances of its request handlers. This function gets around that
by creating a sub-class of the handler in a closure that has access to
this context information.
Args:
root_path: Path to the root of the application running on the server.
login_url: Relative URL which should be used for handling user logins.
static_caching: True if browser caching of static files should be allowed.
default_partition: Default partition to use in the application id.
interactive_console: Whether to add the interactive console.
Returns:
Sub-class of BaseHTTPRequestHandler.
"""
application_module_dict = SetupSharedModules(sys.modules)
application_config_cache = AppConfigCache()
class DevAppServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Dispatches URLs using patterns from a URLMatcher.
The URLMatcher is created by loading an application's configuration file.
Executes CGI scripts in the local process so the scripts can use mock
versions of APIs.
HTTP requests that correctly specify a user info cookie
(dev_appserver_login.COOKIE_NAME) will have the 'USER_EMAIL' environment
variable set accordingly. If the user is also an admin, the
'USER_IS_ADMIN' variable will exist and be set to '1'. If the user is not
logged in, 'USER_EMAIL' will be set to the empty string.
On each request, raises an InvalidAppConfigError exception if the
application configuration file in the directory specified by the root_path
argument is invalid.
"""
server_version = 'AppScaleServer/1.10'
module_dict = application_module_dict
module_manager = ModuleManager(application_module_dict)
config_cache = application_config_cache
rewriter_chain = CreateResponseRewritersChain()
def __init__(self, *args, **kwargs):
"""Initializer.
Args:
args: Positional arguments passed to the superclass constructor.
kwargs: Keyword arguments passed to the superclass constructor.
"""
self._log_record_writer = logservice_stub.RequestLogWriter()
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def version_string(self):
"""Returns server's version string used for Server HTTP header."""
return self.server_version
def do_GET(self):
"""Handle GET requests."""
if self._HasNoBody('GET'):
self._HandleRequest()
def do_POST(self):
"""Handles POST requests."""
self._HandleRequest()
def do_PUT(self):
"""Handle PUT requests."""
self._HandleRequest()
def do_HEAD(self):
"""Handle HEAD requests."""
if self._HasNoBody('HEAD'):
self._HandleRequest()
def do_OPTIONS(self):
"""Handles OPTIONS requests."""
self._HandleRequest()
def do_DELETE(self):
"""Handle DELETE requests."""
self._HandleRequest()
def do_TRACE(self):
"""Handles TRACE requests."""
if self._HasNoBody('TRACE'):
self._HandleRequest()
def _HasNoBody(self, method):
"""Check for request body in HTTP methods where no body is permitted.
If a request body is present a 400 (Invalid request) response is sent.
Args:
method: The request method.
Returns:
True if no request body is present, False otherwise.
"""
content_length = int(self.headers.get('content-length', 0))
if content_length:
body = self.rfile.read(content_length)
logging.warning('Request body in %s is not permitted: %s', method, body)
self.send_response(httplib.BAD_REQUEST)
return False
return True
def _Dispatch(self, dispatcher, socket_infile, outfile, env_dict):
"""Copy request data from socket and dispatch.
Args:
dispatcher: Dispatcher to handle request (MatcherDispatcher).
socket_infile: Original request file stream.
outfile: Output file to write response to.
env_dict: Environment dictionary.
"""
request_descriptor, request_file_name = tempfile.mkstemp('.tmp',
'request.')
try:
request_file = open(request_file_name, 'wb')
try:
CopyStreamPart(self.rfile,
request_file,
int(self.headers.get('content-length', 0)))
finally:
request_file.close()
request_file = open(request_file_name, 'rb')
try:
app_server_request = AppServerRequest(self.path,
None,
self.headers,
request_file,
secret_hash)
dispatcher.Dispatch(app_server_request,
outfile,
base_env_dict=env_dict)
finally:
request_file.close()
finally:
try:
os.close(request_descriptor)
try:
os.remove(request_file_name)
except __HOLE__, err:
if getattr(err, 'winerror', 0) == os_compat.ERROR_SHARING_VIOLATION:
logging.warning('Failed removing %s', request_file_name)
else:
raise
except OSError, err:
if err.errno != errno.ENOENT:
raise
def _HandleRequest(self):
"""Handles any type of request and prints exceptions if they occur."""
host_name = self.headers.get('host') or self.server.server_name
server_name = host_name.split(':', 1)[0]
env_dict = {
'REQUEST_METHOD': self.command,
'REMOTE_ADDR': self.headers.get("X-Real-IP", self.client_address[0]),
'SERVER_SOFTWARE': self.server_version,
'SERVER_NAME': server_name,
'SERVER_PROTOCOL': self.protocol_version,
'SERVER_PORT': str(self.server.server_port),
}
full_url = GetFullURL(server_name, self.server.server_port, self.path)
if len(full_url) > MAX_URL_LENGTH:
msg = 'Requested URI too long: %s' % full_url
logging.error(msg)
self.send_response(httplib.REQUEST_URI_TOO_LONG, msg)
return
tbhandler = cgitb.Hook(file=self.wfile).handle
try:
config, explicit_matcher, from_cache = LoadAppConfig(
root_path, self.module_dict, cache=self.config_cache,
static_caching=static_caching, default_partition=default_partition)
if not from_cache:
self.module_manager.ResetModules()
implicit_matcher = CreateImplicitMatcher(config,
self.module_dict,
root_path,
login_url)
if self.path.startswith('/_ah/admin'):
if any((handler.url == '/_ah/datastore_admin.*'
for handler in config.handlers)):
self.headers['X-AppEngine-Datastore-Admin-Enabled'] = 'True'
self.headers['X-AppEngine-Interactive-Console-Enabled'] = str(
interactive_console)
if config.api_version != API_VERSION:
logging.error(
"API versions cannot be switched dynamically: %r != %r",
config.api_version, API_VERSION)
sys.exit(1)
(exclude, service_match) = ReservedPathFilter(
config.inbound_services).ExcludePath(self.path)
if exclude:
logging.warning(
'Request to %s excluded because %s is not enabled '
'in inbound_services in app.yaml' % (self.path, service_match))
self.send_response(httplib.NOT_FOUND)
return
if config.runtime == 'go':
from google.appengine.ext import go
go.APP_CONFIG = config
version = GetVersionObject()
env_dict['SDK_VERSION'] = version['release']
env_dict['CURRENT_VERSION_ID'] = config.version + ".1"
env_dict['APPLICATION_ID'] = config.application
env_dict['DEFAULT_VERSION_HOSTNAME'] = self.server.frontend_hostport
env_dict['APPENGINE_RUNTIME'] = config.runtime
if config.runtime == 'python27' and config.threadsafe:
env_dict['_AH_THREADSAFE'] = '1'
global _request_time
global _request_id
_request_time = time.time()
_request_id += 1
request_id_hash = _generate_request_id_hash()
env_dict['REQUEST_ID_HASH'] = request_id_hash
os.environ['REQUEST_ID_HASH'] = request_id_hash
cookies = ', '.join(self.headers.getheaders('cookie'))
email_addr, user_id, admin, valid_cookie = \
dev_appserver_login.GetUserInfo(cookies)
self._log_record_writer.write_request_info(
ip=env_dict['REMOTE_ADDR'],
app_id=env_dict['APPLICATION_ID'],
version_id=env_dict['CURRENT_VERSION_ID'],
nickname=email_addr.split('@')[0],
user_agent=self.headers.get('user-agent'),
host=host_name)
dispatcher = MatcherDispatcher(config, login_url, self.module_manager,
[implicit_matcher, explicit_matcher])
outfile = cStringIO.StringIO()
try:
self._Dispatch(dispatcher, self.rfile, outfile, env_dict)
finally:
self.module_manager.UpdateModuleFileModificationTimes()
outfile.flush()
outfile.seek(0)
response = RewriteResponse(outfile, self.rewriter_chain, self.headers,
env_dict)
runtime_response_size = _RemainingDataSize(response.body)
if self.command == 'HEAD' and runtime_response_size > 0:
logging.warning('Dropping unexpected body in response to HEAD '
'request')
response.body = cStringIO.StringIO('')
elif (not response.large_response and
runtime_response_size > MAX_RUNTIME_RESPONSE_SIZE):
logging.error('Response too large: %d, max is %d',
runtime_response_size, MAX_RUNTIME_RESPONSE_SIZE)
response.status_code = 500
response.status_message = 'Forbidden'
new_response = ('HTTP response was too large: %d. '
'The limit is: %d.'
% (runtime_response_size,
MAX_RUNTIME_RESPONSE_SIZE))
response.headers['Content-Length'] = str(len(new_response))
response.body = cStringIO.StringIO(new_response)
except yaml_errors.EventListenerError, e:
title = 'Fatal error when loading application configuration'
msg = '%s:\n%s' % (title, str(e))
logging.error(msg)
self.send_response(httplib.INTERNAL_SERVER_ERROR, title)
self.wfile.write('Content-Type: text/html\r\n\r\n')
self.wfile.write('<pre>%s</pre>' % cgi.escape(msg))
except KeyboardInterrupt, e:
logging.info('Server interrupted by user, terminating')
self.server.stop_serving_forever()
except CompileError, e:
msg = 'Compile error:\n' + e.text + '\n'
logging.error(msg)
self.send_response(httplib.INTERNAL_SERVER_ERROR, 'Compile error')
self.wfile.write('Content-Type: text/plain; charset=utf-8\r\n\r\n')
self.wfile.write(msg)
except ExecuteError, e:
logging.error(e.text)
self.send_response(httplib.INTERNAL_SERVER_ERROR, 'Execute error')
self.wfile.write('Content-Type: text/html; charset=utf-8\r\n\r\n')
self.wfile.write('<title>App failure</title>\n')
self.wfile.write(e.text + '\n<pre>\n')
for l in e.log:
self.wfile.write(cgi.escape(l))
self.wfile.write('</pre>\n')
except:
msg = 'Exception encountered handling request'
logging.exception(msg)
self.send_response(httplib.INTERNAL_SERVER_ERROR, msg)
tbhandler()
else:
try:
self.send_response(response.status_code, response.status_message)
self.wfile.write(response.header_data)
self.wfile.write('\r\n')
shutil.copyfileobj(response.body, self.wfile, COPY_BLOCK_SIZE)
except (IOError, OSError), e:
if e.errno not in [errno.EPIPE, os_compat.WSAECONNABORTED]:
raise e
except socket.error, e:
logging.error("Socket exception: %s" % str(e))
self.server.stop_serving_forever()
def log_error(self, format, *args):
"""Redirect error messages through the logging module."""
logging.error(format, *args)
def log_message(self, format, *args):
"""Redirect log messages through the logging module."""
if hasattr(self, 'path'):
logging.debug(format, *args)
else:
logging.info(format, *args)
def log_request(self, code='-', size='-'):
"""Indicate that this request has completed."""
BaseHTTPServer.BaseHTTPRequestHandler.log_request(self, code, size)
if code == '-':
code = 0
if size == '-':
size = 0
logservice.logs_buffer().flush()
self._log_record_writer.write(self.command, self.path, code, size,
self.request_version)
return DevAppServerRequestHandler | OSError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/CreateRequestHandler |
def ReadAppConfig(appinfo_path, parse_app_config=appinfo_includes.Parse):
"""Reads app.yaml file and returns its app id and list of URLMap instances.
Args:
appinfo_path: String containing the path to the app.yaml file.
parse_app_config: Used for dependency injection.
Returns:
AppInfoExternal instance.
Raises:
If the config file could not be read or the config does not contain any
URLMap instances, this function will raise an InvalidAppConfigError
exception.
"""
try:
appinfo_file = file(appinfo_path, 'r')
except __HOLE__, unused_e:
raise InvalidAppConfigError(
'Application configuration could not be read from "%s"' % appinfo_path)
try:
return parse_app_config(appinfo_file)
finally:
appinfo_file.close() | IOError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/ReadAppConfig |
def ReadCronConfig(croninfo_path, parse_cron_config=croninfo.LoadSingleCron):
"""Reads cron.yaml file and returns a list of CronEntry instances.
Args:
croninfo_path: String containing the path to the cron.yaml file.
parse_cron_config: Used for dependency injection.
Returns:
A CronInfoExternal object.
Raises:
If the config file is unreadable, empty or invalid, this function will
raise an InvalidAppConfigError or a MalformedCronConfiguration exception.
"""
try:
croninfo_file = file(croninfo_path, 'r')
except __HOLE__, e:
raise InvalidAppConfigError(
'Cron configuration could not be read from "%s": %s'
% (croninfo_path, e))
try:
return parse_cron_config(croninfo_file)
finally:
croninfo_file.close() | IOError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/ReadCronConfig |
def _RemoveFile(file_path):
if file_path and os.path.lexists(file_path):
logging.info('Attempting to remove file at %s', file_path)
try:
os.remove(file_path)
except __HOLE__, e:
logging.warning('Removing file failed: %s', e) | OSError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/_RemoveFile |
def SetupStubs(app_id, **config):
"""Sets up testing stubs of APIs.
Args:
app_id: Application ID being served.
config: keyword arguments.
Keywords:
root_path: Root path to the directory of the application which should
contain the app.yaml, index.yaml, and queue.yaml files.
login_url: Relative URL which should be used for handling user login/logout.
datastore_path: Path to the file to store Datastore file stub data in.
prospective_search_path: Path to the file to store Prospective Search stub
data in.
use_sqlite: Use the SQLite stub for the datastore.
high_replication: Use the high replication consistency model
history_path: DEPRECATED, No-op.
clear_datastore: If the datastore should be cleared on startup.
smtp_host: SMTP host used for sending test mail.
smtp_port: SMTP port.
smtp_user: SMTP user.
smtp_password: SMTP password.
mysql_host: MySQL host.
mysql_port: MySQL port.
mysql_user: MySQL user.
mysql_password: MySQL password.
mysql_socket: MySQL socket.
enable_sendmail: Whether to use sendmail as an alternative to SMTP.
show_mail_body: Whether to log the body of emails.
remove: Used for dependency injection.
disable_task_running: True if tasks should not automatically run after
they are enqueued.
task_retry_seconds: How long to wait after an auto-running task before it
is tried again.
trusted: True if this app can access data belonging to other apps. This
behavior is different from the real app server and should be left False
except for advanced uses of dev_appserver.
port: The port that this dev_appserver is bound to. Defaults to 8080
address: The host that this dev_appsever is running on. Defaults to
localhost.
search_index_path: Path to the file to store search indexes in.
clear_search_index: If the search indeces should be cleared on startup.
"""
root_path = config.get('root_path', None)
login_url = config['login_url']
datastore_path = config['datastore_path']
clear_datastore = config['clear_datastore']
prospective_search_path = config.get('prospective_search_path', '')
clear_prospective_search = config.get('clear_prospective_search', False)
use_sqlite = config.get('use_sqlite', False)
high_replication = config.get('high_replication', False)
require_indexes = config.get('require_indexes', False)
mysql_host = config.get('mysql_host', None)
mysql_port = config.get('mysql_port', 3306)
mysql_user = config.get('mysql_user', None)
mysql_password = config.get('mysql_password', None)
mysql_socket = config.get('mysql_socket', None)
smtp_host = config.get('smtp_host', None)
smtp_port = config.get('smtp_port', 25)
smtp_user = config.get('smtp_user', '')
smtp_password = config.get('smtp_password', '')
enable_sendmail = config.get('enable_sendmail', False)
show_mail_body = config.get('show_mail_body', False)
remove = config.get('remove', os.remove)
disable_task_running = config.get('disable_task_running', False)
task_retry_seconds = config.get('task_retry_seconds', 30)
logs_path = config.get('logs_path', ':memory:')
trusted = config.get('trusted', False)
clear_search_index = config.get('clear_search_indexes', False)
search_index_path = config.get('search_indexes_path', None)
_use_atexit_for_datastore_stub = config.get('_use_atexit_for_datastore_stub',
False)
port_sqlite_data = config.get('port_sqlite_data', False)
# AppScale
# Set the port and server to the Nginx proxy.
serve_port = int(config.get('NGINX_PORT', 8080))
serve_address = config.get('NGINX_HOST', 'localhost')
xmpp_path = config['xmpp_path']
uaserver_path = config['uaserver_path']
login_server = config['login_server']
cookie_secret = config['COOKIE_SECRET']
os.environ['APPLICATION_ID'] = app_id
os.environ['REQUEST_ID_HASH'] = ''
if clear_prospective_search and prospective_search_path:
_RemoveFile(prospective_search_path)
if clear_datastore:
_RemoveFile(datastore_path)
if clear_search_index:
_RemoveFile(search_index_path)
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
apiproxy_stub_map.apiproxy.RegisterStub(
'app_identity_service',
app_identity_stub.AppIdentityServiceStub())
apiproxy_stub_map.apiproxy.RegisterStub(
'capability_service',
capability_stub.CapabilityServiceStub())
datastore = datastore_distributed.DatastoreDistributed(
app_id, datastore_path, require_indexes=require_indexes,
trusted=trusted, root_path=root_path)
apiproxy_stub_map.apiproxy.ReplaceStub(
'datastore_v3', datastore)
apiproxy_stub_map.apiproxy.RegisterStub(
'mail',
mail_stub.MailServiceStub(smtp_host,
smtp_port,
smtp_user,
smtp_password,
enable_sendmail=enable_sendmail,
show_mail_body=show_mail_body))
apiproxy_stub_map.apiproxy.RegisterStub(
'memcache',
memcache_distributed.MemcacheService())
hash_secret = hashlib.sha1(app_id + '/'+ cookie_secret).hexdigest()
apiproxy_stub_map.apiproxy.RegisterStub(
'taskqueue',
taskqueue_distributed.TaskQueueServiceStub(app_id, serve_address, serve_port))
apiproxy_stub_map.apiproxy.RegisterStub(
'urlfetch',
urlfetch_stub.URLFetchServiceStub())
apiproxy_stub_map.apiproxy.RegisterStub(
'xmpp',
xmpp_service_real.XmppService(domain=xmpp_path, uaserver=uaserver_path))
from google.appengine import api
sys.modules['google.appengine.api.rdbms'] = rdbms_mysqldb
api.rdbms = rdbms_mysqldb
rdbms_mysqldb.SetConnectKwargs(host=mysql_host, port=mysql_port,
user=mysql_user, passwd=mysql_password,
unix_socket=mysql_socket)
fixed_login_url = '%s?%s=%%s' % (login_url,
dev_appserver_login.CONTINUE_PARAM)
fixed_logout_url = 'https://%s:%s/logout?%s=%%s' % (login_server,
DASHBOARD_HTTPS_PORT, dev_appserver_login.CONTINUE_PARAM)
apiproxy_stub_map.apiproxy.RegisterStub(
'user',
user_service_stub.UserServiceStub(login_url=fixed_login_url,
logout_url=fixed_logout_url))
apiproxy_stub_map.apiproxy.RegisterStub(
'channel',
channel_service_stub.ChannelServiceStub())
apiproxy_stub_map.apiproxy.RegisterStub(
'matcher',
prospective_search_stub.ProspectiveSearchStub(
prospective_search_path,
apiproxy_stub_map.apiproxy.GetStub('taskqueue')))
apiproxy_stub_map.apiproxy.RegisterStub(
'remote_socket',
_remote_socket_stub.RemoteSocketServiceStub())
apiproxy_stub_map.apiproxy.RegisterStub(
'search',
simple_search_stub.SearchServiceStub(index_file=search_index_path))
try:
from google.appengine.api.images import images_stub
host_prefix = 'http://%s:%d' % (serve_address, serve_port)
apiproxy_stub_map.apiproxy.RegisterStub(
'images',
images_stub.ImagesServiceStub(host_prefix=host_prefix))
except __HOLE__, e:
logging.warning('Could not initialize images API; you are likely missing '
'the Python "PIL" module. ImportError: %s', e)
from google.appengine.api.images import images_not_implemented_stub
apiproxy_stub_map.apiproxy.RegisterStub(
'images',
images_not_implemented_stub.ImagesNotImplementedServiceStub())
blob_storage = datastore_blob_storage.DatastoreBlobStorage(
app_id)
apiproxy_stub_map.apiproxy.RegisterStub(
'blobstore',
blobstore_stub.BlobstoreServiceStub(blob_storage))
apiproxy_stub_map.apiproxy.RegisterStub(
'file',
file_service_stub.FileServiceStub(blob_storage))
apiproxy_stub_map.apiproxy.RegisterStub(
'logservice',
logservice_stub.LogServiceStub(True))
system_service_stub = system_stub.SystemServiceStub()
apiproxy_stub_map.apiproxy.RegisterStub('system', system_service_stub) | ImportError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver.py/SetupStubs |
def start(self):
self.instructions()
# start ruler drawing operation
p_canvas = self.fitsimage.get_canvas()
try:
obj = p_canvas.getObjectByTag(self.layertag)
except __HOLE__:
# Add ruler layer
p_canvas.add(self.canvas, tag=self.layertag)
self.resume() | KeyError | dataset/ETHPy150Open ejeschke/ginga/ginga/misc/plugins/Compose.py/Compose.start |
@staticmethod
def member_membership_paid(row):
"""
Whether the member has paid within 12 months of start_date
anniversary
@ToDo: Formula should come from the deployment_template
"""
T = current.T
#try:
# exempted = row["member_membership.fee_exemption"]
#except AttributeError:
# exempted = False
#if excempted:
# return T("exempted")
try:
start_date = row["member_membership.start_date"]
except __HOLE__:
# not available
start_date = None
try:
paid_date = row["member_membership.membership_paid"]
except AttributeError:
# not available
paid_date = None
if start_date:
PAID = T("paid")
OVERDUE = T("overdue")
LAPSED = T("expired")
lapsed = datetime.timedelta(days=183) # 6 months
year = datetime.timedelta(days=365)
now = current.request.utcnow.date()
if not paid_date:
# Never renewed since Membership started
# => due within 1 year
due = start_date + year
if now < due:
return PAID
elif now > (due + lapsed):
return LAPSED
else:
return OVERDUE
now_month = now.month
start_month = start_date.month
if now_month > start_month:
due = datetime.date(now.year, start_month, start_date.day)
elif now_month == start_month:
now_day = now.day
start_day = start_date.day
if now_day >= start_day:
due = datetime.date(now.year, start_month, start_day)
else:
due = datetime.date((now.year - 1), start_month, start_day)
else:
# now_month < start_month
due = datetime.date((now.year - 1), start_month, start_date.day)
if paid_date >= due:
return PAID
elif (due - paid_date) > lapsed:
return LAPSED
else:
return OVERDUE
return current.messages["NONE"]
# --------------------------------------------------------------------- | AttributeError | dataset/ETHPy150Open sahana/eden/modules/s3db/member.py/S3MembersModel.member_membership_paid |
def force_unicode(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Similar to smart_unicode, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first, saves 30-40% in performance when s
# is an instance of unicode. This function gets called often in that
# setting.
if isinstance(s, unicode):
return s
if strings_only and is_protected_type(s):
return s
try:
if not isinstance(s, basestring,):
if hasattr(s, '__unicode__'):
s = unicode(s)
else:
try:
s = unicode(str(s), encoding, errors)
except UnicodeEncodeError:
if not isinstance(s, Exception):
raise
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII data without special
# handling to display as a string. We need to handle this
# without raising a further exception. We do an
# approximation to what the Exception's standard str()
# output should be.
s = u' '.join([force_unicode(arg, encoding, strings_only,
errors) for arg in s])
elif not isinstance(s, unicode):
# Note: We use .decode() here, instead of unicode(s, encoding,
# errors), so that if s is a SafeString, it ends up being a
# SafeUnicode at the end.
s = s.decode(encoding, errors)
except __HOLE__, e:
if not isinstance(s, Exception):
raise GitModelUnicodeDecodeError(s, *e.args)
else:
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII bytestring data without a
# working unicode method. Try to handle this without raising a
# further exception by individually forcing the exception args
# to unicode.
s = u' '.join([force_unicode(arg, encoding, strings_only,
errors) for arg in s])
return s | UnicodeDecodeError | dataset/ETHPy150Open bendavis78/python-gitmodel/gitmodel/utils/encoding.py/force_unicode |
def _get_wmi_setting(wmi_class_name, setting, server):
'''
Get the value of the setting for the provided class.
'''
with salt.utils.winapi.Com():
try:
connection = wmi.WMI(namespace=_WMI_NAMESPACE)
wmi_class = getattr(connection, wmi_class_name)
objs = wmi_class([setting], Name=server)[0]
ret = getattr(objs, setting)
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error)
except (AttributeError, __HOLE__) as error:
_LOG.error('Error getting %s: %s', wmi_class_name, error)
return ret | IndexError | dataset/ETHPy150Open saltstack/salt/salt/modules/win_smtp_server.py/_get_wmi_setting |
def _set_wmi_setting(wmi_class_name, setting, value, server):
'''
Set the value of the setting for the provided class.
'''
with salt.utils.winapi.Com():
try:
connection = wmi.WMI(namespace=_WMI_NAMESPACE)
wmi_class = getattr(connection, wmi_class_name)
objs = wmi_class(Name=server)[0]
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error)
except (AttributeError, __HOLE__) as error:
_LOG.error('Error getting %s: %s', wmi_class_name, error)
try:
setattr(objs, setting, value)
return True
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error)
except AttributeError as error:
_LOG.error('Error setting %s: %s', setting, error)
return False | IndexError | dataset/ETHPy150Open saltstack/salt/salt/modules/win_smtp_server.py/_set_wmi_setting |
def get_log_format_types():
'''
Get all available log format names and ids.
:return: A dictionary of the log format names and ids.
:rtype: dict
CLI Example:
.. code-block:: bash
salt '*' win_smtp_server.get_log_format_types
'''
ret = dict()
prefix = 'logging/'
with salt.utils.winapi.Com():
try:
connection = wmi.WMI(namespace=_WMI_NAMESPACE)
objs = connection.IISLogModuleSetting()
# Remove the prefix from the name.
for obj in objs:
name = str(obj.Name).replace(prefix, '', 1)
ret[name] = str(obj.LogModuleId)
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error)
except (AttributeError, __HOLE__) as error:
_LOG.error('Error getting IISLogModuleSetting: %s', error)
if not ret:
_LOG.error('Unable to get log format types.')
return ret | IndexError | dataset/ETHPy150Open saltstack/salt/salt/modules/win_smtp_server.py/get_log_format_types |
def get_servers():
'''
Get the SMTP virtual server names.
:return: A list of the SMTP virtual servers.
:rtype: list
CLI Example:
.. code-block:: bash
salt '*' win_smtp_server.get_servers
'''
ret = list()
with salt.utils.winapi.Com():
try:
connection = wmi.WMI(namespace=_WMI_NAMESPACE)
objs = connection.IIsSmtpServerSetting()
for obj in objs:
ret.append(str(obj.Name))
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error)
except (AttributeError, __HOLE__) as error:
_LOG.error('Error getting IIsSmtpServerSetting: %s', error)
_LOG.debug('Found SMTP servers: %s', ret)
return ret | IndexError | dataset/ETHPy150Open saltstack/salt/salt/modules/win_smtp_server.py/get_servers |
def get_server_setting(settings, server=_DEFAULT_SERVER):
'''
Get the value of the setting for the SMTP virtual server.
:param str settings: A list of the setting names.
:param str server: The SMTP server name.
:return: A dictionary of the provided settings and their values.
:rtype: dict
CLI Example:
.. code-block:: bash
salt '*' win_smtp_server.get_server_setting settings="['MaxRecipients']"
'''
ret = dict()
if not settings:
_LOG.warning('No settings provided.')
return ret
with salt.utils.winapi.Com():
try:
connection = wmi.WMI(namespace=_WMI_NAMESPACE)
objs = connection.IIsSmtpServerSetting(settings, Name=server)[0]
for setting in settings:
ret[setting] = str(getattr(objs, setting))
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error)
except (__HOLE__, IndexError) as error:
_LOG.error('Error getting IIsSmtpServerSetting: %s', error)
return ret | AttributeError | dataset/ETHPy150Open saltstack/salt/salt/modules/win_smtp_server.py/get_server_setting |
def set_server_setting(settings, server=_DEFAULT_SERVER):
'''
Set the value of the setting for the SMTP virtual server.
.. note::
The setting names are case-sensitive.
:param str settings: A dictionary of the setting names and their values.
:param str server: The SMTP server name.
:return: A boolean representing whether all changes succeeded.
:rtype: bool
CLI Example:
.. code-block:: bash
salt '*' win_smtp_server.set_server_setting settings="{'MaxRecipients': '500'}"
'''
if not settings:
_LOG.warning('No settings provided')
return False
# Some fields are formatted like '{data}'. Salt tries to convert these to dicts
# automatically on input, so convert them back to the proper format.
settings = _normalize_server_settings(**settings)
current_settings = get_server_setting(settings=settings.keys(), server=server)
if settings == current_settings:
_LOG.debug('Settings already contain the provided values.')
return True
# Note that we must fetch all properties of IIsSmtpServerSetting below, since
# filtering for specific properties and then attempting to set them will cause
# an error like: wmi.x_wmi Unexpected COM Error -2147352567
with salt.utils.winapi.Com():
try:
connection = wmi.WMI(namespace=_WMI_NAMESPACE)
objs = connection.IIsSmtpServerSetting(Name=server)[0]
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error)
except (AttributeError, __HOLE__) as error:
_LOG.error('Error getting IIsSmtpServerSetting: %s', error)
for setting in settings:
if str(settings[setting]) != str(current_settings[setting]):
try:
setattr(objs, setting, settings[setting])
except wmi.x_wmi as error:
_LOG.error('Encountered WMI error: %s', error.com_error)
except AttributeError as error:
_LOG.error('Error setting %s: %s', setting, error)
# Get the settings post-change so that we can verify tht all properties
# were modified successfully. Track the ones that weren't.
new_settings = get_server_setting(settings=settings.keys(), server=server)
failed_settings = dict()
for setting in settings:
if str(settings[setting]) != str(new_settings[setting]):
failed_settings[setting] = settings[setting]
if failed_settings:
_LOG.error('Failed to change settings: %s', failed_settings)
return False
_LOG.debug('Settings configured successfully: %s', settings.keys())
return True | IndexError | dataset/ETHPy150Open saltstack/salt/salt/modules/win_smtp_server.py/set_server_setting |
def __getattr__(self, item):
try:
return self.dict[item]
except __HOLE__:
raise AttributeError("Missing user-supplied argument '%s' (set with: --arg %s=VALUE)" % (item, item)) | KeyError | dataset/ETHPy150Open jlevy/ghizmo/ghizmo/main.py/UserArgs.__getattr__ |
def parse_tag_value(tag_list):
"""Parse a DKIM Tag=Value list.
Interprets the syntax specified by RFC4871 section 3.2.
Assumes that folding whitespace is already unfolded.
@param tag_list: A string containing a DKIM Tag=Value list.
"""
tags = {}
tag_specs = tag_list.strip().split(b';')
# Trailing semicolons are valid.
if not tag_specs[-1]:
tag_specs.pop()
for tag_spec in tag_specs:
try:
key, value = tag_spec.split(b'=', 1)
except __HOLE__:
raise InvalidTagSpec(tag_spec)
if key.strip() in tags:
raise DuplicateTag(key.strip())
tags[key.strip()] = value.strip()
return tags | ValueError | dataset/ETHPy150Open Flolagale/mailin/python/dkim/util.py/parse_tag_value |