repository_name
stringlengths 5
67
| func_path_in_repository
stringlengths 4
234
| func_name
stringlengths 0
314
| whole_func_string
stringlengths 52
3.87M
| language
stringclasses 6
values | func_code_string
stringlengths 39
1.84M
| func_code_tokens
sequencelengths 15
672k
| func_documentation_string
stringlengths 1
47.2k
| func_documentation_tokens
sequencelengths 1
3.92k
| split_name
stringclasses 1
value | func_code_url
stringlengths 85
339
|
---|---|---|---|---|---|---|---|---|---|---|
gem/oq-engine | openquake/hmtk/sources/complex_fault_source.py | mtkComplexFaultSource._get_minmax_edges | def _get_minmax_edges(self, edge):
'''
Updates the upper and lower depths based on the input edges
'''
if isinstance(edge, Line):
# For instance of line class need to loop over values
depth_vals = np.array([node.depth for node in edge.points])
else:
depth_vals = edge[:, 2]
temp_upper_depth = np.min(depth_vals)
if not self.upper_depth:
self.upper_depth = temp_upper_depth
else:
if temp_upper_depth < self.upper_depth:
self.upper_depth = temp_upper_depth
temp_lower_depth = np.max(depth_vals)
if not self.lower_depth:
self.lower_depth = temp_lower_depth
else:
if temp_lower_depth > self.lower_depth:
self.lower_depth = temp_lower_depth | python | def _get_minmax_edges(self, edge):
if isinstance(edge, Line):
depth_vals = np.array([node.depth for node in edge.points])
else:
depth_vals = edge[:, 2]
temp_upper_depth = np.min(depth_vals)
if not self.upper_depth:
self.upper_depth = temp_upper_depth
else:
if temp_upper_depth < self.upper_depth:
self.upper_depth = temp_upper_depth
temp_lower_depth = np.max(depth_vals)
if not self.lower_depth:
self.lower_depth = temp_lower_depth
else:
if temp_lower_depth > self.lower_depth:
self.lower_depth = temp_lower_depth | [
"def",
"_get_minmax_edges",
"(",
"self",
",",
"edge",
")",
":",
"if",
"isinstance",
"(",
"edge",
",",
"Line",
")",
":",
"# For instance of line class need to loop over values",
"depth_vals",
"=",
"np",
".",
"array",
"(",
"[",
"node",
".",
"depth",
"for",
"node",
"in",
"edge",
".",
"points",
"]",
")",
"else",
":",
"depth_vals",
"=",
"edge",
"[",
":",
",",
"2",
"]",
"temp_upper_depth",
"=",
"np",
".",
"min",
"(",
"depth_vals",
")",
"if",
"not",
"self",
".",
"upper_depth",
":",
"self",
".",
"upper_depth",
"=",
"temp_upper_depth",
"else",
":",
"if",
"temp_upper_depth",
"<",
"self",
".",
"upper_depth",
":",
"self",
".",
"upper_depth",
"=",
"temp_upper_depth",
"temp_lower_depth",
"=",
"np",
".",
"max",
"(",
"depth_vals",
")",
"if",
"not",
"self",
".",
"lower_depth",
":",
"self",
".",
"lower_depth",
"=",
"temp_lower_depth",
"else",
":",
"if",
"temp_lower_depth",
">",
"self",
".",
"lower_depth",
":",
"self",
".",
"lower_depth",
"=",
"temp_lower_depth"
] | Updates the upper and lower depths based on the input edges | [
"Updates",
"the",
"upper",
"and",
"lower",
"depths",
"based",
"on",
"the",
"input",
"edges"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/complex_fault_source.py#L153-L175 |
gem/oq-engine | openquake/hmtk/sources/complex_fault_source.py | mtkComplexFaultSource.create_oqhazardlib_source | def create_oqhazardlib_source(self, tom, mesh_spacing, use_defaults=False):
"""
Creates an instance of the source model as :class:
openquake.hazardlib.source.complex_fault.ComplexFaultSource
"""
if not self.mfd:
raise ValueError("Cannot write to hazardlib without MFD")
return ComplexFaultSource(
self.id,
self.name,
self.trt,
self.mfd,
mesh_spacing,
conv.mag_scale_rel_to_hazardlib(self.mag_scale_rel, use_defaults),
conv.render_aspect_ratio(self.rupt_aspect_ratio, use_defaults),
tom,
self.fault_edges,
self.rake) | python | def create_oqhazardlib_source(self, tom, mesh_spacing, use_defaults=False):
if not self.mfd:
raise ValueError("Cannot write to hazardlib without MFD")
return ComplexFaultSource(
self.id,
self.name,
self.trt,
self.mfd,
mesh_spacing,
conv.mag_scale_rel_to_hazardlib(self.mag_scale_rel, use_defaults),
conv.render_aspect_ratio(self.rupt_aspect_ratio, use_defaults),
tom,
self.fault_edges,
self.rake) | [
"def",
"create_oqhazardlib_source",
"(",
"self",
",",
"tom",
",",
"mesh_spacing",
",",
"use_defaults",
"=",
"False",
")",
":",
"if",
"not",
"self",
".",
"mfd",
":",
"raise",
"ValueError",
"(",
"\"Cannot write to hazardlib without MFD\"",
")",
"return",
"ComplexFaultSource",
"(",
"self",
".",
"id",
",",
"self",
".",
"name",
",",
"self",
".",
"trt",
",",
"self",
".",
"mfd",
",",
"mesh_spacing",
",",
"conv",
".",
"mag_scale_rel_to_hazardlib",
"(",
"self",
".",
"mag_scale_rel",
",",
"use_defaults",
")",
",",
"conv",
".",
"render_aspect_ratio",
"(",
"self",
".",
"rupt_aspect_ratio",
",",
"use_defaults",
")",
",",
"tom",
",",
"self",
".",
"fault_edges",
",",
"self",
".",
"rake",
")"
] | Creates an instance of the source model as :class:
openquake.hazardlib.source.complex_fault.ComplexFaultSource | [
"Creates",
"an",
"instance",
"of",
"the",
"source",
"model",
"as",
":",
"class",
":",
"openquake",
".",
"hazardlib",
".",
"source",
".",
"complex_fault",
".",
"ComplexFaultSource"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/complex_fault_source.py#L225-L242 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2016.py | KothaEtAl2016.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# extracting dictionary of coefficients specific to required
# intensity measure type.
C = self.COEFFS[imt]
mean = (self._get_magnitude_term(C, rup.mag) +
self._get_distance_term(C, dists.rjb, rup.mag) +
self._get_site_term(C, sites.vs30))
# Units of GMPE are in terms of m/s (corrected in an Erratum)
# Convert to g
if imt.name in "SA PGA":
mean = np.log(np.exp(mean) / g)
else:
# For PGV convert from m/s to cm/s/s
mean = np.log(np.exp(mean) * 100.)
# Get standard deviations
stddevs = self._get_stddevs(C, stddev_types, dists.rjb.shape)
return mean, stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
C = self.COEFFS[imt]
mean = (self._get_magnitude_term(C, rup.mag) +
self._get_distance_term(C, dists.rjb, rup.mag) +
self._get_site_term(C, sites.vs30))
if imt.name in "SA PGA":
mean = np.log(np.exp(mean) / g)
else:
mean = np.log(np.exp(mean) * 100.)
stddevs = self._get_stddevs(C, stddev_types, dists.rjb.shape)
return mean, stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"# extracting dictionary of coefficients specific to required",
"# intensity measure type.",
"C",
"=",
"self",
".",
"COEFFS",
"[",
"imt",
"]",
"mean",
"=",
"(",
"self",
".",
"_get_magnitude_term",
"(",
"C",
",",
"rup",
".",
"mag",
")",
"+",
"self",
".",
"_get_distance_term",
"(",
"C",
",",
"dists",
".",
"rjb",
",",
"rup",
".",
"mag",
")",
"+",
"self",
".",
"_get_site_term",
"(",
"C",
",",
"sites",
".",
"vs30",
")",
")",
"# Units of GMPE are in terms of m/s (corrected in an Erratum)",
"# Convert to g",
"if",
"imt",
".",
"name",
"in",
"\"SA PGA\"",
":",
"mean",
"=",
"np",
".",
"log",
"(",
"np",
".",
"exp",
"(",
"mean",
")",
"/",
"g",
")",
"else",
":",
"# For PGV convert from m/s to cm/s/s",
"mean",
"=",
"np",
".",
"log",
"(",
"np",
".",
"exp",
"(",
"mean",
")",
"*",
"100.",
")",
"# Get standard deviations",
"stddevs",
"=",
"self",
".",
"_get_stddevs",
"(",
"C",
",",
"stddev_types",
",",
"dists",
".",
"rjb",
".",
"shape",
")",
"return",
"mean",
",",
"stddevs"
] | See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2016.py#L74-L99 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2016.py | KothaEtAl2016._get_magnitude_term | def _get_magnitude_term(self, C, mag):
"""
Returns the magnitude scaling term - equation 3
"""
if mag >= self.CONSTS["Mh"]:
return C["e1"] + C["b3"] * (mag - self.CONSTS["Mh"])
else:
return C["e1"] + (C["b1"] * (mag - self.CONSTS["Mh"])) +\
(C["b2"] * (mag - self.CONSTS["Mh"]) ** 2.) | python | def _get_magnitude_term(self, C, mag):
if mag >= self.CONSTS["Mh"]:
return C["e1"] + C["b3"] * (mag - self.CONSTS["Mh"])
else:
return C["e1"] + (C["b1"] * (mag - self.CONSTS["Mh"])) +\
(C["b2"] * (mag - self.CONSTS["Mh"]) ** 2.) | [
"def",
"_get_magnitude_term",
"(",
"self",
",",
"C",
",",
"mag",
")",
":",
"if",
"mag",
">=",
"self",
".",
"CONSTS",
"[",
"\"Mh\"",
"]",
":",
"return",
"C",
"[",
"\"e1\"",
"]",
"+",
"C",
"[",
"\"b3\"",
"]",
"*",
"(",
"mag",
"-",
"self",
".",
"CONSTS",
"[",
"\"Mh\"",
"]",
")",
"else",
":",
"return",
"C",
"[",
"\"e1\"",
"]",
"+",
"(",
"C",
"[",
"\"b1\"",
"]",
"*",
"(",
"mag",
"-",
"self",
".",
"CONSTS",
"[",
"\"Mh\"",
"]",
")",
")",
"+",
"(",
"C",
"[",
"\"b2\"",
"]",
"*",
"(",
"mag",
"-",
"self",
".",
"CONSTS",
"[",
"\"Mh\"",
"]",
")",
"**",
"2.",
")"
] | Returns the magnitude scaling term - equation 3 | [
"Returns",
"the",
"magnitude",
"scaling",
"term",
"-",
"equation",
"3"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2016.py#L101-L109 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2016.py | KothaEtAl2016._get_distance_term | def _get_distance_term(self, C, rjb, mag):
"""
Returns the general distance scaling term - equation 2
"""
c_3 = self._get_anelastic_coeff(C)
rval = np.sqrt(rjb ** 2. + C["h"] ** 2.)
return (C["c1"] + C["c2"] * (mag - self.CONSTS["Mref"])) *\
np.log(rval / self.CONSTS["Rref"]) +\
c_3 * (rval - self.CONSTS["Rref"]) | python | def _get_distance_term(self, C, rjb, mag):
c_3 = self._get_anelastic_coeff(C)
rval = np.sqrt(rjb ** 2. + C["h"] ** 2.)
return (C["c1"] + C["c2"] * (mag - self.CONSTS["Mref"])) *\
np.log(rval / self.CONSTS["Rref"]) +\
c_3 * (rval - self.CONSTS["Rref"]) | [
"def",
"_get_distance_term",
"(",
"self",
",",
"C",
",",
"rjb",
",",
"mag",
")",
":",
"c_3",
"=",
"self",
".",
"_get_anelastic_coeff",
"(",
"C",
")",
"rval",
"=",
"np",
".",
"sqrt",
"(",
"rjb",
"**",
"2.",
"+",
"C",
"[",
"\"h\"",
"]",
"**",
"2.",
")",
"return",
"(",
"C",
"[",
"\"c1\"",
"]",
"+",
"C",
"[",
"\"c2\"",
"]",
"*",
"(",
"mag",
"-",
"self",
".",
"CONSTS",
"[",
"\"Mref\"",
"]",
")",
")",
"*",
"np",
".",
"log",
"(",
"rval",
"/",
"self",
".",
"CONSTS",
"[",
"\"Rref\"",
"]",
")",
"+",
"c_3",
"*",
"(",
"rval",
"-",
"self",
".",
"CONSTS",
"[",
"\"Rref\"",
"]",
")"
] | Returns the general distance scaling term - equation 2 | [
"Returns",
"the",
"general",
"distance",
"scaling",
"term",
"-",
"equation",
"2"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2016.py#L111-L119 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2016.py | KothaEtAl2016._get_site_term | def _get_site_term(self, C, vs30):
"""
Returns only a linear site amplification term
"""
dg1, dg2 = self._get_regional_site_term(C)
return (C["g1"] + dg1) + (C["g2"] + dg2) * np.log(vs30) | python | def _get_site_term(self, C, vs30):
dg1, dg2 = self._get_regional_site_term(C)
return (C["g1"] + dg1) + (C["g2"] + dg2) * np.log(vs30) | [
"def",
"_get_site_term",
"(",
"self",
",",
"C",
",",
"vs30",
")",
":",
"dg1",
",",
"dg2",
"=",
"self",
".",
"_get_regional_site_term",
"(",
"C",
")",
"return",
"(",
"C",
"[",
"\"g1\"",
"]",
"+",
"dg1",
")",
"+",
"(",
"C",
"[",
"\"g2\"",
"]",
"+",
"dg2",
")",
"*",
"np",
".",
"log",
"(",
"vs30",
")"
] | Returns only a linear site amplification term | [
"Returns",
"only",
"a",
"linear",
"site",
"amplification",
"term"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2016.py#L128-L133 |
gem/oq-engine | openquake/hazardlib/gsim/tusa_langer_2016.py | TusaLanger2016RepiBA08SE._get_stddevs | def _get_stddevs(self, C, stddev_types, num_sites):
"""
Return standard deviations as defined in tables below
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
stddevs = [np.zeros(num_sites) + C['SigmaTot'] for _ in stddev_types]
return stddevs | python | def _get_stddevs(self, C, stddev_types, num_sites):
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
stddevs = [np.zeros(num_sites) + C['SigmaTot'] for _ in stddev_types]
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"stddev_types",
",",
"num_sites",
")",
":",
"assert",
"all",
"(",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"for",
"stddev_type",
"in",
"stddev_types",
")",
"stddevs",
"=",
"[",
"np",
".",
"zeros",
"(",
"num_sites",
")",
"+",
"C",
"[",
"'SigmaTot'",
"]",
"for",
"_",
"in",
"stddev_types",
"]",
"return",
"stddevs"
] | Return standard deviations as defined in tables below | [
"Return",
"standard",
"deviations",
"as",
"defined",
"in",
"tables",
"below"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/tusa_langer_2016.py#L112-L119 |
gem/oq-engine | openquake/hazardlib/gsim/tusa_langer_2016.py | TusaLanger2016RepiBA08SE._get_site_type_dummy_variables | def _get_site_type_dummy_variables(self, sites):
"""
Get site type dummy variables, which classified the sites into
different site classes based on the shear wave velocity in the
upper 30 m (Vs30) according to the EC8 (CEN 2003):
class A: Vs30 > 800 m/s
class B: Vs30 = 360 - 800 m/s
class C*: Vs30 = 180 - 360 m/s
class D: Vs30 < 180 m/s
*Not computed by this GMPE
"""
ssa = np.zeros(len(sites.vs30))
ssb = np.zeros(len(sites.vs30))
ssd = np.zeros(len(sites.vs30))
# Class D; Vs30 < 180 m/s.
idx = (sites.vs30 < 180.0)
ssd[idx] = 1.0
# Class B; 360 m/s <= Vs30 <= 800 m/s.
idx = (sites.vs30 >= 360.0) & (sites.vs30 < 800.0)
ssb[idx] = 1.0
# Class A; Vs30 > 800 m/s.
idx = (sites.vs30 >= 800.0)
ssa[idx] = 1.0
for value in sites.vs30:
if 180 <= value < 360:
raise Exception(
'GMPE does not consider site class C (Vs30 = 180-360 m/s)')
return ssa, ssb, ssd | python | def _get_site_type_dummy_variables(self, sites):
ssa = np.zeros(len(sites.vs30))
ssb = np.zeros(len(sites.vs30))
ssd = np.zeros(len(sites.vs30))
idx = (sites.vs30 < 180.0)
ssd[idx] = 1.0
idx = (sites.vs30 >= 360.0) & (sites.vs30 < 800.0)
ssb[idx] = 1.0
idx = (sites.vs30 >= 800.0)
ssa[idx] = 1.0
for value in sites.vs30:
if 180 <= value < 360:
raise Exception(
'GMPE does not consider site class C (Vs30 = 180-360 m/s)')
return ssa, ssb, ssd | [
"def",
"_get_site_type_dummy_variables",
"(",
"self",
",",
"sites",
")",
":",
"ssa",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"sites",
".",
"vs30",
")",
")",
"ssb",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"sites",
".",
"vs30",
")",
")",
"ssd",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"sites",
".",
"vs30",
")",
")",
"# Class D; Vs30 < 180 m/s.",
"idx",
"=",
"(",
"sites",
".",
"vs30",
"<",
"180.0",
")",
"ssd",
"[",
"idx",
"]",
"=",
"1.0",
"# Class B; 360 m/s <= Vs30 <= 800 m/s.",
"idx",
"=",
"(",
"sites",
".",
"vs30",
">=",
"360.0",
")",
"&",
"(",
"sites",
".",
"vs30",
"<",
"800.0",
")",
"ssb",
"[",
"idx",
"]",
"=",
"1.0",
"# Class A; Vs30 > 800 m/s.",
"idx",
"=",
"(",
"sites",
".",
"vs30",
">=",
"800.0",
")",
"ssa",
"[",
"idx",
"]",
"=",
"1.0",
"for",
"value",
"in",
"sites",
".",
"vs30",
":",
"if",
"180",
"<=",
"value",
"<",
"360",
":",
"raise",
"Exception",
"(",
"'GMPE does not consider site class C (Vs30 = 180-360 m/s)'",
")",
"return",
"ssa",
",",
"ssb",
",",
"ssd"
] | Get site type dummy variables, which classified the sites into
different site classes based on the shear wave velocity in the
upper 30 m (Vs30) according to the EC8 (CEN 2003):
class A: Vs30 > 800 m/s
class B: Vs30 = 360 - 800 m/s
class C*: Vs30 = 180 - 360 m/s
class D: Vs30 < 180 m/s
*Not computed by this GMPE | [
"Get",
"site",
"type",
"dummy",
"variables",
"which",
"classified",
"the",
"sites",
"into",
"different",
"site",
"classes",
"based",
"on",
"the",
"shear",
"wave",
"velocity",
"in",
"the",
"upper",
"30",
"m",
"(",
"Vs30",
")",
"according",
"to",
"the",
"EC8",
"(",
"CEN",
"2003",
")",
":",
"class",
"A",
":",
"Vs30",
">",
"800",
"m",
"/",
"s",
"class",
"B",
":",
"Vs30",
"=",
"360",
"-",
"800",
"m",
"/",
"s",
"class",
"C",
"*",
":",
"Vs30",
"=",
"180",
"-",
"360",
"m",
"/",
"s",
"class",
"D",
":",
"Vs30",
"<",
"180",
"m",
"/",
"s",
"*",
"Not",
"computed",
"by",
"this",
"GMPE"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/tusa_langer_2016.py#L148-L177 |
gem/oq-engine | openquake/hazardlib/gsim/tusa_langer_2016.py | TusaLanger2016RepiSP87SE._compute_distance | def _compute_distance(self, rup, dists, C):
"""
Compute the distance function, equation (5).
"""
rval = np.sqrt(dists.repi ** 2 + C['h'] ** 2)
return C['c1'] * np.log10(rval) | python | def _compute_distance(self, rup, dists, C):
rval = np.sqrt(dists.repi ** 2 + C['h'] ** 2)
return C['c1'] * np.log10(rval) | [
"def",
"_compute_distance",
"(",
"self",
",",
"rup",
",",
"dists",
",",
"C",
")",
":",
"rval",
"=",
"np",
".",
"sqrt",
"(",
"dists",
".",
"repi",
"**",
"2",
"+",
"C",
"[",
"'h'",
"]",
"**",
"2",
")",
"return",
"C",
"[",
"'c1'",
"]",
"*",
"np",
".",
"log10",
"(",
"rval",
")"
] | Compute the distance function, equation (5). | [
"Compute",
"the",
"distance",
"function",
"equation",
"(",
"5",
")",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/tusa_langer_2016.py#L256-L261 |
gem/oq-engine | openquake/hazardlib/gsim/tusa_langer_2016.py | TusaLanger2016Rhypo._compute_distance | def _compute_distance(self, rup, dists, C):
"""
Compute the distance function, equation (9):
"""
mref = 3.6
rref = 1.0
rval = np.sqrt(dists.rhypo ** 2 + C['h'] ** 2)
return (C['c1'] + C['c2'] * (rup.mag - mref)) *\
np.log10(rval / rref) + C['c3'] * (rval - rref) | python | def _compute_distance(self, rup, dists, C):
mref = 3.6
rref = 1.0
rval = np.sqrt(dists.rhypo ** 2 + C['h'] ** 2)
return (C['c1'] + C['c2'] * (rup.mag - mref)) *\
np.log10(rval / rref) + C['c3'] * (rval - rref) | [
"def",
"_compute_distance",
"(",
"self",
",",
"rup",
",",
"dists",
",",
"C",
")",
":",
"mref",
"=",
"3.6",
"rref",
"=",
"1.0",
"rval",
"=",
"np",
".",
"sqrt",
"(",
"dists",
".",
"rhypo",
"**",
"2",
"+",
"C",
"[",
"'h'",
"]",
"**",
"2",
")",
"return",
"(",
"C",
"[",
"'c1'",
"]",
"+",
"C",
"[",
"'c2'",
"]",
"*",
"(",
"rup",
".",
"mag",
"-",
"mref",
")",
")",
"*",
"np",
".",
"log10",
"(",
"rval",
"/",
"rref",
")",
"+",
"C",
"[",
"'c3'",
"]",
"*",
"(",
"rval",
"-",
"rref",
")"
] | Compute the distance function, equation (9): | [
"Compute",
"the",
"distance",
"function",
"equation",
"(",
"9",
")",
":"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/tusa_langer_2016.py#L353-L361 |
gem/oq-engine | openquake/hazardlib/gsim/dowrickrhoades_2005.py | DowrickRhoades2005Asc.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
# extract dictionaries of coefficients specific to required
# intensity measure type
C = self.COEFFS[imt]
# Deltas for Tectonic Region Type and rake angles
delta_R, delta_S, delta_V, delta_I = self._get_deltas(rup.rake)
mean = self._compute_mean(C, rup.mag, dists.rrup, rup.hypo_depth,
delta_R, delta_S, delta_V, delta_I,
sites.vs30)
stddevs = self._get_stddevs(C, stddev_types, sites.vs30.size)
return mean, stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
C = self.COEFFS[imt]
delta_R, delta_S, delta_V, delta_I = self._get_deltas(rup.rake)
mean = self._compute_mean(C, rup.mag, dists.rrup, rup.hypo_depth,
delta_R, delta_S, delta_V, delta_I,
sites.vs30)
stddevs = self._get_stddevs(C, stddev_types, sites.vs30.size)
return mean, stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"assert",
"all",
"(",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"for",
"stddev_type",
"in",
"stddev_types",
")",
"# extract dictionaries of coefficients specific to required",
"# intensity measure type",
"C",
"=",
"self",
".",
"COEFFS",
"[",
"imt",
"]",
"# Deltas for Tectonic Region Type and rake angles",
"delta_R",
",",
"delta_S",
",",
"delta_V",
",",
"delta_I",
"=",
"self",
".",
"_get_deltas",
"(",
"rup",
".",
"rake",
")",
"mean",
"=",
"self",
".",
"_compute_mean",
"(",
"C",
",",
"rup",
".",
"mag",
",",
"dists",
".",
"rrup",
",",
"rup",
".",
"hypo_depth",
",",
"delta_R",
",",
"delta_S",
",",
"delta_V",
",",
"delta_I",
",",
"sites",
".",
"vs30",
")",
"stddevs",
"=",
"self",
".",
"_get_stddevs",
"(",
"C",
",",
"stddev_types",
",",
"sites",
".",
"vs30",
".",
"size",
")",
"return",
"mean",
",",
"stddevs"
] | See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/dowrickrhoades_2005.py#L74-L96 |
gem/oq-engine | openquake/hazardlib/gsim/dowrickrhoades_2005.py | DowrickRhoades2005Asc._compute_mean | def _compute_mean(self, C, mag, rrup, hypo_depth, delta_R, delta_S,
delta_V, delta_I, vs30):
"""
Compute MMI Intensity Value as per Equation in Table 5 and
Table 7 pag 198.
"""
# mean is calculated for all the 4 classes using the same equation.
# For DowrickRhoades2005SSlab, the coefficients which don't appear in
# Model 3 equationare assigned to zero
mean = (C['A1'] + (C['A2'] + C['A2R'] * delta_R + C['A2V'] * delta_V) *
mag + (C['A3'] + C['A3S'] * delta_S + C['A3V'] * delta_V) *
np.log10(np.power((rrup**3 + C['d']**3), 1.0 / 3.0)) +
C['A4'] * hypo_depth + C['A5'] * delta_I)
# Get S site class term
S = self._get_site_class(vs30, mean)
# Add S amplification term to mean value
mean = mean + S
return mean | python | def _compute_mean(self, C, mag, rrup, hypo_depth, delta_R, delta_S,
delta_V, delta_I, vs30):
mean = (C['A1'] + (C['A2'] + C['A2R'] * delta_R + C['A2V'] * delta_V) *
mag + (C['A3'] + C['A3S'] * delta_S + C['A3V'] * delta_V) *
np.log10(np.power((rrup**3 + C['d']**3), 1.0 / 3.0)) +
C['A4'] * hypo_depth + C['A5'] * delta_I)
S = self._get_site_class(vs30, mean)
mean = mean + S
return mean | [
"def",
"_compute_mean",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rrup",
",",
"hypo_depth",
",",
"delta_R",
",",
"delta_S",
",",
"delta_V",
",",
"delta_I",
",",
"vs30",
")",
":",
"# mean is calculated for all the 4 classes using the same equation.",
"# For DowrickRhoades2005SSlab, the coefficients which don't appear in",
"# Model 3 equationare assigned to zero",
"mean",
"=",
"(",
"C",
"[",
"'A1'",
"]",
"+",
"(",
"C",
"[",
"'A2'",
"]",
"+",
"C",
"[",
"'A2R'",
"]",
"*",
"delta_R",
"+",
"C",
"[",
"'A2V'",
"]",
"*",
"delta_V",
")",
"*",
"mag",
"+",
"(",
"C",
"[",
"'A3'",
"]",
"+",
"C",
"[",
"'A3S'",
"]",
"*",
"delta_S",
"+",
"C",
"[",
"'A3V'",
"]",
"*",
"delta_V",
")",
"*",
"np",
".",
"log10",
"(",
"np",
".",
"power",
"(",
"(",
"rrup",
"**",
"3",
"+",
"C",
"[",
"'d'",
"]",
"**",
"3",
")",
",",
"1.0",
"/",
"3.0",
")",
")",
"+",
"C",
"[",
"'A4'",
"]",
"*",
"hypo_depth",
"+",
"C",
"[",
"'A5'",
"]",
"*",
"delta_I",
")",
"# Get S site class term",
"S",
"=",
"self",
".",
"_get_site_class",
"(",
"vs30",
",",
"mean",
")",
"# Add S amplification term to mean value",
"mean",
"=",
"mean",
"+",
"S",
"return",
"mean"
] | Compute MMI Intensity Value as per Equation in Table 5 and
Table 7 pag 198. | [
"Compute",
"MMI",
"Intensity",
"Value",
"as",
"per",
"Equation",
"in",
"Table",
"5",
"and",
"Table",
"7",
"pag",
"198",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/dowrickrhoades_2005.py#L98-L120 |
gem/oq-engine | openquake/hazardlib/gsim/dowrickrhoades_2005.py | DowrickRhoades2005Asc._get_stddevs | def _get_stddevs(self, C, stddev_types, num_sites):
"""
Return total standard deviation as described in paragraph 5.2 pag 200.
"""
# interevent stddev
sigma_inter = C['tau'] + np.zeros(num_sites)
# intraevent std
sigma_intra = C['sigma'] + np.zeros(num_sites)
std = []
for stddev_type in stddev_types:
if stddev_type == const.StdDev.TOTAL:
# equation in section 5.2 page 200
std += [np.sqrt(sigma_intra**2 + sigma_inter**2)]
elif stddev_type == const.StdDev.INTRA_EVENT:
std.append(sigma_intra)
elif stddev_type == const.StdDev.INTER_EVENT:
std.append(sigma_inter)
return std | python | def _get_stddevs(self, C, stddev_types, num_sites):
sigma_inter = C['tau'] + np.zeros(num_sites)
sigma_intra = C['sigma'] + np.zeros(num_sites)
std = []
for stddev_type in stddev_types:
if stddev_type == const.StdDev.TOTAL:
std += [np.sqrt(sigma_intra**2 + sigma_inter**2)]
elif stddev_type == const.StdDev.INTRA_EVENT:
std.append(sigma_intra)
elif stddev_type == const.StdDev.INTER_EVENT:
std.append(sigma_inter)
return std | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"stddev_types",
",",
"num_sites",
")",
":",
"# interevent stddev",
"sigma_inter",
"=",
"C",
"[",
"'tau'",
"]",
"+",
"np",
".",
"zeros",
"(",
"num_sites",
")",
"# intraevent std",
"sigma_intra",
"=",
"C",
"[",
"'sigma'",
"]",
"+",
"np",
".",
"zeros",
"(",
"num_sites",
")",
"std",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"if",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"TOTAL",
":",
"# equation in section 5.2 page 200",
"std",
"+=",
"[",
"np",
".",
"sqrt",
"(",
"sigma_intra",
"**",
"2",
"+",
"sigma_inter",
"**",
"2",
")",
"]",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTRA_EVENT",
":",
"std",
".",
"append",
"(",
"sigma_intra",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTER_EVENT",
":",
"std",
".",
"append",
"(",
"sigma_inter",
")",
"return",
"std"
] | Return total standard deviation as described in paragraph 5.2 pag 200. | [
"Return",
"total",
"standard",
"deviation",
"as",
"described",
"in",
"paragraph",
"5",
".",
"2",
"pag",
"200",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/dowrickrhoades_2005.py#L122-L143 |
gem/oq-engine | openquake/hazardlib/gsim/dowrickrhoades_2005.py | DowrickRhoades2005Asc._get_site_class | def _get_site_class(self, vs30, mmi_mean):
"""
Return site class flag for:
Class E - Very Soft Soil vs30 < 180
Class D - Deep or Soft Soil vs30 >= 180 and vs30 <= 360
Class C - Shallow Soil vs30 > 360 and vs30 <= 760
Class B - Rock vs30 > 760 and vs30 <= 1500
Class A - Strong Rock vs30 >= 180 and vs30 <= 360
The S site class is equal to
S = c1 if MMI <= 7
S = c1 - d *(MMI - 7.0) if 7<MMI<9.5
S = c2 if MMI >= 9.5
"""
if vs30[0] < 180:
c1 = 1.0
c2 = -0.25
d = 0.5
elif vs30[0] >= 180 and vs30[0] <= 360:
c1 = 0.5
c2 = -0.125
d = 0.25
elif vs30[0] > 360 and vs30[0] <= 760:
c1 = 0.
c2 = 0.
d = 0.
elif vs30[0] > 760 and vs30[0] <= 1500:
c1 = -0.5
c2 = 0.125
d = -0.25
elif vs30[0] > 1500:
c1 = -1.0
c2 = 0.25
d = -0.5
S = np.zeros_like(vs30)
for i in range(vs30.size):
if mmi_mean[i] <= 7.0:
S[i] += c1
elif mmi_mean[i] > 7 and mmi_mean[i] < 9.5:
S[i] += c1 - d * (mmi_mean[i] - 7.0)
else:
S[i] += c2
return S | python | def _get_site_class(self, vs30, mmi_mean):
if vs30[0] < 180:
c1 = 1.0
c2 = -0.25
d = 0.5
elif vs30[0] >= 180 and vs30[0] <= 360:
c1 = 0.5
c2 = -0.125
d = 0.25
elif vs30[0] > 360 and vs30[0] <= 760:
c1 = 0.
c2 = 0.
d = 0.
elif vs30[0] > 760 and vs30[0] <= 1500:
c1 = -0.5
c2 = 0.125
d = -0.25
elif vs30[0] > 1500:
c1 = -1.0
c2 = 0.25
d = -0.5
S = np.zeros_like(vs30)
for i in range(vs30.size):
if mmi_mean[i] <= 7.0:
S[i] += c1
elif mmi_mean[i] > 7 and mmi_mean[i] < 9.5:
S[i] += c1 - d * (mmi_mean[i] - 7.0)
else:
S[i] += c2
return S | [
"def",
"_get_site_class",
"(",
"self",
",",
"vs30",
",",
"mmi_mean",
")",
":",
"if",
"vs30",
"[",
"0",
"]",
"<",
"180",
":",
"c1",
"=",
"1.0",
"c2",
"=",
"-",
"0.25",
"d",
"=",
"0.5",
"elif",
"vs30",
"[",
"0",
"]",
">=",
"180",
"and",
"vs30",
"[",
"0",
"]",
"<=",
"360",
":",
"c1",
"=",
"0.5",
"c2",
"=",
"-",
"0.125",
"d",
"=",
"0.25",
"elif",
"vs30",
"[",
"0",
"]",
">",
"360",
"and",
"vs30",
"[",
"0",
"]",
"<=",
"760",
":",
"c1",
"=",
"0.",
"c2",
"=",
"0.",
"d",
"=",
"0.",
"elif",
"vs30",
"[",
"0",
"]",
">",
"760",
"and",
"vs30",
"[",
"0",
"]",
"<=",
"1500",
":",
"c1",
"=",
"-",
"0.5",
"c2",
"=",
"0.125",
"d",
"=",
"-",
"0.25",
"elif",
"vs30",
"[",
"0",
"]",
">",
"1500",
":",
"c1",
"=",
"-",
"1.0",
"c2",
"=",
"0.25",
"d",
"=",
"-",
"0.5",
"S",
"=",
"np",
".",
"zeros_like",
"(",
"vs30",
")",
"for",
"i",
"in",
"range",
"(",
"vs30",
".",
"size",
")",
":",
"if",
"mmi_mean",
"[",
"i",
"]",
"<=",
"7.0",
":",
"S",
"[",
"i",
"]",
"+=",
"c1",
"elif",
"mmi_mean",
"[",
"i",
"]",
">",
"7",
"and",
"mmi_mean",
"[",
"i",
"]",
"<",
"9.5",
":",
"S",
"[",
"i",
"]",
"+=",
"c1",
"-",
"d",
"*",
"(",
"mmi_mean",
"[",
"i",
"]",
"-",
"7.0",
")",
"else",
":",
"S",
"[",
"i",
"]",
"+=",
"c2",
"return",
"S"
] | Return site class flag for:
Class E - Very Soft Soil vs30 < 180
Class D - Deep or Soft Soil vs30 >= 180 and vs30 <= 360
Class C - Shallow Soil vs30 > 360 and vs30 <= 760
Class B - Rock vs30 > 760 and vs30 <= 1500
Class A - Strong Rock vs30 >= 180 and vs30 <= 360
The S site class is equal to
S = c1 if MMI <= 7
S = c1 - d *(MMI - 7.0) if 7<MMI<9.5
S = c2 if MMI >= 9.5 | [
"Return",
"site",
"class",
"flag",
"for",
":",
"Class",
"E",
"-",
"Very",
"Soft",
"Soil",
"vs30",
"<",
"180",
"Class",
"D",
"-",
"Deep",
"or",
"Soft",
"Soil",
"vs30",
">",
"=",
"180",
"and",
"vs30",
"<",
"=",
"360",
"Class",
"C",
"-",
"Shallow",
"Soil",
"vs30",
">",
"360",
"and",
"vs30",
"<",
"=",
"760",
"Class",
"B",
"-",
"Rock",
"vs30",
">",
"760",
"and",
"vs30",
"<",
"=",
"1500",
"Class",
"A",
"-",
"Strong",
"Rock",
"vs30",
">",
"=",
"180",
"and",
"vs30",
"<",
"=",
"360",
"The",
"S",
"site",
"class",
"is",
"equal",
"to",
"S",
"=",
"c1",
"if",
"MMI",
"<",
"=",
"7",
"S",
"=",
"c1",
"-",
"d",
"*",
"(",
"MMI",
"-",
"7",
".",
"0",
")",
"if",
"7<MMI<9",
".",
"5",
"S",
"=",
"c2",
"if",
"MMI",
">",
"=",
"9",
".",
"5"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/dowrickrhoades_2005.py#L145-L190 |
gem/oq-engine | openquake/hazardlib/gsim/dowrickrhoades_2005.py | DowrickRhoades2005Asc._get_deltas | def _get_deltas(self, rake):
"""
Return the value of deltas (delta_R, delta_S, delta_V, delta_I),
as defined in "Table 5: Model 1" pag 198
"""
# delta_R = 1 for reverse focal mechanism (45<rake<135)
# and for interface events, 0 for all other events
# delta_S = 1 for Strike-slip focal mechanisms (0<=rake<=45) or
# (135<=rake<=180) or (-45<=rake<=0), 0 for all other events
# delta_V = 1 for TVZ events, 0 for all other events
# delta_I = 1 for interface events, 0 for all other events
# All deltas = 0 for Model 3: Deep Region, pag 198
delta_R, delta_S = 0, 0
delta_V, delta_I = 0, 0
if rake > 45.0 and rake < 135.0:
delta_R = 1
if (rake >= 0.0 and rake <= 45.0) or \
(rake >= 135 and rake <= 180.0) or \
(rake >= -180.0 and rake <= -135.0) or \
(rake >= -45.0 and rake < 0.0):
delta_S = 1
return delta_R, delta_S, delta_V, delta_I | python | def _get_deltas(self, rake):
delta_R, delta_S = 0, 0
delta_V, delta_I = 0, 0
if rake > 45.0 and rake < 135.0:
delta_R = 1
if (rake >= 0.0 and rake <= 45.0) or \
(rake >= 135 and rake <= 180.0) or \
(rake >= -180.0 and rake <= -135.0) or \
(rake >= -45.0 and rake < 0.0):
delta_S = 1
return delta_R, delta_S, delta_V, delta_I | [
"def",
"_get_deltas",
"(",
"self",
",",
"rake",
")",
":",
"# delta_R = 1 for reverse focal mechanism (45<rake<135)",
"# and for interface events, 0 for all other events",
"# delta_S = 1 for Strike-slip focal mechanisms (0<=rake<=45) or",
"# (135<=rake<=180) or (-45<=rake<=0), 0 for all other events",
"# delta_V = 1 for TVZ events, 0 for all other events",
"# delta_I = 1 for interface events, 0 for all other events",
"# All deltas = 0 for Model 3: Deep Region, pag 198",
"delta_R",
",",
"delta_S",
"=",
"0",
",",
"0",
"delta_V",
",",
"delta_I",
"=",
"0",
",",
"0",
"if",
"rake",
">",
"45.0",
"and",
"rake",
"<",
"135.0",
":",
"delta_R",
"=",
"1",
"if",
"(",
"rake",
">=",
"0.0",
"and",
"rake",
"<=",
"45.0",
")",
"or",
"(",
"rake",
">=",
"135",
"and",
"rake",
"<=",
"180.0",
")",
"or",
"(",
"rake",
">=",
"-",
"180.0",
"and",
"rake",
"<=",
"-",
"135.0",
")",
"or",
"(",
"rake",
">=",
"-",
"45.0",
"and",
"rake",
"<",
"0.0",
")",
":",
"delta_S",
"=",
"1",
"return",
"delta_R",
",",
"delta_S",
",",
"delta_V",
",",
"delta_I"
] | Return the value of deltas (delta_R, delta_S, delta_V, delta_I),
as defined in "Table 5: Model 1" pag 198 | [
"Return",
"the",
"value",
"of",
"deltas",
"(",
"delta_R",
"delta_S",
"delta_V",
"delta_I",
")",
"as",
"defined",
"in",
"Table",
"5",
":",
"Model",
"1",
"pag",
"198"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/dowrickrhoades_2005.py#L192-L218 |
gem/oq-engine | openquake/hazardlib/gsim/dowrickrhoades_2005.py | DowrickRhoades2005SSlab._get_deltas | def _get_deltas(self, rake):
"""
Return the value of deltas (delta_R, delta_S, delta_V, delta_I),
as defined in "Table 5: Model 1" pag 198
"""
# All deltas = 0 for DowrickRhoades2005SSlab Model 3: Deep Region,
# pag 198
delta_R, delta_S = 0, 0
delta_V, delta_I = 0, 0
return delta_R, delta_S, delta_V, delta_I | python | def _get_deltas(self, rake):
delta_R, delta_S = 0, 0
delta_V, delta_I = 0, 0
return delta_R, delta_S, delta_V, delta_I | [
"def",
"_get_deltas",
"(",
"self",
",",
"rake",
")",
":",
"# All deltas = 0 for DowrickRhoades2005SSlab Model 3: Deep Region,",
"# pag 198",
"delta_R",
",",
"delta_S",
"=",
"0",
",",
"0",
"delta_V",
",",
"delta_I",
"=",
"0",
",",
"0",
"return",
"delta_R",
",",
"delta_S",
",",
"delta_V",
",",
"delta_I"
] | Return the value of deltas (delta_R, delta_S, delta_V, delta_I),
as defined in "Table 5: Model 1" pag 198 | [
"Return",
"the",
"value",
"of",
"deltas",
"(",
"delta_R",
"delta_S",
"delta_V",
"delta_I",
")",
"as",
"defined",
"in",
"Table",
"5",
":",
"Model",
"1",
"pag",
"198"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/dowrickrhoades_2005.py#L273-L284 |
gem/oq-engine | openquake/commands/plot_assets.py | plot_assets | def plot_assets(calc_id=-1, site_model=False):
"""
Plot the sites and the assets
"""
# NB: matplotlib is imported inside since it is a costly import
import matplotlib.pyplot as p
from openquake.hmtk.plotting.patch import PolygonPatch
dstore = util.read(calc_id)
try:
region = dstore['oqparam'].region
except KeyError:
region = None
sitecol = dstore['sitecol']
try:
assetcol = dstore['assetcol'].value
except AttributeError:
assetcol = dstore['assetcol'].array
fig = p.figure()
ax = fig.add_subplot(111)
if region:
pp = PolygonPatch(shapely.wkt.loads(region), alpha=0.1)
ax.add_patch(pp)
ax.grid(True)
if site_model and 'site_model' in dstore:
sm = dstore['site_model']
sm_lons, sm_lats = sm['lon'], sm['lat']
if len(sm_lons) > 1 and cross_idl(*sm_lons):
sm_lons %= 360
p.scatter(sm_lons, sm_lats, marker='.', color='orange')
p.scatter(sitecol.complete.lons, sitecol.complete.lats, marker='.',
color='gray')
p.scatter(assetcol['lon'], assetcol['lat'], marker='.', color='green')
p.scatter(sitecol.lons, sitecol.lats, marker='+', color='black')
if 'discarded' in dstore:
disc = numpy.unique(dstore['discarded'].value[['lon', 'lat']])
p.scatter(disc['lon'], disc['lat'], marker='x', color='red')
p.show() | python | def plot_assets(calc_id=-1, site_model=False):
import matplotlib.pyplot as p
from openquake.hmtk.plotting.patch import PolygonPatch
dstore = util.read(calc_id)
try:
region = dstore['oqparam'].region
except KeyError:
region = None
sitecol = dstore['sitecol']
try:
assetcol = dstore['assetcol'].value
except AttributeError:
assetcol = dstore['assetcol'].array
fig = p.figure()
ax = fig.add_subplot(111)
if region:
pp = PolygonPatch(shapely.wkt.loads(region), alpha=0.1)
ax.add_patch(pp)
ax.grid(True)
if site_model and 'site_model' in dstore:
sm = dstore['site_model']
sm_lons, sm_lats = sm['lon'], sm['lat']
if len(sm_lons) > 1 and cross_idl(*sm_lons):
sm_lons %= 360
p.scatter(sm_lons, sm_lats, marker='.', color='orange')
p.scatter(sitecol.complete.lons, sitecol.complete.lats, marker='.',
color='gray')
p.scatter(assetcol['lon'], assetcol['lat'], marker='.', color='green')
p.scatter(sitecol.lons, sitecol.lats, marker='+', color='black')
if 'discarded' in dstore:
disc = numpy.unique(dstore['discarded'].value[['lon', 'lat']])
p.scatter(disc['lon'], disc['lat'], marker='x', color='red')
p.show() | [
"def",
"plot_assets",
"(",
"calc_id",
"=",
"-",
"1",
",",
"site_model",
"=",
"False",
")",
":",
"# NB: matplotlib is imported inside since it is a costly import",
"import",
"matplotlib",
".",
"pyplot",
"as",
"p",
"from",
"openquake",
".",
"hmtk",
".",
"plotting",
".",
"patch",
"import",
"PolygonPatch",
"dstore",
"=",
"util",
".",
"read",
"(",
"calc_id",
")",
"try",
":",
"region",
"=",
"dstore",
"[",
"'oqparam'",
"]",
".",
"region",
"except",
"KeyError",
":",
"region",
"=",
"None",
"sitecol",
"=",
"dstore",
"[",
"'sitecol'",
"]",
"try",
":",
"assetcol",
"=",
"dstore",
"[",
"'assetcol'",
"]",
".",
"value",
"except",
"AttributeError",
":",
"assetcol",
"=",
"dstore",
"[",
"'assetcol'",
"]",
".",
"array",
"fig",
"=",
"p",
".",
"figure",
"(",
")",
"ax",
"=",
"fig",
".",
"add_subplot",
"(",
"111",
")",
"if",
"region",
":",
"pp",
"=",
"PolygonPatch",
"(",
"shapely",
".",
"wkt",
".",
"loads",
"(",
"region",
")",
",",
"alpha",
"=",
"0.1",
")",
"ax",
".",
"add_patch",
"(",
"pp",
")",
"ax",
".",
"grid",
"(",
"True",
")",
"if",
"site_model",
"and",
"'site_model'",
"in",
"dstore",
":",
"sm",
"=",
"dstore",
"[",
"'site_model'",
"]",
"sm_lons",
",",
"sm_lats",
"=",
"sm",
"[",
"'lon'",
"]",
",",
"sm",
"[",
"'lat'",
"]",
"if",
"len",
"(",
"sm_lons",
")",
">",
"1",
"and",
"cross_idl",
"(",
"*",
"sm_lons",
")",
":",
"sm_lons",
"%=",
"360",
"p",
".",
"scatter",
"(",
"sm_lons",
",",
"sm_lats",
",",
"marker",
"=",
"'.'",
",",
"color",
"=",
"'orange'",
")",
"p",
".",
"scatter",
"(",
"sitecol",
".",
"complete",
".",
"lons",
",",
"sitecol",
".",
"complete",
".",
"lats",
",",
"marker",
"=",
"'.'",
",",
"color",
"=",
"'gray'",
")",
"p",
".",
"scatter",
"(",
"assetcol",
"[",
"'lon'",
"]",
",",
"assetcol",
"[",
"'lat'",
"]",
",",
"marker",
"=",
"'.'",
",",
"color",
"=",
"'green'",
")",
"p",
".",
"scatter",
"(",
"sitecol",
".",
"lons",
",",
"sitecol",
".",
"lats",
",",
"marker",
"=",
"'+'",
",",
"color",
"=",
"'black'",
")",
"if",
"'discarded'",
"in",
"dstore",
":",
"disc",
"=",
"numpy",
".",
"unique",
"(",
"dstore",
"[",
"'discarded'",
"]",
".",
"value",
"[",
"[",
"'lon'",
",",
"'lat'",
"]",
"]",
")",
"p",
".",
"scatter",
"(",
"disc",
"[",
"'lon'",
"]",
",",
"disc",
"[",
"'lat'",
"]",
",",
"marker",
"=",
"'x'",
",",
"color",
"=",
"'red'",
")",
"p",
".",
"show",
"(",
")"
] | Plot the sites and the assets | [
"Plot",
"the",
"sites",
"and",
"the",
"assets"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/plot_assets.py#L26-L62 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | _get_adjustment | def _get_adjustment(mag, year, mmin, completeness_year, t_f, mag_inc=0.1):
'''
If the magnitude is greater than the minimum in the completeness table
and the year is greater than the corresponding completeness year then
return the Weichert factor
:param float mag:
Magnitude of an earthquake
:param float year:
Year of earthquake
:param np.ndarray completeness_table:
Completeness table
:param float mag_inc:
Magnitude increment
:param float t_f:
Weichert adjustment factor
:returns:
Weichert adjustment factor is event is in complete part of catalogue
(0.0 otherwise)
'''
if len(completeness_year) == 1:
if (mag >= mmin) and (year >= completeness_year[0]):
# No adjustment needed - event weight == 1
return 1.0
else:
# Event should not be counted
return False
kval = int(((mag - mmin) / mag_inc)) + 1
if (kval >= 1) and (year >= completeness_year[kval - 1]):
return t_f
else:
return False | python | def _get_adjustment(mag, year, mmin, completeness_year, t_f, mag_inc=0.1):
if len(completeness_year) == 1:
if (mag >= mmin) and (year >= completeness_year[0]):
return 1.0
else:
return False
kval = int(((mag - mmin) / mag_inc)) + 1
if (kval >= 1) and (year >= completeness_year[kval - 1]):
return t_f
else:
return False | [
"def",
"_get_adjustment",
"(",
"mag",
",",
"year",
",",
"mmin",
",",
"completeness_year",
",",
"t_f",
",",
"mag_inc",
"=",
"0.1",
")",
":",
"if",
"len",
"(",
"completeness_year",
")",
"==",
"1",
":",
"if",
"(",
"mag",
">=",
"mmin",
")",
"and",
"(",
"year",
">=",
"completeness_year",
"[",
"0",
"]",
")",
":",
"# No adjustment needed - event weight == 1",
"return",
"1.0",
"else",
":",
"# Event should not be counted",
"return",
"False",
"kval",
"=",
"int",
"(",
"(",
"(",
"mag",
"-",
"mmin",
")",
"/",
"mag_inc",
")",
")",
"+",
"1",
"if",
"(",
"kval",
">=",
"1",
")",
"and",
"(",
"year",
">=",
"completeness_year",
"[",
"kval",
"-",
"1",
"]",
")",
":",
"return",
"t_f",
"else",
":",
"return",
"False"
] | If the magnitude is greater than the minimum in the completeness table
and the year is greater than the corresponding completeness year then
return the Weichert factor
:param float mag:
Magnitude of an earthquake
:param float year:
Year of earthquake
:param np.ndarray completeness_table:
Completeness table
:param float mag_inc:
Magnitude increment
:param float t_f:
Weichert adjustment factor
:returns:
Weichert adjustment factor is event is in complete part of catalogue
(0.0 otherwise) | [
"If",
"the",
"magnitude",
"is",
"greater",
"than",
"the",
"minimum",
"in",
"the",
"completeness",
"table",
"and",
"the",
"year",
"is",
"greater",
"than",
"the",
"corresponding",
"completeness",
"year",
"then",
"return",
"the",
"Weichert",
"factor"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L129-L167 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | get_catalogue_bounding_polygon | def get_catalogue_bounding_polygon(catalogue):
'''
Returns a polygon containing the bounding box of the catalogue
'''
upper_lon = np.max(catalogue.data['longitude'])
upper_lat = np.max(catalogue.data['latitude'])
lower_lon = np.min(catalogue.data['longitude'])
lower_lat = np.min(catalogue.data['latitude'])
return Polygon([Point(lower_lon, upper_lat), Point(upper_lon, upper_lat),
Point(upper_lon, lower_lat), Point(lower_lon, lower_lat)]) | python | def get_catalogue_bounding_polygon(catalogue):
upper_lon = np.max(catalogue.data['longitude'])
upper_lat = np.max(catalogue.data['latitude'])
lower_lon = np.min(catalogue.data['longitude'])
lower_lat = np.min(catalogue.data['latitude'])
return Polygon([Point(lower_lon, upper_lat), Point(upper_lon, upper_lat),
Point(upper_lon, lower_lat), Point(lower_lon, lower_lat)]) | [
"def",
"get_catalogue_bounding_polygon",
"(",
"catalogue",
")",
":",
"upper_lon",
"=",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'longitude'",
"]",
")",
"upper_lat",
"=",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'latitude'",
"]",
")",
"lower_lon",
"=",
"np",
".",
"min",
"(",
"catalogue",
".",
"data",
"[",
"'longitude'",
"]",
")",
"lower_lat",
"=",
"np",
".",
"min",
"(",
"catalogue",
".",
"data",
"[",
"'latitude'",
"]",
")",
"return",
"Polygon",
"(",
"[",
"Point",
"(",
"lower_lon",
",",
"upper_lat",
")",
",",
"Point",
"(",
"upper_lon",
",",
"upper_lat",
")",
",",
"Point",
"(",
"upper_lon",
",",
"lower_lat",
")",
",",
"Point",
"(",
"lower_lon",
",",
"lower_lat",
")",
"]",
")"
] | Returns a polygon containing the bounding box of the catalogue | [
"Returns",
"a",
"polygon",
"containing",
"the",
"bounding",
"box",
"of",
"the",
"catalogue"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L170-L180 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | Grid.make_from_catalogue | def make_from_catalogue(cls, catalogue, spacing, dilate):
'''
Defines the grid on the basis of the catalogue
'''
new = cls()
cat_bbox = get_catalogue_bounding_polygon(catalogue)
if dilate > 0:
cat_bbox = cat_bbox.dilate(dilate)
# Define Grid spacing
new.update({'xmin': np.min(cat_bbox.lons),
'xmax': np.max(cat_bbox.lons),
'xspc': spacing,
'ymin': np.min(cat_bbox.lats),
'ymax': np.max(cat_bbox.lats),
'yspc': spacing,
'zmin': 0.,
'zmax': np.max(catalogue.data['depth']),
'zspc': np.max(catalogue.data['depth'])})
if new['zmin'] == new['zmax'] == new['zspc'] == 0:
new['zmax'] = new['zspc'] = 1
return new | python | def make_from_catalogue(cls, catalogue, spacing, dilate):
new = cls()
cat_bbox = get_catalogue_bounding_polygon(catalogue)
if dilate > 0:
cat_bbox = cat_bbox.dilate(dilate)
new.update({'xmin': np.min(cat_bbox.lons),
'xmax': np.max(cat_bbox.lons),
'xspc': spacing,
'ymin': np.min(cat_bbox.lats),
'ymax': np.max(cat_bbox.lats),
'yspc': spacing,
'zmin': 0.,
'zmax': np.max(catalogue.data['depth']),
'zspc': np.max(catalogue.data['depth'])})
if new['zmin'] == new['zmax'] == new['zspc'] == 0:
new['zmax'] = new['zspc'] = 1
return new | [
"def",
"make_from_catalogue",
"(",
"cls",
",",
"catalogue",
",",
"spacing",
",",
"dilate",
")",
":",
"new",
"=",
"cls",
"(",
")",
"cat_bbox",
"=",
"get_catalogue_bounding_polygon",
"(",
"catalogue",
")",
"if",
"dilate",
">",
"0",
":",
"cat_bbox",
"=",
"cat_bbox",
".",
"dilate",
"(",
"dilate",
")",
"# Define Grid spacing",
"new",
".",
"update",
"(",
"{",
"'xmin'",
":",
"np",
".",
"min",
"(",
"cat_bbox",
".",
"lons",
")",
",",
"'xmax'",
":",
"np",
".",
"max",
"(",
"cat_bbox",
".",
"lons",
")",
",",
"'xspc'",
":",
"spacing",
",",
"'ymin'",
":",
"np",
".",
"min",
"(",
"cat_bbox",
".",
"lats",
")",
",",
"'ymax'",
":",
"np",
".",
"max",
"(",
"cat_bbox",
".",
"lats",
")",
",",
"'yspc'",
":",
"spacing",
",",
"'zmin'",
":",
"0.",
",",
"'zmax'",
":",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
")",
",",
"'zspc'",
":",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
")",
"}",
")",
"if",
"new",
"[",
"'zmin'",
"]",
"==",
"new",
"[",
"'zmax'",
"]",
"==",
"new",
"[",
"'zspc'",
"]",
"==",
"0",
":",
"new",
"[",
"'zmax'",
"]",
"=",
"new",
"[",
"'zspc'",
"]",
"=",
"1",
"return",
"new"
] | Defines the grid on the basis of the catalogue | [
"Defines",
"the",
"grid",
"on",
"the",
"basis",
"of",
"the",
"catalogue"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L81-L105 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | SmoothedSeismicity.run_analysis | def run_analysis(self, catalogue, config, completeness_table=None,
smoothing_kernel=None):
'''
Runs an analysis of smoothed seismicity in the manner
originally implemented by Frankel (1995)
:param catalogue:
Instance of the openquake.hmtk.seismicity.catalogue.Catalogue class
catalogue.data dictionary containing the following -
'year' - numpy.ndarray vector of years
'longitude' - numpy.ndarray vector of longitudes
'latitude' - numpy.ndarray vector of latitudes
'depth' - numpy.ndarray vector of depths
:param dict config:
Configuration settings of the algorithm:
* 'Length_Limit' - Maximum number of bandwidths for use in
smoothing (Float)
* 'BandWidth' - Bandwidth (km) of the Smoothing Kernel (Float)
* 'increment' - Output incremental (True) or cumulative a-value
(False)
:param np.ndarray completeness_table:
Completeness of the catalogue assuming evenly spaced magnitudes
from most recent bin to oldest bin [year, magnitude]
:param smoothing_kernel:
Smoothing kernel as instance of :class:
`openquake.hmtk.seismicity.smoothing.kernels.base.BaseSmoothingKernel`
:returns:
Full smoothed seismicity data as np.ndarray, of the form
[Longitude, Latitude, Depth, Observed, Smoothed]
'''
self.catalogue = catalogue
if smoothing_kernel:
self.kernel = smoothing_kernel
else:
self.kernel = IsotropicGaussian()
# If no grid limits are specified then take from catalogue
if isinstance(self.grid_limits, list):
self.grid_limits = Grid.make_from_list(self.grid_limits)
assert self.grid_limits['xmax'] >= self.grid_limits['xmin']
assert self.grid_limits['xspc'] > 0.0
assert self.grid_limits['ymax'] >= self.grid_limits['ymin']
assert self.grid_limits['yspc'] > 0.0
elif isinstance(self.grid_limits, float):
self.grid_limits = Grid.make_from_catalogue(
self.catalogue, self.grid_limits,
config['Length_Limit'] * config['BandWidth'])
completeness_table, mag_inc = utils.get_even_magnitude_completeness(
completeness_table,
self.catalogue)
end_year = self.catalogue.end_year
# Get Weichert factor
t_f, _ = utils.get_weichert_factor(self.beta,
completeness_table[:, 1],
completeness_table[:, 0],
end_year)
# Get the grid
self.create_3D_grid(self.catalogue, completeness_table, t_f, mag_inc)
if config['increment']:
# Get Hermann adjustment factors
fval, fival = utils.hermann_adjustment_factors(
self.bval,
completeness_table[0, 1], config['increment'])
self.data[:, -1] = fval * fival * self.data[:, -1]
# Apply smoothing
smoothed_data, sum_data, sum_smooth = self.kernel.smooth_data(
self.data, config, self.use_3d)
print('Smoothing Total Rate Comparison - '
'Observed: %.6g, Smoothed: %.6g' % (sum_data, sum_smooth))
self.data = np.column_stack([self.data, smoothed_data])
return self.data | python | def run_analysis(self, catalogue, config, completeness_table=None,
smoothing_kernel=None):
self.catalogue = catalogue
if smoothing_kernel:
self.kernel = smoothing_kernel
else:
self.kernel = IsotropicGaussian()
if isinstance(self.grid_limits, list):
self.grid_limits = Grid.make_from_list(self.grid_limits)
assert self.grid_limits['xmax'] >= self.grid_limits['xmin']
assert self.grid_limits['xspc'] > 0.0
assert self.grid_limits['ymax'] >= self.grid_limits['ymin']
assert self.grid_limits['yspc'] > 0.0
elif isinstance(self.grid_limits, float):
self.grid_limits = Grid.make_from_catalogue(
self.catalogue, self.grid_limits,
config['Length_Limit'] * config['BandWidth'])
completeness_table, mag_inc = utils.get_even_magnitude_completeness(
completeness_table,
self.catalogue)
end_year = self.catalogue.end_year
t_f, _ = utils.get_weichert_factor(self.beta,
completeness_table[:, 1],
completeness_table[:, 0],
end_year)
self.create_3D_grid(self.catalogue, completeness_table, t_f, mag_inc)
if config['increment']:
fval, fival = utils.hermann_adjustment_factors(
self.bval,
completeness_table[0, 1], config['increment'])
self.data[:, -1] = fval * fival * self.data[:, -1]
smoothed_data, sum_data, sum_smooth = self.kernel.smooth_data(
self.data, config, self.use_3d)
print('Smoothing Total Rate Comparison - '
'Observed: %.6g, Smoothed: %.6g' % (sum_data, sum_smooth))
self.data = np.column_stack([self.data, smoothed_data])
return self.data | [
"def",
"run_analysis",
"(",
"self",
",",
"catalogue",
",",
"config",
",",
"completeness_table",
"=",
"None",
",",
"smoothing_kernel",
"=",
"None",
")",
":",
"self",
".",
"catalogue",
"=",
"catalogue",
"if",
"smoothing_kernel",
":",
"self",
".",
"kernel",
"=",
"smoothing_kernel",
"else",
":",
"self",
".",
"kernel",
"=",
"IsotropicGaussian",
"(",
")",
"# If no grid limits are specified then take from catalogue",
"if",
"isinstance",
"(",
"self",
".",
"grid_limits",
",",
"list",
")",
":",
"self",
".",
"grid_limits",
"=",
"Grid",
".",
"make_from_list",
"(",
"self",
".",
"grid_limits",
")",
"assert",
"self",
".",
"grid_limits",
"[",
"'xmax'",
"]",
">=",
"self",
".",
"grid_limits",
"[",
"'xmin'",
"]",
"assert",
"self",
".",
"grid_limits",
"[",
"'xspc'",
"]",
">",
"0.0",
"assert",
"self",
".",
"grid_limits",
"[",
"'ymax'",
"]",
">=",
"self",
".",
"grid_limits",
"[",
"'ymin'",
"]",
"assert",
"self",
".",
"grid_limits",
"[",
"'yspc'",
"]",
">",
"0.0",
"elif",
"isinstance",
"(",
"self",
".",
"grid_limits",
",",
"float",
")",
":",
"self",
".",
"grid_limits",
"=",
"Grid",
".",
"make_from_catalogue",
"(",
"self",
".",
"catalogue",
",",
"self",
".",
"grid_limits",
",",
"config",
"[",
"'Length_Limit'",
"]",
"*",
"config",
"[",
"'BandWidth'",
"]",
")",
"completeness_table",
",",
"mag_inc",
"=",
"utils",
".",
"get_even_magnitude_completeness",
"(",
"completeness_table",
",",
"self",
".",
"catalogue",
")",
"end_year",
"=",
"self",
".",
"catalogue",
".",
"end_year",
"# Get Weichert factor",
"t_f",
",",
"_",
"=",
"utils",
".",
"get_weichert_factor",
"(",
"self",
".",
"beta",
",",
"completeness_table",
"[",
":",
",",
"1",
"]",
",",
"completeness_table",
"[",
":",
",",
"0",
"]",
",",
"end_year",
")",
"# Get the grid",
"self",
".",
"create_3D_grid",
"(",
"self",
".",
"catalogue",
",",
"completeness_table",
",",
"t_f",
",",
"mag_inc",
")",
"if",
"config",
"[",
"'increment'",
"]",
":",
"# Get Hermann adjustment factors",
"fval",
",",
"fival",
"=",
"utils",
".",
"hermann_adjustment_factors",
"(",
"self",
".",
"bval",
",",
"completeness_table",
"[",
"0",
",",
"1",
"]",
",",
"config",
"[",
"'increment'",
"]",
")",
"self",
".",
"data",
"[",
":",
",",
"-",
"1",
"]",
"=",
"fval",
"*",
"fival",
"*",
"self",
".",
"data",
"[",
":",
",",
"-",
"1",
"]",
"# Apply smoothing",
"smoothed_data",
",",
"sum_data",
",",
"sum_smooth",
"=",
"self",
".",
"kernel",
".",
"smooth_data",
"(",
"self",
".",
"data",
",",
"config",
",",
"self",
".",
"use_3d",
")",
"print",
"(",
"'Smoothing Total Rate Comparison - '",
"'Observed: %.6g, Smoothed: %.6g'",
"%",
"(",
"sum_data",
",",
"sum_smooth",
")",
")",
"self",
".",
"data",
"=",
"np",
".",
"column_stack",
"(",
"[",
"self",
".",
"data",
",",
"smoothed_data",
"]",
")",
"return",
"self",
".",
"data"
] | Runs an analysis of smoothed seismicity in the manner
originally implemented by Frankel (1995)
:param catalogue:
Instance of the openquake.hmtk.seismicity.catalogue.Catalogue class
catalogue.data dictionary containing the following -
'year' - numpy.ndarray vector of years
'longitude' - numpy.ndarray vector of longitudes
'latitude' - numpy.ndarray vector of latitudes
'depth' - numpy.ndarray vector of depths
:param dict config:
Configuration settings of the algorithm:
* 'Length_Limit' - Maximum number of bandwidths for use in
smoothing (Float)
* 'BandWidth' - Bandwidth (km) of the Smoothing Kernel (Float)
* 'increment' - Output incremental (True) or cumulative a-value
(False)
:param np.ndarray completeness_table:
Completeness of the catalogue assuming evenly spaced magnitudes
from most recent bin to oldest bin [year, magnitude]
:param smoothing_kernel:
Smoothing kernel as instance of :class:
`openquake.hmtk.seismicity.smoothing.kernels.base.BaseSmoothingKernel`
:returns:
Full smoothed seismicity data as np.ndarray, of the form
[Longitude, Latitude, Depth, Observed, Smoothed] | [
"Runs",
"an",
"analysis",
"of",
"smoothed",
"seismicity",
"in",
"the",
"manner",
"originally",
"implemented",
"by",
"Frankel",
"(",
"1995",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L242-L321 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | SmoothedSeismicity.create_2D_grid_simple | def create_2D_grid_simple(self, longitude, latitude, year, magnitude,
completeness_table, t_f=1., mag_inc=0.1):
'''
Generates the grid from the limits using an approach closer to that of
Frankel (1995)
:param numpy.ndarray longitude:
Vector of earthquake longitudes
:param numpy.ndarray latitude:
Vector of earthquake latitudes
:param numpy.ndarray year:
Vector of earthquake years
:param numpy.ndarray magnitude:
Vector of earthquake magnitudes
:param numpy.ndarray completeness_table:
Completeness table
:param float t_f:
Weichert adjustment factor
:returns:
Two-dimensional spatial grid of observed rates
'''
assert mag_inc > 0.
xlim = np.ceil(
(self.grid_limits['xmax'] - self.grid_limits['xmin']) /
self.grid_limits['xspc'])
ylim = np.ceil(
(self.grid_limits['ymax'] - self.grid_limits['ymin']) /
self.grid_limits['yspc'])
ncolx = int(xlim)
ncoly = int(ylim)
grid_count = np.zeros(ncolx * ncoly, dtype=float)
for iloc in range(0, len(longitude)):
dlon = (longitude[iloc] - self.grid_limits['xmin']) /\
self.grid_limits['xspc']
if (dlon < 0.) or (dlon > xlim):
# Earthquake outside longitude limits
continue
xcol = int(dlon)
if xcol == ncolx:
# If longitude is directly on upper grid line then retain
xcol = ncolx - 1
dlat = fabs(self.grid_limits['ymax'] - latitude[iloc]) /\
self.grid_limits['yspc']
if (dlat < 0.) or (dlat > ylim):
# Earthquake outside latitude limits
continue
ycol = int(dlat) # Correct for floating precision
if ycol == ncoly:
# If latitude is directly on upper grid line then retain
ycol = ncoly - 1
kmarker = (ycol * int(xlim)) + xcol
adjust = _get_adjustment(magnitude[iloc],
year[iloc],
completeness_table[0, 1],
completeness_table[:, 0],
t_f,
mag_inc)
if adjust:
grid_count[kmarker] = grid_count[kmarker] + adjust
return grid_count | python | def create_2D_grid_simple(self, longitude, latitude, year, magnitude,
completeness_table, t_f=1., mag_inc=0.1):
assert mag_inc > 0.
xlim = np.ceil(
(self.grid_limits['xmax'] - self.grid_limits['xmin']) /
self.grid_limits['xspc'])
ylim = np.ceil(
(self.grid_limits['ymax'] - self.grid_limits['ymin']) /
self.grid_limits['yspc'])
ncolx = int(xlim)
ncoly = int(ylim)
grid_count = np.zeros(ncolx * ncoly, dtype=float)
for iloc in range(0, len(longitude)):
dlon = (longitude[iloc] - self.grid_limits['xmin']) /\
self.grid_limits['xspc']
if (dlon < 0.) or (dlon > xlim):
continue
xcol = int(dlon)
if xcol == ncolx:
xcol = ncolx - 1
dlat = fabs(self.grid_limits['ymax'] - latitude[iloc]) /\
self.grid_limits['yspc']
if (dlat < 0.) or (dlat > ylim):
continue
ycol = int(dlat)
if ycol == ncoly:
ycol = ncoly - 1
kmarker = (ycol * int(xlim)) + xcol
adjust = _get_adjustment(magnitude[iloc],
year[iloc],
completeness_table[0, 1],
completeness_table[:, 0],
t_f,
mag_inc)
if adjust:
grid_count[kmarker] = grid_count[kmarker] + adjust
return grid_count | [
"def",
"create_2D_grid_simple",
"(",
"self",
",",
"longitude",
",",
"latitude",
",",
"year",
",",
"magnitude",
",",
"completeness_table",
",",
"t_f",
"=",
"1.",
",",
"mag_inc",
"=",
"0.1",
")",
":",
"assert",
"mag_inc",
">",
"0.",
"xlim",
"=",
"np",
".",
"ceil",
"(",
"(",
"self",
".",
"grid_limits",
"[",
"'xmax'",
"]",
"-",
"self",
".",
"grid_limits",
"[",
"'xmin'",
"]",
")",
"/",
"self",
".",
"grid_limits",
"[",
"'xspc'",
"]",
")",
"ylim",
"=",
"np",
".",
"ceil",
"(",
"(",
"self",
".",
"grid_limits",
"[",
"'ymax'",
"]",
"-",
"self",
".",
"grid_limits",
"[",
"'ymin'",
"]",
")",
"/",
"self",
".",
"grid_limits",
"[",
"'yspc'",
"]",
")",
"ncolx",
"=",
"int",
"(",
"xlim",
")",
"ncoly",
"=",
"int",
"(",
"ylim",
")",
"grid_count",
"=",
"np",
".",
"zeros",
"(",
"ncolx",
"*",
"ncoly",
",",
"dtype",
"=",
"float",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"longitude",
")",
")",
":",
"dlon",
"=",
"(",
"longitude",
"[",
"iloc",
"]",
"-",
"self",
".",
"grid_limits",
"[",
"'xmin'",
"]",
")",
"/",
"self",
".",
"grid_limits",
"[",
"'xspc'",
"]",
"if",
"(",
"dlon",
"<",
"0.",
")",
"or",
"(",
"dlon",
">",
"xlim",
")",
":",
"# Earthquake outside longitude limits",
"continue",
"xcol",
"=",
"int",
"(",
"dlon",
")",
"if",
"xcol",
"==",
"ncolx",
":",
"# If longitude is directly on upper grid line then retain",
"xcol",
"=",
"ncolx",
"-",
"1",
"dlat",
"=",
"fabs",
"(",
"self",
".",
"grid_limits",
"[",
"'ymax'",
"]",
"-",
"latitude",
"[",
"iloc",
"]",
")",
"/",
"self",
".",
"grid_limits",
"[",
"'yspc'",
"]",
"if",
"(",
"dlat",
"<",
"0.",
")",
"or",
"(",
"dlat",
">",
"ylim",
")",
":",
"# Earthquake outside latitude limits",
"continue",
"ycol",
"=",
"int",
"(",
"dlat",
")",
"# Correct for floating precision",
"if",
"ycol",
"==",
"ncoly",
":",
"# If latitude is directly on upper grid line then retain",
"ycol",
"=",
"ncoly",
"-",
"1",
"kmarker",
"=",
"(",
"ycol",
"*",
"int",
"(",
"xlim",
")",
")",
"+",
"xcol",
"adjust",
"=",
"_get_adjustment",
"(",
"magnitude",
"[",
"iloc",
"]",
",",
"year",
"[",
"iloc",
"]",
",",
"completeness_table",
"[",
"0",
",",
"1",
"]",
",",
"completeness_table",
"[",
":",
",",
"0",
"]",
",",
"t_f",
",",
"mag_inc",
")",
"if",
"adjust",
":",
"grid_count",
"[",
"kmarker",
"]",
"=",
"grid_count",
"[",
"kmarker",
"]",
"+",
"adjust",
"return",
"grid_count"
] | Generates the grid from the limits using an approach closer to that of
Frankel (1995)
:param numpy.ndarray longitude:
Vector of earthquake longitudes
:param numpy.ndarray latitude:
Vector of earthquake latitudes
:param numpy.ndarray year:
Vector of earthquake years
:param numpy.ndarray magnitude:
Vector of earthquake magnitudes
:param numpy.ndarray completeness_table:
Completeness table
:param float t_f:
Weichert adjustment factor
:returns:
Two-dimensional spatial grid of observed rates | [
"Generates",
"the",
"grid",
"from",
"the",
"limits",
"using",
"an",
"approach",
"closer",
"to",
"that",
"of",
"Frankel",
"(",
"1995",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L323-L390 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | SmoothedSeismicity.create_3D_grid | def create_3D_grid(self, catalogue, completeness_table, t_f=1.0,
mag_inc=0.1):
'''
Counts the earthquakes observed in a three dimensional grid
:param catalogue:
Instance of the openquake.hmtk.seismicity.catalogue.Catalogue class
catalogue.data dictionary containing the following -
'year' - numpy.ndarray vector of years
'longitude' - numpy.ndarray vector of longitudes
'latitude' - numpy.ndarray vector of latitudes
'depth' - numpy.ndarray vector of depths
:param np.ndarray completeness_table:
Completeness of the catalogue assuming evenly spaced magnitudes
from most recent bin to oldest bin [year, magnitude]
:param float t_f:
Weichert adjustment factor
:param float mag_inc:
Increment of the completeness magnitude (rendered 0.1)
:returns:
Three-dimensional spatial grid of observed rates (or two dimensional
if only one depth layer is considered)
'''
x_bins = np.arange(self.grid_limits['xmin'],
self.grid_limits['xmax'],
self.grid_limits['xspc'])
if x_bins[-1] < self.grid_limits['xmax']:
x_bins = np.hstack([x_bins, x_bins[-1] + self.grid_limits['xspc']])
y_bins = np.arange(self.grid_limits['ymin'],
self.grid_limits['ymax'],
self.grid_limits['yspc'])
if y_bins[-1] < self.grid_limits['ymax']:
y_bins = np.hstack([y_bins, y_bins[-1] + self.grid_limits['yspc']])
z_bins = np.arange(self.grid_limits['zmin'],
self.grid_limits['zmax'] + self.grid_limits['zspc'],
self.grid_limits['zspc'])
if z_bins[-1] < self.grid_limits['zmax']:
z_bins = np.hstack([z_bins, z_bins[-1] + self.grid_limits['zspc']])
# Define centre points of grid cells
gridx, gridy = np.meshgrid((x_bins[1:] + x_bins[:-1]) / 2.,
(y_bins[1:] + y_bins[:-1]) / 2.)
n_x, n_y = np.shape(gridx)
gridx = np.reshape(gridx, [n_x * n_y, 1])
gridy = np.reshape(np.flipud(gridy), [n_x * n_y, 1])
# Only one depth range
idx = np.logical_and(catalogue.data['depth'] >= z_bins[0],
catalogue.data['depth'] < z_bins[1])
mid_depth = (z_bins[0] + z_bins[1]) / 2.
data_grid = np.column_stack([
gridx,
gridy,
mid_depth * np.ones(n_x * n_y, dtype=float),
self.create_2D_grid_simple(catalogue.data['longitude'][idx],
catalogue.data['latitude'][idx],
catalogue.data['year'][idx],
catalogue.data['magnitude'][idx],
completeness_table,
t_f,
mag_inc)])
if len(z_bins) < 3:
# Only one depth range
self.data = data_grid
return
# Multiple depth layers - append to grid
for iloc in range(1, len(z_bins) - 1):
idx = np.logical_and(catalogue.data['depth'] >= z_bins[iloc],
catalogue.data['depth'] < z_bins[iloc + 1])
mid_depth = (z_bins[iloc] + z_bins[iloc + 1]) / 2.
temp_grid = np.column_stack([
gridx,
gridy,
mid_depth * np.ones(n_x * n_y, dtype=float),
self.create_2D_grid_simple(catalogue.data['longitude'][idx],
catalogue.data['latitude'][idx],
catalogue.data['year'][idx],
catalogue.data['magnitude'][idx],
completeness_table,
t_f,
mag_inc)])
data_grid = np.vstack([data_grid, temp_grid])
self.data = data_grid | python | def create_3D_grid(self, catalogue, completeness_table, t_f=1.0,
mag_inc=0.1):
x_bins = np.arange(self.grid_limits['xmin'],
self.grid_limits['xmax'],
self.grid_limits['xspc'])
if x_bins[-1] < self.grid_limits['xmax']:
x_bins = np.hstack([x_bins, x_bins[-1] + self.grid_limits['xspc']])
y_bins = np.arange(self.grid_limits['ymin'],
self.grid_limits['ymax'],
self.grid_limits['yspc'])
if y_bins[-1] < self.grid_limits['ymax']:
y_bins = np.hstack([y_bins, y_bins[-1] + self.grid_limits['yspc']])
z_bins = np.arange(self.grid_limits['zmin'],
self.grid_limits['zmax'] + self.grid_limits['zspc'],
self.grid_limits['zspc'])
if z_bins[-1] < self.grid_limits['zmax']:
z_bins = np.hstack([z_bins, z_bins[-1] + self.grid_limits['zspc']])
gridx, gridy = np.meshgrid((x_bins[1:] + x_bins[:-1]) / 2.,
(y_bins[1:] + y_bins[:-1]) / 2.)
n_x, n_y = np.shape(gridx)
gridx = np.reshape(gridx, [n_x * n_y, 1])
gridy = np.reshape(np.flipud(gridy), [n_x * n_y, 1])
idx = np.logical_and(catalogue.data['depth'] >= z_bins[0],
catalogue.data['depth'] < z_bins[1])
mid_depth = (z_bins[0] + z_bins[1]) / 2.
data_grid = np.column_stack([
gridx,
gridy,
mid_depth * np.ones(n_x * n_y, dtype=float),
self.create_2D_grid_simple(catalogue.data['longitude'][idx],
catalogue.data['latitude'][idx],
catalogue.data['year'][idx],
catalogue.data['magnitude'][idx],
completeness_table,
t_f,
mag_inc)])
if len(z_bins) < 3:
self.data = data_grid
return
for iloc in range(1, len(z_bins) - 1):
idx = np.logical_and(catalogue.data['depth'] >= z_bins[iloc],
catalogue.data['depth'] < z_bins[iloc + 1])
mid_depth = (z_bins[iloc] + z_bins[iloc + 1]) / 2.
temp_grid = np.column_stack([
gridx,
gridy,
mid_depth * np.ones(n_x * n_y, dtype=float),
self.create_2D_grid_simple(catalogue.data['longitude'][idx],
catalogue.data['latitude'][idx],
catalogue.data['year'][idx],
catalogue.data['magnitude'][idx],
completeness_table,
t_f,
mag_inc)])
data_grid = np.vstack([data_grid, temp_grid])
self.data = data_grid | [
"def",
"create_3D_grid",
"(",
"self",
",",
"catalogue",
",",
"completeness_table",
",",
"t_f",
"=",
"1.0",
",",
"mag_inc",
"=",
"0.1",
")",
":",
"x_bins",
"=",
"np",
".",
"arange",
"(",
"self",
".",
"grid_limits",
"[",
"'xmin'",
"]",
",",
"self",
".",
"grid_limits",
"[",
"'xmax'",
"]",
",",
"self",
".",
"grid_limits",
"[",
"'xspc'",
"]",
")",
"if",
"x_bins",
"[",
"-",
"1",
"]",
"<",
"self",
".",
"grid_limits",
"[",
"'xmax'",
"]",
":",
"x_bins",
"=",
"np",
".",
"hstack",
"(",
"[",
"x_bins",
",",
"x_bins",
"[",
"-",
"1",
"]",
"+",
"self",
".",
"grid_limits",
"[",
"'xspc'",
"]",
"]",
")",
"y_bins",
"=",
"np",
".",
"arange",
"(",
"self",
".",
"grid_limits",
"[",
"'ymin'",
"]",
",",
"self",
".",
"grid_limits",
"[",
"'ymax'",
"]",
",",
"self",
".",
"grid_limits",
"[",
"'yspc'",
"]",
")",
"if",
"y_bins",
"[",
"-",
"1",
"]",
"<",
"self",
".",
"grid_limits",
"[",
"'ymax'",
"]",
":",
"y_bins",
"=",
"np",
".",
"hstack",
"(",
"[",
"y_bins",
",",
"y_bins",
"[",
"-",
"1",
"]",
"+",
"self",
".",
"grid_limits",
"[",
"'yspc'",
"]",
"]",
")",
"z_bins",
"=",
"np",
".",
"arange",
"(",
"self",
".",
"grid_limits",
"[",
"'zmin'",
"]",
",",
"self",
".",
"grid_limits",
"[",
"'zmax'",
"]",
"+",
"self",
".",
"grid_limits",
"[",
"'zspc'",
"]",
",",
"self",
".",
"grid_limits",
"[",
"'zspc'",
"]",
")",
"if",
"z_bins",
"[",
"-",
"1",
"]",
"<",
"self",
".",
"grid_limits",
"[",
"'zmax'",
"]",
":",
"z_bins",
"=",
"np",
".",
"hstack",
"(",
"[",
"z_bins",
",",
"z_bins",
"[",
"-",
"1",
"]",
"+",
"self",
".",
"grid_limits",
"[",
"'zspc'",
"]",
"]",
")",
"# Define centre points of grid cells",
"gridx",
",",
"gridy",
"=",
"np",
".",
"meshgrid",
"(",
"(",
"x_bins",
"[",
"1",
":",
"]",
"+",
"x_bins",
"[",
":",
"-",
"1",
"]",
")",
"/",
"2.",
",",
"(",
"y_bins",
"[",
"1",
":",
"]",
"+",
"y_bins",
"[",
":",
"-",
"1",
"]",
")",
"/",
"2.",
")",
"n_x",
",",
"n_y",
"=",
"np",
".",
"shape",
"(",
"gridx",
")",
"gridx",
"=",
"np",
".",
"reshape",
"(",
"gridx",
",",
"[",
"n_x",
"*",
"n_y",
",",
"1",
"]",
")",
"gridy",
"=",
"np",
".",
"reshape",
"(",
"np",
".",
"flipud",
"(",
"gridy",
")",
",",
"[",
"n_x",
"*",
"n_y",
",",
"1",
"]",
")",
"# Only one depth range",
"idx",
"=",
"np",
".",
"logical_and",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
">=",
"z_bins",
"[",
"0",
"]",
",",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
"<",
"z_bins",
"[",
"1",
"]",
")",
"mid_depth",
"=",
"(",
"z_bins",
"[",
"0",
"]",
"+",
"z_bins",
"[",
"1",
"]",
")",
"/",
"2.",
"data_grid",
"=",
"np",
".",
"column_stack",
"(",
"[",
"gridx",
",",
"gridy",
",",
"mid_depth",
"*",
"np",
".",
"ones",
"(",
"n_x",
"*",
"n_y",
",",
"dtype",
"=",
"float",
")",
",",
"self",
".",
"create_2D_grid_simple",
"(",
"catalogue",
".",
"data",
"[",
"'longitude'",
"]",
"[",
"idx",
"]",
",",
"catalogue",
".",
"data",
"[",
"'latitude'",
"]",
"[",
"idx",
"]",
",",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
"[",
"idx",
"]",
",",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
"[",
"idx",
"]",
",",
"completeness_table",
",",
"t_f",
",",
"mag_inc",
")",
"]",
")",
"if",
"len",
"(",
"z_bins",
")",
"<",
"3",
":",
"# Only one depth range",
"self",
".",
"data",
"=",
"data_grid",
"return",
"# Multiple depth layers - append to grid",
"for",
"iloc",
"in",
"range",
"(",
"1",
",",
"len",
"(",
"z_bins",
")",
"-",
"1",
")",
":",
"idx",
"=",
"np",
".",
"logical_and",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
">=",
"z_bins",
"[",
"iloc",
"]",
",",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
"<",
"z_bins",
"[",
"iloc",
"+",
"1",
"]",
")",
"mid_depth",
"=",
"(",
"z_bins",
"[",
"iloc",
"]",
"+",
"z_bins",
"[",
"iloc",
"+",
"1",
"]",
")",
"/",
"2.",
"temp_grid",
"=",
"np",
".",
"column_stack",
"(",
"[",
"gridx",
",",
"gridy",
",",
"mid_depth",
"*",
"np",
".",
"ones",
"(",
"n_x",
"*",
"n_y",
",",
"dtype",
"=",
"float",
")",
",",
"self",
".",
"create_2D_grid_simple",
"(",
"catalogue",
".",
"data",
"[",
"'longitude'",
"]",
"[",
"idx",
"]",
",",
"catalogue",
".",
"data",
"[",
"'latitude'",
"]",
"[",
"idx",
"]",
",",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
"[",
"idx",
"]",
",",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
"[",
"idx",
"]",
",",
"completeness_table",
",",
"t_f",
",",
"mag_inc",
")",
"]",
")",
"data_grid",
"=",
"np",
".",
"vstack",
"(",
"[",
"data_grid",
",",
"temp_grid",
"]",
")",
"self",
".",
"data",
"=",
"data_grid"
] | Counts the earthquakes observed in a three dimensional grid
:param catalogue:
Instance of the openquake.hmtk.seismicity.catalogue.Catalogue class
catalogue.data dictionary containing the following -
'year' - numpy.ndarray vector of years
'longitude' - numpy.ndarray vector of longitudes
'latitude' - numpy.ndarray vector of latitudes
'depth' - numpy.ndarray vector of depths
:param np.ndarray completeness_table:
Completeness of the catalogue assuming evenly spaced magnitudes
from most recent bin to oldest bin [year, magnitude]
:param float t_f:
Weichert adjustment factor
:param float mag_inc:
Increment of the completeness magnitude (rendered 0.1)
:returns:
Three-dimensional spatial grid of observed rates (or two dimensional
if only one depth layer is considered) | [
"Counts",
"the",
"earthquakes",
"observed",
"in",
"a",
"three",
"dimensional",
"grid"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L392-L489 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | SmoothedSeismicity.write_to_csv | def write_to_csv(self, filename):
'''
Exports to simple csv
:param str filename:
Path to file for export
'''
fid = open(filename, 'wt')
# Create header list
header_info = ['Longitude', 'Latitude', 'Depth', 'Observed Count',
'Smoothed Rate', 'b-value']
writer = csv.DictWriter(fid, fieldnames=header_info)
headers = dict((name0, name0) for name0 in header_info)
# Write to file
writer.writerow(headers)
for row in self.data:
# institute crude compression by omitting points with no seismicity
# and taking advantage of the %g format
if row[4] == 0:
continue
row_dict = {'Longitude': '%g' % row[0],
'Latitude': '%g' % row[1],
'Depth': '%g' % row[2],
'Observed Count': '%d' % row[3],
'Smoothed Rate': '%.6g' % row[4],
'b-value': '%g' % self.bval}
writer.writerow(row_dict)
fid.close() | python | def write_to_csv(self, filename):
fid = open(filename, 'wt')
header_info = ['Longitude', 'Latitude', 'Depth', 'Observed Count',
'Smoothed Rate', 'b-value']
writer = csv.DictWriter(fid, fieldnames=header_info)
headers = dict((name0, name0) for name0 in header_info)
writer.writerow(headers)
for row in self.data:
if row[4] == 0:
continue
row_dict = {'Longitude': '%g' % row[0],
'Latitude': '%g' % row[1],
'Depth': '%g' % row[2],
'Observed Count': '%d' % row[3],
'Smoothed Rate': '%.6g' % row[4],
'b-value': '%g' % self.bval}
writer.writerow(row_dict)
fid.close() | [
"def",
"write_to_csv",
"(",
"self",
",",
"filename",
")",
":",
"fid",
"=",
"open",
"(",
"filename",
",",
"'wt'",
")",
"# Create header list",
"header_info",
"=",
"[",
"'Longitude'",
",",
"'Latitude'",
",",
"'Depth'",
",",
"'Observed Count'",
",",
"'Smoothed Rate'",
",",
"'b-value'",
"]",
"writer",
"=",
"csv",
".",
"DictWriter",
"(",
"fid",
",",
"fieldnames",
"=",
"header_info",
")",
"headers",
"=",
"dict",
"(",
"(",
"name0",
",",
"name0",
")",
"for",
"name0",
"in",
"header_info",
")",
"# Write to file",
"writer",
".",
"writerow",
"(",
"headers",
")",
"for",
"row",
"in",
"self",
".",
"data",
":",
"# institute crude compression by omitting points with no seismicity",
"# and taking advantage of the %g format",
"if",
"row",
"[",
"4",
"]",
"==",
"0",
":",
"continue",
"row_dict",
"=",
"{",
"'Longitude'",
":",
"'%g'",
"%",
"row",
"[",
"0",
"]",
",",
"'Latitude'",
":",
"'%g'",
"%",
"row",
"[",
"1",
"]",
",",
"'Depth'",
":",
"'%g'",
"%",
"row",
"[",
"2",
"]",
",",
"'Observed Count'",
":",
"'%d'",
"%",
"row",
"[",
"3",
"]",
",",
"'Smoothed Rate'",
":",
"'%.6g'",
"%",
"row",
"[",
"4",
"]",
",",
"'b-value'",
":",
"'%g'",
"%",
"self",
".",
"bval",
"}",
"writer",
".",
"writerow",
"(",
"row_dict",
")",
"fid",
".",
"close",
"(",
")"
] | Exports to simple csv
:param str filename:
Path to file for export | [
"Exports",
"to",
"simple",
"csv"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L491-L518 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | _validate_hazard_metadata | def _validate_hazard_metadata(md):
"""
Validate metadata `dict` of attributes, which are more or less the same for
hazard curves, hazard maps, and disaggregation histograms.
:param dict md:
`dict` which can contain the following keys:
* statistics
* gsimlt_path
* smlt_path
* imt
* sa_period
* sa_damping
:raises:
:exc:`ValueError` if the metadata is not valid.
"""
if (md.get('statistics') is not None and (
md.get('smlt_path') is not None or
md.get('gsimlt_path') is not None)):
raise ValueError('Cannot specify both `statistics` and logic tree '
'paths')
if md.get('statistics') is not None:
# make sure only valid statistics types are specified
if md.get('statistics') not in ('mean', 'max', 'quantile', 'std'):
raise ValueError('`statistics` must be either `mean`, `max`, or '
'`quantile`')
else:
# must specify both logic tree paths
if md.get('smlt_path') is None or md.get('gsimlt_path') is None:
raise ValueError('Both logic tree paths are required for '
'non-statistical results')
if md.get('statistics') == 'quantile':
if md.get('quantile_value') is None:
raise ValueError('quantile stastics results require a quantile'
' value to be specified')
if not md.get('statistics') == 'quantile':
if md.get('quantile_value') is not None:
raise ValueError('Quantile value must be specified with '
'quantile statistics')
if md.get('imt') == 'SA':
if md.get('sa_period') is None:
raise ValueError('`sa_period` is required for IMT == `SA`')
if md.get('sa_damping') is None:
raise ValueError('`sa_damping` is required for IMT == `SA`') | python | def _validate_hazard_metadata(md):
if (md.get('statistics') is not None and (
md.get('smlt_path') is not None or
md.get('gsimlt_path') is not None)):
raise ValueError('Cannot specify both `statistics` and logic tree '
'paths')
if md.get('statistics') is not None:
if md.get('statistics') not in ('mean', 'max', 'quantile', 'std'):
raise ValueError('`statistics` must be either `mean`, `max`, or '
'`quantile`')
else:
if md.get('smlt_path') is None or md.get('gsimlt_path') is None:
raise ValueError('Both logic tree paths are required for '
'non-statistical results')
if md.get('statistics') == 'quantile':
if md.get('quantile_value') is None:
raise ValueError('quantile stastics results require a quantile'
' value to be specified')
if not md.get('statistics') == 'quantile':
if md.get('quantile_value') is not None:
raise ValueError('Quantile value must be specified with '
'quantile statistics')
if md.get('imt') == 'SA':
if md.get('sa_period') is None:
raise ValueError('`sa_period` is required for IMT == `SA`')
if md.get('sa_damping') is None:
raise ValueError('`sa_damping` is required for IMT == `SA`') | [
"def",
"_validate_hazard_metadata",
"(",
"md",
")",
":",
"if",
"(",
"md",
".",
"get",
"(",
"'statistics'",
")",
"is",
"not",
"None",
"and",
"(",
"md",
".",
"get",
"(",
"'smlt_path'",
")",
"is",
"not",
"None",
"or",
"md",
".",
"get",
"(",
"'gsimlt_path'",
")",
"is",
"not",
"None",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Cannot specify both `statistics` and logic tree '",
"'paths'",
")",
"if",
"md",
".",
"get",
"(",
"'statistics'",
")",
"is",
"not",
"None",
":",
"# make sure only valid statistics types are specified",
"if",
"md",
".",
"get",
"(",
"'statistics'",
")",
"not",
"in",
"(",
"'mean'",
",",
"'max'",
",",
"'quantile'",
",",
"'std'",
")",
":",
"raise",
"ValueError",
"(",
"'`statistics` must be either `mean`, `max`, or '",
"'`quantile`'",
")",
"else",
":",
"# must specify both logic tree paths",
"if",
"md",
".",
"get",
"(",
"'smlt_path'",
")",
"is",
"None",
"or",
"md",
".",
"get",
"(",
"'gsimlt_path'",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Both logic tree paths are required for '",
"'non-statistical results'",
")",
"if",
"md",
".",
"get",
"(",
"'statistics'",
")",
"==",
"'quantile'",
":",
"if",
"md",
".",
"get",
"(",
"'quantile_value'",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'quantile stastics results require a quantile'",
"' value to be specified'",
")",
"if",
"not",
"md",
".",
"get",
"(",
"'statistics'",
")",
"==",
"'quantile'",
":",
"if",
"md",
".",
"get",
"(",
"'quantile_value'",
")",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"'Quantile value must be specified with '",
"'quantile statistics'",
")",
"if",
"md",
".",
"get",
"(",
"'imt'",
")",
"==",
"'SA'",
":",
"if",
"md",
".",
"get",
"(",
"'sa_period'",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'`sa_period` is required for IMT == `SA`'",
")",
"if",
"md",
".",
"get",
"(",
"'sa_damping'",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'`sa_damping` is required for IMT == `SA`'",
")"
] | Validate metadata `dict` of attributes, which are more or less the same for
hazard curves, hazard maps, and disaggregation histograms.
:param dict md:
`dict` which can contain the following keys:
* statistics
* gsimlt_path
* smlt_path
* imt
* sa_period
* sa_damping
:raises:
:exc:`ValueError` if the metadata is not valid. | [
"Validate",
"metadata",
"dict",
"of",
"attributes",
"which",
"are",
"more",
"or",
"less",
"the",
"same",
"for",
"hazard",
"curves",
"hazard",
"maps",
"and",
"disaggregation",
"histograms",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L54-L103 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | _set_metadata | def _set_metadata(element, metadata, attr_map, transform=str):
"""
Set metadata attributes on a given ``element``.
:param element:
:class:`xml.etree.ElementTree.Element` instance
:param metadata:
Dictionary of metadata items containing attribute data for ``element``.
:param attr_map:
Dictionary mapping of metadata key->attribute name.
:param transform:
A function accepting and returning a single value to be applied to each
attribute value. Defaults to `str`.
"""
for kw, attr in attr_map.items():
value = metadata.get(kw)
if value is not None:
element.set(attr, transform(value)) | python | def _set_metadata(element, metadata, attr_map, transform=str):
for kw, attr in attr_map.items():
value = metadata.get(kw)
if value is not None:
element.set(attr, transform(value)) | [
"def",
"_set_metadata",
"(",
"element",
",",
"metadata",
",",
"attr_map",
",",
"transform",
"=",
"str",
")",
":",
"for",
"kw",
",",
"attr",
"in",
"attr_map",
".",
"items",
"(",
")",
":",
"value",
"=",
"metadata",
".",
"get",
"(",
"kw",
")",
"if",
"value",
"is",
"not",
"None",
":",
"element",
".",
"set",
"(",
"attr",
",",
"transform",
"(",
"value",
")",
")"
] | Set metadata attributes on a given ``element``.
:param element:
:class:`xml.etree.ElementTree.Element` instance
:param metadata:
Dictionary of metadata items containing attribute data for ``element``.
:param attr_map:
Dictionary mapping of metadata key->attribute name.
:param transform:
A function accepting and returning a single value to be applied to each
attribute value. Defaults to `str`. | [
"Set",
"metadata",
"attributes",
"on",
"a",
"given",
"element",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L106-L123 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | gen_gmfs | def gen_gmfs(gmf_set):
"""
Generate GMF nodes from a gmf_set
:param gmf_set: a sequence of GMF objects with attributes
imt, sa_period, sa_damping, event_id and containing a list
of GMF nodes with attributes gmv and location. The nodes
are sorted by lon/lat.
"""
for gmf in gmf_set:
gmf_node = Node('gmf')
gmf_node['IMT'] = gmf.imt
if gmf.imt == 'SA':
gmf_node['saPeriod'] = str(gmf.sa_period)
gmf_node['saDamping'] = str(gmf.sa_damping)
gmf_node['ruptureId'] = gmf.event_id
sorted_nodes = sorted(gmf)
gmf_node.nodes = (
Node('node', dict(gmv=n.gmv, lon=n.location.x, lat=n.location.y))
for n in sorted_nodes)
yield gmf_node | python | def gen_gmfs(gmf_set):
for gmf in gmf_set:
gmf_node = Node('gmf')
gmf_node['IMT'] = gmf.imt
if gmf.imt == 'SA':
gmf_node['saPeriod'] = str(gmf.sa_period)
gmf_node['saDamping'] = str(gmf.sa_damping)
gmf_node['ruptureId'] = gmf.event_id
sorted_nodes = sorted(gmf)
gmf_node.nodes = (
Node('node', dict(gmv=n.gmv, lon=n.location.x, lat=n.location.y))
for n in sorted_nodes)
yield gmf_node | [
"def",
"gen_gmfs",
"(",
"gmf_set",
")",
":",
"for",
"gmf",
"in",
"gmf_set",
":",
"gmf_node",
"=",
"Node",
"(",
"'gmf'",
")",
"gmf_node",
"[",
"'IMT'",
"]",
"=",
"gmf",
".",
"imt",
"if",
"gmf",
".",
"imt",
"==",
"'SA'",
":",
"gmf_node",
"[",
"'saPeriod'",
"]",
"=",
"str",
"(",
"gmf",
".",
"sa_period",
")",
"gmf_node",
"[",
"'saDamping'",
"]",
"=",
"str",
"(",
"gmf",
".",
"sa_damping",
")",
"gmf_node",
"[",
"'ruptureId'",
"]",
"=",
"gmf",
".",
"event_id",
"sorted_nodes",
"=",
"sorted",
"(",
"gmf",
")",
"gmf_node",
".",
"nodes",
"=",
"(",
"Node",
"(",
"'node'",
",",
"dict",
"(",
"gmv",
"=",
"n",
".",
"gmv",
",",
"lon",
"=",
"n",
".",
"location",
".",
"x",
",",
"lat",
"=",
"n",
".",
"location",
".",
"y",
")",
")",
"for",
"n",
"in",
"sorted_nodes",
")",
"yield",
"gmf_node"
] | Generate GMF nodes from a gmf_set
:param gmf_set: a sequence of GMF objects with attributes
imt, sa_period, sa_damping, event_id and containing a list
of GMF nodes with attributes gmv and location. The nodes
are sorted by lon/lat. | [
"Generate",
"GMF",
"nodes",
"from",
"a",
"gmf_set",
":",
"param",
"gmf_set",
":",
"a",
"sequence",
"of",
"GMF",
"objects",
"with",
"attributes",
"imt",
"sa_period",
"sa_damping",
"event_id",
"and",
"containing",
"a",
"list",
"of",
"GMF",
"nodes",
"with",
"attributes",
"gmv",
"and",
"location",
".",
"The",
"nodes",
"are",
"sorted",
"by",
"lon",
"/",
"lat",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L218-L237 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | rupture_to_element | def rupture_to_element(rup, parent=None):
"""
Convert a rupture object into an Element object.
:param rup:
must have attributes .rupid, .events_by_ses and .seed
:param parent:
parent of the returned element, or None
"""
if parent is None:
parent = et.Element('root')
rup_elem = et.SubElement(parent, rup.typology)
elem = et.SubElement(rup_elem, 'stochasticEventSets')
n = 0
for ses in rup.events_by_ses:
eids = rup.events_by_ses[ses]['eid']
n += len(eids)
ses_elem = et.SubElement(elem, 'SES', id=ses)
ses_elem.text = ' '.join(str(eid) for eid in eids)
rup_elem.set('id', rup.rupid)
rup_elem.set('multiplicity', str(n))
sub_elems(rup_elem, rup, 'magnitude', 'strike', 'dip', 'rake')
h = rup.hypocenter
et.SubElement(rup_elem, 'hypocenter', dict(lon=h.x, lat=h.y, depth=h.z))
if rup.is_from_fault_source:
# rup is from a simple or complex fault source
# the rup geometry is represented by a mesh of 3D
# points
mesh_elem = et.SubElement(rup_elem, 'mesh')
# we assume the mesh components (lons, lats, depths)
# are of uniform shape
for i, row in enumerate(rup.lons):
for j, col in enumerate(row):
node_elem = et.SubElement(mesh_elem, 'node')
node_elem.set('row', str(i))
node_elem.set('col', str(j))
node_elem.set('lon', str(rup.lons[i][j]))
node_elem.set('lat', str(rup.lats[i][j]))
node_elem.set('depth', str(rup.depths[i][j]))
# if we never entered the loop above, it's possible
# that i and j will be undefined
mesh_elem.set('rows', str(i + 1))
mesh_elem.set('cols', str(j + 1))
elif rup.is_gridded_surface:
# the rup geometry is represented by a mesh of (1, N) points
mesh_elem = et.SubElement(rup_elem, 'mesh')
for j, _ in enumerate(rup.lons):
node_elem = et.SubElement(mesh_elem, 'node')
node_elem.set('row', '0')
node_elem.set('col', str(j))
node_elem.set('lon', str(rup.lons[j]))
node_elem.set('lat', str(rup.lats[j]))
node_elem.set('depth', str(rup.depths[j]))
else:
# rupture is from a multi surface fault source
if rup.is_multi_surface:
# the arrays lons, lats and depths contain 4*N elements,
# where N is the number of planar surfaces contained in the
# multisurface; each planar surface if characterised by 4
# vertices top_left, top_right, bottom_left, bottom_right
assert len(rup.lons) % 4 == 0
assert len(rup.lons) == len(rup.lats) == len(rup.depths)
for offset in range(len(rup.lons) // 4):
# looping on the coordinates of the sub surfaces, one
# planar surface at the time
start = offset * 4
end = offset * 4 + 4
lons = rup.lons[start:end] # 4 lons of the current surface
lats = rup.lats[start:end] # 4 lats of the current surface
depths = rup.depths[start:end] # 4 depths
ps_elem = et.SubElement(
rup_elem, 'planarSurface')
top_left, top_right, bottom_left, bottom_right = \
zip(lons, lats, depths)
for el_name, corner in (
('topLeft', top_left),
('topRight', top_right),
('bottomLeft', bottom_left),
('bottomRight', bottom_right)):
corner_elem = et.SubElement(ps_elem, el_name)
corner_elem.set('lon', '%.7f' % corner[0])
corner_elem.set('lat', '%.7f' % corner[1])
corner_elem.set('depth', '%.7f' % corner[2])
else:
# rupture is from a point or area source
# the rupture geometry is represented by four 3D
# corner points
ps_elem = et.SubElement(rup_elem, 'planarSurface')
# create the corner point elements, in the order of:
# * top left
# * top right
# * bottom left
# * bottom right
for el_name, corner in (
('topLeft', rup.top_left_corner),
('topRight', rup.top_right_corner),
('bottomLeft', rup.bottom_left_corner),
('bottomRight', rup.bottom_right_corner)):
corner_elem = et.SubElement(ps_elem, el_name)
corner_elem.set('lon', '%.7f' % corner[0])
corner_elem.set('lat', '%.7f' % corner[1])
corner_elem.set('depth', '%.7f' % corner[2])
return parent | python | def rupture_to_element(rup, parent=None):
if parent is None:
parent = et.Element('root')
rup_elem = et.SubElement(parent, rup.typology)
elem = et.SubElement(rup_elem, 'stochasticEventSets')
n = 0
for ses in rup.events_by_ses:
eids = rup.events_by_ses[ses]['eid']
n += len(eids)
ses_elem = et.SubElement(elem, 'SES', id=ses)
ses_elem.text = ' '.join(str(eid) for eid in eids)
rup_elem.set('id', rup.rupid)
rup_elem.set('multiplicity', str(n))
sub_elems(rup_elem, rup, 'magnitude', 'strike', 'dip', 'rake')
h = rup.hypocenter
et.SubElement(rup_elem, 'hypocenter', dict(lon=h.x, lat=h.y, depth=h.z))
if rup.is_from_fault_source:
mesh_elem = et.SubElement(rup_elem, 'mesh')
for i, row in enumerate(rup.lons):
for j, col in enumerate(row):
node_elem = et.SubElement(mesh_elem, 'node')
node_elem.set('row', str(i))
node_elem.set('col', str(j))
node_elem.set('lon', str(rup.lons[i][j]))
node_elem.set('lat', str(rup.lats[i][j]))
node_elem.set('depth', str(rup.depths[i][j]))
mesh_elem.set('rows', str(i + 1))
mesh_elem.set('cols', str(j + 1))
elif rup.is_gridded_surface:
mesh_elem = et.SubElement(rup_elem, 'mesh')
for j, _ in enumerate(rup.lons):
node_elem = et.SubElement(mesh_elem, 'node')
node_elem.set('row', '0')
node_elem.set('col', str(j))
node_elem.set('lon', str(rup.lons[j]))
node_elem.set('lat', str(rup.lats[j]))
node_elem.set('depth', str(rup.depths[j]))
else:
if rup.is_multi_surface:
assert len(rup.lons) % 4 == 0
assert len(rup.lons) == len(rup.lats) == len(rup.depths)
for offset in range(len(rup.lons) // 4):
start = offset * 4
end = offset * 4 + 4
lons = rup.lons[start:end]
lats = rup.lats[start:end]
depths = rup.depths[start:end]
ps_elem = et.SubElement(
rup_elem, 'planarSurface')
top_left, top_right, bottom_left, bottom_right = \
zip(lons, lats, depths)
for el_name, corner in (
('topLeft', top_left),
('topRight', top_right),
('bottomLeft', bottom_left),
('bottomRight', bottom_right)):
corner_elem = et.SubElement(ps_elem, el_name)
corner_elem.set('lon', '%.7f' % corner[0])
corner_elem.set('lat', '%.7f' % corner[1])
corner_elem.set('depth', '%.7f' % corner[2])
else:
ps_elem = et.SubElement(rup_elem, 'planarSurface')
for el_name, corner in (
('topLeft', rup.top_left_corner),
('topRight', rup.top_right_corner),
('bottomLeft', rup.bottom_left_corner),
('bottomRight', rup.bottom_right_corner)):
corner_elem = et.SubElement(ps_elem, el_name)
corner_elem.set('lon', '%.7f' % corner[0])
corner_elem.set('lat', '%.7f' % corner[1])
corner_elem.set('depth', '%.7f' % corner[2])
return parent | [
"def",
"rupture_to_element",
"(",
"rup",
",",
"parent",
"=",
"None",
")",
":",
"if",
"parent",
"is",
"None",
":",
"parent",
"=",
"et",
".",
"Element",
"(",
"'root'",
")",
"rup_elem",
"=",
"et",
".",
"SubElement",
"(",
"parent",
",",
"rup",
".",
"typology",
")",
"elem",
"=",
"et",
".",
"SubElement",
"(",
"rup_elem",
",",
"'stochasticEventSets'",
")",
"n",
"=",
"0",
"for",
"ses",
"in",
"rup",
".",
"events_by_ses",
":",
"eids",
"=",
"rup",
".",
"events_by_ses",
"[",
"ses",
"]",
"[",
"'eid'",
"]",
"n",
"+=",
"len",
"(",
"eids",
")",
"ses_elem",
"=",
"et",
".",
"SubElement",
"(",
"elem",
",",
"'SES'",
",",
"id",
"=",
"ses",
")",
"ses_elem",
".",
"text",
"=",
"' '",
".",
"join",
"(",
"str",
"(",
"eid",
")",
"for",
"eid",
"in",
"eids",
")",
"rup_elem",
".",
"set",
"(",
"'id'",
",",
"rup",
".",
"rupid",
")",
"rup_elem",
".",
"set",
"(",
"'multiplicity'",
",",
"str",
"(",
"n",
")",
")",
"sub_elems",
"(",
"rup_elem",
",",
"rup",
",",
"'magnitude'",
",",
"'strike'",
",",
"'dip'",
",",
"'rake'",
")",
"h",
"=",
"rup",
".",
"hypocenter",
"et",
".",
"SubElement",
"(",
"rup_elem",
",",
"'hypocenter'",
",",
"dict",
"(",
"lon",
"=",
"h",
".",
"x",
",",
"lat",
"=",
"h",
".",
"y",
",",
"depth",
"=",
"h",
".",
"z",
")",
")",
"if",
"rup",
".",
"is_from_fault_source",
":",
"# rup is from a simple or complex fault source",
"# the rup geometry is represented by a mesh of 3D",
"# points",
"mesh_elem",
"=",
"et",
".",
"SubElement",
"(",
"rup_elem",
",",
"'mesh'",
")",
"# we assume the mesh components (lons, lats, depths)",
"# are of uniform shape",
"for",
"i",
",",
"row",
"in",
"enumerate",
"(",
"rup",
".",
"lons",
")",
":",
"for",
"j",
",",
"col",
"in",
"enumerate",
"(",
"row",
")",
":",
"node_elem",
"=",
"et",
".",
"SubElement",
"(",
"mesh_elem",
",",
"'node'",
")",
"node_elem",
".",
"set",
"(",
"'row'",
",",
"str",
"(",
"i",
")",
")",
"node_elem",
".",
"set",
"(",
"'col'",
",",
"str",
"(",
"j",
")",
")",
"node_elem",
".",
"set",
"(",
"'lon'",
",",
"str",
"(",
"rup",
".",
"lons",
"[",
"i",
"]",
"[",
"j",
"]",
")",
")",
"node_elem",
".",
"set",
"(",
"'lat'",
",",
"str",
"(",
"rup",
".",
"lats",
"[",
"i",
"]",
"[",
"j",
"]",
")",
")",
"node_elem",
".",
"set",
"(",
"'depth'",
",",
"str",
"(",
"rup",
".",
"depths",
"[",
"i",
"]",
"[",
"j",
"]",
")",
")",
"# if we never entered the loop above, it's possible",
"# that i and j will be undefined",
"mesh_elem",
".",
"set",
"(",
"'rows'",
",",
"str",
"(",
"i",
"+",
"1",
")",
")",
"mesh_elem",
".",
"set",
"(",
"'cols'",
",",
"str",
"(",
"j",
"+",
"1",
")",
")",
"elif",
"rup",
".",
"is_gridded_surface",
":",
"# the rup geometry is represented by a mesh of (1, N) points",
"mesh_elem",
"=",
"et",
".",
"SubElement",
"(",
"rup_elem",
",",
"'mesh'",
")",
"for",
"j",
",",
"_",
"in",
"enumerate",
"(",
"rup",
".",
"lons",
")",
":",
"node_elem",
"=",
"et",
".",
"SubElement",
"(",
"mesh_elem",
",",
"'node'",
")",
"node_elem",
".",
"set",
"(",
"'row'",
",",
"'0'",
")",
"node_elem",
".",
"set",
"(",
"'col'",
",",
"str",
"(",
"j",
")",
")",
"node_elem",
".",
"set",
"(",
"'lon'",
",",
"str",
"(",
"rup",
".",
"lons",
"[",
"j",
"]",
")",
")",
"node_elem",
".",
"set",
"(",
"'lat'",
",",
"str",
"(",
"rup",
".",
"lats",
"[",
"j",
"]",
")",
")",
"node_elem",
".",
"set",
"(",
"'depth'",
",",
"str",
"(",
"rup",
".",
"depths",
"[",
"j",
"]",
")",
")",
"else",
":",
"# rupture is from a multi surface fault source",
"if",
"rup",
".",
"is_multi_surface",
":",
"# the arrays lons, lats and depths contain 4*N elements,",
"# where N is the number of planar surfaces contained in the",
"# multisurface; each planar surface if characterised by 4",
"# vertices top_left, top_right, bottom_left, bottom_right",
"assert",
"len",
"(",
"rup",
".",
"lons",
")",
"%",
"4",
"==",
"0",
"assert",
"len",
"(",
"rup",
".",
"lons",
")",
"==",
"len",
"(",
"rup",
".",
"lats",
")",
"==",
"len",
"(",
"rup",
".",
"depths",
")",
"for",
"offset",
"in",
"range",
"(",
"len",
"(",
"rup",
".",
"lons",
")",
"//",
"4",
")",
":",
"# looping on the coordinates of the sub surfaces, one",
"# planar surface at the time",
"start",
"=",
"offset",
"*",
"4",
"end",
"=",
"offset",
"*",
"4",
"+",
"4",
"lons",
"=",
"rup",
".",
"lons",
"[",
"start",
":",
"end",
"]",
"# 4 lons of the current surface",
"lats",
"=",
"rup",
".",
"lats",
"[",
"start",
":",
"end",
"]",
"# 4 lats of the current surface",
"depths",
"=",
"rup",
".",
"depths",
"[",
"start",
":",
"end",
"]",
"# 4 depths",
"ps_elem",
"=",
"et",
".",
"SubElement",
"(",
"rup_elem",
",",
"'planarSurface'",
")",
"top_left",
",",
"top_right",
",",
"bottom_left",
",",
"bottom_right",
"=",
"zip",
"(",
"lons",
",",
"lats",
",",
"depths",
")",
"for",
"el_name",
",",
"corner",
"in",
"(",
"(",
"'topLeft'",
",",
"top_left",
")",
",",
"(",
"'topRight'",
",",
"top_right",
")",
",",
"(",
"'bottomLeft'",
",",
"bottom_left",
")",
",",
"(",
"'bottomRight'",
",",
"bottom_right",
")",
")",
":",
"corner_elem",
"=",
"et",
".",
"SubElement",
"(",
"ps_elem",
",",
"el_name",
")",
"corner_elem",
".",
"set",
"(",
"'lon'",
",",
"'%.7f'",
"%",
"corner",
"[",
"0",
"]",
")",
"corner_elem",
".",
"set",
"(",
"'lat'",
",",
"'%.7f'",
"%",
"corner",
"[",
"1",
"]",
")",
"corner_elem",
".",
"set",
"(",
"'depth'",
",",
"'%.7f'",
"%",
"corner",
"[",
"2",
"]",
")",
"else",
":",
"# rupture is from a point or area source",
"# the rupture geometry is represented by four 3D",
"# corner points",
"ps_elem",
"=",
"et",
".",
"SubElement",
"(",
"rup_elem",
",",
"'planarSurface'",
")",
"# create the corner point elements, in the order of:",
"# * top left",
"# * top right",
"# * bottom left",
"# * bottom right",
"for",
"el_name",
",",
"corner",
"in",
"(",
"(",
"'topLeft'",
",",
"rup",
".",
"top_left_corner",
")",
",",
"(",
"'topRight'",
",",
"rup",
".",
"top_right_corner",
")",
",",
"(",
"'bottomLeft'",
",",
"rup",
".",
"bottom_left_corner",
")",
",",
"(",
"'bottomRight'",
",",
"rup",
".",
"bottom_right_corner",
")",
")",
":",
"corner_elem",
"=",
"et",
".",
"SubElement",
"(",
"ps_elem",
",",
"el_name",
")",
"corner_elem",
".",
"set",
"(",
"'lon'",
",",
"'%.7f'",
"%",
"corner",
"[",
"0",
"]",
")",
"corner_elem",
".",
"set",
"(",
"'lat'",
",",
"'%.7f'",
"%",
"corner",
"[",
"1",
"]",
")",
"corner_elem",
".",
"set",
"(",
"'depth'",
",",
"'%.7f'",
"%",
"corner",
"[",
"2",
"]",
")",
"return",
"parent"
] | Convert a rupture object into an Element object.
:param rup:
must have attributes .rupid, .events_by_ses and .seed
:param parent:
parent of the returned element, or None | [
"Convert",
"a",
"rupture",
"object",
"into",
"an",
"Element",
"object",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L311-L422 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | HazardCurveXMLWriter.serialize | def serialize(self, data):
"""
Write a sequence of hazard curves to the specified file.
:param data:
Iterable of hazard curve data. Each datum must be an object with
the following attributes:
* poes: A list of probability of exceedence values (floats).
* location: An object representing the location of the curve; must
have `x` and `y` to represent lon and lat, respectively.
"""
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
self.add_hazard_curves(root, self.metadata, data)
nrml.write(list(root), fh) | python | def serialize(self, data):
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
self.add_hazard_curves(root, self.metadata, data)
nrml.write(list(root), fh) | [
"def",
"serialize",
"(",
"self",
",",
"data",
")",
":",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"fh",
":",
"root",
"=",
"et",
".",
"Element",
"(",
"'nrml'",
")",
"self",
".",
"add_hazard_curves",
"(",
"root",
",",
"self",
".",
"metadata",
",",
"data",
")",
"nrml",
".",
"write",
"(",
"list",
"(",
"root",
")",
",",
"fh",
")"
] | Write a sequence of hazard curves to the specified file.
:param data:
Iterable of hazard curve data. Each datum must be an object with
the following attributes:
* poes: A list of probability of exceedence values (floats).
* location: An object representing the location of the curve; must
have `x` and `y` to represent lon and lat, respectively. | [
"Write",
"a",
"sequence",
"of",
"hazard",
"curves",
"to",
"the",
"specified",
"file",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L177-L192 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | HazardCurveXMLWriter.add_hazard_curves | def add_hazard_curves(self, root, metadata, data):
"""
Add hazard curves stored into `data` as child of the `root`
element with `metadata`. See the documentation of the method
`serialize` and the constructor for a description of `data`
and `metadata`, respectively.
"""
hazard_curves = et.SubElement(root, 'hazardCurves')
_set_metadata(hazard_curves, metadata, _ATTR_MAP)
imls_elem = et.SubElement(hazard_curves, 'IMLs')
imls_elem.text = ' '.join(map(scientificformat, metadata['imls']))
gml_ns = nrml.SERIALIZE_NS_MAP['gml']
for hc in data:
hc_elem = et.SubElement(hazard_curves, 'hazardCurve')
gml_point = et.SubElement(hc_elem, '{%s}Point' % gml_ns)
gml_pos = et.SubElement(gml_point, '{%s}pos' % gml_ns)
gml_pos.text = '%s %s' % (hc.location.x, hc.location.y)
poes_elem = et.SubElement(hc_elem, 'poEs')
poes_elem.text = ' '.join(map(scientificformat, hc.poes)) | python | def add_hazard_curves(self, root, metadata, data):
hazard_curves = et.SubElement(root, 'hazardCurves')
_set_metadata(hazard_curves, metadata, _ATTR_MAP)
imls_elem = et.SubElement(hazard_curves, 'IMLs')
imls_elem.text = ' '.join(map(scientificformat, metadata['imls']))
gml_ns = nrml.SERIALIZE_NS_MAP['gml']
for hc in data:
hc_elem = et.SubElement(hazard_curves, 'hazardCurve')
gml_point = et.SubElement(hc_elem, '{%s}Point' % gml_ns)
gml_pos = et.SubElement(gml_point, '{%s}pos' % gml_ns)
gml_pos.text = '%s %s' % (hc.location.x, hc.location.y)
poes_elem = et.SubElement(hc_elem, 'poEs')
poes_elem.text = ' '.join(map(scientificformat, hc.poes)) | [
"def",
"add_hazard_curves",
"(",
"self",
",",
"root",
",",
"metadata",
",",
"data",
")",
":",
"hazard_curves",
"=",
"et",
".",
"SubElement",
"(",
"root",
",",
"'hazardCurves'",
")",
"_set_metadata",
"(",
"hazard_curves",
",",
"metadata",
",",
"_ATTR_MAP",
")",
"imls_elem",
"=",
"et",
".",
"SubElement",
"(",
"hazard_curves",
",",
"'IMLs'",
")",
"imls_elem",
".",
"text",
"=",
"' '",
".",
"join",
"(",
"map",
"(",
"scientificformat",
",",
"metadata",
"[",
"'imls'",
"]",
")",
")",
"gml_ns",
"=",
"nrml",
".",
"SERIALIZE_NS_MAP",
"[",
"'gml'",
"]",
"for",
"hc",
"in",
"data",
":",
"hc_elem",
"=",
"et",
".",
"SubElement",
"(",
"hazard_curves",
",",
"'hazardCurve'",
")",
"gml_point",
"=",
"et",
".",
"SubElement",
"(",
"hc_elem",
",",
"'{%s}Point'",
"%",
"gml_ns",
")",
"gml_pos",
"=",
"et",
".",
"SubElement",
"(",
"gml_point",
",",
"'{%s}pos'",
"%",
"gml_ns",
")",
"gml_pos",
".",
"text",
"=",
"'%s %s'",
"%",
"(",
"hc",
".",
"location",
".",
"x",
",",
"hc",
".",
"location",
".",
"y",
")",
"poes_elem",
"=",
"et",
".",
"SubElement",
"(",
"hc_elem",
",",
"'poEs'",
")",
"poes_elem",
".",
"text",
"=",
"' '",
".",
"join",
"(",
"map",
"(",
"scientificformat",
",",
"hc",
".",
"poes",
")",
")"
] | Add hazard curves stored into `data` as child of the `root`
element with `metadata`. See the documentation of the method
`serialize` and the constructor for a description of `data`
and `metadata`, respectively. | [
"Add",
"hazard",
"curves",
"stored",
"into",
"data",
"as",
"child",
"of",
"the",
"root",
"element",
"with",
"metadata",
".",
"See",
"the",
"documentation",
"of",
"the",
"method",
"serialize",
"and",
"the",
"constructor",
"for",
"a",
"description",
"of",
"data",
"and",
"metadata",
"respectively",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L194-L215 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | EventBasedGMFXMLWriter.serialize | def serialize(self, data, fmt='%10.7E'):
"""
Serialize a collection of ground motion fields to XML.
:param data:
An iterable of "GMF set" objects.
Each "GMF set" object should:
* have an `investigation_time` attribute
* have an `stochastic_event_set_id` attribute
* be iterable, yielding a sequence of "GMF" objects
Each "GMF" object should:
* have an `imt` attribute
* have an `sa_period` attribute (only if `imt` is 'SA')
* have an `sa_damping` attribute (only if `imt` is 'SA')
* have a `event_id` attribute (to indicate which rupture
contributed to this gmf)
* be iterable, yielding a sequence of "GMF node" objects
Each "GMF node" object should have:
* a `gmv` attribute (to indicate the ground motion value
* `lon` and `lat` attributes (to indicate the geographical location
of the ground motion field)
"""
gmf_set_nodes = []
for gmf_set in data:
gmf_set_node = Node('gmfSet')
if gmf_set.investigation_time:
gmf_set_node['investigationTime'] = str(
gmf_set.investigation_time)
gmf_set_node['stochasticEventSetId'] = str(
gmf_set.stochastic_event_set_id)
gmf_set_node.nodes = gen_gmfs(gmf_set)
gmf_set_nodes.append(gmf_set_node)
gmf_container = Node('gmfCollection')
gmf_container[SM_TREE_PATH] = self.sm_lt_path
gmf_container[GSIM_TREE_PATH] = self.gsim_lt_path
gmf_container.nodes = gmf_set_nodes
with open(self.dest, 'wb') as dest:
nrml.write([gmf_container], dest, fmt) | python | def serialize(self, data, fmt='%10.7E'):
gmf_set_nodes = []
for gmf_set in data:
gmf_set_node = Node('gmfSet')
if gmf_set.investigation_time:
gmf_set_node['investigationTime'] = str(
gmf_set.investigation_time)
gmf_set_node['stochasticEventSetId'] = str(
gmf_set.stochastic_event_set_id)
gmf_set_node.nodes = gen_gmfs(gmf_set)
gmf_set_nodes.append(gmf_set_node)
gmf_container = Node('gmfCollection')
gmf_container[SM_TREE_PATH] = self.sm_lt_path
gmf_container[GSIM_TREE_PATH] = self.gsim_lt_path
gmf_container.nodes = gmf_set_nodes
with open(self.dest, 'wb') as dest:
nrml.write([gmf_container], dest, fmt) | [
"def",
"serialize",
"(",
"self",
",",
"data",
",",
"fmt",
"=",
"'%10.7E'",
")",
":",
"gmf_set_nodes",
"=",
"[",
"]",
"for",
"gmf_set",
"in",
"data",
":",
"gmf_set_node",
"=",
"Node",
"(",
"'gmfSet'",
")",
"if",
"gmf_set",
".",
"investigation_time",
":",
"gmf_set_node",
"[",
"'investigationTime'",
"]",
"=",
"str",
"(",
"gmf_set",
".",
"investigation_time",
")",
"gmf_set_node",
"[",
"'stochasticEventSetId'",
"]",
"=",
"str",
"(",
"gmf_set",
".",
"stochastic_event_set_id",
")",
"gmf_set_node",
".",
"nodes",
"=",
"gen_gmfs",
"(",
"gmf_set",
")",
"gmf_set_nodes",
".",
"append",
"(",
"gmf_set_node",
")",
"gmf_container",
"=",
"Node",
"(",
"'gmfCollection'",
")",
"gmf_container",
"[",
"SM_TREE_PATH",
"]",
"=",
"self",
".",
"sm_lt_path",
"gmf_container",
"[",
"GSIM_TREE_PATH",
"]",
"=",
"self",
".",
"gsim_lt_path",
"gmf_container",
".",
"nodes",
"=",
"gmf_set_nodes",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"dest",
":",
"nrml",
".",
"write",
"(",
"[",
"gmf_container",
"]",
",",
"dest",
",",
"fmt",
")"
] | Serialize a collection of ground motion fields to XML.
:param data:
An iterable of "GMF set" objects.
Each "GMF set" object should:
* have an `investigation_time` attribute
* have an `stochastic_event_set_id` attribute
* be iterable, yielding a sequence of "GMF" objects
Each "GMF" object should:
* have an `imt` attribute
* have an `sa_period` attribute (only if `imt` is 'SA')
* have an `sa_damping` attribute (only if `imt` is 'SA')
* have a `event_id` attribute (to indicate which rupture
contributed to this gmf)
* be iterable, yielding a sequence of "GMF node" objects
Each "GMF node" object should have:
* a `gmv` attribute (to indicate the ground motion value
* `lon` and `lat` attributes (to indicate the geographical location
of the ground motion field) | [
"Serialize",
"a",
"collection",
"of",
"ground",
"motion",
"fields",
"to",
"XML",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L259-L303 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | SESXMLWriter.serialize | def serialize(self, data, investigation_time):
"""
Serialize a collection of stochastic event sets to XML.
:param data:
A dictionary src_group_id -> list of
:class:`openquake.commonlib.calc.Rupture` objects.
Each Rupture should have the following attributes:
* `rupid`
* `events_by_ses`
* `magnitude`
* `strike`
* `dip`
* `rake`
* `tectonic_region_type`
* `is_from_fault_source` (a `bool`)
* `is_multi_surface` (a `bool`)
* `lons`
* `lats`
* `depths`
If `is_from_fault_source` is `True`, the rupture originated from a
simple or complex fault sources. In this case, `lons`, `lats`, and
`depths` should all be 2D arrays (of uniform shape). These
coordinate triples represent nodes of the rupture mesh.
If `is_from_fault_source` is `False`, the rupture originated from a
point or area source. In this case, the rupture is represented by a
quadrilateral planar surface. This planar surface is defined by 3D
vertices. In this case, the rupture should have the following
attributes:
* `top_left_corner`
* `top_right_corner`
* `bottom_right_corner`
* `bottom_left_corner`
Each of these should be a triple of `lon`, `lat`, `depth`.
If `is_multi_surface` is `True`, the rupture originated from a
multi-surface source. In this case, `lons`, `lats`, and `depths`
should have uniform length. The length should be a multiple of 4,
where each segment of 4 represents the corner points of a planar
surface in the following order:
* top left
* top right
* bottom left
* bottom right
Each of these should be a triple of `lon`, `lat`, `depth`.
:param investigation_time:
Investigation time parameter specified in the job.ini
"""
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
ses_container = et.SubElement(root, 'ruptureCollection')
ses_container.set('investigationTime', str(investigation_time))
for grp_id in sorted(data):
attrs = dict(
id=grp_id,
tectonicRegion=data[grp_id][0].tectonic_region_type)
sg = et.SubElement(ses_container, 'ruptureGroup', attrs)
for rupture in data[grp_id]:
rupture_to_element(rupture, sg)
nrml.write(list(root), fh) | python | def serialize(self, data, investigation_time):
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
ses_container = et.SubElement(root, 'ruptureCollection')
ses_container.set('investigationTime', str(investigation_time))
for grp_id in sorted(data):
attrs = dict(
id=grp_id,
tectonicRegion=data[grp_id][0].tectonic_region_type)
sg = et.SubElement(ses_container, 'ruptureGroup', attrs)
for rupture in data[grp_id]:
rupture_to_element(rupture, sg)
nrml.write(list(root), fh) | [
"def",
"serialize",
"(",
"self",
",",
"data",
",",
"investigation_time",
")",
":",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"fh",
":",
"root",
"=",
"et",
".",
"Element",
"(",
"'nrml'",
")",
"ses_container",
"=",
"et",
".",
"SubElement",
"(",
"root",
",",
"'ruptureCollection'",
")",
"ses_container",
".",
"set",
"(",
"'investigationTime'",
",",
"str",
"(",
"investigation_time",
")",
")",
"for",
"grp_id",
"in",
"sorted",
"(",
"data",
")",
":",
"attrs",
"=",
"dict",
"(",
"id",
"=",
"grp_id",
",",
"tectonicRegion",
"=",
"data",
"[",
"grp_id",
"]",
"[",
"0",
"]",
".",
"tectonic_region_type",
")",
"sg",
"=",
"et",
".",
"SubElement",
"(",
"ses_container",
",",
"'ruptureGroup'",
",",
"attrs",
")",
"for",
"rupture",
"in",
"data",
"[",
"grp_id",
"]",
":",
"rupture_to_element",
"(",
"rupture",
",",
"sg",
")",
"nrml",
".",
"write",
"(",
"list",
"(",
"root",
")",
",",
"fh",
")"
] | Serialize a collection of stochastic event sets to XML.
:param data:
A dictionary src_group_id -> list of
:class:`openquake.commonlib.calc.Rupture` objects.
Each Rupture should have the following attributes:
* `rupid`
* `events_by_ses`
* `magnitude`
* `strike`
* `dip`
* `rake`
* `tectonic_region_type`
* `is_from_fault_source` (a `bool`)
* `is_multi_surface` (a `bool`)
* `lons`
* `lats`
* `depths`
If `is_from_fault_source` is `True`, the rupture originated from a
simple or complex fault sources. In this case, `lons`, `lats`, and
`depths` should all be 2D arrays (of uniform shape). These
coordinate triples represent nodes of the rupture mesh.
If `is_from_fault_source` is `False`, the rupture originated from a
point or area source. In this case, the rupture is represented by a
quadrilateral planar surface. This planar surface is defined by 3D
vertices. In this case, the rupture should have the following
attributes:
* `top_left_corner`
* `top_right_corner`
* `bottom_right_corner`
* `bottom_left_corner`
Each of these should be a triple of `lon`, `lat`, `depth`.
If `is_multi_surface` is `True`, the rupture originated from a
multi-surface source. In this case, `lons`, `lats`, and `depths`
should have uniform length. The length should be a multiple of 4,
where each segment of 4 represents the corner points of a planar
surface in the following order:
* top left
* top right
* bottom left
* bottom right
Each of these should be a triple of `lon`, `lat`, `depth`.
:param investigation_time:
Investigation time parameter specified in the job.ini | [
"Serialize",
"a",
"collection",
"of",
"stochastic",
"event",
"sets",
"to",
"XML",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L440-L507 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | HazardMapXMLWriter.serialize | def serialize(self, data):
"""
Serialize hazard map data to XML.
See :meth:`HazardMapWriter.serialize` for details about the expected
input.
"""
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
hazard_map = et.SubElement(root, 'hazardMap')
_set_metadata(hazard_map, self.metadata, _ATTR_MAP)
for lon, lat, iml in data:
node = et.SubElement(hazard_map, 'node')
node.set('lon', str(lon))
node.set('lat', str(lat))
node.set('iml', str(iml))
nrml.write(list(root), fh) | python | def serialize(self, data):
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
hazard_map = et.SubElement(root, 'hazardMap')
_set_metadata(hazard_map, self.metadata, _ATTR_MAP)
for lon, lat, iml in data:
node = et.SubElement(hazard_map, 'node')
node.set('lon', str(lon))
node.set('lat', str(lat))
node.set('iml', str(iml))
nrml.write(list(root), fh) | [
"def",
"serialize",
"(",
"self",
",",
"data",
")",
":",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"fh",
":",
"root",
"=",
"et",
".",
"Element",
"(",
"'nrml'",
")",
"hazard_map",
"=",
"et",
".",
"SubElement",
"(",
"root",
",",
"'hazardMap'",
")",
"_set_metadata",
"(",
"hazard_map",
",",
"self",
".",
"metadata",
",",
"_ATTR_MAP",
")",
"for",
"lon",
",",
"lat",
",",
"iml",
"in",
"data",
":",
"node",
"=",
"et",
".",
"SubElement",
"(",
"hazard_map",
",",
"'node'",
")",
"node",
".",
"set",
"(",
"'lon'",
",",
"str",
"(",
"lon",
")",
")",
"node",
".",
"set",
"(",
"'lat'",
",",
"str",
"(",
"lat",
")",
")",
"node",
".",
"set",
"(",
"'iml'",
",",
"str",
"(",
"iml",
")",
")",
"nrml",
".",
"write",
"(",
"list",
"(",
"root",
")",
",",
"fh",
")"
] | Serialize hazard map data to XML.
See :meth:`HazardMapWriter.serialize` for details about the expected
input. | [
"Serialize",
"hazard",
"map",
"data",
"to",
"XML",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L560-L578 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | DisaggXMLWriter.serialize | def serialize(self, data):
"""
:param data:
A sequence of data where each datum has the following attributes:
* matrix: N-dimensional numpy array containing the disaggregation
histogram.
* dim_labels: A list of strings which label the dimensions of a
given histogram. For example, for a Magnitude-Distance-Epsilon
histogram, we would expect `dim_labels` to be
``['Mag', 'Dist', 'Eps']``.
* poe: The disaggregation Probability of Exceedance level for which
these results were produced.
* iml: Intensity measure level, interpolated from the source hazard
curve at the given ``poe``.
"""
with open(self.dest, 'wb') as fh, floatformat('%.6E'):
root = et.Element('nrml')
diss_matrices = et.SubElement(root, 'disaggMatrices')
_set_metadata(diss_matrices, self.metadata, _ATTR_MAP)
transform = lambda val: ', '.join(map(scientificformat, val))
_set_metadata(diss_matrices, self.metadata, self.BIN_EDGE_ATTR_MAP,
transform=transform)
for result in data:
diss_matrix = et.SubElement(diss_matrices, 'disaggMatrix')
# Check that we have bin edges defined for each dimension label
# (mag, dist, lon, lat, eps, TRT)
for label in result.dim_labels:
bin_edge_attr = self.DIM_LABEL_TO_BIN_EDGE_MAP.get(label)
assert self.metadata.get(bin_edge_attr) is not None, (
"Writer is missing '%s' metadata" % bin_edge_attr
)
result_type = ','.join(result.dim_labels)
diss_matrix.set('type', result_type)
dims = ','.join(str(x) for x in result.matrix.shape)
diss_matrix.set('dims', dims)
diss_matrix.set('poE', scientificformat(result.poe))
diss_matrix.set('iml', scientificformat(result.iml))
for idxs, value in numpy.ndenumerate(result.matrix):
prob = et.SubElement(diss_matrix, 'prob')
index = ','.join([str(x) for x in idxs])
prob.set('index', index)
prob.set('value', scientificformat(value))
nrml.write(list(root), fh) | python | def serialize(self, data):
with open(self.dest, 'wb') as fh, floatformat('%.6E'):
root = et.Element('nrml')
diss_matrices = et.SubElement(root, 'disaggMatrices')
_set_metadata(diss_matrices, self.metadata, _ATTR_MAP)
transform = lambda val: ', '.join(map(scientificformat, val))
_set_metadata(diss_matrices, self.metadata, self.BIN_EDGE_ATTR_MAP,
transform=transform)
for result in data:
diss_matrix = et.SubElement(diss_matrices, 'disaggMatrix')
for label in result.dim_labels:
bin_edge_attr = self.DIM_LABEL_TO_BIN_EDGE_MAP.get(label)
assert self.metadata.get(bin_edge_attr) is not None, (
"Writer is missing '%s' metadata" % bin_edge_attr
)
result_type = ','.join(result.dim_labels)
diss_matrix.set('type', result_type)
dims = ','.join(str(x) for x in result.matrix.shape)
diss_matrix.set('dims', dims)
diss_matrix.set('poE', scientificformat(result.poe))
diss_matrix.set('iml', scientificformat(result.iml))
for idxs, value in numpy.ndenumerate(result.matrix):
prob = et.SubElement(diss_matrix, 'prob')
index = ','.join([str(x) for x in idxs])
prob.set('index', index)
prob.set('value', scientificformat(value))
nrml.write(list(root), fh) | [
"def",
"serialize",
"(",
"self",
",",
"data",
")",
":",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"fh",
",",
"floatformat",
"(",
"'%.6E'",
")",
":",
"root",
"=",
"et",
".",
"Element",
"(",
"'nrml'",
")",
"diss_matrices",
"=",
"et",
".",
"SubElement",
"(",
"root",
",",
"'disaggMatrices'",
")",
"_set_metadata",
"(",
"diss_matrices",
",",
"self",
".",
"metadata",
",",
"_ATTR_MAP",
")",
"transform",
"=",
"lambda",
"val",
":",
"', '",
".",
"join",
"(",
"map",
"(",
"scientificformat",
",",
"val",
")",
")",
"_set_metadata",
"(",
"diss_matrices",
",",
"self",
".",
"metadata",
",",
"self",
".",
"BIN_EDGE_ATTR_MAP",
",",
"transform",
"=",
"transform",
")",
"for",
"result",
"in",
"data",
":",
"diss_matrix",
"=",
"et",
".",
"SubElement",
"(",
"diss_matrices",
",",
"'disaggMatrix'",
")",
"# Check that we have bin edges defined for each dimension label",
"# (mag, dist, lon, lat, eps, TRT)",
"for",
"label",
"in",
"result",
".",
"dim_labels",
":",
"bin_edge_attr",
"=",
"self",
".",
"DIM_LABEL_TO_BIN_EDGE_MAP",
".",
"get",
"(",
"label",
")",
"assert",
"self",
".",
"metadata",
".",
"get",
"(",
"bin_edge_attr",
")",
"is",
"not",
"None",
",",
"(",
"\"Writer is missing '%s' metadata\"",
"%",
"bin_edge_attr",
")",
"result_type",
"=",
"','",
".",
"join",
"(",
"result",
".",
"dim_labels",
")",
"diss_matrix",
".",
"set",
"(",
"'type'",
",",
"result_type",
")",
"dims",
"=",
"','",
".",
"join",
"(",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"result",
".",
"matrix",
".",
"shape",
")",
"diss_matrix",
".",
"set",
"(",
"'dims'",
",",
"dims",
")",
"diss_matrix",
".",
"set",
"(",
"'poE'",
",",
"scientificformat",
"(",
"result",
".",
"poe",
")",
")",
"diss_matrix",
".",
"set",
"(",
"'iml'",
",",
"scientificformat",
"(",
"result",
".",
"iml",
")",
")",
"for",
"idxs",
",",
"value",
"in",
"numpy",
".",
"ndenumerate",
"(",
"result",
".",
"matrix",
")",
":",
"prob",
"=",
"et",
".",
"SubElement",
"(",
"diss_matrix",
",",
"'prob'",
")",
"index",
"=",
"','",
".",
"join",
"(",
"[",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"idxs",
"]",
")",
"prob",
".",
"set",
"(",
"'index'",
",",
"index",
")",
"prob",
".",
"set",
"(",
"'value'",
",",
"scientificformat",
"(",
"value",
")",
")",
"nrml",
".",
"write",
"(",
"list",
"(",
"root",
")",
",",
"fh",
")"
] | :param data:
A sequence of data where each datum has the following attributes:
* matrix: N-dimensional numpy array containing the disaggregation
histogram.
* dim_labels: A list of strings which label the dimensions of a
given histogram. For example, for a Magnitude-Distance-Epsilon
histogram, we would expect `dim_labels` to be
``['Mag', 'Dist', 'Eps']``.
* poe: The disaggregation Probability of Exceedance level for which
these results were produced.
* iml: Intensity measure level, interpolated from the source hazard
curve at the given ``poe``. | [
":",
"param",
"data",
":"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L639-L695 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | UHSXMLWriter.serialize | def serialize(self, data):
"""
Write a sequence of uniform hazard spectra to the specified file.
:param data:
Iterable of UHS data. Each datum must be an object with the
following attributes:
* imls: A sequence of Intensity Measure Levels
* location: An object representing the location of the curve; must
have `x` and `y` to represent lon and lat, respectively.
"""
gml_ns = nrml.SERIALIZE_NS_MAP['gml']
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
uh_spectra = et.SubElement(root, 'uniformHazardSpectra')
_set_metadata(uh_spectra, self.metadata, _ATTR_MAP)
periods_elem = et.SubElement(uh_spectra, 'periods')
periods_elem.text = ' '.join([str(x)
for x in self.metadata['periods']])
for uhs in data:
uhs_elem = et.SubElement(uh_spectra, 'uhs')
gml_point = et.SubElement(uhs_elem, '{%s}Point' % gml_ns)
gml_pos = et.SubElement(gml_point, '{%s}pos' % gml_ns)
gml_pos.text = '%s %s' % (uhs.location.x, uhs.location.y)
imls_elem = et.SubElement(uhs_elem, 'IMLs')
imls_elem.text = ' '.join(['%10.7E' % x for x in uhs.imls])
nrml.write(list(root), fh) | python | def serialize(self, data):
gml_ns = nrml.SERIALIZE_NS_MAP['gml']
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
uh_spectra = et.SubElement(root, 'uniformHazardSpectra')
_set_metadata(uh_spectra, self.metadata, _ATTR_MAP)
periods_elem = et.SubElement(uh_spectra, 'periods')
periods_elem.text = ' '.join([str(x)
for x in self.metadata['periods']])
for uhs in data:
uhs_elem = et.SubElement(uh_spectra, 'uhs')
gml_point = et.SubElement(uhs_elem, '{%s}Point' % gml_ns)
gml_pos = et.SubElement(gml_point, '{%s}pos' % gml_ns)
gml_pos.text = '%s %s' % (uhs.location.x, uhs.location.y)
imls_elem = et.SubElement(uhs_elem, 'IMLs')
imls_elem.text = ' '.join(['%10.7E' % x for x in uhs.imls])
nrml.write(list(root), fh) | [
"def",
"serialize",
"(",
"self",
",",
"data",
")",
":",
"gml_ns",
"=",
"nrml",
".",
"SERIALIZE_NS_MAP",
"[",
"'gml'",
"]",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"fh",
":",
"root",
"=",
"et",
".",
"Element",
"(",
"'nrml'",
")",
"uh_spectra",
"=",
"et",
".",
"SubElement",
"(",
"root",
",",
"'uniformHazardSpectra'",
")",
"_set_metadata",
"(",
"uh_spectra",
",",
"self",
".",
"metadata",
",",
"_ATTR_MAP",
")",
"periods_elem",
"=",
"et",
".",
"SubElement",
"(",
"uh_spectra",
",",
"'periods'",
")",
"periods_elem",
".",
"text",
"=",
"' '",
".",
"join",
"(",
"[",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"self",
".",
"metadata",
"[",
"'periods'",
"]",
"]",
")",
"for",
"uhs",
"in",
"data",
":",
"uhs_elem",
"=",
"et",
".",
"SubElement",
"(",
"uh_spectra",
",",
"'uhs'",
")",
"gml_point",
"=",
"et",
".",
"SubElement",
"(",
"uhs_elem",
",",
"'{%s}Point'",
"%",
"gml_ns",
")",
"gml_pos",
"=",
"et",
".",
"SubElement",
"(",
"gml_point",
",",
"'{%s}pos'",
"%",
"gml_ns",
")",
"gml_pos",
".",
"text",
"=",
"'%s %s'",
"%",
"(",
"uhs",
".",
"location",
".",
"x",
",",
"uhs",
".",
"location",
".",
"y",
")",
"imls_elem",
"=",
"et",
".",
"SubElement",
"(",
"uhs_elem",
",",
"'IMLs'",
")",
"imls_elem",
".",
"text",
"=",
"' '",
".",
"join",
"(",
"[",
"'%10.7E'",
"%",
"x",
"for",
"x",
"in",
"uhs",
".",
"imls",
"]",
")",
"nrml",
".",
"write",
"(",
"list",
"(",
"root",
")",
",",
"fh",
")"
] | Write a sequence of uniform hazard spectra to the specified file.
:param data:
Iterable of UHS data. Each datum must be an object with the
following attributes:
* imls: A sequence of Intensity Measure Levels
* location: An object representing the location of the curve; must
have `x` and `y` to represent lon and lat, respectively. | [
"Write",
"a",
"sequence",
"of",
"uniform",
"hazard",
"spectra",
"to",
"the",
"specified",
"file",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L728-L761 |
gem/oq-engine | openquake/hmtk/plotting/seismicity/max_magnitude/cumulative_moment.py | plot_cumulative_moment | def plot_cumulative_moment(year, mag, figure_size=(8, 6),
filename=None, filetype='png', dpi=300, ax=None):
'''Calculation of Mmax using aCumulative Moment approach, adapted from
the cumulative strain energy method of Makropoulos & Burton (1983)
:param year: Year of Earthquake
:type year: numpy.ndarray
:param mag: Magnitude of Earthquake
:type mag: numpy.ndarray
:keyword iplot: Include cumulative moment plot
:type iplot: Boolean
:return mmax: Returns Maximum Magnitude
:rtype mmax: Float
'''
# Calculate seismic moment
m_o = 10. ** (9.05 + 1.5 * mag)
year_range = np.arange(np.min(year), np.max(year) + 1, 1)
nyr = np.int(np.shape(year_range)[0])
morate = np.zeros(nyr, dtype=float)
# Get moment release per year
for loc, tyr in enumerate(year_range):
idx = np.abs(year - tyr) < 1E-5
if np.sum(idx) > 0:
# Some moment release in that year
morate[loc] = np.sum(m_o[idx])
ave_morate = np.sum(morate) / float(nyr)
# Average moment rate vector
exp_morate = np.cumsum(ave_morate * np.ones(nyr))
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
ax.step(year_range, np.cumsum(morate), 'b-', linewidth=2)
ax.plot(year_range, exp_morate, 'r-', linewidth=2)
# Get offsets
upper_morate = exp_morate + (np.max(np.cumsum(morate) - exp_morate))
lower_morate = exp_morate + (np.min(np.cumsum(morate) - exp_morate))
ax.plot(year_range, upper_morate, 'r--', linewidth=1)
ax.plot(year_range, lower_morate, 'r--', linewidth=1)
ax.axis([np.min(year), np.max(year), 0.0, np.sum(morate)])
_save_image(fig, filename, filetype, dpi) | python | def plot_cumulative_moment(year, mag, figure_size=(8, 6),
filename=None, filetype='png', dpi=300, ax=None):
m_o = 10. ** (9.05 + 1.5 * mag)
year_range = np.arange(np.min(year), np.max(year) + 1, 1)
nyr = np.int(np.shape(year_range)[0])
morate = np.zeros(nyr, dtype=float)
for loc, tyr in enumerate(year_range):
idx = np.abs(year - tyr) < 1E-5
if np.sum(idx) > 0:
morate[loc] = np.sum(m_o[idx])
ave_morate = np.sum(morate) / float(nyr)
exp_morate = np.cumsum(ave_morate * np.ones(nyr))
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
ax.step(year_range, np.cumsum(morate), 'b-', linewidth=2)
ax.plot(year_range, exp_morate, 'r-', linewidth=2)
upper_morate = exp_morate + (np.max(np.cumsum(morate) - exp_morate))
lower_morate = exp_morate + (np.min(np.cumsum(morate) - exp_morate))
ax.plot(year_range, upper_morate, 'r--', linewidth=1)
ax.plot(year_range, lower_morate, 'r--', linewidth=1)
ax.axis([np.min(year), np.max(year), 0.0, np.sum(morate)])
_save_image(fig, filename, filetype, dpi) | [
"def",
"plot_cumulative_moment",
"(",
"year",
",",
"mag",
",",
"figure_size",
"=",
"(",
"8",
",",
"6",
")",
",",
"filename",
"=",
"None",
",",
"filetype",
"=",
"'png'",
",",
"dpi",
"=",
"300",
",",
"ax",
"=",
"None",
")",
":",
"# Calculate seismic moment",
"m_o",
"=",
"10.",
"**",
"(",
"9.05",
"+",
"1.5",
"*",
"mag",
")",
"year_range",
"=",
"np",
".",
"arange",
"(",
"np",
".",
"min",
"(",
"year",
")",
",",
"np",
".",
"max",
"(",
"year",
")",
"+",
"1",
",",
"1",
")",
"nyr",
"=",
"np",
".",
"int",
"(",
"np",
".",
"shape",
"(",
"year_range",
")",
"[",
"0",
"]",
")",
"morate",
"=",
"np",
".",
"zeros",
"(",
"nyr",
",",
"dtype",
"=",
"float",
")",
"# Get moment release per year",
"for",
"loc",
",",
"tyr",
"in",
"enumerate",
"(",
"year_range",
")",
":",
"idx",
"=",
"np",
".",
"abs",
"(",
"year",
"-",
"tyr",
")",
"<",
"1E-5",
"if",
"np",
".",
"sum",
"(",
"idx",
")",
">",
"0",
":",
"# Some moment release in that year",
"morate",
"[",
"loc",
"]",
"=",
"np",
".",
"sum",
"(",
"m_o",
"[",
"idx",
"]",
")",
"ave_morate",
"=",
"np",
".",
"sum",
"(",
"morate",
")",
"/",
"float",
"(",
"nyr",
")",
"# Average moment rate vector",
"exp_morate",
"=",
"np",
".",
"cumsum",
"(",
"ave_morate",
"*",
"np",
".",
"ones",
"(",
"nyr",
")",
")",
"if",
"ax",
"is",
"None",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"figure_size",
")",
"else",
":",
"fig",
"=",
"ax",
".",
"get_figure",
"(",
")",
"ax",
".",
"step",
"(",
"year_range",
",",
"np",
".",
"cumsum",
"(",
"morate",
")",
",",
"'b-'",
",",
"linewidth",
"=",
"2",
")",
"ax",
".",
"plot",
"(",
"year_range",
",",
"exp_morate",
",",
"'r-'",
",",
"linewidth",
"=",
"2",
")",
"# Get offsets",
"upper_morate",
"=",
"exp_morate",
"+",
"(",
"np",
".",
"max",
"(",
"np",
".",
"cumsum",
"(",
"morate",
")",
"-",
"exp_morate",
")",
")",
"lower_morate",
"=",
"exp_morate",
"+",
"(",
"np",
".",
"min",
"(",
"np",
".",
"cumsum",
"(",
"morate",
")",
"-",
"exp_morate",
")",
")",
"ax",
".",
"plot",
"(",
"year_range",
",",
"upper_morate",
",",
"'r--'",
",",
"linewidth",
"=",
"1",
")",
"ax",
".",
"plot",
"(",
"year_range",
",",
"lower_morate",
",",
"'r--'",
",",
"linewidth",
"=",
"1",
")",
"ax",
".",
"axis",
"(",
"[",
"np",
".",
"min",
"(",
"year",
")",
",",
"np",
".",
"max",
"(",
"year",
")",
",",
"0.0",
",",
"np",
".",
"sum",
"(",
"morate",
")",
"]",
")",
"_save_image",
"(",
"fig",
",",
"filename",
",",
"filetype",
",",
"dpi",
")"
] | Calculation of Mmax using aCumulative Moment approach, adapted from
the cumulative strain energy method of Makropoulos & Burton (1983)
:param year: Year of Earthquake
:type year: numpy.ndarray
:param mag: Magnitude of Earthquake
:type mag: numpy.ndarray
:keyword iplot: Include cumulative moment plot
:type iplot: Boolean
:return mmax: Returns Maximum Magnitude
:rtype mmax: Float | [
"Calculation",
"of",
"Mmax",
"using",
"aCumulative",
"Moment",
"approach",
"adapted",
"from",
"the",
"cumulative",
"strain",
"energy",
"method",
"of",
"Makropoulos",
"&",
"Burton",
"(",
"1983",
")",
":",
"param",
"year",
":",
"Year",
"of",
"Earthquake",
":",
"type",
"year",
":",
"numpy",
".",
"ndarray",
":",
"param",
"mag",
":",
"Magnitude",
"of",
"Earthquake",
":",
"type",
"mag",
":",
"numpy",
".",
"ndarray",
":",
"keyword",
"iplot",
":",
"Include",
"cumulative",
"moment",
"plot",
":",
"type",
"iplot",
":",
"Boolean",
":",
"return",
"mmax",
":",
"Returns",
"Maximum",
"Magnitude",
":",
"rtype",
"mmax",
":",
"Float"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/max_magnitude/cumulative_moment.py#L58-L100 |
gem/oq-engine | openquake/hmtk/seismicity/max_magnitude/kijko_sellevol_bayes.py | check_config | def check_config(config, data):
'''Check config file inputs
:param dict config:
Configuration settings for the function
'''
essential_keys = ['input_mmin', 'b-value', 'sigma-b']
for key in essential_keys:
if not key in config.keys():
raise ValueError('For KijkoSellevolBayes the key %s needs to '
'be set in the configuation' % key)
if 'tolerance' not in config.keys() or not config['tolerance']:
config['tolerance'] = 1E-5
if not config.get('maximum_iterations', False):
config['maximum_iterations'] = 1000
if config['input_mmin'] < np.min(data['magnitude']):
config['input_mmin'] = np.min(data['magnitude'])
if fabs(config['sigma-b'] < 1E-15):
raise ValueError('Sigma-b must be greater than zero!')
return config | python | def check_config(config, data):
essential_keys = ['input_mmin', 'b-value', 'sigma-b']
for key in essential_keys:
if not key in config.keys():
raise ValueError('For KijkoSellevolBayes the key %s needs to '
'be set in the configuation' % key)
if 'tolerance' not in config.keys() or not config['tolerance']:
config['tolerance'] = 1E-5
if not config.get('maximum_iterations', False):
config['maximum_iterations'] = 1000
if config['input_mmin'] < np.min(data['magnitude']):
config['input_mmin'] = np.min(data['magnitude'])
if fabs(config['sigma-b'] < 1E-15):
raise ValueError('Sigma-b must be greater than zero!')
return config | [
"def",
"check_config",
"(",
"config",
",",
"data",
")",
":",
"essential_keys",
"=",
"[",
"'input_mmin'",
",",
"'b-value'",
",",
"'sigma-b'",
"]",
"for",
"key",
"in",
"essential_keys",
":",
"if",
"not",
"key",
"in",
"config",
".",
"keys",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'For KijkoSellevolBayes the key %s needs to '",
"'be set in the configuation'",
"%",
"key",
")",
"if",
"'tolerance'",
"not",
"in",
"config",
".",
"keys",
"(",
")",
"or",
"not",
"config",
"[",
"'tolerance'",
"]",
":",
"config",
"[",
"'tolerance'",
"]",
"=",
"1E-5",
"if",
"not",
"config",
".",
"get",
"(",
"'maximum_iterations'",
",",
"False",
")",
":",
"config",
"[",
"'maximum_iterations'",
"]",
"=",
"1000",
"if",
"config",
"[",
"'input_mmin'",
"]",
"<",
"np",
".",
"min",
"(",
"data",
"[",
"'magnitude'",
"]",
")",
":",
"config",
"[",
"'input_mmin'",
"]",
"=",
"np",
".",
"min",
"(",
"data",
"[",
"'magnitude'",
"]",
")",
"if",
"fabs",
"(",
"config",
"[",
"'sigma-b'",
"]",
"<",
"1E-15",
")",
":",
"raise",
"ValueError",
"(",
"'Sigma-b must be greater than zero!'",
")",
"return",
"config"
] | Check config file inputs
:param dict config:
Configuration settings for the function | [
"Check",
"config",
"file",
"inputs"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/max_magnitude/kijko_sellevol_bayes.py#L60-L84 |
gem/oq-engine | openquake/hazardlib/gsim/toro_1997.py | ToroEtAl1997MblgNSHMP2008._compute_mean | def _compute_mean(self, C, mag, rjb):
"""
Compute ground motion mean value.
"""
# line 1686 in hazgridXnga2.f
ffc = self._compute_finite_fault_correction(mag)
d = np.sqrt(rjb ** 2 + (C['c7'] ** 2) * (ffc ** 2))
# lines 1663, 1694-1696 in hazgridXnga2.f
mean = (
C['c1'] + C['c2'] * (mag - 6.) +
C['c3'] * ((mag - 6.) ** 2) -
C['c4'] * np.log(d) - C['c6'] * d
)
factor = np.log(rjb / 100.)
idx = factor > 0
mean[idx] -= (C['c5'] - C['c4']) * factor[idx]
return mean | python | def _compute_mean(self, C, mag, rjb):
ffc = self._compute_finite_fault_correction(mag)
d = np.sqrt(rjb ** 2 + (C['c7'] ** 2) * (ffc ** 2))
mean = (
C['c1'] + C['c2'] * (mag - 6.) +
C['c3'] * ((mag - 6.) ** 2) -
C['c4'] * np.log(d) - C['c6'] * d
)
factor = np.log(rjb / 100.)
idx = factor > 0
mean[idx] -= (C['c5'] - C['c4']) * factor[idx]
return mean | [
"def",
"_compute_mean",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rjb",
")",
":",
"# line 1686 in hazgridXnga2.f",
"ffc",
"=",
"self",
".",
"_compute_finite_fault_correction",
"(",
"mag",
")",
"d",
"=",
"np",
".",
"sqrt",
"(",
"rjb",
"**",
"2",
"+",
"(",
"C",
"[",
"'c7'",
"]",
"**",
"2",
")",
"*",
"(",
"ffc",
"**",
"2",
")",
")",
"# lines 1663, 1694-1696 in hazgridXnga2.f",
"mean",
"=",
"(",
"C",
"[",
"'c1'",
"]",
"+",
"C",
"[",
"'c2'",
"]",
"*",
"(",
"mag",
"-",
"6.",
")",
"+",
"C",
"[",
"'c3'",
"]",
"*",
"(",
"(",
"mag",
"-",
"6.",
")",
"**",
"2",
")",
"-",
"C",
"[",
"'c4'",
"]",
"*",
"np",
".",
"log",
"(",
"d",
")",
"-",
"C",
"[",
"'c6'",
"]",
"*",
"d",
")",
"factor",
"=",
"np",
".",
"log",
"(",
"rjb",
"/",
"100.",
")",
"idx",
"=",
"factor",
">",
"0",
"mean",
"[",
"idx",
"]",
"-=",
"(",
"C",
"[",
"'c5'",
"]",
"-",
"C",
"[",
"'c4'",
"]",
")",
"*",
"factor",
"[",
"idx",
"]",
"return",
"mean"
] | Compute ground motion mean value. | [
"Compute",
"ground",
"motion",
"mean",
"value",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_1997.py#L110-L129 |
gem/oq-engine | openquake/hazardlib/gsim/toro_1997.py | ToroEtAl1997MblgNSHMP2008._compute_finite_fault_correction | def _compute_finite_fault_correction(self, mag):
"""
Compute finite fault correction term as geometric mean of correction
terms obtained from Mw values calculated with Johnston 1996 and
Atkinson and Boore 1987 conversion equations.
Implement equations as in lines 1653 - 1658 in hazgridXnga2.f
"""
mw_j96 = mblg_to_mw_johnston_96(mag)
mw_ab87 = mblg_to_mw_atkinson_boore_87(mag)
t1 = np.exp(-1.25 + 0.227 * mw_j96)
t2 = np.exp(-1.25 + 0.227 * mw_ab87)
return np.sqrt(t1 * t2) | python | def _compute_finite_fault_correction(self, mag):
mw_j96 = mblg_to_mw_johnston_96(mag)
mw_ab87 = mblg_to_mw_atkinson_boore_87(mag)
t1 = np.exp(-1.25 + 0.227 * mw_j96)
t2 = np.exp(-1.25 + 0.227 * mw_ab87)
return np.sqrt(t1 * t2) | [
"def",
"_compute_finite_fault_correction",
"(",
"self",
",",
"mag",
")",
":",
"mw_j96",
"=",
"mblg_to_mw_johnston_96",
"(",
"mag",
")",
"mw_ab87",
"=",
"mblg_to_mw_atkinson_boore_87",
"(",
"mag",
")",
"t1",
"=",
"np",
".",
"exp",
"(",
"-",
"1.25",
"+",
"0.227",
"*",
"mw_j96",
")",
"t2",
"=",
"np",
".",
"exp",
"(",
"-",
"1.25",
"+",
"0.227",
"*",
"mw_ab87",
")",
"return",
"np",
".",
"sqrt",
"(",
"t1",
"*",
"t2",
")"
] | Compute finite fault correction term as geometric mean of correction
terms obtained from Mw values calculated with Johnston 1996 and
Atkinson and Boore 1987 conversion equations.
Implement equations as in lines 1653 - 1658 in hazgridXnga2.f | [
"Compute",
"finite",
"fault",
"correction",
"term",
"as",
"geometric",
"mean",
"of",
"correction",
"terms",
"obtained",
"from",
"Mw",
"values",
"calculated",
"with",
"Johnston",
"1996",
"and",
"Atkinson",
"and",
"Boore",
"1987",
"conversion",
"equations",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_1997.py#L131-L145 |
gem/oq-engine | openquake/commands/upgrade_nrml.py | get_vulnerability_functions_04 | def get_vulnerability_functions_04(fname):
"""
Parse the vulnerability model in NRML 0.4 format.
:param fname:
path of the vulnerability file
:returns:
a dictionary imt, taxonomy -> vulnerability function + vset
"""
categories = dict(assetCategory=set(), lossCategory=set(),
vulnerabilitySetID=set())
imts = set()
taxonomies = set()
vf_dict = {} # imt, taxonomy -> vulnerability function
for vset in nrml.read(fname).vulnerabilityModel:
categories['assetCategory'].add(vset['assetCategory'])
categories['lossCategory'].add(vset['lossCategory'])
categories['vulnerabilitySetID'].add(vset['vulnerabilitySetID'])
IML = vset.IML
imt_str = IML['IMT']
imls = ~IML
imts.add(imt_str)
for vfun in vset.getnodes('discreteVulnerability'):
taxonomy = vfun['vulnerabilityFunctionID']
if taxonomy in taxonomies:
raise InvalidFile(
'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
(taxonomy, fname, vfun.lineno))
taxonomies.add(taxonomy)
with context(fname, vfun):
loss_ratios = ~vfun.lossRatio
coefficients = ~vfun.coefficientsVariation
if len(loss_ratios) != len(imls):
raise InvalidFile(
'There are %d loss ratios, but %d imls: %s, line %d' %
(len(loss_ratios), len(imls), fname,
vfun.lossRatio.lineno))
if len(coefficients) != len(imls):
raise InvalidFile(
'There are %d coefficients, but %d imls: %s, line %d' %
(len(coefficients), len(imls), fname,
vfun.coefficientsVariation.lineno))
with context(fname, vfun):
vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
taxonomy, imt_str, imls, loss_ratios, coefficients,
vfun['probabilisticDistribution'])
categories['id'] = '_'.join(sorted(categories['vulnerabilitySetID']))
del categories['vulnerabilitySetID']
return vf_dict, categories | python | def get_vulnerability_functions_04(fname):
categories = dict(assetCategory=set(), lossCategory=set(),
vulnerabilitySetID=set())
imts = set()
taxonomies = set()
vf_dict = {}
for vset in nrml.read(fname).vulnerabilityModel:
categories['assetCategory'].add(vset['assetCategory'])
categories['lossCategory'].add(vset['lossCategory'])
categories['vulnerabilitySetID'].add(vset['vulnerabilitySetID'])
IML = vset.IML
imt_str = IML['IMT']
imls = ~IML
imts.add(imt_str)
for vfun in vset.getnodes('discreteVulnerability'):
taxonomy = vfun['vulnerabilityFunctionID']
if taxonomy in taxonomies:
raise InvalidFile(
'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
(taxonomy, fname, vfun.lineno))
taxonomies.add(taxonomy)
with context(fname, vfun):
loss_ratios = ~vfun.lossRatio
coefficients = ~vfun.coefficientsVariation
if len(loss_ratios) != len(imls):
raise InvalidFile(
'There are %d loss ratios, but %d imls: %s, line %d' %
(len(loss_ratios), len(imls), fname,
vfun.lossRatio.lineno))
if len(coefficients) != len(imls):
raise InvalidFile(
'There are %d coefficients, but %d imls: %s, line %d' %
(len(coefficients), len(imls), fname,
vfun.coefficientsVariation.lineno))
with context(fname, vfun):
vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
taxonomy, imt_str, imls, loss_ratios, coefficients,
vfun['probabilisticDistribution'])
categories['id'] = '_'.join(sorted(categories['vulnerabilitySetID']))
del categories['vulnerabilitySetID']
return vf_dict, categories | [
"def",
"get_vulnerability_functions_04",
"(",
"fname",
")",
":",
"categories",
"=",
"dict",
"(",
"assetCategory",
"=",
"set",
"(",
")",
",",
"lossCategory",
"=",
"set",
"(",
")",
",",
"vulnerabilitySetID",
"=",
"set",
"(",
")",
")",
"imts",
"=",
"set",
"(",
")",
"taxonomies",
"=",
"set",
"(",
")",
"vf_dict",
"=",
"{",
"}",
"# imt, taxonomy -> vulnerability function",
"for",
"vset",
"in",
"nrml",
".",
"read",
"(",
"fname",
")",
".",
"vulnerabilityModel",
":",
"categories",
"[",
"'assetCategory'",
"]",
".",
"add",
"(",
"vset",
"[",
"'assetCategory'",
"]",
")",
"categories",
"[",
"'lossCategory'",
"]",
".",
"add",
"(",
"vset",
"[",
"'lossCategory'",
"]",
")",
"categories",
"[",
"'vulnerabilitySetID'",
"]",
".",
"add",
"(",
"vset",
"[",
"'vulnerabilitySetID'",
"]",
")",
"IML",
"=",
"vset",
".",
"IML",
"imt_str",
"=",
"IML",
"[",
"'IMT'",
"]",
"imls",
"=",
"~",
"IML",
"imts",
".",
"add",
"(",
"imt_str",
")",
"for",
"vfun",
"in",
"vset",
".",
"getnodes",
"(",
"'discreteVulnerability'",
")",
":",
"taxonomy",
"=",
"vfun",
"[",
"'vulnerabilityFunctionID'",
"]",
"if",
"taxonomy",
"in",
"taxonomies",
":",
"raise",
"InvalidFile",
"(",
"'Duplicated vulnerabilityFunctionID: %s: %s, line %d'",
"%",
"(",
"taxonomy",
",",
"fname",
",",
"vfun",
".",
"lineno",
")",
")",
"taxonomies",
".",
"add",
"(",
"taxonomy",
")",
"with",
"context",
"(",
"fname",
",",
"vfun",
")",
":",
"loss_ratios",
"=",
"~",
"vfun",
".",
"lossRatio",
"coefficients",
"=",
"~",
"vfun",
".",
"coefficientsVariation",
"if",
"len",
"(",
"loss_ratios",
")",
"!=",
"len",
"(",
"imls",
")",
":",
"raise",
"InvalidFile",
"(",
"'There are %d loss ratios, but %d imls: %s, line %d'",
"%",
"(",
"len",
"(",
"loss_ratios",
")",
",",
"len",
"(",
"imls",
")",
",",
"fname",
",",
"vfun",
".",
"lossRatio",
".",
"lineno",
")",
")",
"if",
"len",
"(",
"coefficients",
")",
"!=",
"len",
"(",
"imls",
")",
":",
"raise",
"InvalidFile",
"(",
"'There are %d coefficients, but %d imls: %s, line %d'",
"%",
"(",
"len",
"(",
"coefficients",
")",
",",
"len",
"(",
"imls",
")",
",",
"fname",
",",
"vfun",
".",
"coefficientsVariation",
".",
"lineno",
")",
")",
"with",
"context",
"(",
"fname",
",",
"vfun",
")",
":",
"vf_dict",
"[",
"imt_str",
",",
"taxonomy",
"]",
"=",
"scientific",
".",
"VulnerabilityFunction",
"(",
"taxonomy",
",",
"imt_str",
",",
"imls",
",",
"loss_ratios",
",",
"coefficients",
",",
"vfun",
"[",
"'probabilisticDistribution'",
"]",
")",
"categories",
"[",
"'id'",
"]",
"=",
"'_'",
".",
"join",
"(",
"sorted",
"(",
"categories",
"[",
"'vulnerabilitySetID'",
"]",
")",
")",
"del",
"categories",
"[",
"'vulnerabilitySetID'",
"]",
"return",
"vf_dict",
",",
"categories"
] | Parse the vulnerability model in NRML 0.4 format.
:param fname:
path of the vulnerability file
:returns:
a dictionary imt, taxonomy -> vulnerability function + vset | [
"Parse",
"the",
"vulnerability",
"model",
"in",
"NRML",
"0",
".",
"4",
"format",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/upgrade_nrml.py#L32-L80 |
gem/oq-engine | openquake/commands/upgrade_nrml.py | upgrade_file | def upgrade_file(path, multipoint):
"""Upgrade to the latest NRML version"""
node0 = nrml.read(path, chatty=False)[0]
shutil.copy(path, path + '.bak') # make a backup of the original file
tag = striptag(node0.tag)
gml = True
if tag == 'vulnerabilityModel':
vf_dict, cat_dict = get_vulnerability_functions_04(path)
# below I am converting into a NRML 0.5 vulnerabilityModel
node0 = Node(
'vulnerabilityModel', cat_dict,
nodes=[obj_to_node(val) for val in vf_dict.values()])
gml = False
elif tag == 'fragilityModel':
node0 = read_nrml.convert_fragility_model_04(
nrml.read(path)[0], path)
gml = False
elif tag == 'sourceModel':
node0 = nrml.read(path)[0]
dic = groupby(node0.nodes, operator.itemgetter('tectonicRegion'))
node0.nodes = [Node('sourceGroup',
dict(tectonicRegion=trt, name="group %s" % i),
nodes=srcs)
for i, (trt, srcs) in enumerate(dic.items(), 1)]
if multipoint:
sourceconverter.update_source_model(node0, path + '.bak')
with open(path, 'wb') as f:
nrml.write([node0], f, gml=gml) | python | def upgrade_file(path, multipoint):
node0 = nrml.read(path, chatty=False)[0]
shutil.copy(path, path + '.bak')
tag = striptag(node0.tag)
gml = True
if tag == 'vulnerabilityModel':
vf_dict, cat_dict = get_vulnerability_functions_04(path)
node0 = Node(
'vulnerabilityModel', cat_dict,
nodes=[obj_to_node(val) for val in vf_dict.values()])
gml = False
elif tag == 'fragilityModel':
node0 = read_nrml.convert_fragility_model_04(
nrml.read(path)[0], path)
gml = False
elif tag == 'sourceModel':
node0 = nrml.read(path)[0]
dic = groupby(node0.nodes, operator.itemgetter('tectonicRegion'))
node0.nodes = [Node('sourceGroup',
dict(tectonicRegion=trt, name="group %s" % i),
nodes=srcs)
for i, (trt, srcs) in enumerate(dic.items(), 1)]
if multipoint:
sourceconverter.update_source_model(node0, path + '.bak')
with open(path, 'wb') as f:
nrml.write([node0], f, gml=gml) | [
"def",
"upgrade_file",
"(",
"path",
",",
"multipoint",
")",
":",
"node0",
"=",
"nrml",
".",
"read",
"(",
"path",
",",
"chatty",
"=",
"False",
")",
"[",
"0",
"]",
"shutil",
".",
"copy",
"(",
"path",
",",
"path",
"+",
"'.bak'",
")",
"# make a backup of the original file",
"tag",
"=",
"striptag",
"(",
"node0",
".",
"tag",
")",
"gml",
"=",
"True",
"if",
"tag",
"==",
"'vulnerabilityModel'",
":",
"vf_dict",
",",
"cat_dict",
"=",
"get_vulnerability_functions_04",
"(",
"path",
")",
"# below I am converting into a NRML 0.5 vulnerabilityModel",
"node0",
"=",
"Node",
"(",
"'vulnerabilityModel'",
",",
"cat_dict",
",",
"nodes",
"=",
"[",
"obj_to_node",
"(",
"val",
")",
"for",
"val",
"in",
"vf_dict",
".",
"values",
"(",
")",
"]",
")",
"gml",
"=",
"False",
"elif",
"tag",
"==",
"'fragilityModel'",
":",
"node0",
"=",
"read_nrml",
".",
"convert_fragility_model_04",
"(",
"nrml",
".",
"read",
"(",
"path",
")",
"[",
"0",
"]",
",",
"path",
")",
"gml",
"=",
"False",
"elif",
"tag",
"==",
"'sourceModel'",
":",
"node0",
"=",
"nrml",
".",
"read",
"(",
"path",
")",
"[",
"0",
"]",
"dic",
"=",
"groupby",
"(",
"node0",
".",
"nodes",
",",
"operator",
".",
"itemgetter",
"(",
"'tectonicRegion'",
")",
")",
"node0",
".",
"nodes",
"=",
"[",
"Node",
"(",
"'sourceGroup'",
",",
"dict",
"(",
"tectonicRegion",
"=",
"trt",
",",
"name",
"=",
"\"group %s\"",
"%",
"i",
")",
",",
"nodes",
"=",
"srcs",
")",
"for",
"i",
",",
"(",
"trt",
",",
"srcs",
")",
"in",
"enumerate",
"(",
"dic",
".",
"items",
"(",
")",
",",
"1",
")",
"]",
"if",
"multipoint",
":",
"sourceconverter",
".",
"update_source_model",
"(",
"node0",
",",
"path",
"+",
"'.bak'",
")",
"with",
"open",
"(",
"path",
",",
"'wb'",
")",
"as",
"f",
":",
"nrml",
".",
"write",
"(",
"[",
"node0",
"]",
",",
"f",
",",
"gml",
"=",
"gml",
")"
] | Upgrade to the latest NRML version | [
"Upgrade",
"to",
"the",
"latest",
"NRML",
"version"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/upgrade_nrml.py#L83-L110 |
gem/oq-engine | openquake/commands/upgrade_nrml.py | upgrade_nrml | def upgrade_nrml(directory, dry_run, multipoint):
"""
Upgrade all the NRML files contained in the given directory to the latest
NRML version. Works by walking all subdirectories.
WARNING: there is no downgrade!
"""
for cwd, dirs, files in os.walk(directory):
for f in files:
path = os.path.join(cwd, f)
if f.endswith('.xml'):
ip = iterparse(path, events=('start',))
next(ip) # read node zero
try:
fulltag = next(ip)[1].tag # tag of the first node
xmlns, tag = fulltag.split('}')
except Exception: # not a NRML file
xmlns, tag = '', ''
if xmlns[1:] == NRML05: # already upgraded
if 'sourceModel' in tag and multipoint:
print('upgrading to multiPointSources', path)
node0 = nrml.read(path)[0]
sourceconverter.update_source_model(node0, path)
with open(path, 'wb') as f:
nrml.write([node0], f, gml=True)
elif 'nrml/0.4' in xmlns and (
'vulnerability' in tag or 'fragility' in tag or
'sourceModel' in tag):
if not dry_run:
print('Upgrading', path)
try:
upgrade_file(path, multipoint)
except Exception as exc:
raise
print(exc)
else:
print('Not upgrading', path) | python | def upgrade_nrml(directory, dry_run, multipoint):
for cwd, dirs, files in os.walk(directory):
for f in files:
path = os.path.join(cwd, f)
if f.endswith('.xml'):
ip = iterparse(path, events=('start',))
next(ip)
try:
fulltag = next(ip)[1].tag
xmlns, tag = fulltag.split('}')
except Exception:
xmlns, tag = '', ''
if xmlns[1:] == NRML05:
if 'sourceModel' in tag and multipoint:
print('upgrading to multiPointSources', path)
node0 = nrml.read(path)[0]
sourceconverter.update_source_model(node0, path)
with open(path, 'wb') as f:
nrml.write([node0], f, gml=True)
elif 'nrml/0.4' in xmlns and (
'vulnerability' in tag or 'fragility' in tag or
'sourceModel' in tag):
if not dry_run:
print('Upgrading', path)
try:
upgrade_file(path, multipoint)
except Exception as exc:
raise
print(exc)
else:
print('Not upgrading', path) | [
"def",
"upgrade_nrml",
"(",
"directory",
",",
"dry_run",
",",
"multipoint",
")",
":",
"for",
"cwd",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"directory",
")",
":",
"for",
"f",
"in",
"files",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"cwd",
",",
"f",
")",
"if",
"f",
".",
"endswith",
"(",
"'.xml'",
")",
":",
"ip",
"=",
"iterparse",
"(",
"path",
",",
"events",
"=",
"(",
"'start'",
",",
")",
")",
"next",
"(",
"ip",
")",
"# read node zero",
"try",
":",
"fulltag",
"=",
"next",
"(",
"ip",
")",
"[",
"1",
"]",
".",
"tag",
"# tag of the first node",
"xmlns",
",",
"tag",
"=",
"fulltag",
".",
"split",
"(",
"'}'",
")",
"except",
"Exception",
":",
"# not a NRML file",
"xmlns",
",",
"tag",
"=",
"''",
",",
"''",
"if",
"xmlns",
"[",
"1",
":",
"]",
"==",
"NRML05",
":",
"# already upgraded",
"if",
"'sourceModel'",
"in",
"tag",
"and",
"multipoint",
":",
"print",
"(",
"'upgrading to multiPointSources'",
",",
"path",
")",
"node0",
"=",
"nrml",
".",
"read",
"(",
"path",
")",
"[",
"0",
"]",
"sourceconverter",
".",
"update_source_model",
"(",
"node0",
",",
"path",
")",
"with",
"open",
"(",
"path",
",",
"'wb'",
")",
"as",
"f",
":",
"nrml",
".",
"write",
"(",
"[",
"node0",
"]",
",",
"f",
",",
"gml",
"=",
"True",
")",
"elif",
"'nrml/0.4'",
"in",
"xmlns",
"and",
"(",
"'vulnerability'",
"in",
"tag",
"or",
"'fragility'",
"in",
"tag",
"or",
"'sourceModel'",
"in",
"tag",
")",
":",
"if",
"not",
"dry_run",
":",
"print",
"(",
"'Upgrading'",
",",
"path",
")",
"try",
":",
"upgrade_file",
"(",
"path",
",",
"multipoint",
")",
"except",
"Exception",
"as",
"exc",
":",
"raise",
"print",
"(",
"exc",
")",
"else",
":",
"print",
"(",
"'Not upgrading'",
",",
"path",
")"
] | Upgrade all the NRML files contained in the given directory to the latest
NRML version. Works by walking all subdirectories.
WARNING: there is no downgrade! | [
"Upgrade",
"all",
"the",
"NRML",
"files",
"contained",
"in",
"the",
"given",
"directory",
"to",
"the",
"latest",
"NRML",
"version",
".",
"Works",
"by",
"walking",
"all",
"subdirectories",
".",
"WARNING",
":",
"there",
"is",
"no",
"downgrade!"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/upgrade_nrml.py#L117-L152 |
gem/oq-engine | openquake/hazardlib/gsim/faccioli_2010.py | FaccioliEtAl2010._compute_term_3 | def _compute_term_3(self, C, rrup, mag):
"""
This computes the third term in equation 2, page 2.
"""
return (C['a3'] *
np.log10(rrup + C['a4'] * np.power(10, C['a5'] * mag))) | python | def _compute_term_3(self, C, rrup, mag):
return (C['a3'] *
np.log10(rrup + C['a4'] * np.power(10, C['a5'] * mag))) | [
"def",
"_compute_term_3",
"(",
"self",
",",
"C",
",",
"rrup",
",",
"mag",
")",
":",
"return",
"(",
"C",
"[",
"'a3'",
"]",
"*",
"np",
".",
"log10",
"(",
"rrup",
"+",
"C",
"[",
"'a4'",
"]",
"*",
"np",
".",
"power",
"(",
"10",
",",
"C",
"[",
"'a5'",
"]",
"*",
"mag",
")",
")",
")"
] | This computes the third term in equation 2, page 2. | [
"This",
"computes",
"the",
"third",
"term",
"in",
"equation",
"2",
"page",
"2",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/faccioli_2010.py#L85-L90 |
gem/oq-engine | openquake/hmtk/sources/source_conversion_utils.py | mag_scale_rel_to_hazardlib | def mag_scale_rel_to_hazardlib(mag_scale_rel, use_default=False):
"""
Returns the magnitude scaling relation in a format readable by
openquake.hazardlib
"""
if isinstance(mag_scale_rel, BaseMSR):
return mag_scale_rel
elif isinstance(mag_scale_rel, str):
if not mag_scale_rel in SCALE_RELS.keys():
raise ValueError('Magnitude scaling relation %s not supported!'
% mag_scale_rel)
else:
return SCALE_RELS[mag_scale_rel]()
else:
if use_default:
# Returns the Wells and Coppersmith string
return WC1994()
else:
raise ValueError('Magnitude Scaling Relation Not Defined!') | python | def mag_scale_rel_to_hazardlib(mag_scale_rel, use_default=False):
if isinstance(mag_scale_rel, BaseMSR):
return mag_scale_rel
elif isinstance(mag_scale_rel, str):
if not mag_scale_rel in SCALE_RELS.keys():
raise ValueError('Magnitude scaling relation %s not supported!'
% mag_scale_rel)
else:
return SCALE_RELS[mag_scale_rel]()
else:
if use_default:
return WC1994()
else:
raise ValueError('Magnitude Scaling Relation Not Defined!') | [
"def",
"mag_scale_rel_to_hazardlib",
"(",
"mag_scale_rel",
",",
"use_default",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"mag_scale_rel",
",",
"BaseMSR",
")",
":",
"return",
"mag_scale_rel",
"elif",
"isinstance",
"(",
"mag_scale_rel",
",",
"str",
")",
":",
"if",
"not",
"mag_scale_rel",
"in",
"SCALE_RELS",
".",
"keys",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'Magnitude scaling relation %s not supported!'",
"%",
"mag_scale_rel",
")",
"else",
":",
"return",
"SCALE_RELS",
"[",
"mag_scale_rel",
"]",
"(",
")",
"else",
":",
"if",
"use_default",
":",
"# Returns the Wells and Coppersmith string",
"return",
"WC1994",
"(",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Magnitude Scaling Relation Not Defined!'",
")"
] | Returns the magnitude scaling relation in a format readable by
openquake.hazardlib | [
"Returns",
"the",
"magnitude",
"scaling",
"relation",
"in",
"a",
"format",
"readable",
"by",
"openquake",
".",
"hazardlib"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/source_conversion_utils.py#L79-L97 |
gem/oq-engine | openquake/hmtk/sources/source_conversion_utils.py | npd_to_pmf | def npd_to_pmf(nodal_plane_dist, use_default=False):
"""
Returns the nodal plane distribution as an instance of the PMF class
"""
if isinstance(nodal_plane_dist, PMF):
# Aready in PMF format - return
return nodal_plane_dist
else:
if use_default:
return PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))])
else:
raise ValueError('Nodal Plane distribution not defined') | python | def npd_to_pmf(nodal_plane_dist, use_default=False):
if isinstance(nodal_plane_dist, PMF):
return nodal_plane_dist
else:
if use_default:
return PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))])
else:
raise ValueError('Nodal Plane distribution not defined') | [
"def",
"npd_to_pmf",
"(",
"nodal_plane_dist",
",",
"use_default",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"nodal_plane_dist",
",",
"PMF",
")",
":",
"# Aready in PMF format - return",
"return",
"nodal_plane_dist",
"else",
":",
"if",
"use_default",
":",
"return",
"PMF",
"(",
"[",
"(",
"1.0",
",",
"NodalPlane",
"(",
"0.0",
",",
"90.0",
",",
"0.0",
")",
")",
"]",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Nodal Plane distribution not defined'",
")"
] | Returns the nodal plane distribution as an instance of the PMF class | [
"Returns",
"the",
"nodal",
"plane",
"distribution",
"as",
"an",
"instance",
"of",
"the",
"PMF",
"class"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/source_conversion_utils.py#L100-L111 |
gem/oq-engine | openquake/hmtk/sources/source_conversion_utils.py | hdd_to_pmf | def hdd_to_pmf(hypo_depth_dist, use_default=False):
"""
Returns the hypocentral depth distribtuion as an instance of the :class:
openquake.hazardlib.pmf.
"""
if isinstance(hypo_depth_dist, PMF):
# Is already instance of PMF
return hypo_depth_dist
else:
if use_default:
# Default value of 10 km accepted
return PMF([(1.0, 10.0)])
else:
# Out of options - raise error!
raise ValueError('Hypocentral depth distribution not defined!') | python | def hdd_to_pmf(hypo_depth_dist, use_default=False):
if isinstance(hypo_depth_dist, PMF):
return hypo_depth_dist
else:
if use_default:
return PMF([(1.0, 10.0)])
else:
raise ValueError('Hypocentral depth distribution not defined!') | [
"def",
"hdd_to_pmf",
"(",
"hypo_depth_dist",
",",
"use_default",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"hypo_depth_dist",
",",
"PMF",
")",
":",
"# Is already instance of PMF",
"return",
"hypo_depth_dist",
"else",
":",
"if",
"use_default",
":",
"# Default value of 10 km accepted",
"return",
"PMF",
"(",
"[",
"(",
"1.0",
",",
"10.0",
")",
"]",
")",
"else",
":",
"# Out of options - raise error!",
"raise",
"ValueError",
"(",
"'Hypocentral depth distribution not defined!'",
")"
] | Returns the hypocentral depth distribtuion as an instance of the :class:
openquake.hazardlib.pmf. | [
"Returns",
"the",
"hypocentral",
"depth",
"distribtuion",
"as",
"an",
"instance",
"of",
"the",
":",
"class",
":",
"openquake",
".",
"hazardlib",
".",
"pmf",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/source_conversion_utils.py#L114-L128 |
gem/oq-engine | openquake/hmtk/sources/source_conversion_utils.py | simple_trace_to_wkt_linestring | def simple_trace_to_wkt_linestring(trace):
'''
Coverts a simple fault trace to well-known text format
:param trace:
Fault trace as instance of :class: openquake.hazardlib.geo.line.Line
:returns:
Well-known text (WKT) Linstring representation of the trace
'''
trace_str = ""
for point in trace:
trace_str += ' %s %s,' % (point.longitude, point.latitude)
trace_str = trace_str.lstrip(' ')
return 'LINESTRING (' + trace_str.rstrip(',') + ')' | python | def simple_trace_to_wkt_linestring(trace):
trace_str = ""
for point in trace:
trace_str += ' %s %s,' % (point.longitude, point.latitude)
trace_str = trace_str.lstrip(' ')
return 'LINESTRING (' + trace_str.rstrip(',') + ')' | [
"def",
"simple_trace_to_wkt_linestring",
"(",
"trace",
")",
":",
"trace_str",
"=",
"\"\"",
"for",
"point",
"in",
"trace",
":",
"trace_str",
"+=",
"' %s %s,'",
"%",
"(",
"point",
".",
"longitude",
",",
"point",
".",
"latitude",
")",
"trace_str",
"=",
"trace_str",
".",
"lstrip",
"(",
"' '",
")",
"return",
"'LINESTRING ('",
"+",
"trace_str",
".",
"rstrip",
"(",
"','",
")",
"+",
"')'"
] | Coverts a simple fault trace to well-known text format
:param trace:
Fault trace as instance of :class: openquake.hazardlib.geo.line.Line
:returns:
Well-known text (WKT) Linstring representation of the trace | [
"Coverts",
"a",
"simple",
"fault",
"trace",
"to",
"well",
"-",
"known",
"text",
"format"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/source_conversion_utils.py#L131-L145 |
gem/oq-engine | openquake/hmtk/sources/source_conversion_utils.py | simple_edge_to_wkt_linestring | def simple_edge_to_wkt_linestring(edge):
'''
Coverts a simple fault trace to well-known text format
:param trace:
Fault trace as instance of :class: openquake.hazardlib.geo.line.Line
:returns:
Well-known text (WKT) Linstring representation of the trace
'''
trace_str = ""
for point in edge:
trace_str += ' %s %s %s,' % (point.longitude, point.latitude,
point.depth)
trace_str = trace_str.lstrip(' ')
return 'LINESTRING (' + trace_str.rstrip(',') + ')' | python | def simple_edge_to_wkt_linestring(edge):
trace_str = ""
for point in edge:
trace_str += ' %s %s %s,' % (point.longitude, point.latitude,
point.depth)
trace_str = trace_str.lstrip(' ')
return 'LINESTRING (' + trace_str.rstrip(',') + ')' | [
"def",
"simple_edge_to_wkt_linestring",
"(",
"edge",
")",
":",
"trace_str",
"=",
"\"\"",
"for",
"point",
"in",
"edge",
":",
"trace_str",
"+=",
"' %s %s %s,'",
"%",
"(",
"point",
".",
"longitude",
",",
"point",
".",
"latitude",
",",
"point",
".",
"depth",
")",
"trace_str",
"=",
"trace_str",
".",
"lstrip",
"(",
"' '",
")",
"return",
"'LINESTRING ('",
"+",
"trace_str",
".",
"rstrip",
"(",
"','",
")",
"+",
"')'"
] | Coverts a simple fault trace to well-known text format
:param trace:
Fault trace as instance of :class: openquake.hazardlib.geo.line.Line
:returns:
Well-known text (WKT) Linstring representation of the trace | [
"Coverts",
"a",
"simple",
"fault",
"trace",
"to",
"well",
"-",
"known",
"text",
"format"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/source_conversion_utils.py#L148-L163 |
gem/oq-engine | openquake/commands/checksum.py | checksum | def checksum(thing):
"""
Get the checksum of a calculation from the calculation ID (if already
done) or from the job.ini/job.zip file (if not done yet). If `thing`
is a source model logic tree file, get the checksum of the model by
ignoring the job.ini, the gmpe logic tree file and possibly other files.
"""
try:
job_id = int(thing)
job_file = None
except ValueError:
job_id = None
job_file = thing
if not os.path.exists(job_file):
sys.exit('%s does not correspond to an existing file' % job_file)
if job_id:
dstore = util.read(job_id)
checksum = dstore['/'].attrs['checksum32']
elif job_file.endswith('.xml'): # assume it is a smlt file
inputs = {'source_model_logic_tree': job_file}
checksum = readinput.get_checksum32(mock.Mock(inputs=inputs))
else:
oq = readinput.get_oqparam(job_file)
checksum = readinput.get_checksum32(oq)
print(checksum) | python | def checksum(thing):
try:
job_id = int(thing)
job_file = None
except ValueError:
job_id = None
job_file = thing
if not os.path.exists(job_file):
sys.exit('%s does not correspond to an existing file' % job_file)
if job_id:
dstore = util.read(job_id)
checksum = dstore['/'].attrs['checksum32']
elif job_file.endswith('.xml'):
inputs = {'source_model_logic_tree': job_file}
checksum = readinput.get_checksum32(mock.Mock(inputs=inputs))
else:
oq = readinput.get_oqparam(job_file)
checksum = readinput.get_checksum32(oq)
print(checksum) | [
"def",
"checksum",
"(",
"thing",
")",
":",
"try",
":",
"job_id",
"=",
"int",
"(",
"thing",
")",
"job_file",
"=",
"None",
"except",
"ValueError",
":",
"job_id",
"=",
"None",
"job_file",
"=",
"thing",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"job_file",
")",
":",
"sys",
".",
"exit",
"(",
"'%s does not correspond to an existing file'",
"%",
"job_file",
")",
"if",
"job_id",
":",
"dstore",
"=",
"util",
".",
"read",
"(",
"job_id",
")",
"checksum",
"=",
"dstore",
"[",
"'/'",
"]",
".",
"attrs",
"[",
"'checksum32'",
"]",
"elif",
"job_file",
".",
"endswith",
"(",
"'.xml'",
")",
":",
"# assume it is a smlt file",
"inputs",
"=",
"{",
"'source_model_logic_tree'",
":",
"job_file",
"}",
"checksum",
"=",
"readinput",
".",
"get_checksum32",
"(",
"mock",
".",
"Mock",
"(",
"inputs",
"=",
"inputs",
")",
")",
"else",
":",
"oq",
"=",
"readinput",
".",
"get_oqparam",
"(",
"job_file",
")",
"checksum",
"=",
"readinput",
".",
"get_checksum32",
"(",
"oq",
")",
"print",
"(",
"checksum",
")"
] | Get the checksum of a calculation from the calculation ID (if already
done) or from the job.ini/job.zip file (if not done yet). If `thing`
is a source model logic tree file, get the checksum of the model by
ignoring the job.ini, the gmpe logic tree file and possibly other files. | [
"Get",
"the",
"checksum",
"of",
"a",
"calculation",
"from",
"the",
"calculation",
"ID",
"(",
"if",
"already",
"done",
")",
"or",
"from",
"the",
"job",
".",
"ini",
"/",
"job",
".",
"zip",
"file",
"(",
"if",
"not",
"done",
"yet",
")",
".",
"If",
"thing",
"is",
"a",
"source",
"model",
"logic",
"tree",
"file",
"get",
"the",
"checksum",
"of",
"the",
"model",
"by",
"ignoring",
"the",
"job",
".",
"ini",
"the",
"gmpe",
"logic",
"tree",
"file",
"and",
"possibly",
"other",
"files",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/checksum.py#L27-L51 |
gem/oq-engine | openquake/commands/engine.py | run_job | def run_job(job_ini, log_level='info', log_file=None, exports='',
username=getpass.getuser(), **kw):
"""
Run a job using the specified config file and other options.
:param str job_ini:
Path to calculation config (INI-style) files.
:param str log_level:
'debug', 'info', 'warn', 'error', or 'critical'
:param str log_file:
Path to log file.
:param exports:
A comma-separated string of export types requested by the user.
:param username:
Name of the user running the job
:param kw:
Extra parameters like hazard_calculation_id and calculation_mode
"""
job_id = logs.init('job', getattr(logging, log_level.upper()))
with logs.handle(job_id, log_level, log_file):
job_ini = os.path.abspath(job_ini)
oqparam = eng.job_from_file(job_ini, job_id, username, **kw)
kw['username'] = username
eng.run_calc(job_id, oqparam, exports, **kw)
for line in logs.dbcmd('list_outputs', job_id, False):
safeprint(line)
return job_id | python | def run_job(job_ini, log_level='info', log_file=None, exports='',
username=getpass.getuser(), **kw):
job_id = logs.init('job', getattr(logging, log_level.upper()))
with logs.handle(job_id, log_level, log_file):
job_ini = os.path.abspath(job_ini)
oqparam = eng.job_from_file(job_ini, job_id, username, **kw)
kw['username'] = username
eng.run_calc(job_id, oqparam, exports, **kw)
for line in logs.dbcmd('list_outputs', job_id, False):
safeprint(line)
return job_id | [
"def",
"run_job",
"(",
"job_ini",
",",
"log_level",
"=",
"'info'",
",",
"log_file",
"=",
"None",
",",
"exports",
"=",
"''",
",",
"username",
"=",
"getpass",
".",
"getuser",
"(",
")",
",",
"*",
"*",
"kw",
")",
":",
"job_id",
"=",
"logs",
".",
"init",
"(",
"'job'",
",",
"getattr",
"(",
"logging",
",",
"log_level",
".",
"upper",
"(",
")",
")",
")",
"with",
"logs",
".",
"handle",
"(",
"job_id",
",",
"log_level",
",",
"log_file",
")",
":",
"job_ini",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"job_ini",
")",
"oqparam",
"=",
"eng",
".",
"job_from_file",
"(",
"job_ini",
",",
"job_id",
",",
"username",
",",
"*",
"*",
"kw",
")",
"kw",
"[",
"'username'",
"]",
"=",
"username",
"eng",
".",
"run_calc",
"(",
"job_id",
",",
"oqparam",
",",
"exports",
",",
"*",
"*",
"kw",
")",
"for",
"line",
"in",
"logs",
".",
"dbcmd",
"(",
"'list_outputs'",
",",
"job_id",
",",
"False",
")",
":",
"safeprint",
"(",
"line",
")",
"return",
"job_id"
] | Run a job using the specified config file and other options.
:param str job_ini:
Path to calculation config (INI-style) files.
:param str log_level:
'debug', 'info', 'warn', 'error', or 'critical'
:param str log_file:
Path to log file.
:param exports:
A comma-separated string of export types requested by the user.
:param username:
Name of the user running the job
:param kw:
Extra parameters like hazard_calculation_id and calculation_mode | [
"Run",
"a",
"job",
"using",
"the",
"specified",
"config",
"file",
"and",
"other",
"options",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/engine.py#L45-L71 |
gem/oq-engine | openquake/commands/engine.py | run_tile | def run_tile(job_ini, sites_slice):
"""
Used in tiling calculations
"""
return run_job(job_ini, sites_slice=(sites_slice.start, sites_slice.stop)) | python | def run_tile(job_ini, sites_slice):
return run_job(job_ini, sites_slice=(sites_slice.start, sites_slice.stop)) | [
"def",
"run_tile",
"(",
"job_ini",
",",
"sites_slice",
")",
":",
"return",
"run_job",
"(",
"job_ini",
",",
"sites_slice",
"=",
"(",
"sites_slice",
".",
"start",
",",
"sites_slice",
".",
"stop",
")",
")"
] | Used in tiling calculations | [
"Used",
"in",
"tiling",
"calculations"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/engine.py#L74-L78 |
gem/oq-engine | openquake/commands/engine.py | del_calculation | def del_calculation(job_id, confirmed=False):
"""
Delete a calculation and all associated outputs.
"""
if logs.dbcmd('get_job', job_id) is None:
print('There is no job %d' % job_id)
return
if confirmed or confirm(
'Are you sure you want to (abort and) delete this calculation and '
'all associated outputs?\nThis action cannot be undone. (y/n): '):
try:
abort(job_id)
resp = logs.dbcmd('del_calc', job_id, getpass.getuser())
except RuntimeError as err:
safeprint(err)
else:
if 'success' in resp:
print('Removed %d' % job_id)
else:
print(resp['error']) | python | def del_calculation(job_id, confirmed=False):
if logs.dbcmd('get_job', job_id) is None:
print('There is no job %d' % job_id)
return
if confirmed or confirm(
'Are you sure you want to (abort and) delete this calculation and '
'all associated outputs?\nThis action cannot be undone. (y/n): '):
try:
abort(job_id)
resp = logs.dbcmd('del_calc', job_id, getpass.getuser())
except RuntimeError as err:
safeprint(err)
else:
if 'success' in resp:
print('Removed %d' % job_id)
else:
print(resp['error']) | [
"def",
"del_calculation",
"(",
"job_id",
",",
"confirmed",
"=",
"False",
")",
":",
"if",
"logs",
".",
"dbcmd",
"(",
"'get_job'",
",",
"job_id",
")",
"is",
"None",
":",
"print",
"(",
"'There is no job %d'",
"%",
"job_id",
")",
"return",
"if",
"confirmed",
"or",
"confirm",
"(",
"'Are you sure you want to (abort and) delete this calculation and '",
"'all associated outputs?\\nThis action cannot be undone. (y/n): '",
")",
":",
"try",
":",
"abort",
"(",
"job_id",
")",
"resp",
"=",
"logs",
".",
"dbcmd",
"(",
"'del_calc'",
",",
"job_id",
",",
"getpass",
".",
"getuser",
"(",
")",
")",
"except",
"RuntimeError",
"as",
"err",
":",
"safeprint",
"(",
"err",
")",
"else",
":",
"if",
"'success'",
"in",
"resp",
":",
"print",
"(",
"'Removed %d'",
"%",
"job_id",
")",
"else",
":",
"print",
"(",
"resp",
"[",
"'error'",
"]",
")"
] | Delete a calculation and all associated outputs. | [
"Delete",
"a",
"calculation",
"and",
"all",
"associated",
"outputs",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/engine.py#L81-L101 |
gem/oq-engine | openquake/commands/engine.py | smart_run | def smart_run(job_ini, oqparam, log_level, log_file, exports, reuse_hazard):
"""
Run calculations by storing their hazard checksum and reusing previous
calculations if requested.
"""
haz_checksum = readinput.get_checksum32(oqparam, hazard=True)
# retrieve an old calculation with the right checksum, if any
job = logs.dbcmd('get_job_from_checksum', haz_checksum)
reuse = reuse_hazard and job and os.path.exists(job.ds_calc_dir + '.hdf5')
# recompute the hazard and store the checksum
ebr = (oqparam.calculation_mode == 'event_based_risk' and
'gmfs' not in oqparam.inputs)
if ebr:
kw = dict(calculation_mode='event_based')
if (oqparam.sites or 'sites' in oqparam.inputs or
'site_model' in oqparam.inputs):
# remove exposure from the hazard
kw['exposure_file'] = ''
else:
kw = {}
if not reuse:
hc_id = run_job(job_ini, log_level, log_file, exports, **kw)
if job is None:
logs.dbcmd('add_checksum', hc_id, haz_checksum)
elif not reuse_hazard or not os.path.exists(job.ds_calc_dir + '.hdf5'):
logs.dbcmd('update_job_checksum', hc_id, haz_checksum)
if ebr:
run_job(job_ini, log_level, log_file,
exports, hazard_calculation_id=hc_id)
else:
hc_id = job.id
logging.info('Reusing job #%d', job.id)
run_job(job_ini, log_level, log_file,
exports, hazard_calculation_id=hc_id) | python | def smart_run(job_ini, oqparam, log_level, log_file, exports, reuse_hazard):
haz_checksum = readinput.get_checksum32(oqparam, hazard=True)
job = logs.dbcmd('get_job_from_checksum', haz_checksum)
reuse = reuse_hazard and job and os.path.exists(job.ds_calc_dir + '.hdf5')
ebr = (oqparam.calculation_mode == 'event_based_risk' and
'gmfs' not in oqparam.inputs)
if ebr:
kw = dict(calculation_mode='event_based')
if (oqparam.sites or 'sites' in oqparam.inputs or
'site_model' in oqparam.inputs):
kw['exposure_file'] = ''
else:
kw = {}
if not reuse:
hc_id = run_job(job_ini, log_level, log_file, exports, **kw)
if job is None:
logs.dbcmd('add_checksum', hc_id, haz_checksum)
elif not reuse_hazard or not os.path.exists(job.ds_calc_dir + '.hdf5'):
logs.dbcmd('update_job_checksum', hc_id, haz_checksum)
if ebr:
run_job(job_ini, log_level, log_file,
exports, hazard_calculation_id=hc_id)
else:
hc_id = job.id
logging.info('Reusing job
run_job(job_ini, log_level, log_file,
exports, hazard_calculation_id=hc_id) | [
"def",
"smart_run",
"(",
"job_ini",
",",
"oqparam",
",",
"log_level",
",",
"log_file",
",",
"exports",
",",
"reuse_hazard",
")",
":",
"haz_checksum",
"=",
"readinput",
".",
"get_checksum32",
"(",
"oqparam",
",",
"hazard",
"=",
"True",
")",
"# retrieve an old calculation with the right checksum, if any",
"job",
"=",
"logs",
".",
"dbcmd",
"(",
"'get_job_from_checksum'",
",",
"haz_checksum",
")",
"reuse",
"=",
"reuse_hazard",
"and",
"job",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"job",
".",
"ds_calc_dir",
"+",
"'.hdf5'",
")",
"# recompute the hazard and store the checksum",
"ebr",
"=",
"(",
"oqparam",
".",
"calculation_mode",
"==",
"'event_based_risk'",
"and",
"'gmfs'",
"not",
"in",
"oqparam",
".",
"inputs",
")",
"if",
"ebr",
":",
"kw",
"=",
"dict",
"(",
"calculation_mode",
"=",
"'event_based'",
")",
"if",
"(",
"oqparam",
".",
"sites",
"or",
"'sites'",
"in",
"oqparam",
".",
"inputs",
"or",
"'site_model'",
"in",
"oqparam",
".",
"inputs",
")",
":",
"# remove exposure from the hazard",
"kw",
"[",
"'exposure_file'",
"]",
"=",
"''",
"else",
":",
"kw",
"=",
"{",
"}",
"if",
"not",
"reuse",
":",
"hc_id",
"=",
"run_job",
"(",
"job_ini",
",",
"log_level",
",",
"log_file",
",",
"exports",
",",
"*",
"*",
"kw",
")",
"if",
"job",
"is",
"None",
":",
"logs",
".",
"dbcmd",
"(",
"'add_checksum'",
",",
"hc_id",
",",
"haz_checksum",
")",
"elif",
"not",
"reuse_hazard",
"or",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"job",
".",
"ds_calc_dir",
"+",
"'.hdf5'",
")",
":",
"logs",
".",
"dbcmd",
"(",
"'update_job_checksum'",
",",
"hc_id",
",",
"haz_checksum",
")",
"if",
"ebr",
":",
"run_job",
"(",
"job_ini",
",",
"log_level",
",",
"log_file",
",",
"exports",
",",
"hazard_calculation_id",
"=",
"hc_id",
")",
"else",
":",
"hc_id",
"=",
"job",
".",
"id",
"logging",
".",
"info",
"(",
"'Reusing job #%d'",
",",
"job",
".",
"id",
")",
"run_job",
"(",
"job_ini",
",",
"log_level",
",",
"log_file",
",",
"exports",
",",
"hazard_calculation_id",
"=",
"hc_id",
")"
] | Run calculations by storing their hazard checksum and reusing previous
calculations if requested. | [
"Run",
"calculations",
"by",
"storing",
"their",
"hazard",
"checksum",
"and",
"reusing",
"previous",
"calculations",
"if",
"requested",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/engine.py#L104-L137 |
gem/oq-engine | openquake/commands/engine.py | engine | def engine(log_file, no_distribute, yes, config_file, make_html_report,
upgrade_db, db_version, what_if_I_upgrade, run,
list_hazard_calculations, list_risk_calculations,
delete_calculation, delete_uncompleted_calculations,
hazard_calculation_id, list_outputs, show_log,
export_output, export_outputs, exports='',
log_level='info', reuse_hazard=False):
"""
Run a calculation using the traditional command line API
"""
if not run:
# configure a basic logging
logs.init()
if config_file:
config.read(os.path.abspath(os.path.expanduser(config_file)),
soft_mem_limit=int, hard_mem_limit=int, port=int,
multi_user=valid.boolean, multi_node=valid.boolean)
if no_distribute:
os.environ['OQ_DISTRIBUTE'] = 'no'
# check if the datadir exists
datadir = datastore.get_datadir()
if not os.path.exists(datadir):
os.makedirs(datadir)
dbserver.ensure_on()
# check if we are talking to the right server
err = dbserver.check_foreign()
if err:
sys.exit(err)
if upgrade_db:
msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
if msg.startswith('Your database is already updated'):
pass
elif yes or confirm('Proceed? (y/n) '):
logs.dbcmd('upgrade_db')
sys.exit(0)
if db_version:
safeprint(logs.dbcmd('db_version'))
sys.exit(0)
if what_if_I_upgrade:
safeprint(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
sys.exit(0)
# check if the db is outdated
outdated = logs.dbcmd('check_outdated')
if outdated:
sys.exit(outdated)
# hazard or hazard+risk
if hazard_calculation_id == -1:
# get the latest calculation of the current user
hc_id = get_job_id(hazard_calculation_id, getpass.getuser())
elif hazard_calculation_id:
# make it possible to use calculations made by another user
hc_id = get_job_id(hazard_calculation_id)
else:
hc_id = None
if run:
log_file = os.path.expanduser(log_file) \
if log_file is not None else None
job_inis = [os.path.expanduser(f) for f in run]
if len(job_inis) == 1 and not hc_id:
# init logs before calling get_oqparam
logs.init('nojob', getattr(logging, log_level.upper()))
# not using logs.handle that logs on the db
oq = readinput.get_oqparam(job_inis[0])
smart_run(job_inis[0], oq, log_level, log_file,
exports, reuse_hazard)
return
for i, job_ini in enumerate(job_inis):
open(job_ini, 'rb').read() # IOError if the file does not exist
job_id = run_job(job_ini, log_level, log_file,
exports, hazard_calculation_id=hc_id)
if not hc_id: # use the first calculation as base for the others
hc_id = job_id
# hazard
elif list_hazard_calculations:
for line in logs.dbcmd(
'list_calculations', 'hazard', getpass.getuser()):
safeprint(line)
elif delete_calculation is not None:
del_calculation(delete_calculation, yes)
# risk
elif list_risk_calculations:
for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
safeprint(line)
# export
elif make_html_report:
safeprint('Written %s' % make_report(make_html_report))
sys.exit(0)
elif list_outputs is not None:
hc_id = get_job_id(list_outputs)
for line in logs.dbcmd('list_outputs', hc_id):
safeprint(line)
elif show_log is not None:
hc_id = get_job_id(show_log)
for line in logs.dbcmd('get_log', hc_id):
safeprint(line)
elif export_output is not None:
output_id, target_dir = export_output
dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
for line in core.export_output(
dskey, calc_id, datadir, os.path.expanduser(target_dir),
exports or 'csv,xml'):
safeprint(line)
elif export_outputs is not None:
job_id, target_dir = export_outputs
hc_id = get_job_id(job_id)
for line in core.export_outputs(
hc_id, os.path.expanduser(target_dir), exports or 'csv,xml'):
safeprint(line)
elif delete_uncompleted_calculations:
logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())
else:
engine.parentparser.prog = 'oq engine'
engine.parentparser.print_usage() | python | def engine(log_file, no_distribute, yes, config_file, make_html_report,
upgrade_db, db_version, what_if_I_upgrade, run,
list_hazard_calculations, list_risk_calculations,
delete_calculation, delete_uncompleted_calculations,
hazard_calculation_id, list_outputs, show_log,
export_output, export_outputs, exports='',
log_level='info', reuse_hazard=False):
if not run:
logs.init()
if config_file:
config.read(os.path.abspath(os.path.expanduser(config_file)),
soft_mem_limit=int, hard_mem_limit=int, port=int,
multi_user=valid.boolean, multi_node=valid.boolean)
if no_distribute:
os.environ['OQ_DISTRIBUTE'] = 'no'
datadir = datastore.get_datadir()
if not os.path.exists(datadir):
os.makedirs(datadir)
dbserver.ensure_on()
err = dbserver.check_foreign()
if err:
sys.exit(err)
if upgrade_db:
msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
if msg.startswith('Your database is already updated'):
pass
elif yes or confirm('Proceed? (y/n) '):
logs.dbcmd('upgrade_db')
sys.exit(0)
if db_version:
safeprint(logs.dbcmd('db_version'))
sys.exit(0)
if what_if_I_upgrade:
safeprint(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
sys.exit(0)
outdated = logs.dbcmd('check_outdated')
if outdated:
sys.exit(outdated)
if hazard_calculation_id == -1:
hc_id = get_job_id(hazard_calculation_id, getpass.getuser())
elif hazard_calculation_id:
hc_id = get_job_id(hazard_calculation_id)
else:
hc_id = None
if run:
log_file = os.path.expanduser(log_file) \
if log_file is not None else None
job_inis = [os.path.expanduser(f) for f in run]
if len(job_inis) == 1 and not hc_id:
logs.init('nojob', getattr(logging, log_level.upper()))
oq = readinput.get_oqparam(job_inis[0])
smart_run(job_inis[0], oq, log_level, log_file,
exports, reuse_hazard)
return
for i, job_ini in enumerate(job_inis):
open(job_ini, 'rb').read()
job_id = run_job(job_ini, log_level, log_file,
exports, hazard_calculation_id=hc_id)
if not hc_id:
hc_id = job_id
elif list_hazard_calculations:
for line in logs.dbcmd(
'list_calculations', 'hazard', getpass.getuser()):
safeprint(line)
elif delete_calculation is not None:
del_calculation(delete_calculation, yes)
elif list_risk_calculations:
for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
safeprint(line)
elif make_html_report:
safeprint('Written %s' % make_report(make_html_report))
sys.exit(0)
elif list_outputs is not None:
hc_id = get_job_id(list_outputs)
for line in logs.dbcmd('list_outputs', hc_id):
safeprint(line)
elif show_log is not None:
hc_id = get_job_id(show_log)
for line in logs.dbcmd('get_log', hc_id):
safeprint(line)
elif export_output is not None:
output_id, target_dir = export_output
dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
for line in core.export_output(
dskey, calc_id, datadir, os.path.expanduser(target_dir),
exports or 'csv,xml'):
safeprint(line)
elif export_outputs is not None:
job_id, target_dir = export_outputs
hc_id = get_job_id(job_id)
for line in core.export_outputs(
hc_id, os.path.expanduser(target_dir), exports or 'csv,xml'):
safeprint(line)
elif delete_uncompleted_calculations:
logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())
else:
engine.parentparser.prog = 'oq engine'
engine.parentparser.print_usage() | [
"def",
"engine",
"(",
"log_file",
",",
"no_distribute",
",",
"yes",
",",
"config_file",
",",
"make_html_report",
",",
"upgrade_db",
",",
"db_version",
",",
"what_if_I_upgrade",
",",
"run",
",",
"list_hazard_calculations",
",",
"list_risk_calculations",
",",
"delete_calculation",
",",
"delete_uncompleted_calculations",
",",
"hazard_calculation_id",
",",
"list_outputs",
",",
"show_log",
",",
"export_output",
",",
"export_outputs",
",",
"exports",
"=",
"''",
",",
"log_level",
"=",
"'info'",
",",
"reuse_hazard",
"=",
"False",
")",
":",
"if",
"not",
"run",
":",
"# configure a basic logging",
"logs",
".",
"init",
"(",
")",
"if",
"config_file",
":",
"config",
".",
"read",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"config_file",
")",
")",
",",
"soft_mem_limit",
"=",
"int",
",",
"hard_mem_limit",
"=",
"int",
",",
"port",
"=",
"int",
",",
"multi_user",
"=",
"valid",
".",
"boolean",
",",
"multi_node",
"=",
"valid",
".",
"boolean",
")",
"if",
"no_distribute",
":",
"os",
".",
"environ",
"[",
"'OQ_DISTRIBUTE'",
"]",
"=",
"'no'",
"# check if the datadir exists",
"datadir",
"=",
"datastore",
".",
"get_datadir",
"(",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"datadir",
")",
":",
"os",
".",
"makedirs",
"(",
"datadir",
")",
"dbserver",
".",
"ensure_on",
"(",
")",
"# check if we are talking to the right server",
"err",
"=",
"dbserver",
".",
"check_foreign",
"(",
")",
"if",
"err",
":",
"sys",
".",
"exit",
"(",
"err",
")",
"if",
"upgrade_db",
":",
"msg",
"=",
"logs",
".",
"dbcmd",
"(",
"'what_if_I_upgrade'",
",",
"'read_scripts'",
")",
"if",
"msg",
".",
"startswith",
"(",
"'Your database is already updated'",
")",
":",
"pass",
"elif",
"yes",
"or",
"confirm",
"(",
"'Proceed? (y/n) '",
")",
":",
"logs",
".",
"dbcmd",
"(",
"'upgrade_db'",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"if",
"db_version",
":",
"safeprint",
"(",
"logs",
".",
"dbcmd",
"(",
"'db_version'",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"if",
"what_if_I_upgrade",
":",
"safeprint",
"(",
"logs",
".",
"dbcmd",
"(",
"'what_if_I_upgrade'",
",",
"'extract_upgrade_scripts'",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"# check if the db is outdated",
"outdated",
"=",
"logs",
".",
"dbcmd",
"(",
"'check_outdated'",
")",
"if",
"outdated",
":",
"sys",
".",
"exit",
"(",
"outdated",
")",
"# hazard or hazard+risk",
"if",
"hazard_calculation_id",
"==",
"-",
"1",
":",
"# get the latest calculation of the current user",
"hc_id",
"=",
"get_job_id",
"(",
"hazard_calculation_id",
",",
"getpass",
".",
"getuser",
"(",
")",
")",
"elif",
"hazard_calculation_id",
":",
"# make it possible to use calculations made by another user",
"hc_id",
"=",
"get_job_id",
"(",
"hazard_calculation_id",
")",
"else",
":",
"hc_id",
"=",
"None",
"if",
"run",
":",
"log_file",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"log_file",
")",
"if",
"log_file",
"is",
"not",
"None",
"else",
"None",
"job_inis",
"=",
"[",
"os",
".",
"path",
".",
"expanduser",
"(",
"f",
")",
"for",
"f",
"in",
"run",
"]",
"if",
"len",
"(",
"job_inis",
")",
"==",
"1",
"and",
"not",
"hc_id",
":",
"# init logs before calling get_oqparam",
"logs",
".",
"init",
"(",
"'nojob'",
",",
"getattr",
"(",
"logging",
",",
"log_level",
".",
"upper",
"(",
")",
")",
")",
"# not using logs.handle that logs on the db",
"oq",
"=",
"readinput",
".",
"get_oqparam",
"(",
"job_inis",
"[",
"0",
"]",
")",
"smart_run",
"(",
"job_inis",
"[",
"0",
"]",
",",
"oq",
",",
"log_level",
",",
"log_file",
",",
"exports",
",",
"reuse_hazard",
")",
"return",
"for",
"i",
",",
"job_ini",
"in",
"enumerate",
"(",
"job_inis",
")",
":",
"open",
"(",
"job_ini",
",",
"'rb'",
")",
".",
"read",
"(",
")",
"# IOError if the file does not exist",
"job_id",
"=",
"run_job",
"(",
"job_ini",
",",
"log_level",
",",
"log_file",
",",
"exports",
",",
"hazard_calculation_id",
"=",
"hc_id",
")",
"if",
"not",
"hc_id",
":",
"# use the first calculation as base for the others",
"hc_id",
"=",
"job_id",
"# hazard",
"elif",
"list_hazard_calculations",
":",
"for",
"line",
"in",
"logs",
".",
"dbcmd",
"(",
"'list_calculations'",
",",
"'hazard'",
",",
"getpass",
".",
"getuser",
"(",
")",
")",
":",
"safeprint",
"(",
"line",
")",
"elif",
"delete_calculation",
"is",
"not",
"None",
":",
"del_calculation",
"(",
"delete_calculation",
",",
"yes",
")",
"# risk",
"elif",
"list_risk_calculations",
":",
"for",
"line",
"in",
"logs",
".",
"dbcmd",
"(",
"'list_calculations'",
",",
"'risk'",
",",
"getpass",
".",
"getuser",
"(",
")",
")",
":",
"safeprint",
"(",
"line",
")",
"# export",
"elif",
"make_html_report",
":",
"safeprint",
"(",
"'Written %s'",
"%",
"make_report",
"(",
"make_html_report",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"elif",
"list_outputs",
"is",
"not",
"None",
":",
"hc_id",
"=",
"get_job_id",
"(",
"list_outputs",
")",
"for",
"line",
"in",
"logs",
".",
"dbcmd",
"(",
"'list_outputs'",
",",
"hc_id",
")",
":",
"safeprint",
"(",
"line",
")",
"elif",
"show_log",
"is",
"not",
"None",
":",
"hc_id",
"=",
"get_job_id",
"(",
"show_log",
")",
"for",
"line",
"in",
"logs",
".",
"dbcmd",
"(",
"'get_log'",
",",
"hc_id",
")",
":",
"safeprint",
"(",
"line",
")",
"elif",
"export_output",
"is",
"not",
"None",
":",
"output_id",
",",
"target_dir",
"=",
"export_output",
"dskey",
",",
"calc_id",
",",
"datadir",
"=",
"logs",
".",
"dbcmd",
"(",
"'get_output'",
",",
"int",
"(",
"output_id",
")",
")",
"for",
"line",
"in",
"core",
".",
"export_output",
"(",
"dskey",
",",
"calc_id",
",",
"datadir",
",",
"os",
".",
"path",
".",
"expanduser",
"(",
"target_dir",
")",
",",
"exports",
"or",
"'csv,xml'",
")",
":",
"safeprint",
"(",
"line",
")",
"elif",
"export_outputs",
"is",
"not",
"None",
":",
"job_id",
",",
"target_dir",
"=",
"export_outputs",
"hc_id",
"=",
"get_job_id",
"(",
"job_id",
")",
"for",
"line",
"in",
"core",
".",
"export_outputs",
"(",
"hc_id",
",",
"os",
".",
"path",
".",
"expanduser",
"(",
"target_dir",
")",
",",
"exports",
"or",
"'csv,xml'",
")",
":",
"safeprint",
"(",
"line",
")",
"elif",
"delete_uncompleted_calculations",
":",
"logs",
".",
"dbcmd",
"(",
"'delete_uncompleted_calculations'",
",",
"getpass",
".",
"getuser",
"(",
")",
")",
"else",
":",
"engine",
".",
"parentparser",
".",
"prog",
"=",
"'oq engine'",
"engine",
".",
"parentparser",
".",
"print_usage",
"(",
")"
] | Run a calculation using the traditional command line API | [
"Run",
"a",
"calculation",
"using",
"the",
"traditional",
"command",
"line",
"API"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/engine.py#L141-L268 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# extract dictionaries of coefficients specific to required
# intensity measure type and for PGA
C = self.COEFFS[imt]
C_PGA = self.COEFFS[PGA()]
# compute median pga on rock (vs30=1100), needed for site response
# term calculation
# For spectral accelerations at periods between 0.0 and 0.25 s, Sa (T)
# cannot be less than PGA on soil, therefore if the IMT is in this
# period range it is necessary to calculate PGA on soil
if imt.name == 'SA' and imt.period > 0.0 and imt.period < 0.25:
get_pga_site = True
else:
get_pga_site = False
pga1100, pga_site = self._compute_imt1100(C_PGA,
sites,
rup,
dists,
get_pga_site)
# Get the median ground motion
mean = (self._compute_magnitude_term(C, rup.mag) +
self._compute_distance_term(C, rup, dists) +
self._compute_style_of_faulting_term(C, rup) +
self._compute_hanging_wall_term(C, rup, dists) +
self._compute_shallow_site_response(C, sites, pga1100) +
self._compute_basin_response_term(C, sites.z2pt5))
# If it is necessary to ensure that Sa(T) >= PGA (see previous comment)
if get_pga_site:
idx = mean < np.log(pga_site)
mean[idx] = np.log(pga_site[idx])
stddevs = self._get_stddevs(C,
sites,
pga1100,
C_PGA['s_lny'],
stddev_types)
return mean, stddevs | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
C = self.COEFFS[imt]
C_PGA = self.COEFFS[PGA()]
if imt.name == 'SA' and imt.period > 0.0 and imt.period < 0.25:
get_pga_site = True
else:
get_pga_site = False
pga1100, pga_site = self._compute_imt1100(C_PGA,
sites,
rup,
dists,
get_pga_site)
mean = (self._compute_magnitude_term(C, rup.mag) +
self._compute_distance_term(C, rup, dists) +
self._compute_style_of_faulting_term(C, rup) +
self._compute_hanging_wall_term(C, rup, dists) +
self._compute_shallow_site_response(C, sites, pga1100) +
self._compute_basin_response_term(C, sites.z2pt5))
if get_pga_site:
idx = mean < np.log(pga_site)
mean[idx] = np.log(pga_site[idx])
stddevs = self._get_stddevs(C,
sites,
pga1100,
C_PGA['s_lny'],
stddev_types)
return mean, stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"# extract dictionaries of coefficients specific to required",
"# intensity measure type and for PGA",
"C",
"=",
"self",
".",
"COEFFS",
"[",
"imt",
"]",
"C_PGA",
"=",
"self",
".",
"COEFFS",
"[",
"PGA",
"(",
")",
"]",
"# compute median pga on rock (vs30=1100), needed for site response",
"# term calculation",
"# For spectral accelerations at periods between 0.0 and 0.25 s, Sa (T)",
"# cannot be less than PGA on soil, therefore if the IMT is in this",
"# period range it is necessary to calculate PGA on soil",
"if",
"imt",
".",
"name",
"==",
"'SA'",
"and",
"imt",
".",
"period",
">",
"0.0",
"and",
"imt",
".",
"period",
"<",
"0.25",
":",
"get_pga_site",
"=",
"True",
"else",
":",
"get_pga_site",
"=",
"False",
"pga1100",
",",
"pga_site",
"=",
"self",
".",
"_compute_imt1100",
"(",
"C_PGA",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"get_pga_site",
")",
"# Get the median ground motion",
"mean",
"=",
"(",
"self",
".",
"_compute_magnitude_term",
"(",
"C",
",",
"rup",
".",
"mag",
")",
"+",
"self",
".",
"_compute_distance_term",
"(",
"C",
",",
"rup",
",",
"dists",
")",
"+",
"self",
".",
"_compute_style_of_faulting_term",
"(",
"C",
",",
"rup",
")",
"+",
"self",
".",
"_compute_hanging_wall_term",
"(",
"C",
",",
"rup",
",",
"dists",
")",
"+",
"self",
".",
"_compute_shallow_site_response",
"(",
"C",
",",
"sites",
",",
"pga1100",
")",
"+",
"self",
".",
"_compute_basin_response_term",
"(",
"C",
",",
"sites",
".",
"z2pt5",
")",
")",
"# If it is necessary to ensure that Sa(T) >= PGA (see previous comment)",
"if",
"get_pga_site",
":",
"idx",
"=",
"mean",
"<",
"np",
".",
"log",
"(",
"pga_site",
")",
"mean",
"[",
"idx",
"]",
"=",
"np",
".",
"log",
"(",
"pga_site",
"[",
"idx",
"]",
")",
"stddevs",
"=",
"self",
".",
"_get_stddevs",
"(",
"C",
",",
"sites",
",",
"pga1100",
",",
"C_PGA",
"[",
"'s_lny'",
"]",
",",
"stddev_types",
")",
"return",
"mean",
",",
"stddevs"
] | See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L83-L127 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_imt1100 | def _compute_imt1100(self, C, sites, rup, dists, get_pga_site=False):
"""
Computes the PGA on reference (Vs30 = 1100 m/s) rock.
"""
# Calculates simple site response term assuming all sites 1100 m/s
fsite = (C['c10'] + (C['k2'] * C['n'])) * log(1100. / C['k1'])
# Calculates the PGA on rock
pga1100 = np.exp(self._compute_magnitude_term(C, rup.mag) +
self._compute_distance_term(C, rup, dists) +
self._compute_style_of_faulting_term(C, rup) +
self._compute_hanging_wall_term(C, rup, dists) +
self._compute_basin_response_term(C, sites.z2pt5) +
fsite)
# If PGA at the site is needed then remove factor for rock and
# re-calculate on correct site condition
if get_pga_site:
pga_site = np.exp(np.log(pga1100) - fsite)
fsite = self._compute_shallow_site_response(C, sites, pga1100)
pga_site = np.exp(np.log(pga_site) + fsite)
else:
pga_site = None
return pga1100, pga_site | python | def _compute_imt1100(self, C, sites, rup, dists, get_pga_site=False):
fsite = (C['c10'] + (C['k2'] * C['n'])) * log(1100. / C['k1'])
pga1100 = np.exp(self._compute_magnitude_term(C, rup.mag) +
self._compute_distance_term(C, rup, dists) +
self._compute_style_of_faulting_term(C, rup) +
self._compute_hanging_wall_term(C, rup, dists) +
self._compute_basin_response_term(C, sites.z2pt5) +
fsite)
if get_pga_site:
pga_site = np.exp(np.log(pga1100) - fsite)
fsite = self._compute_shallow_site_response(C, sites, pga1100)
pga_site = np.exp(np.log(pga_site) + fsite)
else:
pga_site = None
return pga1100, pga_site | [
"def",
"_compute_imt1100",
"(",
"self",
",",
"C",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"get_pga_site",
"=",
"False",
")",
":",
"# Calculates simple site response term assuming all sites 1100 m/s",
"fsite",
"=",
"(",
"C",
"[",
"'c10'",
"]",
"+",
"(",
"C",
"[",
"'k2'",
"]",
"*",
"C",
"[",
"'n'",
"]",
")",
")",
"*",
"log",
"(",
"1100.",
"/",
"C",
"[",
"'k1'",
"]",
")",
"# Calculates the PGA on rock",
"pga1100",
"=",
"np",
".",
"exp",
"(",
"self",
".",
"_compute_magnitude_term",
"(",
"C",
",",
"rup",
".",
"mag",
")",
"+",
"self",
".",
"_compute_distance_term",
"(",
"C",
",",
"rup",
",",
"dists",
")",
"+",
"self",
".",
"_compute_style_of_faulting_term",
"(",
"C",
",",
"rup",
")",
"+",
"self",
".",
"_compute_hanging_wall_term",
"(",
"C",
",",
"rup",
",",
"dists",
")",
"+",
"self",
".",
"_compute_basin_response_term",
"(",
"C",
",",
"sites",
".",
"z2pt5",
")",
"+",
"fsite",
")",
"# If PGA at the site is needed then remove factor for rock and",
"# re-calculate on correct site condition",
"if",
"get_pga_site",
":",
"pga_site",
"=",
"np",
".",
"exp",
"(",
"np",
".",
"log",
"(",
"pga1100",
")",
"-",
"fsite",
")",
"fsite",
"=",
"self",
".",
"_compute_shallow_site_response",
"(",
"C",
",",
"sites",
",",
"pga1100",
")",
"pga_site",
"=",
"np",
".",
"exp",
"(",
"np",
".",
"log",
"(",
"pga_site",
")",
"+",
"fsite",
")",
"else",
":",
"pga_site",
"=",
"None",
"return",
"pga1100",
",",
"pga_site"
] | Computes the PGA on reference (Vs30 = 1100 m/s) rock. | [
"Computes",
"the",
"PGA",
"on",
"reference",
"(",
"Vs30",
"=",
"1100",
"m",
"/",
"s",
")",
"rock",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L129-L150 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_magnitude_term | def _compute_magnitude_term(self, C, mag):
"""
Returns the magnitude scaling factor (equation (2), page 144)
"""
fmag = C['c0'] + C['c1'] * mag
if mag <= 5.5:
return fmag
elif mag > 6.5:
return fmag + (C['c2'] * (mag - 5.5)) + (C['c3'] * (mag - 6.5))
else:
return fmag + (C['c2'] * (mag - 5.5)) | python | def _compute_magnitude_term(self, C, mag):
fmag = C['c0'] + C['c1'] * mag
if mag <= 5.5:
return fmag
elif mag > 6.5:
return fmag + (C['c2'] * (mag - 5.5)) + (C['c3'] * (mag - 6.5))
else:
return fmag + (C['c2'] * (mag - 5.5)) | [
"def",
"_compute_magnitude_term",
"(",
"self",
",",
"C",
",",
"mag",
")",
":",
"fmag",
"=",
"C",
"[",
"'c0'",
"]",
"+",
"C",
"[",
"'c1'",
"]",
"*",
"mag",
"if",
"mag",
"<=",
"5.5",
":",
"return",
"fmag",
"elif",
"mag",
">",
"6.5",
":",
"return",
"fmag",
"+",
"(",
"C",
"[",
"'c2'",
"]",
"*",
"(",
"mag",
"-",
"5.5",
")",
")",
"+",
"(",
"C",
"[",
"'c3'",
"]",
"*",
"(",
"mag",
"-",
"6.5",
")",
")",
"else",
":",
"return",
"fmag",
"+",
"(",
"C",
"[",
"'c2'",
"]",
"*",
"(",
"mag",
"-",
"5.5",
")",
")"
] | Returns the magnitude scaling factor (equation (2), page 144) | [
"Returns",
"the",
"magnitude",
"scaling",
"factor",
"(",
"equation",
"(",
"2",
")",
"page",
"144",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L152-L162 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_distance_term | def _compute_distance_term(self, C, rup, dists):
"""
Returns the distance scaling factor (equation (3), page 145)
"""
return (C['c4'] + C['c5'] * rup.mag) * \
np.log(np.sqrt(dists.rrup ** 2. + C['c6'] ** 2.)) | python | def _compute_distance_term(self, C, rup, dists):
return (C['c4'] + C['c5'] * rup.mag) * \
np.log(np.sqrt(dists.rrup ** 2. + C['c6'] ** 2.)) | [
"def",
"_compute_distance_term",
"(",
"self",
",",
"C",
",",
"rup",
",",
"dists",
")",
":",
"return",
"(",
"C",
"[",
"'c4'",
"]",
"+",
"C",
"[",
"'c5'",
"]",
"*",
"rup",
".",
"mag",
")",
"*",
"np",
".",
"log",
"(",
"np",
".",
"sqrt",
"(",
"dists",
".",
"rrup",
"**",
"2.",
"+",
"C",
"[",
"'c6'",
"]",
"**",
"2.",
")",
")"
] | Returns the distance scaling factor (equation (3), page 145) | [
"Returns",
"the",
"distance",
"scaling",
"factor",
"(",
"equation",
"(",
"3",
")",
"page",
"145",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L164-L169 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_style_of_faulting_term | def _compute_style_of_faulting_term(self, C, rup):
"""
Returns the style of faulting factor, depending on the mechanism (rake)
and top of rupture depth (equations (4) and (5), pages 145 - 146)
"""
frv, fnm = self._get_fault_type_dummy_variables(rup.rake)
if frv > 0.:
# Top of rupture depth term only applies to reverse faults
if rup.ztor < 1.:
ffltz = rup.ztor
else:
ffltz = 1.
else:
ffltz = 0.
return (C['c7'] * frv * ffltz) + (C['c8'] * fnm) | python | def _compute_style_of_faulting_term(self, C, rup):
frv, fnm = self._get_fault_type_dummy_variables(rup.rake)
if frv > 0.:
if rup.ztor < 1.:
ffltz = rup.ztor
else:
ffltz = 1.
else:
ffltz = 0.
return (C['c7'] * frv * ffltz) + (C['c8'] * fnm) | [
"def",
"_compute_style_of_faulting_term",
"(",
"self",
",",
"C",
",",
"rup",
")",
":",
"frv",
",",
"fnm",
"=",
"self",
".",
"_get_fault_type_dummy_variables",
"(",
"rup",
".",
"rake",
")",
"if",
"frv",
">",
"0.",
":",
"# Top of rupture depth term only applies to reverse faults",
"if",
"rup",
".",
"ztor",
"<",
"1.",
":",
"ffltz",
"=",
"rup",
".",
"ztor",
"else",
":",
"ffltz",
"=",
"1.",
"else",
":",
"ffltz",
"=",
"0.",
"return",
"(",
"C",
"[",
"'c7'",
"]",
"*",
"frv",
"*",
"ffltz",
")",
"+",
"(",
"C",
"[",
"'c8'",
"]",
"*",
"fnm",
")"
] | Returns the style of faulting factor, depending on the mechanism (rake)
and top of rupture depth (equations (4) and (5), pages 145 - 146) | [
"Returns",
"the",
"style",
"of",
"faulting",
"factor",
"depending",
"on",
"the",
"mechanism",
"(",
"rake",
")",
"and",
"top",
"of",
"rupture",
"depth",
"(",
"equations",
"(",
"4",
")",
"and",
"(",
"5",
")",
"pages",
"145",
"-",
"146",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L171-L186 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_hanging_wall_term | def _compute_hanging_wall_term(self, C, rup, dists):
"""
Returns the hanging wall scaling term, the product of the scaling
coefficient and four separate scaling terms for distance, magnitude,
rupture depth and dip (equations 6 - 10, page 146). Individual
scaling terms defined in separate functions
"""
return (C['c9'] *
self._get_hanging_wall_distance_term(dists, rup.ztor) *
self._get_hanging_wall_magnitude_term(rup.mag) *
self._get_hanging_wall_depth_term(rup.ztor) *
self._get_hanging_wall_dip_term(rup.dip)) | python | def _compute_hanging_wall_term(self, C, rup, dists):
return (C['c9'] *
self._get_hanging_wall_distance_term(dists, rup.ztor) *
self._get_hanging_wall_magnitude_term(rup.mag) *
self._get_hanging_wall_depth_term(rup.ztor) *
self._get_hanging_wall_dip_term(rup.dip)) | [
"def",
"_compute_hanging_wall_term",
"(",
"self",
",",
"C",
",",
"rup",
",",
"dists",
")",
":",
"return",
"(",
"C",
"[",
"'c9'",
"]",
"*",
"self",
".",
"_get_hanging_wall_distance_term",
"(",
"dists",
",",
"rup",
".",
"ztor",
")",
"*",
"self",
".",
"_get_hanging_wall_magnitude_term",
"(",
"rup",
".",
"mag",
")",
"*",
"self",
".",
"_get_hanging_wall_depth_term",
"(",
"rup",
".",
"ztor",
")",
"*",
"self",
".",
"_get_hanging_wall_dip_term",
"(",
"rup",
".",
"dip",
")",
")"
] | Returns the hanging wall scaling term, the product of the scaling
coefficient and four separate scaling terms for distance, magnitude,
rupture depth and dip (equations 6 - 10, page 146). Individual
scaling terms defined in separate functions | [
"Returns",
"the",
"hanging",
"wall",
"scaling",
"term",
"the",
"product",
"of",
"the",
"scaling",
"coefficient",
"and",
"four",
"separate",
"scaling",
"terms",
"for",
"distance",
"magnitude",
"rupture",
"depth",
"and",
"dip",
"(",
"equations",
"6",
"-",
"10",
"page",
"146",
")",
".",
"Individual",
"scaling",
"terms",
"defined",
"in",
"separate",
"functions"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L205-L216 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._get_hanging_wall_distance_term | def _get_hanging_wall_distance_term(self, dists, ztor):
"""
Returns the hanging wall distance scaling term (equation 7, page 146)
"""
fhngr = np.ones_like(dists.rjb, dtype=float)
idx = dists.rjb > 0.
if ztor < 1.:
temp_rjb = np.sqrt(dists.rjb[idx] ** 2. + 1.)
r_max = np.max(np.column_stack([dists.rrup[idx], temp_rjb]),
axis=1)
fhngr[idx] = (r_max - dists.rjb[idx]) / r_max
else:
fhngr[idx] = (dists.rrup[idx] - dists.rjb[idx]) / dists.rrup[idx]
return fhngr | python | def _get_hanging_wall_distance_term(self, dists, ztor):
fhngr = np.ones_like(dists.rjb, dtype=float)
idx = dists.rjb > 0.
if ztor < 1.:
temp_rjb = np.sqrt(dists.rjb[idx] ** 2. + 1.)
r_max = np.max(np.column_stack([dists.rrup[idx], temp_rjb]),
axis=1)
fhngr[idx] = (r_max - dists.rjb[idx]) / r_max
else:
fhngr[idx] = (dists.rrup[idx] - dists.rjb[idx]) / dists.rrup[idx]
return fhngr | [
"def",
"_get_hanging_wall_distance_term",
"(",
"self",
",",
"dists",
",",
"ztor",
")",
":",
"fhngr",
"=",
"np",
".",
"ones_like",
"(",
"dists",
".",
"rjb",
",",
"dtype",
"=",
"float",
")",
"idx",
"=",
"dists",
".",
"rjb",
">",
"0.",
"if",
"ztor",
"<",
"1.",
":",
"temp_rjb",
"=",
"np",
".",
"sqrt",
"(",
"dists",
".",
"rjb",
"[",
"idx",
"]",
"**",
"2.",
"+",
"1.",
")",
"r_max",
"=",
"np",
".",
"max",
"(",
"np",
".",
"column_stack",
"(",
"[",
"dists",
".",
"rrup",
"[",
"idx",
"]",
",",
"temp_rjb",
"]",
")",
",",
"axis",
"=",
"1",
")",
"fhngr",
"[",
"idx",
"]",
"=",
"(",
"r_max",
"-",
"dists",
".",
"rjb",
"[",
"idx",
"]",
")",
"/",
"r_max",
"else",
":",
"fhngr",
"[",
"idx",
"]",
"=",
"(",
"dists",
".",
"rrup",
"[",
"idx",
"]",
"-",
"dists",
".",
"rjb",
"[",
"idx",
"]",
")",
"/",
"dists",
".",
"rrup",
"[",
"idx",
"]",
"return",
"fhngr"
] | Returns the hanging wall distance scaling term (equation 7, page 146) | [
"Returns",
"the",
"hanging",
"wall",
"distance",
"scaling",
"term",
"(",
"equation",
"7",
"page",
"146",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L218-L231 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_shallow_site_response | def _compute_shallow_site_response(self, C, sites, pga1100):
"""
Returns the shallow site response term (equation 11, page 146)
"""
stiff_factor = C['c10'] + (C['k2'] * C['n'])
# Initially default all sites to intermediate rock value
fsite = stiff_factor * np.log(sites.vs30 / C['k1'])
# Check for soft soil sites
idx = sites.vs30 < C['k1']
if np.any(idx):
pga_scale = np.log(pga1100[idx] +
(C['c'] * ((sites.vs30[idx] / C['k1']) **
C['n']))) - np.log(pga1100[idx] + C['c'])
fsite[idx] = C['c10'] * np.log(sites.vs30[idx] / C['k1']) + \
(C['k2'] * pga_scale)
# Any very hard rock sites are rendered to the constant amplification
# factor
idx = sites.vs30 >= 1100.
if np.any(idx):
fsite[idx] = stiff_factor * log(1100. / C['k1'])
return fsite | python | def _compute_shallow_site_response(self, C, sites, pga1100):
stiff_factor = C['c10'] + (C['k2'] * C['n'])
fsite = stiff_factor * np.log(sites.vs30 / C['k1'])
idx = sites.vs30 < C['k1']
if np.any(idx):
pga_scale = np.log(pga1100[idx] +
(C['c'] * ((sites.vs30[idx] / C['k1']) **
C['n']))) - np.log(pga1100[idx] + C['c'])
fsite[idx] = C['c10'] * np.log(sites.vs30[idx] / C['k1']) + \
(C['k2'] * pga_scale)
idx = sites.vs30 >= 1100.
if np.any(idx):
fsite[idx] = stiff_factor * log(1100. / C['k1'])
return fsite | [
"def",
"_compute_shallow_site_response",
"(",
"self",
",",
"C",
",",
"sites",
",",
"pga1100",
")",
":",
"stiff_factor",
"=",
"C",
"[",
"'c10'",
"]",
"+",
"(",
"C",
"[",
"'k2'",
"]",
"*",
"C",
"[",
"'n'",
"]",
")",
"# Initially default all sites to intermediate rock value",
"fsite",
"=",
"stiff_factor",
"*",
"np",
".",
"log",
"(",
"sites",
".",
"vs30",
"/",
"C",
"[",
"'k1'",
"]",
")",
"# Check for soft soil sites",
"idx",
"=",
"sites",
".",
"vs30",
"<",
"C",
"[",
"'k1'",
"]",
"if",
"np",
".",
"any",
"(",
"idx",
")",
":",
"pga_scale",
"=",
"np",
".",
"log",
"(",
"pga1100",
"[",
"idx",
"]",
"+",
"(",
"C",
"[",
"'c'",
"]",
"*",
"(",
"(",
"sites",
".",
"vs30",
"[",
"idx",
"]",
"/",
"C",
"[",
"'k1'",
"]",
")",
"**",
"C",
"[",
"'n'",
"]",
")",
")",
")",
"-",
"np",
".",
"log",
"(",
"pga1100",
"[",
"idx",
"]",
"+",
"C",
"[",
"'c'",
"]",
")",
"fsite",
"[",
"idx",
"]",
"=",
"C",
"[",
"'c10'",
"]",
"*",
"np",
".",
"log",
"(",
"sites",
".",
"vs30",
"[",
"idx",
"]",
"/",
"C",
"[",
"'k1'",
"]",
")",
"+",
"(",
"C",
"[",
"'k2'",
"]",
"*",
"pga_scale",
")",
"# Any very hard rock sites are rendered to the constant amplification",
"# factor",
"idx",
"=",
"sites",
".",
"vs30",
">=",
"1100.",
"if",
"np",
".",
"any",
"(",
"idx",
")",
":",
"fsite",
"[",
"idx",
"]",
"=",
"stiff_factor",
"*",
"log",
"(",
"1100.",
"/",
"C",
"[",
"'k1'",
"]",
")",
"return",
"fsite"
] | Returns the shallow site response term (equation 11, page 146) | [
"Returns",
"the",
"shallow",
"site",
"response",
"term",
"(",
"equation",
"11",
"page",
"146",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L262-L283 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_basin_response_term | def _compute_basin_response_term(self, C, z2pt5):
"""
Returns the basin response term (equation 12, page 146)
"""
fsed = np.zeros_like(z2pt5, dtype=float)
idx = z2pt5 < 1.0
if np.any(idx):
fsed[idx] = C['c11'] * (z2pt5[idx] - 1.0)
idx = z2pt5 > 3.0
if np.any(idx):
fsed[idx] = (C['c12'] * C['k3'] * exp(-0.75)) *\
(1.0 - np.exp(-0.25 * (z2pt5[idx] - 3.0)))
return fsed | python | def _compute_basin_response_term(self, C, z2pt5):
fsed = np.zeros_like(z2pt5, dtype=float)
idx = z2pt5 < 1.0
if np.any(idx):
fsed[idx] = C['c11'] * (z2pt5[idx] - 1.0)
idx = z2pt5 > 3.0
if np.any(idx):
fsed[idx] = (C['c12'] * C['k3'] * exp(-0.75)) *\
(1.0 - np.exp(-0.25 * (z2pt5[idx] - 3.0)))
return fsed | [
"def",
"_compute_basin_response_term",
"(",
"self",
",",
"C",
",",
"z2pt5",
")",
":",
"fsed",
"=",
"np",
".",
"zeros_like",
"(",
"z2pt5",
",",
"dtype",
"=",
"float",
")",
"idx",
"=",
"z2pt5",
"<",
"1.0",
"if",
"np",
".",
"any",
"(",
"idx",
")",
":",
"fsed",
"[",
"idx",
"]",
"=",
"C",
"[",
"'c11'",
"]",
"*",
"(",
"z2pt5",
"[",
"idx",
"]",
"-",
"1.0",
")",
"idx",
"=",
"z2pt5",
">",
"3.0",
"if",
"np",
".",
"any",
"(",
"idx",
")",
":",
"fsed",
"[",
"idx",
"]",
"=",
"(",
"C",
"[",
"'c12'",
"]",
"*",
"C",
"[",
"'k3'",
"]",
"*",
"exp",
"(",
"-",
"0.75",
")",
")",
"*",
"(",
"1.0",
"-",
"np",
".",
"exp",
"(",
"-",
"0.25",
"*",
"(",
"z2pt5",
"[",
"idx",
"]",
"-",
"3.0",
")",
")",
")",
"return",
"fsed"
] | Returns the basin response term (equation 12, page 146) | [
"Returns",
"the",
"basin",
"response",
"term",
"(",
"equation",
"12",
"page",
"146",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L285-L298 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._get_stddevs | def _get_stddevs(self, C, sites, pga1100, sigma_pga, stddev_types):
"""
Returns the standard deviations as described in the "ALEATORY
UNCERTAINTY MODEL" section of the paper. Equations 13 to 19, pages 147
to 151
"""
std_intra = self._compute_intra_event_std(C,
sites.vs30,
pga1100,
sigma_pga)
std_inter = C['t_lny'] * np.ones_like(sites.vs30)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(self._get_total_sigma(C, std_intra, std_inter))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(std_intra)
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(std_inter)
return stddevs | python | def _get_stddevs(self, C, sites, pga1100, sigma_pga, stddev_types):
std_intra = self._compute_intra_event_std(C,
sites.vs30,
pga1100,
sigma_pga)
std_inter = C['t_lny'] * np.ones_like(sites.vs30)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(self._get_total_sigma(C, std_intra, std_inter))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(std_intra)
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(std_inter)
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"sites",
",",
"pga1100",
",",
"sigma_pga",
",",
"stddev_types",
")",
":",
"std_intra",
"=",
"self",
".",
"_compute_intra_event_std",
"(",
"C",
",",
"sites",
".",
"vs30",
",",
"pga1100",
",",
"sigma_pga",
")",
"std_inter",
"=",
"C",
"[",
"'t_lny'",
"]",
"*",
"np",
".",
"ones_like",
"(",
"sites",
".",
"vs30",
")",
"stddevs",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"if",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"TOTAL",
":",
"stddevs",
".",
"append",
"(",
"self",
".",
"_get_total_sigma",
"(",
"C",
",",
"std_intra",
",",
"std_inter",
")",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTRA_EVENT",
":",
"stddevs",
".",
"append",
"(",
"std_intra",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTER_EVENT",
":",
"stddevs",
".",
"append",
"(",
"std_inter",
")",
"return",
"stddevs"
] | Returns the standard deviations as described in the "ALEATORY
UNCERTAINTY MODEL" section of the paper. Equations 13 to 19, pages 147
to 151 | [
"Returns",
"the",
"standard",
"deviations",
"as",
"described",
"in",
"the",
"ALEATORY",
"UNCERTAINTY",
"MODEL",
"section",
"of",
"the",
"paper",
".",
"Equations",
"13",
"to",
"19",
"pages",
"147",
"to",
"151"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L300-L321 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_intra_event_std | def _compute_intra_event_std(self, C, vs30, pga1100, sigma_pga):
"""
Returns the intra-event standard deviation at the site, as defined in
equation 15, page 147
"""
# Get intra-event standard deviation at the base of the site profile
sig_lnyb = np.sqrt(C['s_lny'] ** 2. - C['s_lnAF'] ** 2.)
sig_lnab = np.sqrt(sigma_pga ** 2. - C['s_lnAF'] ** 2.)
# Get linearised relationship between f_site and ln PGA
alpha = self._compute_intra_event_alpha(C, vs30, pga1100)
return np.sqrt(
(sig_lnyb ** 2.) +
(C['s_lnAF'] ** 2.) +
((alpha ** 2.) * (sig_lnab ** 2.)) +
(2.0 * alpha * C['rho'] * sig_lnyb * sig_lnab)) | python | def _compute_intra_event_std(self, C, vs30, pga1100, sigma_pga):
sig_lnyb = np.sqrt(C['s_lny'] ** 2. - C['s_lnAF'] ** 2.)
sig_lnab = np.sqrt(sigma_pga ** 2. - C['s_lnAF'] ** 2.)
alpha = self._compute_intra_event_alpha(C, vs30, pga1100)
return np.sqrt(
(sig_lnyb ** 2.) +
(C['s_lnAF'] ** 2.) +
((alpha ** 2.) * (sig_lnab ** 2.)) +
(2.0 * alpha * C['rho'] * sig_lnyb * sig_lnab)) | [
"def",
"_compute_intra_event_std",
"(",
"self",
",",
"C",
",",
"vs30",
",",
"pga1100",
",",
"sigma_pga",
")",
":",
"# Get intra-event standard deviation at the base of the site profile",
"sig_lnyb",
"=",
"np",
".",
"sqrt",
"(",
"C",
"[",
"'s_lny'",
"]",
"**",
"2.",
"-",
"C",
"[",
"'s_lnAF'",
"]",
"**",
"2.",
")",
"sig_lnab",
"=",
"np",
".",
"sqrt",
"(",
"sigma_pga",
"**",
"2.",
"-",
"C",
"[",
"'s_lnAF'",
"]",
"**",
"2.",
")",
"# Get linearised relationship between f_site and ln PGA",
"alpha",
"=",
"self",
".",
"_compute_intra_event_alpha",
"(",
"C",
",",
"vs30",
",",
"pga1100",
")",
"return",
"np",
".",
"sqrt",
"(",
"(",
"sig_lnyb",
"**",
"2.",
")",
"+",
"(",
"C",
"[",
"'s_lnAF'",
"]",
"**",
"2.",
")",
"+",
"(",
"(",
"alpha",
"**",
"2.",
")",
"*",
"(",
"sig_lnab",
"**",
"2.",
")",
")",
"+",
"(",
"2.0",
"*",
"alpha",
"*",
"C",
"[",
"'rho'",
"]",
"*",
"sig_lnyb",
"*",
"sig_lnab",
")",
")"
] | Returns the intra-event standard deviation at the site, as defined in
equation 15, page 147 | [
"Returns",
"the",
"intra",
"-",
"event",
"standard",
"deviation",
"at",
"the",
"site",
"as",
"defined",
"in",
"equation",
"15",
"page",
"147"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L323-L338 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_intra_event_alpha | def _compute_intra_event_alpha(self, C, vs30, pga1100):
"""
Returns the linearised functional relationship between fsite and
pga1100, determined from the partial derivative defined on equation 17
on page 148
"""
alpha = np.zeros_like(vs30, dtype=float)
idx = vs30 < C['k1']
if np.any(idx):
temp1 = (pga1100[idx] +
C['c'] * (vs30[idx] / C['k1']) ** C['n']) ** -1.
temp1 = temp1 - ((pga1100[idx] + C['c']) ** -1.)
alpha[idx] = C['k2'] * pga1100[idx] * temp1
return alpha | python | def _compute_intra_event_alpha(self, C, vs30, pga1100):
alpha = np.zeros_like(vs30, dtype=float)
idx = vs30 < C['k1']
if np.any(idx):
temp1 = (pga1100[idx] +
C['c'] * (vs30[idx] / C['k1']) ** C['n']) ** -1.
temp1 = temp1 - ((pga1100[idx] + C['c']) ** -1.)
alpha[idx] = C['k2'] * pga1100[idx] * temp1
return alpha | [
"def",
"_compute_intra_event_alpha",
"(",
"self",
",",
"C",
",",
"vs30",
",",
"pga1100",
")",
":",
"alpha",
"=",
"np",
".",
"zeros_like",
"(",
"vs30",
",",
"dtype",
"=",
"float",
")",
"idx",
"=",
"vs30",
"<",
"C",
"[",
"'k1'",
"]",
"if",
"np",
".",
"any",
"(",
"idx",
")",
":",
"temp1",
"=",
"(",
"pga1100",
"[",
"idx",
"]",
"+",
"C",
"[",
"'c'",
"]",
"*",
"(",
"vs30",
"[",
"idx",
"]",
"/",
"C",
"[",
"'k1'",
"]",
")",
"**",
"C",
"[",
"'n'",
"]",
")",
"**",
"-",
"1.",
"temp1",
"=",
"temp1",
"-",
"(",
"(",
"pga1100",
"[",
"idx",
"]",
"+",
"C",
"[",
"'c'",
"]",
")",
"**",
"-",
"1.",
")",
"alpha",
"[",
"idx",
"]",
"=",
"C",
"[",
"'k2'",
"]",
"*",
"pga1100",
"[",
"idx",
"]",
"*",
"temp1",
"return",
"alpha"
] | Returns the linearised functional relationship between fsite and
pga1100, determined from the partial derivative defined on equation 17
on page 148 | [
"Returns",
"the",
"linearised",
"functional",
"relationship",
"between",
"fsite",
"and",
"pga1100",
"determined",
"from",
"the",
"partial",
"derivative",
"defined",
"on",
"equation",
"17",
"on",
"page",
"148"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L340-L354 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008Arbitrary._get_total_sigma | def _get_total_sigma(self, C, std_intra, std_inter):
"""
Returns the total sigma term for the arbitrary horizontal component of
ground motion defined by equation 18, page 150
"""
return np.sqrt(std_intra ** 2. + std_inter ** 2. + C['c_lny'] ** 2.) | python | def _get_total_sigma(self, C, std_intra, std_inter):
return np.sqrt(std_intra ** 2. + std_inter ** 2. + C['c_lny'] ** 2.) | [
"def",
"_get_total_sigma",
"(",
"self",
",",
"C",
",",
"std_intra",
",",
"std_inter",
")",
":",
"return",
"np",
".",
"sqrt",
"(",
"std_intra",
"**",
"2.",
"+",
"std_inter",
"**",
"2.",
"+",
"C",
"[",
"'c_lny'",
"]",
"**",
"2.",
")"
] | Returns the total sigma term for the arbitrary horizontal component of
ground motion defined by equation 18, page 150 | [
"Returns",
"the",
"total",
"sigma",
"term",
"for",
"the",
"arbitrary",
"horizontal",
"component",
"of",
"ground",
"motion",
"defined",
"by",
"equation",
"18",
"page",
"150"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L407-L412 |
gem/oq-engine | openquake/calculators/ucerf_event_based.py | generate_event_set | def generate_event_set(ucerf, background_sids, src_filter, ses_idx, seed):
"""
Generates the event set corresponding to a particular branch
"""
serial = seed + ses_idx * TWO16
# get rates from file
with h5py.File(ucerf.source_file, 'r') as hdf5:
occurrences = ucerf.tom.sample_number_of_occurrences(ucerf.rate, seed)
indices, = numpy.where(occurrences)
logging.debug(
'Considering "%s", %d ruptures', ucerf.source_id, len(indices))
# get ruptures from the indices
ruptures = []
rupture_occ = []
for iloc, n_occ in zip(indices, occurrences[indices]):
ucerf_rup = ucerf.get_ucerf_rupture(iloc, src_filter)
if ucerf_rup:
ucerf_rup.serial = serial
serial += 1
ruptures.append(ucerf_rup)
rupture_occ.append(n_occ)
# sample background sources
background_ruptures, background_n_occ = sample_background_model(
hdf5, ucerf.idx_set["grid_key"], ucerf.tom, seed,
background_sids, ucerf.min_mag, ucerf.npd, ucerf.hdd, ucerf.usd,
ucerf.lsd, ucerf.msr, ucerf.aspect, ucerf.tectonic_region_type)
for i, brup in enumerate(background_ruptures):
brup.serial = serial
serial += 1
ruptures.append(brup)
rupture_occ.extend(background_n_occ)
assert len(ruptures) < TWO16, len(ruptures) # < 2^16 ruptures per SES
return ruptures, rupture_occ | python | def generate_event_set(ucerf, background_sids, src_filter, ses_idx, seed):
serial = seed + ses_idx * TWO16
with h5py.File(ucerf.source_file, 'r') as hdf5:
occurrences = ucerf.tom.sample_number_of_occurrences(ucerf.rate, seed)
indices, = numpy.where(occurrences)
logging.debug(
'Considering "%s", %d ruptures', ucerf.source_id, len(indices))
ruptures = []
rupture_occ = []
for iloc, n_occ in zip(indices, occurrences[indices]):
ucerf_rup = ucerf.get_ucerf_rupture(iloc, src_filter)
if ucerf_rup:
ucerf_rup.serial = serial
serial += 1
ruptures.append(ucerf_rup)
rupture_occ.append(n_occ)
background_ruptures, background_n_occ = sample_background_model(
hdf5, ucerf.idx_set["grid_key"], ucerf.tom, seed,
background_sids, ucerf.min_mag, ucerf.npd, ucerf.hdd, ucerf.usd,
ucerf.lsd, ucerf.msr, ucerf.aspect, ucerf.tectonic_region_type)
for i, brup in enumerate(background_ruptures):
brup.serial = serial
serial += 1
ruptures.append(brup)
rupture_occ.extend(background_n_occ)
assert len(ruptures) < TWO16, len(ruptures)
return ruptures, rupture_occ | [
"def",
"generate_event_set",
"(",
"ucerf",
",",
"background_sids",
",",
"src_filter",
",",
"ses_idx",
",",
"seed",
")",
":",
"serial",
"=",
"seed",
"+",
"ses_idx",
"*",
"TWO16",
"# get rates from file",
"with",
"h5py",
".",
"File",
"(",
"ucerf",
".",
"source_file",
",",
"'r'",
")",
"as",
"hdf5",
":",
"occurrences",
"=",
"ucerf",
".",
"tom",
".",
"sample_number_of_occurrences",
"(",
"ucerf",
".",
"rate",
",",
"seed",
")",
"indices",
",",
"=",
"numpy",
".",
"where",
"(",
"occurrences",
")",
"logging",
".",
"debug",
"(",
"'Considering \"%s\", %d ruptures'",
",",
"ucerf",
".",
"source_id",
",",
"len",
"(",
"indices",
")",
")",
"# get ruptures from the indices",
"ruptures",
"=",
"[",
"]",
"rupture_occ",
"=",
"[",
"]",
"for",
"iloc",
",",
"n_occ",
"in",
"zip",
"(",
"indices",
",",
"occurrences",
"[",
"indices",
"]",
")",
":",
"ucerf_rup",
"=",
"ucerf",
".",
"get_ucerf_rupture",
"(",
"iloc",
",",
"src_filter",
")",
"if",
"ucerf_rup",
":",
"ucerf_rup",
".",
"serial",
"=",
"serial",
"serial",
"+=",
"1",
"ruptures",
".",
"append",
"(",
"ucerf_rup",
")",
"rupture_occ",
".",
"append",
"(",
"n_occ",
")",
"# sample background sources",
"background_ruptures",
",",
"background_n_occ",
"=",
"sample_background_model",
"(",
"hdf5",
",",
"ucerf",
".",
"idx_set",
"[",
"\"grid_key\"",
"]",
",",
"ucerf",
".",
"tom",
",",
"seed",
",",
"background_sids",
",",
"ucerf",
".",
"min_mag",
",",
"ucerf",
".",
"npd",
",",
"ucerf",
".",
"hdd",
",",
"ucerf",
".",
"usd",
",",
"ucerf",
".",
"lsd",
",",
"ucerf",
".",
"msr",
",",
"ucerf",
".",
"aspect",
",",
"ucerf",
".",
"tectonic_region_type",
")",
"for",
"i",
",",
"brup",
"in",
"enumerate",
"(",
"background_ruptures",
")",
":",
"brup",
".",
"serial",
"=",
"serial",
"serial",
"+=",
"1",
"ruptures",
".",
"append",
"(",
"brup",
")",
"rupture_occ",
".",
"extend",
"(",
"background_n_occ",
")",
"assert",
"len",
"(",
"ruptures",
")",
"<",
"TWO16",
",",
"len",
"(",
"ruptures",
")",
"# < 2^16 ruptures per SES",
"return",
"ruptures",
",",
"rupture_occ"
] | Generates the event set corresponding to a particular branch | [
"Generates",
"the",
"event",
"set",
"corresponding",
"to",
"a",
"particular",
"branch"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/ucerf_event_based.py#L41-L76 |
gem/oq-engine | openquake/calculators/ucerf_event_based.py | sample_background_model | def sample_background_model(
hdf5, branch_key, tom, seed, filter_idx, min_mag, npd, hdd,
upper_seismogenic_depth, lower_seismogenic_depth, msr=WC1994(),
aspect=1.5, trt=DEFAULT_TRT):
"""
Generates a rupture set from a sample of the background model
:param branch_key:
Key to indicate the branch for selecting the background model
:param tom:
Temporal occurrence model as instance of :class:
openquake.hazardlib.tom.TOM
:param seed:
Random seed to use in the call to tom.sample_number_of_occurrences
:param filter_idx:
Sites for consideration (can be None!)
:param float min_mag:
Minimim magnitude for consideration of background sources
:param npd:
Nodal plane distribution as instance of :class:
openquake.hazardlib.pmf.PMF
:param hdd:
Hypocentral depth distribution as instance of :class:
openquake.hazardlib.pmf.PMF
:param float aspect:
Aspect ratio
:param float upper_seismogenic_depth:
Upper seismogenic depth (km)
:param float lower_seismogenic_depth:
Lower seismogenic depth (km)
:param msr:
Magnitude scaling relation
:param float integration_distance:
Maximum distance from rupture to site for consideration
"""
bg_magnitudes = hdf5["/".join(["Grid", branch_key, "Magnitude"])].value
# Select magnitudes above the minimum magnitudes
mag_idx = bg_magnitudes >= min_mag
mags = bg_magnitudes[mag_idx]
rates = hdf5["/".join(["Grid", branch_key, "RateArray"])][filter_idx, :]
rates = rates[:, mag_idx]
valid_locs = hdf5["Grid/Locations"][filter_idx, :]
# Sample remaining rates
sampler = tom.sample_number_of_occurrences(rates, seed)
background_ruptures = []
background_n_occ = []
for i, mag in enumerate(mags):
rate_idx = numpy.where(sampler[:, i])[0]
rate_cnt = sampler[rate_idx, i]
occurrence = rates[rate_idx, i]
locations = valid_locs[rate_idx, :]
ruptures = generate_background_ruptures(
tom, locations, occurrence,
mag, npd, hdd, upper_seismogenic_depth,
lower_seismogenic_depth, msr, aspect, trt)
background_ruptures.extend(ruptures)
background_n_occ.extend(rate_cnt.tolist())
return background_ruptures, background_n_occ | python | def sample_background_model(
hdf5, branch_key, tom, seed, filter_idx, min_mag, npd, hdd,
upper_seismogenic_depth, lower_seismogenic_depth, msr=WC1994(),
aspect=1.5, trt=DEFAULT_TRT):
bg_magnitudes = hdf5["/".join(["Grid", branch_key, "Magnitude"])].value
mag_idx = bg_magnitudes >= min_mag
mags = bg_magnitudes[mag_idx]
rates = hdf5["/".join(["Grid", branch_key, "RateArray"])][filter_idx, :]
rates = rates[:, mag_idx]
valid_locs = hdf5["Grid/Locations"][filter_idx, :]
sampler = tom.sample_number_of_occurrences(rates, seed)
background_ruptures = []
background_n_occ = []
for i, mag in enumerate(mags):
rate_idx = numpy.where(sampler[:, i])[0]
rate_cnt = sampler[rate_idx, i]
occurrence = rates[rate_idx, i]
locations = valid_locs[rate_idx, :]
ruptures = generate_background_ruptures(
tom, locations, occurrence,
mag, npd, hdd, upper_seismogenic_depth,
lower_seismogenic_depth, msr, aspect, trt)
background_ruptures.extend(ruptures)
background_n_occ.extend(rate_cnt.tolist())
return background_ruptures, background_n_occ | [
"def",
"sample_background_model",
"(",
"hdf5",
",",
"branch_key",
",",
"tom",
",",
"seed",
",",
"filter_idx",
",",
"min_mag",
",",
"npd",
",",
"hdd",
",",
"upper_seismogenic_depth",
",",
"lower_seismogenic_depth",
",",
"msr",
"=",
"WC1994",
"(",
")",
",",
"aspect",
"=",
"1.5",
",",
"trt",
"=",
"DEFAULT_TRT",
")",
":",
"bg_magnitudes",
"=",
"hdf5",
"[",
"\"/\"",
".",
"join",
"(",
"[",
"\"Grid\"",
",",
"branch_key",
",",
"\"Magnitude\"",
"]",
")",
"]",
".",
"value",
"# Select magnitudes above the minimum magnitudes",
"mag_idx",
"=",
"bg_magnitudes",
">=",
"min_mag",
"mags",
"=",
"bg_magnitudes",
"[",
"mag_idx",
"]",
"rates",
"=",
"hdf5",
"[",
"\"/\"",
".",
"join",
"(",
"[",
"\"Grid\"",
",",
"branch_key",
",",
"\"RateArray\"",
"]",
")",
"]",
"[",
"filter_idx",
",",
":",
"]",
"rates",
"=",
"rates",
"[",
":",
",",
"mag_idx",
"]",
"valid_locs",
"=",
"hdf5",
"[",
"\"Grid/Locations\"",
"]",
"[",
"filter_idx",
",",
":",
"]",
"# Sample remaining rates",
"sampler",
"=",
"tom",
".",
"sample_number_of_occurrences",
"(",
"rates",
",",
"seed",
")",
"background_ruptures",
"=",
"[",
"]",
"background_n_occ",
"=",
"[",
"]",
"for",
"i",
",",
"mag",
"in",
"enumerate",
"(",
"mags",
")",
":",
"rate_idx",
"=",
"numpy",
".",
"where",
"(",
"sampler",
"[",
":",
",",
"i",
"]",
")",
"[",
"0",
"]",
"rate_cnt",
"=",
"sampler",
"[",
"rate_idx",
",",
"i",
"]",
"occurrence",
"=",
"rates",
"[",
"rate_idx",
",",
"i",
"]",
"locations",
"=",
"valid_locs",
"[",
"rate_idx",
",",
":",
"]",
"ruptures",
"=",
"generate_background_ruptures",
"(",
"tom",
",",
"locations",
",",
"occurrence",
",",
"mag",
",",
"npd",
",",
"hdd",
",",
"upper_seismogenic_depth",
",",
"lower_seismogenic_depth",
",",
"msr",
",",
"aspect",
",",
"trt",
")",
"background_ruptures",
".",
"extend",
"(",
"ruptures",
")",
"background_n_occ",
".",
"extend",
"(",
"rate_cnt",
".",
"tolist",
"(",
")",
")",
"return",
"background_ruptures",
",",
"background_n_occ"
] | Generates a rupture set from a sample of the background model
:param branch_key:
Key to indicate the branch for selecting the background model
:param tom:
Temporal occurrence model as instance of :class:
openquake.hazardlib.tom.TOM
:param seed:
Random seed to use in the call to tom.sample_number_of_occurrences
:param filter_idx:
Sites for consideration (can be None!)
:param float min_mag:
Minimim magnitude for consideration of background sources
:param npd:
Nodal plane distribution as instance of :class:
openquake.hazardlib.pmf.PMF
:param hdd:
Hypocentral depth distribution as instance of :class:
openquake.hazardlib.pmf.PMF
:param float aspect:
Aspect ratio
:param float upper_seismogenic_depth:
Upper seismogenic depth (km)
:param float lower_seismogenic_depth:
Lower seismogenic depth (km)
:param msr:
Magnitude scaling relation
:param float integration_distance:
Maximum distance from rupture to site for consideration | [
"Generates",
"a",
"rupture",
"set",
"from",
"a",
"sample",
"of",
"the",
"background",
"model"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/ucerf_event_based.py#L79-L136 |
gem/oq-engine | openquake/calculators/ucerf_event_based.py | build_ruptures | def build_ruptures(sources, src_filter, param, monitor):
"""
:param sources: a list with a single UCERF source
:param param: extra parameters
:param monitor: a Monitor instance
:returns: an AccumDict grp_id -> EBRuptures
"""
[src] = sources
res = AccumDict()
res.calc_times = []
sampl_mon = monitor('sampling ruptures', measuremem=True)
res.trt = DEFAULT_TRT
background_sids = src.get_background_sids(src_filter)
sitecol = src_filter.sitecol
samples = getattr(src, 'samples', 1)
n_occ = AccumDict(accum=0)
t0 = time.time()
with sampl_mon:
for sam_idx in range(samples):
for ses_idx, ses_seed in param['ses_seeds']:
seed = sam_idx * TWO16 + ses_seed
rups, occs = generate_event_set(
src, background_sids, src_filter, ses_idx, seed)
for rup, occ in zip(rups, occs):
n_occ[rup] += occ
tot_occ = sum(n_occ.values())
dic = {'eff_ruptures': {src.src_group_id: src.num_ruptures}}
eb_ruptures = [EBRupture(rup, src.id, src.src_group_id, n, samples)
for rup, n in n_occ.items()]
dic['rup_array'] = stochastic.get_rup_array(eb_ruptures, src_filter)
dt = time.time() - t0
dic['calc_times'] = {src.id: numpy.array([tot_occ, len(sitecol), dt], F32)}
return dic | python | def build_ruptures(sources, src_filter, param, monitor):
[src] = sources
res = AccumDict()
res.calc_times = []
sampl_mon = monitor('sampling ruptures', measuremem=True)
res.trt = DEFAULT_TRT
background_sids = src.get_background_sids(src_filter)
sitecol = src_filter.sitecol
samples = getattr(src, 'samples', 1)
n_occ = AccumDict(accum=0)
t0 = time.time()
with sampl_mon:
for sam_idx in range(samples):
for ses_idx, ses_seed in param['ses_seeds']:
seed = sam_idx * TWO16 + ses_seed
rups, occs = generate_event_set(
src, background_sids, src_filter, ses_idx, seed)
for rup, occ in zip(rups, occs):
n_occ[rup] += occ
tot_occ = sum(n_occ.values())
dic = {'eff_ruptures': {src.src_group_id: src.num_ruptures}}
eb_ruptures = [EBRupture(rup, src.id, src.src_group_id, n, samples)
for rup, n in n_occ.items()]
dic['rup_array'] = stochastic.get_rup_array(eb_ruptures, src_filter)
dt = time.time() - t0
dic['calc_times'] = {src.id: numpy.array([tot_occ, len(sitecol), dt], F32)}
return dic | [
"def",
"build_ruptures",
"(",
"sources",
",",
"src_filter",
",",
"param",
",",
"monitor",
")",
":",
"[",
"src",
"]",
"=",
"sources",
"res",
"=",
"AccumDict",
"(",
")",
"res",
".",
"calc_times",
"=",
"[",
"]",
"sampl_mon",
"=",
"monitor",
"(",
"'sampling ruptures'",
",",
"measuremem",
"=",
"True",
")",
"res",
".",
"trt",
"=",
"DEFAULT_TRT",
"background_sids",
"=",
"src",
".",
"get_background_sids",
"(",
"src_filter",
")",
"sitecol",
"=",
"src_filter",
".",
"sitecol",
"samples",
"=",
"getattr",
"(",
"src",
",",
"'samples'",
",",
"1",
")",
"n_occ",
"=",
"AccumDict",
"(",
"accum",
"=",
"0",
")",
"t0",
"=",
"time",
".",
"time",
"(",
")",
"with",
"sampl_mon",
":",
"for",
"sam_idx",
"in",
"range",
"(",
"samples",
")",
":",
"for",
"ses_idx",
",",
"ses_seed",
"in",
"param",
"[",
"'ses_seeds'",
"]",
":",
"seed",
"=",
"sam_idx",
"*",
"TWO16",
"+",
"ses_seed",
"rups",
",",
"occs",
"=",
"generate_event_set",
"(",
"src",
",",
"background_sids",
",",
"src_filter",
",",
"ses_idx",
",",
"seed",
")",
"for",
"rup",
",",
"occ",
"in",
"zip",
"(",
"rups",
",",
"occs",
")",
":",
"n_occ",
"[",
"rup",
"]",
"+=",
"occ",
"tot_occ",
"=",
"sum",
"(",
"n_occ",
".",
"values",
"(",
")",
")",
"dic",
"=",
"{",
"'eff_ruptures'",
":",
"{",
"src",
".",
"src_group_id",
":",
"src",
".",
"num_ruptures",
"}",
"}",
"eb_ruptures",
"=",
"[",
"EBRupture",
"(",
"rup",
",",
"src",
".",
"id",
",",
"src",
".",
"src_group_id",
",",
"n",
",",
"samples",
")",
"for",
"rup",
",",
"n",
"in",
"n_occ",
".",
"items",
"(",
")",
"]",
"dic",
"[",
"'rup_array'",
"]",
"=",
"stochastic",
".",
"get_rup_array",
"(",
"eb_ruptures",
",",
"src_filter",
")",
"dt",
"=",
"time",
".",
"time",
"(",
")",
"-",
"t0",
"dic",
"[",
"'calc_times'",
"]",
"=",
"{",
"src",
".",
"id",
":",
"numpy",
".",
"array",
"(",
"[",
"tot_occ",
",",
"len",
"(",
"sitecol",
")",
",",
"dt",
"]",
",",
"F32",
")",
"}",
"return",
"dic"
] | :param sources: a list with a single UCERF source
:param param: extra parameters
:param monitor: a Monitor instance
:returns: an AccumDict grp_id -> EBRuptures | [
":",
"param",
"sources",
":",
"a",
"list",
"with",
"a",
"single",
"UCERF",
"source",
":",
"param",
"param",
":",
"extra",
"parameters",
":",
"param",
"monitor",
":",
"a",
"Monitor",
"instance",
":",
"returns",
":",
"an",
"AccumDict",
"grp_id",
"-",
">",
"EBRuptures"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/ucerf_event_based.py#L141-L173 |
gem/oq-engine | openquake/hazardlib/scalerel/wc1994.py | WC1994.get_median_area | def get_median_area(self, mag, rake):
"""
The values are a function of both magnitude and rake.
Setting the rake to ``None`` causes their "All" rupture-types
to be applied.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 10.0 ** (-3.49 + 0.91 * mag)
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 10.0 ** (-3.42 + 0.90 * mag)
elif rake > 0:
# thrust/reverse
return 10.0 ** (-3.99 + 0.98 * mag)
else:
# normal
return 10.0 ** (-2.87 + 0.82 * mag) | python | def get_median_area(self, mag, rake):
assert rake is None or -180 <= rake <= 180
if rake is None:
return 10.0 ** (-3.49 + 0.91 * mag)
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
return 10.0 ** (-3.42 + 0.90 * mag)
elif rake > 0:
return 10.0 ** (-3.99 + 0.98 * mag)
else:
return 10.0 ** (-2.87 + 0.82 * mag) | [
"def",
"get_median_area",
"(",
"self",
",",
"mag",
",",
"rake",
")",
":",
"assert",
"rake",
"is",
"None",
"or",
"-",
"180",
"<=",
"rake",
"<=",
"180",
"if",
"rake",
"is",
"None",
":",
"# their \"All\" case",
"return",
"10.0",
"**",
"(",
"-",
"3.49",
"+",
"0.91",
"*",
"mag",
")",
"elif",
"(",
"-",
"45",
"<=",
"rake",
"<=",
"45",
")",
"or",
"(",
"rake",
">=",
"135",
")",
"or",
"(",
"rake",
"<=",
"-",
"135",
")",
":",
"# strike slip",
"return",
"10.0",
"**",
"(",
"-",
"3.42",
"+",
"0.90",
"*",
"mag",
")",
"elif",
"rake",
">",
"0",
":",
"# thrust/reverse",
"return",
"10.0",
"**",
"(",
"-",
"3.99",
"+",
"0.98",
"*",
"mag",
")",
"else",
":",
"# normal",
"return",
"10.0",
"**",
"(",
"-",
"2.87",
"+",
"0.82",
"*",
"mag",
")"
] | The values are a function of both magnitude and rake.
Setting the rake to ``None`` causes their "All" rupture-types
to be applied. | [
"The",
"values",
"are",
"a",
"function",
"of",
"both",
"magnitude",
"and",
"rake",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/wc1994.py#L33-L52 |
gem/oq-engine | openquake/hazardlib/scalerel/wc1994.py | WC1994.get_std_dev_area | def get_std_dev_area(self, mag, rake):
"""
Standard deviation for WC1994. Magnitude is ignored.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 0.22
elif rake > 0:
# thrust/reverse
return 0.26
else:
# normal
return 0.22 | python | def get_std_dev_area(self, mag, rake):
assert rake is None or -180 <= rake <= 180
if rake is None:
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
return 0.22
elif rake > 0:
return 0.26
else:
return 0.22 | [
"def",
"get_std_dev_area",
"(",
"self",
",",
"mag",
",",
"rake",
")",
":",
"assert",
"rake",
"is",
"None",
"or",
"-",
"180",
"<=",
"rake",
"<=",
"180",
"if",
"rake",
"is",
"None",
":",
"# their \"All\" case",
"return",
"0.24",
"elif",
"(",
"-",
"45",
"<=",
"rake",
"<=",
"45",
")",
"or",
"(",
"rake",
">=",
"135",
")",
"or",
"(",
"rake",
"<=",
"-",
"135",
")",
":",
"# strike slip",
"return",
"0.22",
"elif",
"rake",
">",
"0",
":",
"# thrust/reverse",
"return",
"0.26",
"else",
":",
"# normal",
"return",
"0.22"
] | Standard deviation for WC1994. Magnitude is ignored. | [
"Standard",
"deviation",
"for",
"WC1994",
".",
"Magnitude",
"is",
"ignored",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/wc1994.py#L54-L70 |
gem/oq-engine | openquake/hazardlib/scalerel/wc1994.py | WC1994.get_std_dev_mag | def get_std_dev_mag(self, rake):
"""
Standard deviation on the magnitude for the WC1994 area relation.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 0.23
elif rake > 0:
# thrust/reverse
return 0.25
else:
# normal
return 0.25 | python | def get_std_dev_mag(self, rake):
assert rake is None or -180 <= rake <= 180
if rake is None:
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
return 0.23
elif rake > 0:
return 0.25
else:
return 0.25 | [
"def",
"get_std_dev_mag",
"(",
"self",
",",
"rake",
")",
":",
"assert",
"rake",
"is",
"None",
"or",
"-",
"180",
"<=",
"rake",
"<=",
"180",
"if",
"rake",
"is",
"None",
":",
"# their \"All\" case",
"return",
"0.24",
"elif",
"(",
"-",
"45",
"<=",
"rake",
"<=",
"45",
")",
"or",
"(",
"rake",
">=",
"135",
")",
"or",
"(",
"rake",
"<=",
"-",
"135",
")",
":",
"# strike slip",
"return",
"0.23",
"elif",
"rake",
">",
"0",
":",
"# thrust/reverse",
"return",
"0.25",
"else",
":",
"# normal",
"return",
"0.25"
] | Standard deviation on the magnitude for the WC1994 area relation. | [
"Standard",
"deviation",
"on",
"the",
"magnitude",
"for",
"the",
"WC1994",
"area",
"relation",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/wc1994.py#L72-L88 |
gem/oq-engine | openquake/hazardlib/scalerel/wc1994.py | WC1994.get_median_mag | def get_median_mag(self, area, rake):
"""
Return magnitude (Mw) given the area and rake.
Setting the rake to ``None`` causes their "All" rupture-types
to be applied.
:param area:
Area in square km.
:param rake:
Rake angle (the rupture propagation direction) in degrees,
from -180 to 180.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 4.07 + 0.98 * log10(area)
elif (-45 <= rake <= 45) or (rake > 135) or (rake < -135):
# strike slip
return 3.98 + 1.02 * log10(area)
elif rake > 0:
# thrust/reverse
return 4.33 + 0.90 * log10(area)
else:
# normal
return 3.93 + 1.02 * log10(area) | python | def get_median_mag(self, area, rake):
assert rake is None or -180 <= rake <= 180
if rake is None:
return 4.07 + 0.98 * log10(area)
elif (-45 <= rake <= 45) or (rake > 135) or (rake < -135):
return 3.98 + 1.02 * log10(area)
elif rake > 0:
return 4.33 + 0.90 * log10(area)
else:
return 3.93 + 1.02 * log10(area) | [
"def",
"get_median_mag",
"(",
"self",
",",
"area",
",",
"rake",
")",
":",
"assert",
"rake",
"is",
"None",
"or",
"-",
"180",
"<=",
"rake",
"<=",
"180",
"if",
"rake",
"is",
"None",
":",
"# their \"All\" case",
"return",
"4.07",
"+",
"0.98",
"*",
"log10",
"(",
"area",
")",
"elif",
"(",
"-",
"45",
"<=",
"rake",
"<=",
"45",
")",
"or",
"(",
"rake",
">",
"135",
")",
"or",
"(",
"rake",
"<",
"-",
"135",
")",
":",
"# strike slip",
"return",
"3.98",
"+",
"1.02",
"*",
"log10",
"(",
"area",
")",
"elif",
"rake",
">",
"0",
":",
"# thrust/reverse",
"return",
"4.33",
"+",
"0.90",
"*",
"log10",
"(",
"area",
")",
"else",
":",
"# normal",
"return",
"3.93",
"+",
"1.02",
"*",
"log10",
"(",
"area",
")"
] | Return magnitude (Mw) given the area and rake.
Setting the rake to ``None`` causes their "All" rupture-types
to be applied.
:param area:
Area in square km.
:param rake:
Rake angle (the rupture propagation direction) in degrees,
from -180 to 180. | [
"Return",
"magnitude",
"(",
"Mw",
")",
"given",
"the",
"area",
"and",
"rake",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/wc1994.py#L90-L115 |
gem/oq-engine | openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py | GenericGmpeAvgSA.set_parameters | def set_parameters(self):
"""
Combines the parameters of the GMPE provided at the construction
level with the ones assigned to the average GMPE.
"""
for key in dir(self):
if key.startswith('REQUIRES_'):
setattr(self, key, getattr(self.gmpe, key))
if key.startswith('DEFINED_'):
if not key.endswith('FOR_INTENSITY_MEASURE_TYPES'):
setattr(self, key, getattr(self.gmpe, key)) | python | def set_parameters(self):
for key in dir(self):
if key.startswith('REQUIRES_'):
setattr(self, key, getattr(self.gmpe, key))
if key.startswith('DEFINED_'):
if not key.endswith('FOR_INTENSITY_MEASURE_TYPES'):
setattr(self, key, getattr(self.gmpe, key)) | [
"def",
"set_parameters",
"(",
"self",
")",
":",
"for",
"key",
"in",
"dir",
"(",
"self",
")",
":",
"if",
"key",
".",
"startswith",
"(",
"'REQUIRES_'",
")",
":",
"setattr",
"(",
"self",
",",
"key",
",",
"getattr",
"(",
"self",
".",
"gmpe",
",",
"key",
")",
")",
"if",
"key",
".",
"startswith",
"(",
"'DEFINED_'",
")",
":",
"if",
"not",
"key",
".",
"endswith",
"(",
"'FOR_INTENSITY_MEASURE_TYPES'",
")",
":",
"setattr",
"(",
"self",
",",
"key",
",",
"getattr",
"(",
"self",
".",
"gmpe",
",",
"key",
")",
")"
] | Combines the parameters of the GMPE provided at the construction
level with the ones assigned to the average GMPE. | [
"Combines",
"the",
"parameters",
"of",
"the",
"GMPE",
"provided",
"at",
"the",
"construction",
"level",
"with",
"the",
"ones",
"assigned",
"to",
"the",
"average",
"GMPE",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py#L87-L97 |
gem/oq-engine | openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py | GenericGmpeAvgSA.get_mean_and_stddevs | def get_mean_and_stddevs(self, sites, rup, dists, imt, stds_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
mean_list = []
stddvs_list = []
# Loop over averaging periods
for period in self.avg_periods:
imt_local = SA(float(period))
# compute mean and standard deviation
mean, stddvs = self.gmpe.get_mean_and_stddevs(sites, rup, dists,
imt_local,
stds_types)
mean_list.append(mean)
stddvs_list.append(stddvs[0]) # Support only for total!
mean_avgsa = 0.
stddvs_avgsa = 0.
for i1 in range(self.tnum):
mean_avgsa += mean_list[i1]
for i2 in range(self.tnum):
rho = self.corr_func.get_correlation(self.avg_periods[i1],
self.avg_periods[i2])
stddvs_avgsa += rho * stddvs_list[i1] * stddvs_list[i2]
mean_avgsa /= self.tnum
stddvs_avgsa = np.sqrt(stddvs_avgsa)/self.tnum
return mean_avgsa, [stddvs_avgsa] | python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stds_types):
mean_list = []
stddvs_list = []
for period in self.avg_periods:
imt_local = SA(float(period))
mean, stddvs = self.gmpe.get_mean_and_stddevs(sites, rup, dists,
imt_local,
stds_types)
mean_list.append(mean)
stddvs_list.append(stddvs[0])
mean_avgsa = 0.
stddvs_avgsa = 0.
for i1 in range(self.tnum):
mean_avgsa += mean_list[i1]
for i2 in range(self.tnum):
rho = self.corr_func.get_correlation(self.avg_periods[i1],
self.avg_periods[i2])
stddvs_avgsa += rho * stddvs_list[i1] * stddvs_list[i2]
mean_avgsa /= self.tnum
stddvs_avgsa = np.sqrt(stddvs_avgsa)/self.tnum
return mean_avgsa, [stddvs_avgsa] | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stds_types",
")",
":",
"mean_list",
"=",
"[",
"]",
"stddvs_list",
"=",
"[",
"]",
"# Loop over averaging periods",
"for",
"period",
"in",
"self",
".",
"avg_periods",
":",
"imt_local",
"=",
"SA",
"(",
"float",
"(",
"period",
")",
")",
"# compute mean and standard deviation",
"mean",
",",
"stddvs",
"=",
"self",
".",
"gmpe",
".",
"get_mean_and_stddevs",
"(",
"sites",
",",
"rup",
",",
"dists",
",",
"imt_local",
",",
"stds_types",
")",
"mean_list",
".",
"append",
"(",
"mean",
")",
"stddvs_list",
".",
"append",
"(",
"stddvs",
"[",
"0",
"]",
")",
"# Support only for total!",
"mean_avgsa",
"=",
"0.",
"stddvs_avgsa",
"=",
"0.",
"for",
"i1",
"in",
"range",
"(",
"self",
".",
"tnum",
")",
":",
"mean_avgsa",
"+=",
"mean_list",
"[",
"i1",
"]",
"for",
"i2",
"in",
"range",
"(",
"self",
".",
"tnum",
")",
":",
"rho",
"=",
"self",
".",
"corr_func",
".",
"get_correlation",
"(",
"self",
".",
"avg_periods",
"[",
"i1",
"]",
",",
"self",
".",
"avg_periods",
"[",
"i2",
"]",
")",
"stddvs_avgsa",
"+=",
"rho",
"*",
"stddvs_list",
"[",
"i1",
"]",
"*",
"stddvs_list",
"[",
"i2",
"]",
"mean_avgsa",
"/=",
"self",
".",
"tnum",
"stddvs_avgsa",
"=",
"np",
".",
"sqrt",
"(",
"stddvs_avgsa",
")",
"/",
"self",
".",
"tnum",
"return",
"mean_avgsa",
",",
"[",
"stddvs_avgsa",
"]"
] | See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. | [
"See",
":",
"meth",
":",
"superclass",
"method",
"<",
".",
"base",
".",
"GroundShakingIntensityModel",
".",
"get_mean_and_stddevs",
">",
"for",
"spec",
"of",
"input",
"and",
"result",
"values",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py#L99-L132 |
gem/oq-engine | openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py | BakerJayaramCorrelationModel.get_correlation | def get_correlation(self, t1, t2):
"""
Computes the correlation coefficient for the specified periods.
:param float t1:
First period of interest.
:param float t2:
Second period of interest.
:return float rho:
The predicted correlation coefficient.
"""
t_min = min(t1, t2)
t_max = max(t1, t2)
c1 = 1.0
c1 -= np.cos(np.pi / 2.0 - np.log(t_max / max(t_min, 0.109)) * 0.366)
if t_max < 0.2:
c2 = 0.105 * (1.0 - 1.0 / (1.0 + np.exp(100.0 * t_max - 5.0)))
c2 = 1.0 - c2 * (t_max - t_min) / (t_max - 0.0099)
else:
c2 = 0
if t_max < 0.109:
c3 = c2
else:
c3 = c1
c4 = c1
c4 += 0.5 * (np.sqrt(c3) - c3) * (1.0 + np.cos(np.pi * t_min / 0.109))
if t_max <= 0.109:
rho = c2
elif t_min > 0.109:
rho = c1
elif t_max < 0.2:
rho = min(c2, c4)
else:
rho = c4
return rho | python | def get_correlation(self, t1, t2):
t_min = min(t1, t2)
t_max = max(t1, t2)
c1 = 1.0
c1 -= np.cos(np.pi / 2.0 - np.log(t_max / max(t_min, 0.109)) * 0.366)
if t_max < 0.2:
c2 = 0.105 * (1.0 - 1.0 / (1.0 + np.exp(100.0 * t_max - 5.0)))
c2 = 1.0 - c2 * (t_max - t_min) / (t_max - 0.0099)
else:
c2 = 0
if t_max < 0.109:
c3 = c2
else:
c3 = c1
c4 = c1
c4 += 0.5 * (np.sqrt(c3) - c3) * (1.0 + np.cos(np.pi * t_min / 0.109))
if t_max <= 0.109:
rho = c2
elif t_min > 0.109:
rho = c1
elif t_max < 0.2:
rho = min(c2, c4)
else:
rho = c4
return rho | [
"def",
"get_correlation",
"(",
"self",
",",
"t1",
",",
"t2",
")",
":",
"t_min",
"=",
"min",
"(",
"t1",
",",
"t2",
")",
"t_max",
"=",
"max",
"(",
"t1",
",",
"t2",
")",
"c1",
"=",
"1.0",
"c1",
"-=",
"np",
".",
"cos",
"(",
"np",
".",
"pi",
"/",
"2.0",
"-",
"np",
".",
"log",
"(",
"t_max",
"/",
"max",
"(",
"t_min",
",",
"0.109",
")",
")",
"*",
"0.366",
")",
"if",
"t_max",
"<",
"0.2",
":",
"c2",
"=",
"0.105",
"*",
"(",
"1.0",
"-",
"1.0",
"/",
"(",
"1.0",
"+",
"np",
".",
"exp",
"(",
"100.0",
"*",
"t_max",
"-",
"5.0",
")",
")",
")",
"c2",
"=",
"1.0",
"-",
"c2",
"*",
"(",
"t_max",
"-",
"t_min",
")",
"/",
"(",
"t_max",
"-",
"0.0099",
")",
"else",
":",
"c2",
"=",
"0",
"if",
"t_max",
"<",
"0.109",
":",
"c3",
"=",
"c2",
"else",
":",
"c3",
"=",
"c1",
"c4",
"=",
"c1",
"c4",
"+=",
"0.5",
"*",
"(",
"np",
".",
"sqrt",
"(",
"c3",
")",
"-",
"c3",
")",
"*",
"(",
"1.0",
"+",
"np",
".",
"cos",
"(",
"np",
".",
"pi",
"*",
"t_min",
"/",
"0.109",
")",
")",
"if",
"t_max",
"<=",
"0.109",
":",
"rho",
"=",
"c2",
"elif",
"t_min",
">",
"0.109",
":",
"rho",
"=",
"c1",
"elif",
"t_max",
"<",
"0.2",
":",
"rho",
"=",
"min",
"(",
"c2",
",",
"c4",
")",
"else",
":",
"rho",
"=",
"c4",
"return",
"rho"
] | Computes the correlation coefficient for the specified periods.
:param float t1:
First period of interest.
:param float t2:
Second period of interest.
:return float rho:
The predicted correlation coefficient. | [
"Computes",
"the",
"correlation",
"coefficient",
"for",
"the",
"specified",
"periods",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py#L165-L208 |
gem/oq-engine | openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py | AkkarCorrelationModel.get_correlation | def get_correlation(self, t1, t2):
"""
Computes the correlation coefficient for the specified periods.
:param float t1:
First period of interest.
:param float t2:
Second period of interest.
:return float:
The predicted correlation coefficient.
"""
if t1 not in act.periods:
raise ValueError('t1 not a valid period')
if t2 not in act.periods:
raise ValueError('t2 not a valid period')
return act.coeff_table[act.periods.index(t1)][act.periods.index(t2)] | python | def get_correlation(self, t1, t2):
if t1 not in act.periods:
raise ValueError('t1 not a valid period')
if t2 not in act.periods:
raise ValueError('t2 not a valid period')
return act.coeff_table[act.periods.index(t1)][act.periods.index(t2)] | [
"def",
"get_correlation",
"(",
"self",
",",
"t1",
",",
"t2",
")",
":",
"if",
"t1",
"not",
"in",
"act",
".",
"periods",
":",
"raise",
"ValueError",
"(",
"'t1 not a valid period'",
")",
"if",
"t2",
"not",
"in",
"act",
".",
"periods",
":",
"raise",
"ValueError",
"(",
"'t2 not a valid period'",
")",
"return",
"act",
".",
"coeff_table",
"[",
"act",
".",
"periods",
".",
"index",
"(",
"t1",
")",
"]",
"[",
"act",
".",
"periods",
".",
"index",
"(",
"t2",
")",
"]"
] | Computes the correlation coefficient for the specified periods.
:param float t1:
First period of interest.
:param float t2:
Second period of interest.
:return float:
The predicted correlation coefficient. | [
"Computes",
"the",
"correlation",
"coefficient",
"for",
"the",
"specified",
"periods",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py#L220-L240 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | surface_to_array | def surface_to_array(surface):
"""
:param surface: a Surface object
:returns: a 3D array of shape (3, N, M)
"""
if hasattr(surface, 'surfaces'): # multiplanar surfaces
n = len(surface.surfaces)
arr = numpy.zeros((3, n, 4), F32)
for i, surf in enumerate(surface.surfaces):
arr[:, i] = surf.mesh.array
return arr
mesh = surface.mesh
if len(mesh.lons.shape) == 1: # 1D mesh
shp = (3, 1) + mesh.lons.shape
else: # 2D mesh
shp = (3,) + mesh.lons.shape
return mesh.array.reshape(shp) | python | def surface_to_array(surface):
if hasattr(surface, 'surfaces'):
n = len(surface.surfaces)
arr = numpy.zeros((3, n, 4), F32)
for i, surf in enumerate(surface.surfaces):
arr[:, i] = surf.mesh.array
return arr
mesh = surface.mesh
if len(mesh.lons.shape) == 1:
shp = (3, 1) + mesh.lons.shape
else:
shp = (3,) + mesh.lons.shape
return mesh.array.reshape(shp) | [
"def",
"surface_to_array",
"(",
"surface",
")",
":",
"if",
"hasattr",
"(",
"surface",
",",
"'surfaces'",
")",
":",
"# multiplanar surfaces",
"n",
"=",
"len",
"(",
"surface",
".",
"surfaces",
")",
"arr",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"3",
",",
"n",
",",
"4",
")",
",",
"F32",
")",
"for",
"i",
",",
"surf",
"in",
"enumerate",
"(",
"surface",
".",
"surfaces",
")",
":",
"arr",
"[",
":",
",",
"i",
"]",
"=",
"surf",
".",
"mesh",
".",
"array",
"return",
"arr",
"mesh",
"=",
"surface",
".",
"mesh",
"if",
"len",
"(",
"mesh",
".",
"lons",
".",
"shape",
")",
"==",
"1",
":",
"# 1D mesh",
"shp",
"=",
"(",
"3",
",",
"1",
")",
"+",
"mesh",
".",
"lons",
".",
"shape",
"else",
":",
"# 2D mesh",
"shp",
"=",
"(",
"3",
",",
")",
"+",
"mesh",
".",
"lons",
".",
"shape",
"return",
"mesh",
".",
"array",
".",
"reshape",
"(",
"shp",
")"
] | :param surface: a Surface object
:returns: a 3D array of shape (3, N, M) | [
":",
"param",
"surface",
":",
"a",
"Surface",
"object",
":",
"returns",
":",
"a",
"3D",
"array",
"of",
"shape",
"(",
"3",
"N",
"M",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L46-L62 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.from_coords | def from_coords(cls, coords, sort=True):
"""
Create a mesh object from a list of 3D coordinates (by sorting them)
:params coords: list of coordinates
:param sort: flag (default True)
:returns: a :class:`Mesh` instance
"""
coords = list(coords)
if sort:
coords.sort()
if len(coords[0]) == 2: # 2D coordinates
lons, lats = zip(*coords)
depths = None
else: # 3D coordinates
lons, lats, depths = zip(*coords)
depths = numpy.array(depths)
return cls(numpy.array(lons), numpy.array(lats), depths) | python | def from_coords(cls, coords, sort=True):
coords = list(coords)
if sort:
coords.sort()
if len(coords[0]) == 2:
lons, lats = zip(*coords)
depths = None
else:
lons, lats, depths = zip(*coords)
depths = numpy.array(depths)
return cls(numpy.array(lons), numpy.array(lats), depths) | [
"def",
"from_coords",
"(",
"cls",
",",
"coords",
",",
"sort",
"=",
"True",
")",
":",
"coords",
"=",
"list",
"(",
"coords",
")",
"if",
"sort",
":",
"coords",
".",
"sort",
"(",
")",
"if",
"len",
"(",
"coords",
"[",
"0",
"]",
")",
"==",
"2",
":",
"# 2D coordinates",
"lons",
",",
"lats",
"=",
"zip",
"(",
"*",
"coords",
")",
"depths",
"=",
"None",
"else",
":",
"# 3D coordinates",
"lons",
",",
"lats",
",",
"depths",
"=",
"zip",
"(",
"*",
"coords",
")",
"depths",
"=",
"numpy",
".",
"array",
"(",
"depths",
")",
"return",
"cls",
"(",
"numpy",
".",
"array",
"(",
"lons",
")",
",",
"numpy",
".",
"array",
"(",
"lats",
")",
",",
"depths",
")"
] | Create a mesh object from a list of 3D coordinates (by sorting them)
:params coords: list of coordinates
:param sort: flag (default True)
:returns: a :class:`Mesh` instance | [
"Create",
"a",
"mesh",
"object",
"from",
"a",
"list",
"of",
"3D",
"coordinates",
"(",
"by",
"sorting",
"them",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L114-L131 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.from_points_list | def from_points_list(cls, points):
"""
Create a mesh object from a collection of points.
:param point:
List of :class:`~openquake.hazardlib.geo.point.Point` objects.
:returns:
An instance of :class:`Mesh` with one-dimensional arrays
of coordinates from ``points``.
"""
lons = numpy.zeros(len(points), dtype=float)
lats = lons.copy()
depths = lons.copy()
for i in range(len(points)):
lons[i] = points[i].longitude
lats[i] = points[i].latitude
depths[i] = points[i].depth
if not depths.any():
# all points have zero depth, no need to waste memory
depths = None
return cls(lons, lats, depths) | python | def from_points_list(cls, points):
lons = numpy.zeros(len(points), dtype=float)
lats = lons.copy()
depths = lons.copy()
for i in range(len(points)):
lons[i] = points[i].longitude
lats[i] = points[i].latitude
depths[i] = points[i].depth
if not depths.any():
depths = None
return cls(lons, lats, depths) | [
"def",
"from_points_list",
"(",
"cls",
",",
"points",
")",
":",
"lons",
"=",
"numpy",
".",
"zeros",
"(",
"len",
"(",
"points",
")",
",",
"dtype",
"=",
"float",
")",
"lats",
"=",
"lons",
".",
"copy",
"(",
")",
"depths",
"=",
"lons",
".",
"copy",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"points",
")",
")",
":",
"lons",
"[",
"i",
"]",
"=",
"points",
"[",
"i",
"]",
".",
"longitude",
"lats",
"[",
"i",
"]",
"=",
"points",
"[",
"i",
"]",
".",
"latitude",
"depths",
"[",
"i",
"]",
"=",
"points",
"[",
"i",
"]",
".",
"depth",
"if",
"not",
"depths",
".",
"any",
"(",
")",
":",
"# all points have zero depth, no need to waste memory",
"depths",
"=",
"None",
"return",
"cls",
"(",
"lons",
",",
"lats",
",",
"depths",
")"
] | Create a mesh object from a collection of points.
:param point:
List of :class:`~openquake.hazardlib.geo.point.Point` objects.
:returns:
An instance of :class:`Mesh` with one-dimensional arrays
of coordinates from ``points``. | [
"Create",
"a",
"mesh",
"object",
"from",
"a",
"collection",
"of",
"points",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L134-L154 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.xyz | def xyz(self):
"""
:returns: an array of shape (N, 3) with the cartesian coordinates
"""
return geo_utils.spherical_to_cartesian(
self.lons.flat, self.lats.flat, self.depths.flat) | python | def xyz(self):
return geo_utils.spherical_to_cartesian(
self.lons.flat, self.lats.flat, self.depths.flat) | [
"def",
"xyz",
"(",
"self",
")",
":",
"return",
"geo_utils",
".",
"spherical_to_cartesian",
"(",
"self",
".",
"lons",
".",
"flat",
",",
"self",
".",
"lats",
".",
"flat",
",",
"self",
".",
"depths",
".",
"flat",
")"
] | :returns: an array of shape (N, 3) with the cartesian coordinates | [
":",
"returns",
":",
"an",
"array",
"of",
"shape",
"(",
"N",
"3",
")",
"with",
"the",
"cartesian",
"coordinates"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L167-L172 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_min_distance | def get_min_distance(self, mesh):
"""
Compute and return the minimum distance from the mesh to each point
in another mesh.
:returns:
numpy array of distances in km of shape (self.size, mesh.size)
Method doesn't make any assumptions on arrangement of the points
in either mesh and instead calculates the distance from each point of
this mesh to each point of the target mesh and returns the lowest found
for each.
"""
return cdist(self.xyz, mesh.xyz).min(axis=0) | python | def get_min_distance(self, mesh):
return cdist(self.xyz, mesh.xyz).min(axis=0) | [
"def",
"get_min_distance",
"(",
"self",
",",
"mesh",
")",
":",
"return",
"cdist",
"(",
"self",
".",
"xyz",
",",
"mesh",
".",
"xyz",
")",
".",
"min",
"(",
"axis",
"=",
"0",
")"
] | Compute and return the minimum distance from the mesh to each point
in another mesh.
:returns:
numpy array of distances in km of shape (self.size, mesh.size)
Method doesn't make any assumptions on arrangement of the points
in either mesh and instead calculates the distance from each point of
this mesh to each point of the target mesh and returns the lowest found
for each. | [
"Compute",
"and",
"return",
"the",
"minimum",
"distance",
"from",
"the",
"mesh",
"to",
"each",
"point",
"in",
"another",
"mesh",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L236-L249 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_closest_points | def get_closest_points(self, mesh):
"""
Find closest point of this mesh for each point in the other mesh
:returns:
:class:`Mesh` object of the same shape as `mesh` with closest
points from this one at respective indices.
"""
min_idx = cdist(self.xyz, mesh.xyz).argmin(axis=0) # lose shape
if hasattr(mesh, 'shape'):
min_idx = min_idx.reshape(mesh.shape)
lons = self.lons.take(min_idx)
lats = self.lats.take(min_idx)
deps = self.depths.take(min_idx)
return Mesh(lons, lats, deps) | python | def get_closest_points(self, mesh):
min_idx = cdist(self.xyz, mesh.xyz).argmin(axis=0)
if hasattr(mesh, 'shape'):
min_idx = min_idx.reshape(mesh.shape)
lons = self.lons.take(min_idx)
lats = self.lats.take(min_idx)
deps = self.depths.take(min_idx)
return Mesh(lons, lats, deps) | [
"def",
"get_closest_points",
"(",
"self",
",",
"mesh",
")",
":",
"min_idx",
"=",
"cdist",
"(",
"self",
".",
"xyz",
",",
"mesh",
".",
"xyz",
")",
".",
"argmin",
"(",
"axis",
"=",
"0",
")",
"# lose shape",
"if",
"hasattr",
"(",
"mesh",
",",
"'shape'",
")",
":",
"min_idx",
"=",
"min_idx",
".",
"reshape",
"(",
"mesh",
".",
"shape",
")",
"lons",
"=",
"self",
".",
"lons",
".",
"take",
"(",
"min_idx",
")",
"lats",
"=",
"self",
".",
"lats",
".",
"take",
"(",
"min_idx",
")",
"deps",
"=",
"self",
".",
"depths",
".",
"take",
"(",
"min_idx",
")",
"return",
"Mesh",
"(",
"lons",
",",
"lats",
",",
"deps",
")"
] | Find closest point of this mesh for each point in the other mesh
:returns:
:class:`Mesh` object of the same shape as `mesh` with closest
points from this one at respective indices. | [
"Find",
"closest",
"point",
"of",
"this",
"mesh",
"for",
"each",
"point",
"in",
"the",
"other",
"mesh"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L251-L265 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_distance_matrix | def get_distance_matrix(self):
"""
Compute and return distances between each pairs of points in the mesh.
This method requires that the coordinate arrays are one-dimensional.
NB: the depth of the points is ignored
.. warning::
Because of its quadratic space and time complexity this method
is safe to use for meshes of up to several thousand points. For
mesh of 10k points it needs ~800 Mb for just the resulting matrix
and four times that much for intermediate storage.
:returns:
Two-dimensional numpy array, square matrix of distances. The matrix
has zeros on main diagonal and positive distances in kilometers
on all other cells. That is, value in cell (3, 5) is the distance
between mesh's points 3 and 5 in km, and it is equal to value
in cell (5, 3).
Uses :func:`openquake.hazardlib.geo.geodetic.geodetic_distance`.
"""
assert self.lons.ndim == 1
distances = geodetic.geodetic_distance(
self.lons.reshape(self.lons.shape + (1, )),
self.lats.reshape(self.lats.shape + (1, )),
self.lons,
self.lats)
return numpy.matrix(distances, copy=False) | python | def get_distance_matrix(self):
assert self.lons.ndim == 1
distances = geodetic.geodetic_distance(
self.lons.reshape(self.lons.shape + (1, )),
self.lats.reshape(self.lats.shape + (1, )),
self.lons,
self.lats)
return numpy.matrix(distances, copy=False) | [
"def",
"get_distance_matrix",
"(",
"self",
")",
":",
"assert",
"self",
".",
"lons",
".",
"ndim",
"==",
"1",
"distances",
"=",
"geodetic",
".",
"geodetic_distance",
"(",
"self",
".",
"lons",
".",
"reshape",
"(",
"self",
".",
"lons",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
",",
"self",
".",
"lats",
".",
"reshape",
"(",
"self",
".",
"lats",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
",",
"self",
".",
"lons",
",",
"self",
".",
"lats",
")",
"return",
"numpy",
".",
"matrix",
"(",
"distances",
",",
"copy",
"=",
"False",
")"
] | Compute and return distances between each pairs of points in the mesh.
This method requires that the coordinate arrays are one-dimensional.
NB: the depth of the points is ignored
.. warning::
Because of its quadratic space and time complexity this method
is safe to use for meshes of up to several thousand points. For
mesh of 10k points it needs ~800 Mb for just the resulting matrix
and four times that much for intermediate storage.
:returns:
Two-dimensional numpy array, square matrix of distances. The matrix
has zeros on main diagonal and positive distances in kilometers
on all other cells. That is, value in cell (3, 5) is the distance
between mesh's points 3 and 5 in km, and it is equal to value
in cell (5, 3).
Uses :func:`openquake.hazardlib.geo.geodetic.geodetic_distance`. | [
"Compute",
"and",
"return",
"distances",
"between",
"each",
"pairs",
"of",
"points",
"in",
"the",
"mesh",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L267-L295 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh._get_proj_convex_hull | def _get_proj_convex_hull(self):
"""
Create a projection centered in the center of this mesh and define
a convex polygon in that projection, enveloping all the points
of the mesh.
:returns:
Tuple of two items: projection function and shapely 2d polygon.
Note that the result geometry can be line or point depending
on number of points in the mesh and their arrangement.
"""
# create a projection centered in the center of points collection
proj = geo_utils.OrthographicProjection(
*geo_utils.get_spherical_bounding_box(self.lons, self.lats))
# project all the points and create a shapely multipoint object.
# need to copy an array because otherwise shapely misinterprets it
coords = numpy.transpose(proj(self.lons.flat, self.lats.flat)).copy()
multipoint = shapely.geometry.MultiPoint(coords)
# create a 2d polygon from a convex hull around that multipoint
return proj, multipoint.convex_hull | python | def _get_proj_convex_hull(self):
proj = geo_utils.OrthographicProjection(
*geo_utils.get_spherical_bounding_box(self.lons, self.lats))
coords = numpy.transpose(proj(self.lons.flat, self.lats.flat)).copy()
multipoint = shapely.geometry.MultiPoint(coords)
return proj, multipoint.convex_hull | [
"def",
"_get_proj_convex_hull",
"(",
"self",
")",
":",
"# create a projection centered in the center of points collection",
"proj",
"=",
"geo_utils",
".",
"OrthographicProjection",
"(",
"*",
"geo_utils",
".",
"get_spherical_bounding_box",
"(",
"self",
".",
"lons",
",",
"self",
".",
"lats",
")",
")",
"# project all the points and create a shapely multipoint object.",
"# need to copy an array because otherwise shapely misinterprets it",
"coords",
"=",
"numpy",
".",
"transpose",
"(",
"proj",
"(",
"self",
".",
"lons",
".",
"flat",
",",
"self",
".",
"lats",
".",
"flat",
")",
")",
".",
"copy",
"(",
")",
"multipoint",
"=",
"shapely",
".",
"geometry",
".",
"MultiPoint",
"(",
"coords",
")",
"# create a 2d polygon from a convex hull around that multipoint",
"return",
"proj",
",",
"multipoint",
".",
"convex_hull"
] | Create a projection centered in the center of this mesh and define
a convex polygon in that projection, enveloping all the points
of the mesh.
:returns:
Tuple of two items: projection function and shapely 2d polygon.
Note that the result geometry can be line or point depending
on number of points in the mesh and their arrangement. | [
"Create",
"a",
"projection",
"centered",
"in",
"the",
"center",
"of",
"this",
"mesh",
"and",
"define",
"a",
"convex",
"polygon",
"in",
"that",
"projection",
"enveloping",
"all",
"the",
"points",
"of",
"the",
"mesh",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L297-L317 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_joyner_boore_distance | def get_joyner_boore_distance(self, mesh):
"""
Compute and return Joyner-Boore distance to each point of ``mesh``.
Point's depth is ignored.
See
:meth:`openquake.hazardlib.geo.surface.base.BaseSurface.get_joyner_boore_distance`
for definition of this distance.
:returns:
numpy array of distances in km of the same shape as ``mesh``.
Distance value is considered to be zero if a point
lies inside the polygon enveloping the projection of the mesh
or on one of its edges.
"""
# we perform a hybrid calculation (geodetic mesh-to-mesh distance
# and distance on the projection plane for close points). first,
# we find the closest geodetic distance for each point of target
# mesh to this one. in general that distance is greater than
# the exact distance to enclosing polygon of this mesh and it
# depends on mesh spacing. but the difference can be neglected
# if calculated geodetic distance is over some threshold.
# get the highest slice from the 3D mesh
distances = geodetic.min_geodetic_distance(
(self.lons, self.lats), (mesh.lons, mesh.lats))
# here we find the points for which calculated mesh-to-mesh
# distance is below a threshold. this threshold is arbitrary:
# lower values increase the maximum possible error, higher
# values reduce the efficiency of that filtering. the maximum
# error is equal to the maximum difference between a distance
# from site to two adjacent points of the mesh and distance
# from site to the line connecting them. thus the error is
# a function of distance threshold and mesh spacing. the error
# is maximum when the site lies on a perpendicular to the line
# connecting points of the mesh and that passes the middle
# point between them. the error then can be calculated as
# ``err = trsh - d = trsh - \sqrt(trsh^2 - (ms/2)^2)``, where
# ``trsh`` and ``d`` are distance to mesh points (the one
# we found on the previous step) and distance to the line
# connecting them (the actual distance) and ``ms`` is mesh
# spacing. the threshold of 40 km gives maximum error of 314
# meters for meshes with spacing of 10 km and 5.36 km for
# meshes with spacing of 40 km. if mesh spacing is over
# ``(trsh / \sqrt(2)) * 2`` then points lying in the middle
# of mesh cells (that is inside the polygon) will be filtered
# out by the threshold and have positive distance instead of 0.
# so for threshold of 40 km mesh spacing should not be more
# than 56 km (typical values are 5 to 10 km).
idxs = (distances < 40).nonzero()[0] # indices on the first dimension
if not len(idxs):
# no point is close enough, return distances as they are
return distances
# for all the points that are closer than the threshold we need
# to recalculate the distance and set it to zero, if point falls
# inside the enclosing polygon of the mesh. for doing that we
# project both this mesh and the points of the second mesh--selected
# by distance threshold--to the same Cartesian space, define
# minimum shapely polygon enclosing the mesh and calculate point
# to polygon distance, which gives the most accurate value
# of distance in km (and that value is zero for points inside
# the polygon).
proj, polygon = self._get_proj_enclosing_polygon()
if not isinstance(polygon, shapely.geometry.Polygon):
# either line or point is our enclosing polygon. draw
# a square with side of 10 m around in order to have
# a proper polygon instead.
polygon = polygon.buffer(self.DIST_TOLERANCE, 1)
mesh_xx, mesh_yy = proj(mesh.lons[idxs], mesh.lats[idxs])
# replace geodetic distance values for points-closer-than-the-threshold
# by more accurate point-to-polygon distance values.
distances[idxs] = geo_utils.point_to_polygon_distance(
polygon, mesh_xx, mesh_yy)
return distances | python | def get_joyner_boore_distance(self, mesh):
distances = geodetic.min_geodetic_distance(
(self.lons, self.lats), (mesh.lons, mesh.lats))
idxs = (distances < 40).nonzero()[0]
if not len(idxs):
return distances
proj, polygon = self._get_proj_enclosing_polygon()
if not isinstance(polygon, shapely.geometry.Polygon):
polygon = polygon.buffer(self.DIST_TOLERANCE, 1)
mesh_xx, mesh_yy = proj(mesh.lons[idxs], mesh.lats[idxs])
distances[idxs] = geo_utils.point_to_polygon_distance(
polygon, mesh_xx, mesh_yy)
return distances | [
"def",
"get_joyner_boore_distance",
"(",
"self",
",",
"mesh",
")",
":",
"# we perform a hybrid calculation (geodetic mesh-to-mesh distance",
"# and distance on the projection plane for close points). first,",
"# we find the closest geodetic distance for each point of target",
"# mesh to this one. in general that distance is greater than",
"# the exact distance to enclosing polygon of this mesh and it",
"# depends on mesh spacing. but the difference can be neglected",
"# if calculated geodetic distance is over some threshold.",
"# get the highest slice from the 3D mesh",
"distances",
"=",
"geodetic",
".",
"min_geodetic_distance",
"(",
"(",
"self",
".",
"lons",
",",
"self",
".",
"lats",
")",
",",
"(",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
")",
")",
"# here we find the points for which calculated mesh-to-mesh",
"# distance is below a threshold. this threshold is arbitrary:",
"# lower values increase the maximum possible error, higher",
"# values reduce the efficiency of that filtering. the maximum",
"# error is equal to the maximum difference between a distance",
"# from site to two adjacent points of the mesh and distance",
"# from site to the line connecting them. thus the error is",
"# a function of distance threshold and mesh spacing. the error",
"# is maximum when the site lies on a perpendicular to the line",
"# connecting points of the mesh and that passes the middle",
"# point between them. the error then can be calculated as",
"# ``err = trsh - d = trsh - \\sqrt(trsh^2 - (ms/2)^2)``, where",
"# ``trsh`` and ``d`` are distance to mesh points (the one",
"# we found on the previous step) and distance to the line",
"# connecting them (the actual distance) and ``ms`` is mesh",
"# spacing. the threshold of 40 km gives maximum error of 314",
"# meters for meshes with spacing of 10 km and 5.36 km for",
"# meshes with spacing of 40 km. if mesh spacing is over",
"# ``(trsh / \\sqrt(2)) * 2`` then points lying in the middle",
"# of mesh cells (that is inside the polygon) will be filtered",
"# out by the threshold and have positive distance instead of 0.",
"# so for threshold of 40 km mesh spacing should not be more",
"# than 56 km (typical values are 5 to 10 km).",
"idxs",
"=",
"(",
"distances",
"<",
"40",
")",
".",
"nonzero",
"(",
")",
"[",
"0",
"]",
"# indices on the first dimension",
"if",
"not",
"len",
"(",
"idxs",
")",
":",
"# no point is close enough, return distances as they are",
"return",
"distances",
"# for all the points that are closer than the threshold we need",
"# to recalculate the distance and set it to zero, if point falls",
"# inside the enclosing polygon of the mesh. for doing that we",
"# project both this mesh and the points of the second mesh--selected",
"# by distance threshold--to the same Cartesian space, define",
"# minimum shapely polygon enclosing the mesh and calculate point",
"# to polygon distance, which gives the most accurate value",
"# of distance in km (and that value is zero for points inside",
"# the polygon).",
"proj",
",",
"polygon",
"=",
"self",
".",
"_get_proj_enclosing_polygon",
"(",
")",
"if",
"not",
"isinstance",
"(",
"polygon",
",",
"shapely",
".",
"geometry",
".",
"Polygon",
")",
":",
"# either line or point is our enclosing polygon. draw",
"# a square with side of 10 m around in order to have",
"# a proper polygon instead.",
"polygon",
"=",
"polygon",
".",
"buffer",
"(",
"self",
".",
"DIST_TOLERANCE",
",",
"1",
")",
"mesh_xx",
",",
"mesh_yy",
"=",
"proj",
"(",
"mesh",
".",
"lons",
"[",
"idxs",
"]",
",",
"mesh",
".",
"lats",
"[",
"idxs",
"]",
")",
"# replace geodetic distance values for points-closer-than-the-threshold",
"# by more accurate point-to-polygon distance values.",
"distances",
"[",
"idxs",
"]",
"=",
"geo_utils",
".",
"point_to_polygon_distance",
"(",
"polygon",
",",
"mesh_xx",
",",
"mesh_yy",
")",
"return",
"distances"
] | Compute and return Joyner-Boore distance to each point of ``mesh``.
Point's depth is ignored.
See
:meth:`openquake.hazardlib.geo.surface.base.BaseSurface.get_joyner_boore_distance`
for definition of this distance.
:returns:
numpy array of distances in km of the same shape as ``mesh``.
Distance value is considered to be zero if a point
lies inside the polygon enveloping the projection of the mesh
or on one of its edges. | [
"Compute",
"and",
"return",
"Joyner",
"-",
"Boore",
"distance",
"to",
"each",
"point",
"of",
"mesh",
".",
"Point",
"s",
"depth",
"is",
"ignored",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L319-L393 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh._get_proj_enclosing_polygon | def _get_proj_enclosing_polygon(self):
"""
See :meth:`Mesh._get_proj_enclosing_polygon`.
:class:`RectangularMesh` contains an information about relative
positions of points, so it allows to define the minimum polygon,
containing the projection of the mesh, which doesn't necessarily
have to be convex (in contrast to :class:`Mesh` implementation).
:returns:
Same structure as :meth:`Mesh._get_proj_convex_hull`.
"""
if self.lons.size < 4:
# the mesh doesn't contain even a single cell
return self._get_proj_convex_hull()
proj = geo_utils.OrthographicProjection(
*geo_utils.get_spherical_bounding_box(self.lons, self.lats))
if len(self.lons.shape) == 1: # 1D mesh
lons = self.lons.reshape(len(self.lons), 1)
lats = self.lats.reshape(len(self.lats), 1)
else: # 2D mesh
lons = self.lons.T
lats = self.lats.T
mesh2d = numpy.array(proj(lons, lats)).T
lines = iter(mesh2d)
# we iterate over horizontal stripes, keeping the "previous"
# line of points. we keep it reversed, such that together
# with the current line they define the sequence of points
# around the stripe.
prev_line = next(lines)[::-1]
polygons = []
for i, line in enumerate(lines):
coords = numpy.concatenate((prev_line, line, prev_line[0:1]))
# create the shapely polygon object from the stripe
# coordinates and simplify it (remove redundant points,
# if there are any lying on the straight line).
stripe = shapely.geometry.LineString(coords) \
.simplify(self.DIST_TOLERANCE) \
.buffer(self.DIST_TOLERANCE, 2)
polygons.append(shapely.geometry.Polygon(stripe.exterior))
prev_line = line[::-1]
try:
# create a final polygon as the union of all the stripe ones
polygon = shapely.ops.cascaded_union(polygons) \
.simplify(self.DIST_TOLERANCE)
except ValueError:
# NOTE(larsbutler): In some rare cases, we've observed ValueErrors
# ("No Shapely geometry can be created from null value") with very
# specific sets of polygons such that there are two unique
# and many duplicates of one.
# This bug is very difficult to reproduce consistently (except on
# specific platforms) so the work around here is to remove the
# duplicate polygons. In fact, we only observed this error on our
# CI/build machine. None of our dev environments or production
# machines has encountered this error, at least consistently. >:(
polygons = [shapely.wkt.loads(x) for x in
list(set(p.wkt for p in polygons))]
polygon = shapely.ops.cascaded_union(polygons) \
.simplify(self.DIST_TOLERANCE)
return proj, polygon | python | def _get_proj_enclosing_polygon(self):
if self.lons.size < 4:
return self._get_proj_convex_hull()
proj = geo_utils.OrthographicProjection(
*geo_utils.get_spherical_bounding_box(self.lons, self.lats))
if len(self.lons.shape) == 1:
lons = self.lons.reshape(len(self.lons), 1)
lats = self.lats.reshape(len(self.lats), 1)
else:
lons = self.lons.T
lats = self.lats.T
mesh2d = numpy.array(proj(lons, lats)).T
lines = iter(mesh2d)
prev_line = next(lines)[::-1]
polygons = []
for i, line in enumerate(lines):
coords = numpy.concatenate((prev_line, line, prev_line[0:1]))
stripe = shapely.geometry.LineString(coords) \
.simplify(self.DIST_TOLERANCE) \
.buffer(self.DIST_TOLERANCE, 2)
polygons.append(shapely.geometry.Polygon(stripe.exterior))
prev_line = line[::-1]
try:
polygon = shapely.ops.cascaded_union(polygons) \
.simplify(self.DIST_TOLERANCE)
except ValueError:
polygons = [shapely.wkt.loads(x) for x in
list(set(p.wkt for p in polygons))]
polygon = shapely.ops.cascaded_union(polygons) \
.simplify(self.DIST_TOLERANCE)
return proj, polygon | [
"def",
"_get_proj_enclosing_polygon",
"(",
"self",
")",
":",
"if",
"self",
".",
"lons",
".",
"size",
"<",
"4",
":",
"# the mesh doesn't contain even a single cell",
"return",
"self",
".",
"_get_proj_convex_hull",
"(",
")",
"proj",
"=",
"geo_utils",
".",
"OrthographicProjection",
"(",
"*",
"geo_utils",
".",
"get_spherical_bounding_box",
"(",
"self",
".",
"lons",
",",
"self",
".",
"lats",
")",
")",
"if",
"len",
"(",
"self",
".",
"lons",
".",
"shape",
")",
"==",
"1",
":",
"# 1D mesh",
"lons",
"=",
"self",
".",
"lons",
".",
"reshape",
"(",
"len",
"(",
"self",
".",
"lons",
")",
",",
"1",
")",
"lats",
"=",
"self",
".",
"lats",
".",
"reshape",
"(",
"len",
"(",
"self",
".",
"lats",
")",
",",
"1",
")",
"else",
":",
"# 2D mesh",
"lons",
"=",
"self",
".",
"lons",
".",
"T",
"lats",
"=",
"self",
".",
"lats",
".",
"T",
"mesh2d",
"=",
"numpy",
".",
"array",
"(",
"proj",
"(",
"lons",
",",
"lats",
")",
")",
".",
"T",
"lines",
"=",
"iter",
"(",
"mesh2d",
")",
"# we iterate over horizontal stripes, keeping the \"previous\"",
"# line of points. we keep it reversed, such that together",
"# with the current line they define the sequence of points",
"# around the stripe.",
"prev_line",
"=",
"next",
"(",
"lines",
")",
"[",
":",
":",
"-",
"1",
"]",
"polygons",
"=",
"[",
"]",
"for",
"i",
",",
"line",
"in",
"enumerate",
"(",
"lines",
")",
":",
"coords",
"=",
"numpy",
".",
"concatenate",
"(",
"(",
"prev_line",
",",
"line",
",",
"prev_line",
"[",
"0",
":",
"1",
"]",
")",
")",
"# create the shapely polygon object from the stripe",
"# coordinates and simplify it (remove redundant points,",
"# if there are any lying on the straight line).",
"stripe",
"=",
"shapely",
".",
"geometry",
".",
"LineString",
"(",
"coords",
")",
".",
"simplify",
"(",
"self",
".",
"DIST_TOLERANCE",
")",
".",
"buffer",
"(",
"self",
".",
"DIST_TOLERANCE",
",",
"2",
")",
"polygons",
".",
"append",
"(",
"shapely",
".",
"geometry",
".",
"Polygon",
"(",
"stripe",
".",
"exterior",
")",
")",
"prev_line",
"=",
"line",
"[",
":",
":",
"-",
"1",
"]",
"try",
":",
"# create a final polygon as the union of all the stripe ones",
"polygon",
"=",
"shapely",
".",
"ops",
".",
"cascaded_union",
"(",
"polygons",
")",
".",
"simplify",
"(",
"self",
".",
"DIST_TOLERANCE",
")",
"except",
"ValueError",
":",
"# NOTE(larsbutler): In some rare cases, we've observed ValueErrors",
"# (\"No Shapely geometry can be created from null value\") with very",
"# specific sets of polygons such that there are two unique",
"# and many duplicates of one.",
"# This bug is very difficult to reproduce consistently (except on",
"# specific platforms) so the work around here is to remove the",
"# duplicate polygons. In fact, we only observed this error on our",
"# CI/build machine. None of our dev environments or production",
"# machines has encountered this error, at least consistently. >:(",
"polygons",
"=",
"[",
"shapely",
".",
"wkt",
".",
"loads",
"(",
"x",
")",
"for",
"x",
"in",
"list",
"(",
"set",
"(",
"p",
".",
"wkt",
"for",
"p",
"in",
"polygons",
")",
")",
"]",
"polygon",
"=",
"shapely",
".",
"ops",
".",
"cascaded_union",
"(",
"polygons",
")",
".",
"simplify",
"(",
"self",
".",
"DIST_TOLERANCE",
")",
"return",
"proj",
",",
"polygon"
] | See :meth:`Mesh._get_proj_enclosing_polygon`.
:class:`RectangularMesh` contains an information about relative
positions of points, so it allows to define the minimum polygon,
containing the projection of the mesh, which doesn't necessarily
have to be convex (in contrast to :class:`Mesh` implementation).
:returns:
Same structure as :meth:`Mesh._get_proj_convex_hull`. | [
"See",
":",
"meth",
":",
"Mesh",
".",
"_get_proj_enclosing_polygon",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L395-L455 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_convex_hull | def get_convex_hull(self):
"""
Get a convex polygon object that contains projections of all the points
of the mesh.
:returns:
Instance of :class:`openquake.hazardlib.geo.polygon.Polygon` that
is a convex hull around all the points in this mesh. If the
original mesh had only one point, the resulting polygon has a
square shape with a side length of 10 meters. If there were only
two points, resulting polygon is a stripe 10 meters wide.
"""
proj, polygon2d = self._get_proj_convex_hull()
# if mesh had only one point, the convex hull is a point. if there
# were two, it is a line string. we need to return a convex polygon
# object, so extend that area-less geometries by some arbitrarily
# small distance.
if isinstance(polygon2d, (shapely.geometry.LineString,
shapely.geometry.Point)):
polygon2d = polygon2d.buffer(self.DIST_TOLERANCE, 1)
# avoid circular imports
from openquake.hazardlib.geo.polygon import Polygon
return Polygon._from_2d(polygon2d, proj) | python | def get_convex_hull(self):
proj, polygon2d = self._get_proj_convex_hull()
if isinstance(polygon2d, (shapely.geometry.LineString,
shapely.geometry.Point)):
polygon2d = polygon2d.buffer(self.DIST_TOLERANCE, 1)
from openquake.hazardlib.geo.polygon import Polygon
return Polygon._from_2d(polygon2d, proj) | [
"def",
"get_convex_hull",
"(",
"self",
")",
":",
"proj",
",",
"polygon2d",
"=",
"self",
".",
"_get_proj_convex_hull",
"(",
")",
"# if mesh had only one point, the convex hull is a point. if there",
"# were two, it is a line string. we need to return a convex polygon",
"# object, so extend that area-less geometries by some arbitrarily",
"# small distance.",
"if",
"isinstance",
"(",
"polygon2d",
",",
"(",
"shapely",
".",
"geometry",
".",
"LineString",
",",
"shapely",
".",
"geometry",
".",
"Point",
")",
")",
":",
"polygon2d",
"=",
"polygon2d",
".",
"buffer",
"(",
"self",
".",
"DIST_TOLERANCE",
",",
"1",
")",
"# avoid circular imports",
"from",
"openquake",
".",
"hazardlib",
".",
"geo",
".",
"polygon",
"import",
"Polygon",
"return",
"Polygon",
".",
"_from_2d",
"(",
"polygon2d",
",",
"proj",
")"
] | Get a convex polygon object that contains projections of all the points
of the mesh.
:returns:
Instance of :class:`openquake.hazardlib.geo.polygon.Polygon` that
is a convex hull around all the points in this mesh. If the
original mesh had only one point, the resulting polygon has a
square shape with a side length of 10 meters. If there were only
two points, resulting polygon is a stripe 10 meters wide. | [
"Get",
"a",
"convex",
"polygon",
"object",
"that",
"contains",
"projections",
"of",
"all",
"the",
"points",
"of",
"the",
"mesh",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L457-L480 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.from_points_list | def from_points_list(cls, points):
"""
Create a rectangular mesh object from a list of lists of points.
Lists in a list are supposed to have the same length.
:param point:
List of lists of :class:`~openquake.hazardlib.geo.point.Point`
objects.
"""
assert points is not None and len(points) > 0 and len(points[0]) > 0, \
'list of at least one non-empty list of points is required'
lons = numpy.zeros((len(points), len(points[0])), dtype=float)
lats = lons.copy()
depths = lons.copy()
num_cols = len(points[0])
for i, row in enumerate(points):
assert len(row) == num_cols, \
'lists of points are not of uniform length'
for j, point in enumerate(row):
lons[i, j] = point.longitude
lats[i, j] = point.latitude
depths[i, j] = point.depth
if not depths.any():
depths = None
return cls(lons, lats, depths) | python | def from_points_list(cls, points):
assert points is not None and len(points) > 0 and len(points[0]) > 0, \
'list of at least one non-empty list of points is required'
lons = numpy.zeros((len(points), len(points[0])), dtype=float)
lats = lons.copy()
depths = lons.copy()
num_cols = len(points[0])
for i, row in enumerate(points):
assert len(row) == num_cols, \
'lists of points are not of uniform length'
for j, point in enumerate(row):
lons[i, j] = point.longitude
lats[i, j] = point.latitude
depths[i, j] = point.depth
if not depths.any():
depths = None
return cls(lons, lats, depths) | [
"def",
"from_points_list",
"(",
"cls",
",",
"points",
")",
":",
"assert",
"points",
"is",
"not",
"None",
"and",
"len",
"(",
"points",
")",
">",
"0",
"and",
"len",
"(",
"points",
"[",
"0",
"]",
")",
">",
"0",
",",
"'list of at least one non-empty list of points is required'",
"lons",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"len",
"(",
"points",
")",
",",
"len",
"(",
"points",
"[",
"0",
"]",
")",
")",
",",
"dtype",
"=",
"float",
")",
"lats",
"=",
"lons",
".",
"copy",
"(",
")",
"depths",
"=",
"lons",
".",
"copy",
"(",
")",
"num_cols",
"=",
"len",
"(",
"points",
"[",
"0",
"]",
")",
"for",
"i",
",",
"row",
"in",
"enumerate",
"(",
"points",
")",
":",
"assert",
"len",
"(",
"row",
")",
"==",
"num_cols",
",",
"'lists of points are not of uniform length'",
"for",
"j",
",",
"point",
"in",
"enumerate",
"(",
"row",
")",
":",
"lons",
"[",
"i",
",",
"j",
"]",
"=",
"point",
".",
"longitude",
"lats",
"[",
"i",
",",
"j",
"]",
"=",
"point",
".",
"latitude",
"depths",
"[",
"i",
",",
"j",
"]",
"=",
"point",
".",
"depth",
"if",
"not",
"depths",
".",
"any",
"(",
")",
":",
"depths",
"=",
"None",
"return",
"cls",
"(",
"lons",
",",
"lats",
",",
"depths",
")"
] | Create a rectangular mesh object from a list of lists of points.
Lists in a list are supposed to have the same length.
:param point:
List of lists of :class:`~openquake.hazardlib.geo.point.Point`
objects. | [
"Create",
"a",
"rectangular",
"mesh",
"object",
"from",
"a",
"list",
"of",
"lists",
"of",
"points",
".",
"Lists",
"in",
"a",
"list",
"are",
"supposed",
"to",
"have",
"the",
"same",
"length",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L497-L521 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.get_middle_point | def get_middle_point(self):
"""
Return the middle point of the mesh.
:returns:
An instance of :class:`~openquake.hazardlib.geo.point.Point`.
The middle point is taken from the middle row and a middle column
of the mesh if there are odd number of both. Otherwise the geometric
mean point of two or four middle points.
"""
num_rows, num_cols = self.lons.shape
mid_row = num_rows // 2
depth = 0
if num_rows & 1 == 1:
# there are odd number of rows
mid_col = num_cols // 2
if num_cols & 1 == 1:
# odd number of columns, we can easily take
# the middle point
depth = self.depths[mid_row, mid_col]
return Point(self.lons[mid_row, mid_col],
self.lats[mid_row, mid_col], depth)
else:
# even number of columns, need to take two middle
# points on the middle row
lon1, lon2 = self.lons[mid_row, mid_col - 1: mid_col + 1]
lat1, lat2 = self.lats[mid_row, mid_col - 1: mid_col + 1]
depth1 = self.depths[mid_row, mid_col - 1]
depth2 = self.depths[mid_row, mid_col]
else:
# there are even number of rows. take the row just above
# and the one just below the middle and find middle point
# of each
submesh1 = self[mid_row - 1: mid_row]
submesh2 = self[mid_row: mid_row + 1]
p1, p2 = submesh1.get_middle_point(), submesh2.get_middle_point()
lon1, lat1, depth1 = p1.longitude, p1.latitude, p1.depth
lon2, lat2, depth2 = p2.longitude, p2.latitude, p2.depth
# we need to find the middle between two points
depth = (depth1 + depth2) / 2.0
lon, lat = geo_utils.get_middle_point(lon1, lat1, lon2, lat2)
return Point(lon, lat, depth) | python | def get_middle_point(self):
num_rows, num_cols = self.lons.shape
mid_row = num_rows // 2
depth = 0
if num_rows & 1 == 1:
mid_col = num_cols // 2
if num_cols & 1 == 1:
depth = self.depths[mid_row, mid_col]
return Point(self.lons[mid_row, mid_col],
self.lats[mid_row, mid_col], depth)
else:
lon1, lon2 = self.lons[mid_row, mid_col - 1: mid_col + 1]
lat1, lat2 = self.lats[mid_row, mid_col - 1: mid_col + 1]
depth1 = self.depths[mid_row, mid_col - 1]
depth2 = self.depths[mid_row, mid_col]
else:
submesh1 = self[mid_row - 1: mid_row]
submesh2 = self[mid_row: mid_row + 1]
p1, p2 = submesh1.get_middle_point(), submesh2.get_middle_point()
lon1, lat1, depth1 = p1.longitude, p1.latitude, p1.depth
lon2, lat2, depth2 = p2.longitude, p2.latitude, p2.depth
depth = (depth1 + depth2) / 2.0
lon, lat = geo_utils.get_middle_point(lon1, lat1, lon2, lat2)
return Point(lon, lat, depth) | [
"def",
"get_middle_point",
"(",
"self",
")",
":",
"num_rows",
",",
"num_cols",
"=",
"self",
".",
"lons",
".",
"shape",
"mid_row",
"=",
"num_rows",
"//",
"2",
"depth",
"=",
"0",
"if",
"num_rows",
"&",
"1",
"==",
"1",
":",
"# there are odd number of rows",
"mid_col",
"=",
"num_cols",
"//",
"2",
"if",
"num_cols",
"&",
"1",
"==",
"1",
":",
"# odd number of columns, we can easily take",
"# the middle point",
"depth",
"=",
"self",
".",
"depths",
"[",
"mid_row",
",",
"mid_col",
"]",
"return",
"Point",
"(",
"self",
".",
"lons",
"[",
"mid_row",
",",
"mid_col",
"]",
",",
"self",
".",
"lats",
"[",
"mid_row",
",",
"mid_col",
"]",
",",
"depth",
")",
"else",
":",
"# even number of columns, need to take two middle",
"# points on the middle row",
"lon1",
",",
"lon2",
"=",
"self",
".",
"lons",
"[",
"mid_row",
",",
"mid_col",
"-",
"1",
":",
"mid_col",
"+",
"1",
"]",
"lat1",
",",
"lat2",
"=",
"self",
".",
"lats",
"[",
"mid_row",
",",
"mid_col",
"-",
"1",
":",
"mid_col",
"+",
"1",
"]",
"depth1",
"=",
"self",
".",
"depths",
"[",
"mid_row",
",",
"mid_col",
"-",
"1",
"]",
"depth2",
"=",
"self",
".",
"depths",
"[",
"mid_row",
",",
"mid_col",
"]",
"else",
":",
"# there are even number of rows. take the row just above",
"# and the one just below the middle and find middle point",
"# of each",
"submesh1",
"=",
"self",
"[",
"mid_row",
"-",
"1",
":",
"mid_row",
"]",
"submesh2",
"=",
"self",
"[",
"mid_row",
":",
"mid_row",
"+",
"1",
"]",
"p1",
",",
"p2",
"=",
"submesh1",
".",
"get_middle_point",
"(",
")",
",",
"submesh2",
".",
"get_middle_point",
"(",
")",
"lon1",
",",
"lat1",
",",
"depth1",
"=",
"p1",
".",
"longitude",
",",
"p1",
".",
"latitude",
",",
"p1",
".",
"depth",
"lon2",
",",
"lat2",
",",
"depth2",
"=",
"p2",
".",
"longitude",
",",
"p2",
".",
"latitude",
",",
"p2",
".",
"depth",
"# we need to find the middle between two points",
"depth",
"=",
"(",
"depth1",
"+",
"depth2",
")",
"/",
"2.0",
"lon",
",",
"lat",
"=",
"geo_utils",
".",
"get_middle_point",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
"return",
"Point",
"(",
"lon",
",",
"lat",
",",
"depth",
")"
] | Return the middle point of the mesh.
:returns:
An instance of :class:`~openquake.hazardlib.geo.point.Point`.
The middle point is taken from the middle row and a middle column
of the mesh if there are odd number of both. Otherwise the geometric
mean point of two or four middle points. | [
"Return",
"the",
"middle",
"point",
"of",
"the",
"mesh",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L523-L566 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.get_mean_inclination_and_azimuth | def get_mean_inclination_and_azimuth(self):
"""
Calculate weighted average inclination and azimuth of the mesh surface.
:returns:
Tuple of two float numbers: inclination angle in a range [0, 90]
and azimuth in range [0, 360) (in decimal degrees).
The mesh is triangulated, the inclination and azimuth for each triangle
is computed and average values weighted on each triangle's area
are calculated. Azimuth is always defined in a way that inclination
angle doesn't exceed 90 degree.
"""
assert 1 not in self.lons.shape, (
"inclination and azimuth are only defined for mesh of more than "
"one row and more than one column of points")
assert ((self.depths[1:] - self.depths[:-1]) >= 0).all(), (
"get_mean_inclination_and_azimuth() requires next mesh row "
"to be not shallower than the previous one")
points, along_azimuth, updip, diag = self.triangulate()
# define planes that are perpendicular to each point's vector
# as normals to those planes
earth_surface_tangent_normal = geo_utils.normalized(points)
# calculating triangles' area and normals for top-left triangles
e1 = along_azimuth[:-1]
e2 = updip[:, :-1]
tl_area = geo_utils.triangle_area(e1, e2, diag)
tl_normal = geo_utils.normalized(numpy.cross(e1, e2))
# ... and bottom-right triangles
e1 = along_azimuth[1:]
e2 = updip[:, 1:]
br_area = geo_utils.triangle_area(e1, e2, diag)
br_normal = geo_utils.normalized(numpy.cross(e1, e2))
if (self.depths == 0).all():
# mesh is on earth surface, inclination is zero
inclination = 0
else:
# inclination calculation
# top-left triangles
en = earth_surface_tangent_normal[:-1, :-1]
# cosine of inclination of the triangle is scalar product
# of vector normal to triangle plane and (normalized) vector
# pointing to top left corner of a triangle from earth center
incl_cos = numpy.sum(en * tl_normal, axis=-1).clip(-1.0, 1.0)
# we calculate average angle using mean of circular quantities
# formula: define 2d vector for each triangle where length
# of the vector corresponds to triangle's weight (we use triangle
# area) and angle is equal to inclination angle. then we calculate
# the angle of vector sum of all those vectors and that angle
# is the weighted average.
xx = numpy.sum(tl_area * incl_cos)
# express sine via cosine using Pythagorean trigonometric identity,
# this is a bit faster than sin(arccos(incl_cos))
yy = numpy.sum(tl_area * sqrt(1 - incl_cos * incl_cos))
# bottom-right triangles
en = earth_surface_tangent_normal[1:, 1:]
# we need to clip scalar product values because in some cases
# they might exceed range where arccos is defined ([-1, 1])
# because of floating point imprecision
incl_cos = numpy.sum(en * br_normal, axis=-1).clip(-1.0, 1.0)
# weighted angle vectors are calculated independently for top-left
# and bottom-right triangles of each cell in a mesh. here we
# combine both and finally get the weighted mean angle
xx += numpy.sum(br_area * incl_cos)
yy += numpy.sum(br_area * sqrt(1 - incl_cos * incl_cos))
inclination = numpy.degrees(numpy.arctan2(yy, xx))
# azimuth calculation is done similar to one for inclination. we also
# do separate calculations for top-left and bottom-right triangles
# and also combine results using mean of circular quantities approach
# unit vector along z axis
z_unit = numpy.array([0.0, 0.0, 1.0])
# unit vectors pointing west from each point of the mesh, they define
# planes that contain meridian of respective point
norms_west = geo_utils.normalized(numpy.cross(points + z_unit, points))
# unit vectors parallel to planes defined by previous ones. they are
# directed from each point to a point lying on z axis on the same
# distance from earth center
norms_north = geo_utils.normalized(numpy.cross(points, norms_west))
# need to normalize triangles' azimuthal edges because we will project
# them on other normals and thus calculate an angle in between
along_azimuth = geo_utils.normalized(along_azimuth)
# process top-left triangles
# here we identify the sign of direction of the triangles' azimuthal
# edges: is edge pointing west or east? for finding that we project
# those edges to vectors directing to west by calculating scalar
# product and get the sign of resulting value: if it is negative
# than the resulting azimuth should be negative as top edge is pointing
# west.
sign = numpy.sign(numpy.sign(
numpy.sum(along_azimuth[:-1] * norms_west[:-1, :-1], axis=-1))
# we run numpy.sign(numpy.sign(...) + 0.1) to make resulting values
# be only either -1 or 1 with zero values (when edge is pointing
# strictly north or south) expressed as 1 (which means "don't
# change the sign")
+ 0.1)
# the length of projection of azimuthal edge on norms_north is cosine
# of edge's azimuth
az_cos = numpy.sum(along_azimuth[:-1] * norms_north[:-1, :-1], axis=-1)
# use the same approach for finding the weighted mean
# as for inclination (see above)
xx = numpy.sum(tl_area * az_cos)
# the only difference is that azimuth is defined in a range
# [0, 360), so we need to have two reference planes and change
# sign of projection on one normal to sign of projection to another one
yy = numpy.sum(tl_area * sqrt(1 - az_cos * az_cos) * sign)
# bottom-right triangles
sign = numpy.sign(numpy.sign(
numpy.sum(along_azimuth[1:] * norms_west[1:, 1:], axis=-1))
+ 0.1)
az_cos = numpy.sum(along_azimuth[1:] * norms_north[1:, 1:], axis=-1)
xx += numpy.sum(br_area * az_cos)
yy += numpy.sum(br_area * sqrt(1 - az_cos * az_cos) * sign)
azimuth = numpy.degrees(numpy.arctan2(yy, xx))
if azimuth < 0:
azimuth += 360
if inclination > 90:
# average inclination is over 90 degree, that means that we need
# to reverse azimuthal direction in order for inclination to be
# in range [0, 90]
inclination = 180 - inclination
azimuth = (azimuth + 180) % 360
return inclination, azimuth | python | def get_mean_inclination_and_azimuth(self):
assert 1 not in self.lons.shape, (
"inclination and azimuth are only defined for mesh of more than "
"one row and more than one column of points")
assert ((self.depths[1:] - self.depths[:-1]) >= 0).all(), (
"get_mean_inclination_and_azimuth() requires next mesh row "
"to be not shallower than the previous one")
points, along_azimuth, updip, diag = self.triangulate()
earth_surface_tangent_normal = geo_utils.normalized(points)
e1 = along_azimuth[:-1]
e2 = updip[:, :-1]
tl_area = geo_utils.triangle_area(e1, e2, diag)
tl_normal = geo_utils.normalized(numpy.cross(e1, e2))
e1 = along_azimuth[1:]
e2 = updip[:, 1:]
br_area = geo_utils.triangle_area(e1, e2, diag)
br_normal = geo_utils.normalized(numpy.cross(e1, e2))
if (self.depths == 0).all():
inclination = 0
else:
en = earth_surface_tangent_normal[:-1, :-1]
incl_cos = numpy.sum(en * tl_normal, axis=-1).clip(-1.0, 1.0)
xx = numpy.sum(tl_area * incl_cos)
yy = numpy.sum(tl_area * sqrt(1 - incl_cos * incl_cos))
en = earth_surface_tangent_normal[1:, 1:]
incl_cos = numpy.sum(en * br_normal, axis=-1).clip(-1.0, 1.0)
xx += numpy.sum(br_area * incl_cos)
yy += numpy.sum(br_area * sqrt(1 - incl_cos * incl_cos))
inclination = numpy.degrees(numpy.arctan2(yy, xx))
z_unit = numpy.array([0.0, 0.0, 1.0])
norms_west = geo_utils.normalized(numpy.cross(points + z_unit, points))
norms_north = geo_utils.normalized(numpy.cross(points, norms_west))
along_azimuth = geo_utils.normalized(along_azimuth)
sign = numpy.sign(numpy.sign(
numpy.sum(along_azimuth[:-1] * norms_west[:-1, :-1], axis=-1))
+ 0.1)
az_cos = numpy.sum(along_azimuth[:-1] * norms_north[:-1, :-1], axis=-1)
xx = numpy.sum(tl_area * az_cos)
yy = numpy.sum(tl_area * sqrt(1 - az_cos * az_cos) * sign)
sign = numpy.sign(numpy.sign(
numpy.sum(along_azimuth[1:] * norms_west[1:, 1:], axis=-1))
+ 0.1)
az_cos = numpy.sum(along_azimuth[1:] * norms_north[1:, 1:], axis=-1)
xx += numpy.sum(br_area * az_cos)
yy += numpy.sum(br_area * sqrt(1 - az_cos * az_cos) * sign)
azimuth = numpy.degrees(numpy.arctan2(yy, xx))
if azimuth < 0:
azimuth += 360
if inclination > 90:
inclination = 180 - inclination
azimuth = (azimuth + 180) % 360
return inclination, azimuth | [
"def",
"get_mean_inclination_and_azimuth",
"(",
"self",
")",
":",
"assert",
"1",
"not",
"in",
"self",
".",
"lons",
".",
"shape",
",",
"(",
"\"inclination and azimuth are only defined for mesh of more than \"",
"\"one row and more than one column of points\"",
")",
"assert",
"(",
"(",
"self",
".",
"depths",
"[",
"1",
":",
"]",
"-",
"self",
".",
"depths",
"[",
":",
"-",
"1",
"]",
")",
">=",
"0",
")",
".",
"all",
"(",
")",
",",
"(",
"\"get_mean_inclination_and_azimuth() requires next mesh row \"",
"\"to be not shallower than the previous one\"",
")",
"points",
",",
"along_azimuth",
",",
"updip",
",",
"diag",
"=",
"self",
".",
"triangulate",
"(",
")",
"# define planes that are perpendicular to each point's vector",
"# as normals to those planes",
"earth_surface_tangent_normal",
"=",
"geo_utils",
".",
"normalized",
"(",
"points",
")",
"# calculating triangles' area and normals for top-left triangles",
"e1",
"=",
"along_azimuth",
"[",
":",
"-",
"1",
"]",
"e2",
"=",
"updip",
"[",
":",
",",
":",
"-",
"1",
"]",
"tl_area",
"=",
"geo_utils",
".",
"triangle_area",
"(",
"e1",
",",
"e2",
",",
"diag",
")",
"tl_normal",
"=",
"geo_utils",
".",
"normalized",
"(",
"numpy",
".",
"cross",
"(",
"e1",
",",
"e2",
")",
")",
"# ... and bottom-right triangles",
"e1",
"=",
"along_azimuth",
"[",
"1",
":",
"]",
"e2",
"=",
"updip",
"[",
":",
",",
"1",
":",
"]",
"br_area",
"=",
"geo_utils",
".",
"triangle_area",
"(",
"e1",
",",
"e2",
",",
"diag",
")",
"br_normal",
"=",
"geo_utils",
".",
"normalized",
"(",
"numpy",
".",
"cross",
"(",
"e1",
",",
"e2",
")",
")",
"if",
"(",
"self",
".",
"depths",
"==",
"0",
")",
".",
"all",
"(",
")",
":",
"# mesh is on earth surface, inclination is zero",
"inclination",
"=",
"0",
"else",
":",
"# inclination calculation",
"# top-left triangles",
"en",
"=",
"earth_surface_tangent_normal",
"[",
":",
"-",
"1",
",",
":",
"-",
"1",
"]",
"# cosine of inclination of the triangle is scalar product",
"# of vector normal to triangle plane and (normalized) vector",
"# pointing to top left corner of a triangle from earth center",
"incl_cos",
"=",
"numpy",
".",
"sum",
"(",
"en",
"*",
"tl_normal",
",",
"axis",
"=",
"-",
"1",
")",
".",
"clip",
"(",
"-",
"1.0",
",",
"1.0",
")",
"# we calculate average angle using mean of circular quantities",
"# formula: define 2d vector for each triangle where length",
"# of the vector corresponds to triangle's weight (we use triangle",
"# area) and angle is equal to inclination angle. then we calculate",
"# the angle of vector sum of all those vectors and that angle",
"# is the weighted average.",
"xx",
"=",
"numpy",
".",
"sum",
"(",
"tl_area",
"*",
"incl_cos",
")",
"# express sine via cosine using Pythagorean trigonometric identity,",
"# this is a bit faster than sin(arccos(incl_cos))",
"yy",
"=",
"numpy",
".",
"sum",
"(",
"tl_area",
"*",
"sqrt",
"(",
"1",
"-",
"incl_cos",
"*",
"incl_cos",
")",
")",
"# bottom-right triangles",
"en",
"=",
"earth_surface_tangent_normal",
"[",
"1",
":",
",",
"1",
":",
"]",
"# we need to clip scalar product values because in some cases",
"# they might exceed range where arccos is defined ([-1, 1])",
"# because of floating point imprecision",
"incl_cos",
"=",
"numpy",
".",
"sum",
"(",
"en",
"*",
"br_normal",
",",
"axis",
"=",
"-",
"1",
")",
".",
"clip",
"(",
"-",
"1.0",
",",
"1.0",
")",
"# weighted angle vectors are calculated independently for top-left",
"# and bottom-right triangles of each cell in a mesh. here we",
"# combine both and finally get the weighted mean angle",
"xx",
"+=",
"numpy",
".",
"sum",
"(",
"br_area",
"*",
"incl_cos",
")",
"yy",
"+=",
"numpy",
".",
"sum",
"(",
"br_area",
"*",
"sqrt",
"(",
"1",
"-",
"incl_cos",
"*",
"incl_cos",
")",
")",
"inclination",
"=",
"numpy",
".",
"degrees",
"(",
"numpy",
".",
"arctan2",
"(",
"yy",
",",
"xx",
")",
")",
"# azimuth calculation is done similar to one for inclination. we also",
"# do separate calculations for top-left and bottom-right triangles",
"# and also combine results using mean of circular quantities approach",
"# unit vector along z axis",
"z_unit",
"=",
"numpy",
".",
"array",
"(",
"[",
"0.0",
",",
"0.0",
",",
"1.0",
"]",
")",
"# unit vectors pointing west from each point of the mesh, they define",
"# planes that contain meridian of respective point",
"norms_west",
"=",
"geo_utils",
".",
"normalized",
"(",
"numpy",
".",
"cross",
"(",
"points",
"+",
"z_unit",
",",
"points",
")",
")",
"# unit vectors parallel to planes defined by previous ones. they are",
"# directed from each point to a point lying on z axis on the same",
"# distance from earth center",
"norms_north",
"=",
"geo_utils",
".",
"normalized",
"(",
"numpy",
".",
"cross",
"(",
"points",
",",
"norms_west",
")",
")",
"# need to normalize triangles' azimuthal edges because we will project",
"# them on other normals and thus calculate an angle in between",
"along_azimuth",
"=",
"geo_utils",
".",
"normalized",
"(",
"along_azimuth",
")",
"# process top-left triangles",
"# here we identify the sign of direction of the triangles' azimuthal",
"# edges: is edge pointing west or east? for finding that we project",
"# those edges to vectors directing to west by calculating scalar",
"# product and get the sign of resulting value: if it is negative",
"# than the resulting azimuth should be negative as top edge is pointing",
"# west.",
"sign",
"=",
"numpy",
".",
"sign",
"(",
"numpy",
".",
"sign",
"(",
"numpy",
".",
"sum",
"(",
"along_azimuth",
"[",
":",
"-",
"1",
"]",
"*",
"norms_west",
"[",
":",
"-",
"1",
",",
":",
"-",
"1",
"]",
",",
"axis",
"=",
"-",
"1",
")",
")",
"# we run numpy.sign(numpy.sign(...) + 0.1) to make resulting values",
"# be only either -1 or 1 with zero values (when edge is pointing",
"# strictly north or south) expressed as 1 (which means \"don't",
"# change the sign\")",
"+",
"0.1",
")",
"# the length of projection of azimuthal edge on norms_north is cosine",
"# of edge's azimuth",
"az_cos",
"=",
"numpy",
".",
"sum",
"(",
"along_azimuth",
"[",
":",
"-",
"1",
"]",
"*",
"norms_north",
"[",
":",
"-",
"1",
",",
":",
"-",
"1",
"]",
",",
"axis",
"=",
"-",
"1",
")",
"# use the same approach for finding the weighted mean",
"# as for inclination (see above)",
"xx",
"=",
"numpy",
".",
"sum",
"(",
"tl_area",
"*",
"az_cos",
")",
"# the only difference is that azimuth is defined in a range",
"# [0, 360), so we need to have two reference planes and change",
"# sign of projection on one normal to sign of projection to another one",
"yy",
"=",
"numpy",
".",
"sum",
"(",
"tl_area",
"*",
"sqrt",
"(",
"1",
"-",
"az_cos",
"*",
"az_cos",
")",
"*",
"sign",
")",
"# bottom-right triangles",
"sign",
"=",
"numpy",
".",
"sign",
"(",
"numpy",
".",
"sign",
"(",
"numpy",
".",
"sum",
"(",
"along_azimuth",
"[",
"1",
":",
"]",
"*",
"norms_west",
"[",
"1",
":",
",",
"1",
":",
"]",
",",
"axis",
"=",
"-",
"1",
")",
")",
"+",
"0.1",
")",
"az_cos",
"=",
"numpy",
".",
"sum",
"(",
"along_azimuth",
"[",
"1",
":",
"]",
"*",
"norms_north",
"[",
"1",
":",
",",
"1",
":",
"]",
",",
"axis",
"=",
"-",
"1",
")",
"xx",
"+=",
"numpy",
".",
"sum",
"(",
"br_area",
"*",
"az_cos",
")",
"yy",
"+=",
"numpy",
".",
"sum",
"(",
"br_area",
"*",
"sqrt",
"(",
"1",
"-",
"az_cos",
"*",
"az_cos",
")",
"*",
"sign",
")",
"azimuth",
"=",
"numpy",
".",
"degrees",
"(",
"numpy",
".",
"arctan2",
"(",
"yy",
",",
"xx",
")",
")",
"if",
"azimuth",
"<",
"0",
":",
"azimuth",
"+=",
"360",
"if",
"inclination",
">",
"90",
":",
"# average inclination is over 90 degree, that means that we need",
"# to reverse azimuthal direction in order for inclination to be",
"# in range [0, 90]",
"inclination",
"=",
"180",
"-",
"inclination",
"azimuth",
"=",
"(",
"azimuth",
"+",
"180",
")",
"%",
"360",
"return",
"inclination",
",",
"azimuth"
] | Calculate weighted average inclination and azimuth of the mesh surface.
:returns:
Tuple of two float numbers: inclination angle in a range [0, 90]
and azimuth in range [0, 360) (in decimal degrees).
The mesh is triangulated, the inclination and azimuth for each triangle
is computed and average values weighted on each triangle's area
are calculated. Azimuth is always defined in a way that inclination
angle doesn't exceed 90 degree. | [
"Calculate",
"weighted",
"average",
"inclination",
"and",
"azimuth",
"of",
"the",
"mesh",
"surface",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L568-L702 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.get_cell_dimensions | def get_cell_dimensions(self):
"""
Calculate centroid, width, length and area of each mesh cell.
:returns:
Tuple of four elements, each being 2d numpy array.
Each array has both dimensions less by one the dimensions
of the mesh, since they represent cells, not vertices.
Arrays contain the following cell information:
#. centroids, 3d vectors in a Cartesian space,
#. length (size along row of points) in km,
#. width (size along column of points) in km,
#. area in square km.
"""
points, along_azimuth, updip, diag = self.triangulate()
top = along_azimuth[:-1]
left = updip[:, :-1]
tl_area = geo_utils.triangle_area(top, left, diag)
top_length = numpy.sqrt(numpy.sum(top * top, axis=-1))
left_length = numpy.sqrt(numpy.sum(left * left, axis=-1))
bottom = along_azimuth[1:]
right = updip[:, 1:]
br_area = geo_utils.triangle_area(bottom, right, diag)
bottom_length = numpy.sqrt(numpy.sum(bottom * bottom, axis=-1))
right_length = numpy.sqrt(numpy.sum(right * right, axis=-1))
cell_area = tl_area + br_area
tl_center = (points[:-1, :-1] + points[:-1, 1:] + points[1:, :-1]) / 3
br_center = (points[:-1, 1:] + points[1:, :-1] + points[1:, 1:]) / 3
cell_center = ((tl_center * tl_area.reshape(tl_area.shape + (1, ))
+ br_center * br_area.reshape(br_area.shape + (1, )))
/ cell_area.reshape(cell_area.shape + (1, )))
cell_length = ((top_length * tl_area + bottom_length * br_area)
/ cell_area)
cell_width = ((left_length * tl_area + right_length * br_area)
/ cell_area)
return cell_center, cell_length, cell_width, cell_area | python | def get_cell_dimensions(self):
points, along_azimuth, updip, diag = self.triangulate()
top = along_azimuth[:-1]
left = updip[:, :-1]
tl_area = geo_utils.triangle_area(top, left, diag)
top_length = numpy.sqrt(numpy.sum(top * top, axis=-1))
left_length = numpy.sqrt(numpy.sum(left * left, axis=-1))
bottom = along_azimuth[1:]
right = updip[:, 1:]
br_area = geo_utils.triangle_area(bottom, right, diag)
bottom_length = numpy.sqrt(numpy.sum(bottom * bottom, axis=-1))
right_length = numpy.sqrt(numpy.sum(right * right, axis=-1))
cell_area = tl_area + br_area
tl_center = (points[:-1, :-1] + points[:-1, 1:] + points[1:, :-1]) / 3
br_center = (points[:-1, 1:] + points[1:, :-1] + points[1:, 1:]) / 3
cell_center = ((tl_center * tl_area.reshape(tl_area.shape + (1, ))
+ br_center * br_area.reshape(br_area.shape + (1, )))
/ cell_area.reshape(cell_area.shape + (1, )))
cell_length = ((top_length * tl_area + bottom_length * br_area)
/ cell_area)
cell_width = ((left_length * tl_area + right_length * br_area)
/ cell_area)
return cell_center, cell_length, cell_width, cell_area | [
"def",
"get_cell_dimensions",
"(",
"self",
")",
":",
"points",
",",
"along_azimuth",
",",
"updip",
",",
"diag",
"=",
"self",
".",
"triangulate",
"(",
")",
"top",
"=",
"along_azimuth",
"[",
":",
"-",
"1",
"]",
"left",
"=",
"updip",
"[",
":",
",",
":",
"-",
"1",
"]",
"tl_area",
"=",
"geo_utils",
".",
"triangle_area",
"(",
"top",
",",
"left",
",",
"diag",
")",
"top_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"top",
"*",
"top",
",",
"axis",
"=",
"-",
"1",
")",
")",
"left_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"left",
"*",
"left",
",",
"axis",
"=",
"-",
"1",
")",
")",
"bottom",
"=",
"along_azimuth",
"[",
"1",
":",
"]",
"right",
"=",
"updip",
"[",
":",
",",
"1",
":",
"]",
"br_area",
"=",
"geo_utils",
".",
"triangle_area",
"(",
"bottom",
",",
"right",
",",
"diag",
")",
"bottom_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"bottom",
"*",
"bottom",
",",
"axis",
"=",
"-",
"1",
")",
")",
"right_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"right",
"*",
"right",
",",
"axis",
"=",
"-",
"1",
")",
")",
"cell_area",
"=",
"tl_area",
"+",
"br_area",
"tl_center",
"=",
"(",
"points",
"[",
":",
"-",
"1",
",",
":",
"-",
"1",
"]",
"+",
"points",
"[",
":",
"-",
"1",
",",
"1",
":",
"]",
"+",
"points",
"[",
"1",
":",
",",
":",
"-",
"1",
"]",
")",
"/",
"3",
"br_center",
"=",
"(",
"points",
"[",
":",
"-",
"1",
",",
"1",
":",
"]",
"+",
"points",
"[",
"1",
":",
",",
":",
"-",
"1",
"]",
"+",
"points",
"[",
"1",
":",
",",
"1",
":",
"]",
")",
"/",
"3",
"cell_center",
"=",
"(",
"(",
"tl_center",
"*",
"tl_area",
".",
"reshape",
"(",
"tl_area",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
"+",
"br_center",
"*",
"br_area",
".",
"reshape",
"(",
"br_area",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
")",
"/",
"cell_area",
".",
"reshape",
"(",
"cell_area",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
")",
"cell_length",
"=",
"(",
"(",
"top_length",
"*",
"tl_area",
"+",
"bottom_length",
"*",
"br_area",
")",
"/",
"cell_area",
")",
"cell_width",
"=",
"(",
"(",
"left_length",
"*",
"tl_area",
"+",
"right_length",
"*",
"br_area",
")",
"/",
"cell_area",
")",
"return",
"cell_center",
",",
"cell_length",
",",
"cell_width",
",",
"cell_area"
] | Calculate centroid, width, length and area of each mesh cell.
:returns:
Tuple of four elements, each being 2d numpy array.
Each array has both dimensions less by one the dimensions
of the mesh, since they represent cells, not vertices.
Arrays contain the following cell information:
#. centroids, 3d vectors in a Cartesian space,
#. length (size along row of points) in km,
#. width (size along column of points) in km,
#. area in square km. | [
"Calculate",
"centroid",
"width",
"length",
"and",
"area",
"of",
"each",
"mesh",
"cell",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L704-L746 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.triangulate | def triangulate(self):
"""
Convert mesh points to vectors in Cartesian space.
:returns:
Tuple of four elements, each being 2d numpy array of 3d vectors
(the same structure and shape as the mesh itself). Those arrays
are:
#. points vectors,
#. vectors directed from each point (excluding the last column)
to the next one in a same row β,
#. vectors directed from each point (excluding the first row)
to the previous one in a same column β,
#. vectors pointing from a bottom left point of each mesh cell
to top right one β.
So the last three arrays of vectors allow to construct triangles
covering the whole mesh.
"""
points = geo_utils.spherical_to_cartesian(self.lons, self.lats,
self.depths)
# triangulate the mesh by defining vectors of triangles edges:
# β
along_azimuth = points[:, 1:] - points[:, :-1]
# β
updip = points[:-1] - points[1:]
# β
diag = points[:-1, 1:] - points[1:, :-1]
return points, along_azimuth, updip, diag | python | def triangulate(self):
points = geo_utils.spherical_to_cartesian(self.lons, self.lats,
self.depths)
along_azimuth = points[:, 1:] - points[:, :-1]
updip = points[:-1] - points[1:]
diag = points[:-1, 1:] - points[1:, :-1]
return points, along_azimuth, updip, diag | [
"def",
"triangulate",
"(",
"self",
")",
":",
"points",
"=",
"geo_utils",
".",
"spherical_to_cartesian",
"(",
"self",
".",
"lons",
",",
"self",
".",
"lats",
",",
"self",
".",
"depths",
")",
"# triangulate the mesh by defining vectors of triangles edges:",
"# β",
"along_azimuth",
"=",
"points",
"[",
":",
",",
"1",
":",
"]",
"-",
"points",
"[",
":",
",",
":",
"-",
"1",
"]",
"# β",
"updip",
"=",
"points",
"[",
":",
"-",
"1",
"]",
"-",
"points",
"[",
"1",
":",
"]",
"# β",
"diag",
"=",
"points",
"[",
":",
"-",
"1",
",",
"1",
":",
"]",
"-",
"points",
"[",
"1",
":",
",",
":",
"-",
"1",
"]",
"return",
"points",
",",
"along_azimuth",
",",
"updip",
",",
"diag"
] | Convert mesh points to vectors in Cartesian space.
:returns:
Tuple of four elements, each being 2d numpy array of 3d vectors
(the same structure and shape as the mesh itself). Those arrays
are:
#. points vectors,
#. vectors directed from each point (excluding the last column)
to the next one in a same row β,
#. vectors directed from each point (excluding the first row)
to the previous one in a same column β,
#. vectors pointing from a bottom left point of each mesh cell
to top right one β.
So the last three arrays of vectors allow to construct triangles
covering the whole mesh. | [
"Convert",
"mesh",
"points",
"to",
"vectors",
"in",
"Cartesian",
"space",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L748-L778 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.get_mean_width | def get_mean_width(self):
"""
Calculate and return (weighted) mean width (km) of a mesh surface.
The length of each mesh column is computed (summing up the cell widths
in a same column), and the mean value (weighted by the mean cell
length in each column) is returned.
"""
assert 1 not in self.lons.shape, (
"mean width is only defined for mesh of more than "
"one row and more than one column of points")
_, cell_length, cell_width, cell_area = self.get_cell_dimensions()
# compute widths along each mesh column
widths = numpy.sum(cell_width, axis=0)
# compute (weighted) mean cell length along each mesh column
column_areas = numpy.sum(cell_area, axis=0)
mean_cell_lengths = numpy.sum(cell_length * cell_area, axis=0) / \
column_areas
# compute and return weighted mean
return numpy.sum(widths * mean_cell_lengths) / \
numpy.sum(mean_cell_lengths) | python | def get_mean_width(self):
assert 1 not in self.lons.shape, (
"mean width is only defined for mesh of more than "
"one row and more than one column of points")
_, cell_length, cell_width, cell_area = self.get_cell_dimensions()
widths = numpy.sum(cell_width, axis=0)
column_areas = numpy.sum(cell_area, axis=0)
mean_cell_lengths = numpy.sum(cell_length * cell_area, axis=0) / \
column_areas
return numpy.sum(widths * mean_cell_lengths) / \
numpy.sum(mean_cell_lengths) | [
"def",
"get_mean_width",
"(",
"self",
")",
":",
"assert",
"1",
"not",
"in",
"self",
".",
"lons",
".",
"shape",
",",
"(",
"\"mean width is only defined for mesh of more than \"",
"\"one row and more than one column of points\"",
")",
"_",
",",
"cell_length",
",",
"cell_width",
",",
"cell_area",
"=",
"self",
".",
"get_cell_dimensions",
"(",
")",
"# compute widths along each mesh column",
"widths",
"=",
"numpy",
".",
"sum",
"(",
"cell_width",
",",
"axis",
"=",
"0",
")",
"# compute (weighted) mean cell length along each mesh column",
"column_areas",
"=",
"numpy",
".",
"sum",
"(",
"cell_area",
",",
"axis",
"=",
"0",
")",
"mean_cell_lengths",
"=",
"numpy",
".",
"sum",
"(",
"cell_length",
"*",
"cell_area",
",",
"axis",
"=",
"0",
")",
"/",
"column_areas",
"# compute and return weighted mean",
"return",
"numpy",
".",
"sum",
"(",
"widths",
"*",
"mean_cell_lengths",
")",
"/",
"numpy",
".",
"sum",
"(",
"mean_cell_lengths",
")"
] | Calculate and return (weighted) mean width (km) of a mesh surface.
The length of each mesh column is computed (summing up the cell widths
in a same column), and the mean value (weighted by the mean cell
length in each column) is returned. | [
"Calculate",
"and",
"return",
"(",
"weighted",
")",
"mean",
"width",
"(",
"km",
")",
"of",
"a",
"mesh",
"surface",
"."
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L780-L804 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/kernels/isotropic_gaussian.py | IsotropicGaussian.smooth_data | def smooth_data(self, data, config, is_3d=False):
'''
Applies the smoothing kernel to the data
:param np.ndarray data:
Raw earthquake count in the form [Longitude, Latitude, Depth,
Count]
:param dict config:
Configuration parameters must contain:
* BandWidth: The bandwidth of the kernel (in km) (float)
* Length_Limit: Maximum number of standard deviations
:returns:
* smoothed_value: np.ndarray vector of smoothed values
* Total (summed) rate of the original values
* Total (summed) rate of the smoothed values
'''
max_dist = config['Length_Limit'] * config['BandWidth']
smoothed_value = np.zeros(len(data), dtype=float)
for iloc in range(0, len(data)):
dist_val = haversine(data[:, 0], data[:, 1],
data[iloc, 0], data[iloc, 1])
if is_3d:
dist_val = np.sqrt(dist_val.flatten() ** 2.0 +
(data[:, 2] - data[iloc, 2]) ** 2.0)
id0 = np.where(dist_val <= max_dist)[0]
w_val = (np.exp(-(dist_val[id0] ** 2.0) /
(config['BandWidth'] ** 2.))).flatten()
smoothed_value[iloc] = np.sum(w_val * data[id0, 3]) / np.sum(w_val)
return smoothed_value, np.sum(data[:, -1]), np.sum(smoothed_value) | python | def smooth_data(self, data, config, is_3d=False):
max_dist = config['Length_Limit'] * config['BandWidth']
smoothed_value = np.zeros(len(data), dtype=float)
for iloc in range(0, len(data)):
dist_val = haversine(data[:, 0], data[:, 1],
data[iloc, 0], data[iloc, 1])
if is_3d:
dist_val = np.sqrt(dist_val.flatten() ** 2.0 +
(data[:, 2] - data[iloc, 2]) ** 2.0)
id0 = np.where(dist_val <= max_dist)[0]
w_val = (np.exp(-(dist_val[id0] ** 2.0) /
(config['BandWidth'] ** 2.))).flatten()
smoothed_value[iloc] = np.sum(w_val * data[id0, 3]) / np.sum(w_val)
return smoothed_value, np.sum(data[:, -1]), np.sum(smoothed_value) | [
"def",
"smooth_data",
"(",
"self",
",",
"data",
",",
"config",
",",
"is_3d",
"=",
"False",
")",
":",
"max_dist",
"=",
"config",
"[",
"'Length_Limit'",
"]",
"*",
"config",
"[",
"'BandWidth'",
"]",
"smoothed_value",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"data",
")",
",",
"dtype",
"=",
"float",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"data",
")",
")",
":",
"dist_val",
"=",
"haversine",
"(",
"data",
"[",
":",
",",
"0",
"]",
",",
"data",
"[",
":",
",",
"1",
"]",
",",
"data",
"[",
"iloc",
",",
"0",
"]",
",",
"data",
"[",
"iloc",
",",
"1",
"]",
")",
"if",
"is_3d",
":",
"dist_val",
"=",
"np",
".",
"sqrt",
"(",
"dist_val",
".",
"flatten",
"(",
")",
"**",
"2.0",
"+",
"(",
"data",
"[",
":",
",",
"2",
"]",
"-",
"data",
"[",
"iloc",
",",
"2",
"]",
")",
"**",
"2.0",
")",
"id0",
"=",
"np",
".",
"where",
"(",
"dist_val",
"<=",
"max_dist",
")",
"[",
"0",
"]",
"w_val",
"=",
"(",
"np",
".",
"exp",
"(",
"-",
"(",
"dist_val",
"[",
"id0",
"]",
"**",
"2.0",
")",
"/",
"(",
"config",
"[",
"'BandWidth'",
"]",
"**",
"2.",
")",
")",
")",
".",
"flatten",
"(",
")",
"smoothed_value",
"[",
"iloc",
"]",
"=",
"np",
".",
"sum",
"(",
"w_val",
"*",
"data",
"[",
"id0",
",",
"3",
"]",
")",
"/",
"np",
".",
"sum",
"(",
"w_val",
")",
"return",
"smoothed_value",
",",
"np",
".",
"sum",
"(",
"data",
"[",
":",
",",
"-",
"1",
"]",
")",
",",
"np",
".",
"sum",
"(",
"smoothed_value",
")"
] | Applies the smoothing kernel to the data
:param np.ndarray data:
Raw earthquake count in the form [Longitude, Latitude, Depth,
Count]
:param dict config:
Configuration parameters must contain:
* BandWidth: The bandwidth of the kernel (in km) (float)
* Length_Limit: Maximum number of standard deviations
:returns:
* smoothed_value: np.ndarray vector of smoothed values
* Total (summed) rate of the original values
* Total (summed) rate of the smoothed values | [
"Applies",
"the",
"smoothing",
"kernel",
"to",
"the",
"data"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/kernels/isotropic_gaussian.py#L69-L99 |
gem/oq-engine | openquake/hazardlib/gsim/atkinson_macias_2009.py | AtkinsonMacias2009._get_magnitude_term | def _get_magnitude_term(self, C, mag):
"""
Returns the magnitude scaling term provided in Equation (5)
"""
dmag = mag - 8.0
return C["c0"] + C["c3"] * dmag + C["c4"] * (dmag ** 2.) | python | def _get_magnitude_term(self, C, mag):
dmag = mag - 8.0
return C["c0"] + C["c3"] * dmag + C["c4"] * (dmag ** 2.) | [
"def",
"_get_magnitude_term",
"(",
"self",
",",
"C",
",",
"mag",
")",
":",
"dmag",
"=",
"mag",
"-",
"8.0",
"return",
"C",
"[",
"\"c0\"",
"]",
"+",
"C",
"[",
"\"c3\"",
"]",
"*",
"dmag",
"+",
"C",
"[",
"\"c4\"",
"]",
"*",
"(",
"dmag",
"**",
"2.",
")"
] | Returns the magnitude scaling term provided in Equation (5) | [
"Returns",
"the",
"magnitude",
"scaling",
"term",
"provided",
"in",
"Equation",
"(",
"5",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_macias_2009.py#L85-L90 |
gem/oq-engine | openquake/hazardlib/gsim/atkinson_macias_2009.py | AtkinsonMacias2009._get_distance_term | def _get_distance_term(self, C, rrup, mag):
"""
Returns the distance scaling given in Equation (4), page 1569,
with distance adjusted by the magnitude-dependent depth scaling
factor given in Equation (6)
"""
r_adj = np.sqrt(rrup ** 2.0 + (mag ** 2.0 - 3.1 * mag - 14.55) ** 2.)
return C["c1"] * np.log10(r_adj) + C["c2"] * r_adj | python | def _get_distance_term(self, C, rrup, mag):
r_adj = np.sqrt(rrup ** 2.0 + (mag ** 2.0 - 3.1 * mag - 14.55) ** 2.)
return C["c1"] * np.log10(r_adj) + C["c2"] * r_adj | [
"def",
"_get_distance_term",
"(",
"self",
",",
"C",
",",
"rrup",
",",
"mag",
")",
":",
"r_adj",
"=",
"np",
".",
"sqrt",
"(",
"rrup",
"**",
"2.0",
"+",
"(",
"mag",
"**",
"2.0",
"-",
"3.1",
"*",
"mag",
"-",
"14.55",
")",
"**",
"2.",
")",
"return",
"C",
"[",
"\"c1\"",
"]",
"*",
"np",
".",
"log10",
"(",
"r_adj",
")",
"+",
"C",
"[",
"\"c2\"",
"]",
"*",
"r_adj"
] | Returns the distance scaling given in Equation (4), page 1569,
with distance adjusted by the magnitude-dependent depth scaling
factor given in Equation (6) | [
"Returns",
"the",
"distance",
"scaling",
"given",
"in",
"Equation",
"(",
"4",
")",
"page",
"1569",
"with",
"distance",
"adjusted",
"by",
"the",
"magnitude",
"-",
"dependent",
"depth",
"scaling",
"factor",
"given",
"in",
"Equation",
"(",
"6",
")"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_macias_2009.py#L92-L99 |
gem/oq-engine | openquake/commands/purge.py | purge_one | def purge_one(calc_id, user):
"""
Remove one calculation ID from the database and remove its datastore
"""
filename = os.path.join(datadir, 'calc_%s.hdf5' % calc_id)
err = dbcmd('del_calc', calc_id, user)
if err:
print(err)
elif os.path.exists(filename): # not removed yet
os.remove(filename)
print('Removed %s' % filename) | python | def purge_one(calc_id, user):
filename = os.path.join(datadir, 'calc_%s.hdf5' % calc_id)
err = dbcmd('del_calc', calc_id, user)
if err:
print(err)
elif os.path.exists(filename):
os.remove(filename)
print('Removed %s' % filename) | [
"def",
"purge_one",
"(",
"calc_id",
",",
"user",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"datadir",
",",
"'calc_%s.hdf5'",
"%",
"calc_id",
")",
"err",
"=",
"dbcmd",
"(",
"'del_calc'",
",",
"calc_id",
",",
"user",
")",
"if",
"err",
":",
"print",
"(",
"err",
")",
"elif",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"# not removed yet",
"os",
".",
"remove",
"(",
"filename",
")",
"print",
"(",
"'Removed %s'",
"%",
"filename",
")"
] | Remove one calculation ID from the database and remove its datastore | [
"Remove",
"one",
"calculation",
"ID",
"from",
"the",
"database",
"and",
"remove",
"its",
"datastore"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/purge.py#L28-L38 |
gem/oq-engine | openquake/commands/purge.py | purge_all | def purge_all(user=None, fast=False):
"""
Remove all calculations of the given user
"""
user = user or getpass.getuser()
if os.path.exists(datadir):
if fast:
shutil.rmtree(datadir)
print('Removed %s' % datadir)
else:
for fname in os.listdir(datadir):
mo = re.match('calc_(\d+)\.hdf5', fname)
if mo is not None:
calc_id = int(mo.group(1))
purge_one(calc_id, user) | python | def purge_all(user=None, fast=False):
user = user or getpass.getuser()
if os.path.exists(datadir):
if fast:
shutil.rmtree(datadir)
print('Removed %s' % datadir)
else:
for fname in os.listdir(datadir):
mo = re.match('calc_(\d+)\.hdf5', fname)
if mo is not None:
calc_id = int(mo.group(1))
purge_one(calc_id, user) | [
"def",
"purge_all",
"(",
"user",
"=",
"None",
",",
"fast",
"=",
"False",
")",
":",
"user",
"=",
"user",
"or",
"getpass",
".",
"getuser",
"(",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"datadir",
")",
":",
"if",
"fast",
":",
"shutil",
".",
"rmtree",
"(",
"datadir",
")",
"print",
"(",
"'Removed %s'",
"%",
"datadir",
")",
"else",
":",
"for",
"fname",
"in",
"os",
".",
"listdir",
"(",
"datadir",
")",
":",
"mo",
"=",
"re",
".",
"match",
"(",
"'calc_(\\d+)\\.hdf5'",
",",
"fname",
")",
"if",
"mo",
"is",
"not",
"None",
":",
"calc_id",
"=",
"int",
"(",
"mo",
".",
"group",
"(",
"1",
")",
")",
"purge_one",
"(",
"calc_id",
",",
"user",
")"
] | Remove all calculations of the given user | [
"Remove",
"all",
"calculations",
"of",
"the",
"given",
"user"
] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/purge.py#L42-L56 |