Skip to content

Commit

Permalink
Merge pull request #10117 from gem/peril
Browse files Browse the repository at this point in the history
Introduced perils in the CompositeRiskModel
  • Loading branch information
micheles authored Nov 4, 2024
2 parents 9a9f9ad + 32fdf2a commit fe414aa
Show file tree
Hide file tree
Showing 4 changed files with 59 additions and 50 deletions.
6 changes: 3 additions & 3 deletions openquake/commonlib/oqvalidation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1472,12 +1472,12 @@ def set_loss_types(self):
else:
self._parent = None
# set all_cost_types
# rt has the form 'vulnerability/structural', 'fragility/...', ...
costtypes = set(rt.rsplit('/')[1] for rt in self.risk_files)
# rt has the form 'earthquake/vulnerability/structural', ...
costtypes = set(rt.split('/')[2] for rt in self.risk_files)
if not costtypes and self.hazard_calculation_id:
try:
self._risk_files = rfs = get_risk_files(self._parent.inputs)
costtypes = set(rt.rsplit('/')[1] for rt in rfs)
costtypes = set(rt.split('/')[2] for rt in rfs)
except OSError: # FileNotFound for wrong hazard_calculation_id
pass
self.all_cost_types = sorted(costtypes) # including occupants
Expand Down
97 changes: 52 additions & 45 deletions openquake/risklib/riskmodels.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,8 @@
F32 = numpy.float32
F64 = numpy.float64


LTYPE_REGEX = '|'.join(lt for lt in scientific.LOSSTYPE
if '+' not in lt and '_ins' not in lt)
lts = numpy.concatenate([scientific.LOSSTYPE, scientific.PERILTYPE])
LTYPE_REGEX = '|'.join(lt for lt in lts if '+' not in lt and '_ins' not in lt)
RISK_TYPE_REGEX = re.compile(r'(%s)_([\w_]+)' % LTYPE_REGEX)


Expand All @@ -56,22 +55,30 @@ def _assert_equal(d1, d2):
def get_risk_files(inputs):
"""
:param inputs: a dictionary key -> path name
:returns: a pair (file_type, {risk_type: path})
:returns: a dictionary "peril/kind/cost_type" -> fname
"""
rfs = {}
job_ini = inputs['job_ini']
for key in sorted(inputs):
if key == 'fragility':
# backward compatibily for .ini files with key fragility_file
# instead of structural_fragility_file
rfs['fragility/structural'] = inputs[
rfs['earthquake/fragility/structural'] = inputs[
'structural_fragility'] = inputs[key]
del inputs['fragility']
elif key.endswith(('_fragility', '_vulnerability', '_vulnerability_retrofitted')):
match = RISK_TYPE_REGEX.match(key)
if match:
rfs['%s/%s' % (match.group(2), match.group(1))] = inputs[key]
elif match is None:
kind = match.group(2) # fragility or vulnerability
value = inputs[key]
if isinstance(value, dict): # cost_type -> fname
peril = match.group(1)
for cost_type, fname in value.items():
rfs[f'{peril}/{kind}/{cost_type}'] = fname
else:
cost_type = match.group(1)
rfs[f'earthquake/{kind}/{cost_type}'] = value
else:
raise ValueError('Invalid key in %s: %s_file' % (job_ini, key))
return rfs

Expand All @@ -96,29 +103,30 @@ def build_vf_node(vf):
{'id': vf.id, 'dist': vf.distribution_name}, nodes=nodes)


def group_by_lt(funclist):
def group_by_peril(funclist):
"""
Converts a list of objects with attribute .loss_type into a dictionary
loss_type -> risk_function
peril -> loss_type -> risk_function
"""
d = AccumDict(accum=[])
ddic = AccumDict(accum=AccumDict(accum=[])) # peril -> lt -> rf
for rf in funclist:
d[rf.loss_type].append(rf)
for lt, lst in d.items():
if len(lst) == 1:
d[lt] = lst[0]
elif lst[1].kind == 'fragility':
# EventBasedDamageTestCase.test_case_11
cf, ffl = lst
ffl.cf = cf
d[lt] = ffl
elif lst[1].kind == 'vulnerability_retrofitted':
vf, retro = lst
vf.retro = retro
d[lt] = vf
else:
raise RuntimeError(lst)
return d
ddic[rf.peril][rf.loss_type].append(rf)
for peril, dic in ddic.items():
for lt, lst in dic.items():
if len(lst) == 1:
dic[lt] = lst[0]
elif lst[1].kind == 'fragility':
# EventBasedDamageTestCase.test_case_11
cf, ffl = lst
ffl.cf = cf
dic[lt] = ffl
elif lst[1].kind == 'vulnerability_retrofitted':
vf, retro = lst
vf.retro = retro
dic[lt] = vf
else:
raise RuntimeError(lst)
return ddic


class RiskFuncList(list):
Expand All @@ -132,7 +140,7 @@ def groupby_id(self):
ddic = AccumDict(accum=[])
for rf in self:
ddic[rf.id].append(rf)
return {riskid: group_by_lt(rfs) for riskid, rfs in ddic.items()}
return {riskid: group_by_peril(rfs) for riskid, rfs in ddic.items()}


def get_risk_functions(oqparam):
Expand All @@ -143,34 +151,30 @@ def get_risk_functions(oqparam):
a list of risk functions
"""
job_ini = oqparam.inputs['job_ini']
rmodels = AccumDict()
rmodels = AccumDict() # (peril, loss_type, kind) -> rmodel
for key, fname in get_risk_files(oqparam.inputs).items():
kind, loss_type = key.split('/') # ex. vulnerability/structural
peril, kind, loss_type = key.split('/') # ex. earthquake/vulnerability/structural
rmodel = nrml.to_python(fname)
if len(rmodel) == 0:
raise InvalidFile(f'{job_ini}: {fname} is empty!')
rmodels[loss_type, kind] = rmodel
rmodels[peril, loss_type, kind] = rmodel
if rmodel.lossCategory is None: # NRML 0.4
continue
cost_type = str(rmodel.lossCategory)
rmodel_kind = rmodel.__class__.__name__
kind_ = kind.replace('_retrofitted', '') # strip retrofitted
if not rmodel_kind.lower().startswith(kind_):
raise ValueError(
'Error in the file "%s_file=%s": is '
'of kind %s, expected %s' % (
key, oqparam.inputs[key], rmodel_kind,
kind.capitalize() + 'Model'))
f'Error in the file "{key}_file={fname}": is '
f'of kind {rmodel_kind}, expected {kind.capitalize() + "Model"}')
if cost_type != loss_type:
raise ValueError(
'Error in the file "%s_file=%s": lossCategory is of '
'type "%s", expected "%s"' %
(key, oqparam.inputs[key],
rmodel.lossCategory, loss_type))
f'Error in the file "{key}_file={fname}": lossCategory is of '
f'type "{rmodel.lossCategory}", expected "{loss_type}"')
cl_risk = oqparam.calculation_mode in ('classical', 'classical_risk')
rlist = RiskFuncList()
rlist.limit_states = []
for (loss_type, kind), rm in sorted(rmodels.items()):
for (peril, loss_type, kind), rm in sorted(rmodels.items()):
if kind == 'fragility':
for (imt, riskid), ffl in sorted(rm.items()):
if not rlist.limit_states:
Expand All @@ -179,6 +183,7 @@ def get_risk_functions(oqparam):
# limit states; this may change in the future
assert rlist.limit_states == rm.limitStates, (
rlist.limit_states, rm.limitStates)
ffl.peril = peril
ffl.loss_type = loss_type
ffl.kind = kind
rlist.append(ffl)
Expand All @@ -187,6 +192,7 @@ def get_risk_functions(oqparam):
# to make sure they are strictly increasing
for (imt, riskid), rf in sorted(rm.items()):
rf = rf.strictly_increasing() if cl_risk else rf
rf.peril = peril
rf.loss_type = loss_type
rf.kind = kind
rlist.append(rf)
Expand Down Expand Up @@ -234,7 +240,7 @@ class RiskModel(object):
Base class. Can be used in the tests as a mock.
:param taxonomy: a taxonomy string
:param risk_functions: a dict (loss_type, kind) -> risk_function
:param risk_functions: a dict peril -> (loss_type, kind) -> risk_function
"""
time_event = None # used in scenario_risk
compositemodel = None # set by get_crmodel
Expand All @@ -243,25 +249,25 @@ class RiskModel(object):
def __init__(self, calcmode, taxonomy, risk_functions, **kw):
self.calcmode = calcmode
self.taxonomy = taxonomy
self.risk_functions = {'earthquake': risk_functions}
self.risk_functions = risk_functions
vars(self).update(kw) # updates risk_investigation_time too
steps = kw.get('lrem_steps_per_interval')
if calcmode in ('classical', 'classical_risk'):
self.loss_ratios = {
lt: tuple(vf.mean_loss_ratios_with_steps(steps))
for lt, vf in risk_functions.items()}
for lt, vf in risk_functions['earthquake'].items()}
if calcmode == 'classical_bcr':
self.loss_ratios_orig = {}
self.loss_ratios_retro = {}
for lt, vf in risk_functions.items():
for lt, vf in risk_functions['earthquake'].items():
self.loss_ratios_orig[lt] = tuple(
vf.mean_loss_ratios_with_steps(steps))
self.loss_ratios_retro[lt] = tuple(
vf.retro.mean_loss_ratios_with_steps(steps))

# set imt_by_lt
self.imt_by_lt = {} # dictionary loss_type -> imt
for lt, rf in risk_functions.items():
for lt, rf in risk_functions['earthquake'].items():
if rf.kind in ('vulnerability', 'fragility'):
self.imt_by_lt[lt] = rf.imt

Expand Down Expand Up @@ -468,13 +474,14 @@ def get_riskcomputer(dic, alias):
if hasattr(rf, 'init'):
rf.init()
rf.loss_type = lt
rf.peril = 'earthquake'
if getattr(rf, 'retro', False):
rf.retro = hdf5.json_to_obj(json.dumps(rf.retro))
rf.retro.init()
rf.retro.loss_type = lt
rfs[riskid].append(rf)
rm = RiskModel(dic['calculation_mode'], 'taxonomy',
group_by_lt(rfs[riskid]),
group_by_peril(rfs[riskid]),
lrem_steps_per_interval=steps,
minimum_asset_loss=mal)
rm.alias = alias
Expand Down
2 changes: 1 addition & 1 deletion openquake/risklib/scientific.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
'losses', 'collapsed',
'injured', 'fatalities', 'homeless', 'non_operational']

PERILS = 'earthquake', 'liquefaction', 'landslide'
PERILTYPE = numpy.array(['earthquake', 'liquefaction', 'landslide'])
LOSSTYPE = numpy.array('''\
business_interruption contents nonstructural structural
occupants occupants_day occupants_night occupants_transit
Expand Down
4 changes: 3 additions & 1 deletion openquake/risklib/tests/riskmodels_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,7 @@ def test1(self):
'RC#structural':
{"openquake.risklib.scientific.VulnerabilityFunction":
{"id": "RC",
"peril": 'earthquake',
"loss_type": "structural",
"imt": "PGA",
"imls": [0.1, 0.2, 0.3, 0.5, 0.7],
Expand All @@ -217,7 +218,8 @@ def test1(self):
'gmv_0': [.23, .31]}
rc = riskmodels.get_riskcomputer(dic, alias={'PGA': 'gmv_0'})
print(toml.dumps(dic))
self.assertEqual(dic, rc.todict())
for k, v in rc.todict().items():
self.assertEqual(dic[k], v)
out = rc.output(pandas.DataFrame(gmfs))
print(out)

Expand Down

0 comments on commit fe414aa

Please sign in to comment.