Skip to content

Commit

Permalink
Merge pull request #10119 from gem/perils
Browse files Browse the repository at this point in the history
Replaced consequences and taxonomy mapping by loss type with by peril
  • Loading branch information
micheles authored Nov 5, 2024
2 parents 99c5bae + 74fd8ca commit b79cab6
Show file tree
Hide file tree
Showing 71 changed files with 795 additions and 1,734 deletions.
2 changes: 2 additions & 0 deletions debian/changelog
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
[Michele Simionato]
* Extended consequences to perils
* Replaced taxonomy mapping by loss type with taxonomy mapping by peril
* Internal: changed the ordering in the composite risk model from
(loss_type, riskid) -> (riskid, loss_type)
* Added an exporter for trt_gsim
Expand Down
19 changes: 10 additions & 9 deletions openquake/calculators/event_based_damage.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,29 +91,30 @@ def _gen_dd3(asset_df, gmf_df, crmodel, dparam, mon):

# this part is ultra-slow, especially for discrete damage distributions
E = len(dparam.eids)
L = len(oq.loss_types)
P = len(crmodel.perils)
[lt] = oq.loss_types # assume single loss type
for taxo, adf in asset_df.groupby('taxonomy'):
with mon:
out = crmodel.get_output(adf, gmf_df)
outs = crmodel.get_output(adf, gmf_df) # dicts loss_type -> array
aids = adf.index.to_numpy()
A = len(aids)
assets = adf.to_records()
if oq.float_dmg_dist:
number = assets['value-number']
else:
number = assets['value-number'] = U32(assets['value-number'])
dd4 = numpy.zeros((L, A, E, dparam.Dc), F32)
dd4 = numpy.zeros((P, A, E, dparam.Dc), F32)
D = dparam.D
for lti, lt in enumerate(oq.loss_types):
for p, out in enumerate(outs):
fractions = out[lt]
if oq.float_dmg_dist:
for a in range(A):
dd4[lti, a, :, :D] = fractions[a] * number[a]
dd4[p, a, :, :D] = fractions[a] * number[a]
else:
# this is a performance distaster; for instance
# the Messina test in oq-risk-tests becomes 12x
# slower even if it has only 25_736 assets
dd4[lti, :, :, :D] = dparam.rng.discrete_dmg_dist(
dd4[p, :, :, :D] = dparam.rng.discrete_dmg_dist(
dparam.eids, fractions, number)

# secondary perils and consequences
Expand All @@ -122,12 +123,12 @@ def _gen_dd3(asset_df, gmf_df, crmodel, dparam, mon):
for d in range(1, D):
# doing the mean on the secondary simulations
if oq.float_dmg_dist:
dd4[lti, a, :, d] *= probs
dd4[p, a, :, d] *= probs
else:
dd4[lti, a, :, d] *= dprobs
dd4[p, a, :, d] *= dprobs

df = crmodel.tmap_df[crmodel.tmap_df.taxi == assets[0]['taxonomy']]
if L > 1:
if P > 1:
# compose damage distributions
dd3 = numpy.empty(dd4.shape[1:])
for a in range(A):
Expand Down
2 changes: 1 addition & 1 deletion openquake/calculators/event_based_risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def gen_outputs(df, crmodel, rng, monitor):
if len(gmf_df) == 0: # common enough
continue
with mon_risk:
out = crmodel.get_output(
[out] = crmodel.get_output(
adf, gmf_df, crmodel.oqparam._sec_losses, rng)
yield out

Expand Down
2 changes: 1 addition & 1 deletion openquake/calculators/tests/event_based_risk_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,7 +553,7 @@ def test_case_7a(self):
self.assertEqual(nrups, 2) # two ruptures >= 80% of the losses

def test_case_8(self):
# nontrivial taxonomy mapping
# loss_type-dependent taxonomy mapping
out = self.run_calc(case_8.__file__, 'job.ini', exports='csv',
concurrent_tasks='0')
for fname in out['aggrisk', 'csv']:
Expand Down
2 changes: 1 addition & 1 deletion openquake/calculators/tests/scenario_damage_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def test_case_7(self):
'risk_by_event', ['event_id', 'loss_id', 'agg_id'],
dict(agg_id=K))
self.assertEqual(len(df), 100)
self.assertEqual(len(df[df.dmg_1 > 0]), 12) # only 12/100 are nonzero
self.assertEqual(len(df[df.dmg_1 > 0]), 28) # only 28/100 are nonzero

def test_case_8(self):
# case with a shakemap
Expand Down
15 changes: 12 additions & 3 deletions openquake/commonlib/oqvalidation.py
Original file line number Diff line number Diff line change
Expand Up @@ -967,6 +967,8 @@ class OqParam(valid.ParamSet):
'residents_vulnerability',
'area_vulnerability',
'number_vulnerability',
'earthquake_fragility',
'earthquake_vulnerability',
'liquefaction_fragility',
'liquefaction_vulnerability',
'landslide_fragility',
Expand Down Expand Up @@ -1480,7 +1482,8 @@ def set_loss_types(self):
costtypes = set(rt.split('/')[2] for rt in rfs)
except OSError: # FileNotFound for wrong hazard_calculation_id
pass
self.all_cost_types = sorted(costtypes) # including occupants
# all_cost_types includes occupants and exclude perils
self.all_cost_types = sorted(costtypes - set(scientific.PERILTYPE))
# fix minimum_asset_loss
self.minimum_asset_loss = {
ln: calc.filters.getdefault(self.minimum_asset_loss, ln)
Expand Down Expand Up @@ -1641,9 +1644,11 @@ def levels_per_imt(self):
def set_risk_imts(self, risklist):
"""
:param risklist:
a list of risk functions with attributes .id, .loss_type, .kind
a list of risk functions with attributes .id, .peril, .loss_type, .kind
:returns:
a list of ordered unique perils
Set the attribute risk_imtls.
Set the attribute .risk_imtls as a side effect
"""
risk_imtls = AccumDict(accum=[]) # imt -> imls
for i, rf in enumerate(risklist):
Expand All @@ -1664,6 +1669,7 @@ def set_risk_imts(self, risklist):
(imt, min(imls), max(imls)))
suggested[-1] += '}'
self.risk_imtls = {imt: [min(ls)] for imt, ls in risk_imtls.items()}

if self.uniform_hazard_spectra:
self.check_uniform_hazard_spectra()
if not self.hazard_imtls:
Expand All @@ -1683,6 +1689,9 @@ def set_risk_imts(self, risklist):
if imt in sec_imts:
self.raise_invalid('you forgot to set secondary_perils =')

risk_perils = sorted(set(rf.peril for rf in risklist))
return risk_perils

def get_primary_imtls(self):
"""
:returns: IMTs and levels which are not secondary
Expand Down
57 changes: 25 additions & 32 deletions openquake/commonlib/readinput.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
from openquake.baselib import config, hdf5, parallel, InvalidFile
from openquake.baselib.performance import Monitor
from openquake.baselib.general import (
random_filter, countby, group_array, get_duplicates, gettemp, AccumDict)
random_filter, countby, get_duplicates, gettemp, AccumDict)
from openquake.baselib.python3compat import zip, decode
from openquake.baselib.node import Node
from openquake.hazardlib.const import StdDev
Expand Down Expand Up @@ -968,11 +968,13 @@ def get_imts(oqparam):
return list(map(imt.from_string, sorted(oqparam.imtls)))


def _cons_coeffs(records, loss_types, limit_states):
dtlist = [(lt, F32) for lt in loss_types]
def _cons_coeffs(df, perils, loss_dt, limit_states):
dtlist = [(peril, loss_dt) for peril in perils]
coeffs = numpy.zeros(len(limit_states), dtlist)
for rec in records:
coeffs[rec['loss_type']] = [rec[ds] for ds in limit_states]
for lt in loss_dt.names:
for peril in perils:
the_df = df[(df.peril == peril) & (df.loss_type == lt)]
coeffs[peril][lt] = the_df[limit_states].to_numpy()[0]
return coeffs


Expand All @@ -993,43 +995,35 @@ def get_crmodel(oqparam):
return crm

risklist = get_risk_functions(oqparam)
perils = numpy.array(sorted(set(rf.peril for rf in risklist)))
if not oqparam.limit_states and risklist.limit_states:
oqparam.limit_states = risklist.limit_states
elif 'damage' in oqparam.calculation_mode and risklist.limit_states:
assert oqparam.limit_states == risklist.limit_states
loss_types = oqparam.loss_dt().names
consdict = {}
if 'consequence' in oqparam.inputs:
if not risklist.limit_states:
raise InvalidFile('Missing fragility functions in %s' %
oqparam.inputs['job_ini'])
# build consdict of the form consequence_by_tagname -> tag -> array
loss_dt = oqparam.loss_dt()
for by, fnames in oqparam.inputs['consequence'].items():
if isinstance(fnames, str): # single file
fnames = [fnames]
dtypedict = {
by: str, 'consequence': str, 'loss_type': str, None: float}

# i.e. files collapsed.csv, fatalities.csv, ... with headers
# taxonomy,consequence,loss_type,slight,moderate,extensive
arrays = []
for fname in fnames:
arr = hdf5.read_csv(fname, dtypedict).array
for no, row in enumerate(arr, 2):
if row['loss_type'] not in loss_types:
msg = '%s: line=%d: there is not fragility function for %s'
logging.warning(msg, fname, no, row['loss_type'])
arrays.append(arr[numpy.isin(arr['loss_type'], loss_types)])

array = numpy.concatenate(arrays)
dic = group_array(array, 'consequence')
for consequence, group in dic.items():
# i.e. files collapsed.csv, fatalities.csv, ... with headers like
# taxonomy,consequence,slight,moderate,extensive
df = pandas.concat([pandas.read_csv(fname) for fname in fnames])
if 'loss_type' not in df.columns:
df['loss_type'] = 'structural'
if 'peril' not in df.columns:
df['peril'] = 'earthquake'
for consequence, group in df.groupby('consequence'):
if consequence not in scientific.KNOWN_CONSEQUENCES:
raise InvalidFile('Unknown consequence %s in %s' %
(consequence, fnames))
bytag = {
tag: _cons_coeffs(grp, loss_types, risklist.limit_states)
for tag, grp in group_array(group, by).items()}
tag: _cons_coeffs(grp, perils, loss_dt, risklist.limit_states)
for tag, grp in group.groupby(by)}
consdict['%s_by_%s' % (consequence, by)] = bytag
# for instance consdict['collapsed_by_taxonomy']['W_LFM-DUM_H3']
# is [(0.05,), (0.2 ,), (0.6 ,), (1. ,)] for damage state and structural
Expand Down Expand Up @@ -1220,7 +1214,7 @@ def aristotle_tmap(oqparam, taxidx):
for taxo, risk_id, weight in zip(df.taxonomy, df.conversion, df.weight):
if taxo in taxidx:
acc['country'].append(key)
acc['loss_type'].append('*')
acc['peril'].append('earthquake')
acc['taxi'].append(taxidx[taxo])
acc['risk_id'].append(risk_id)
acc['weight'].append(weight)
Expand All @@ -1241,7 +1235,7 @@ def taxonomy_mapping(oqparam, taxidx):
df = pandas.DataFrame(dict(weight=numpy.ones(nt),
taxi=taxidx.values(),
risk_id=list(taxidx),
loss_type=['*']*nt,
peril=['*']*nt,
country=['?']*nt))
return df
fname = oqparam.inputs['taxonomy_mapping']
Expand All @@ -1255,17 +1249,16 @@ def _taxonomy_mapping(filename, taxidx):
raise e.__class__('%s while reading %s' % (e, filename))
if 'weight' not in tmap_df:
tmap_df['weight'] = 1.
if 'loss_type' not in tmap_df:
tmap_df['loss_type'] = '*'
if 'peril' not in tmap_df:
tmap_df['peril'] = '*'
if 'country' not in tmap_df:
tmap_df['country'] = '?'
if 'conversion' in tmap_df.columns:
# conversion was the old name in the header for engine <= 3.12
tmap_df = tmap_df.rename(columns={'conversion': 'risk_id'})
assert set(tmap_df) == {'country', 'loss_type', 'taxonomy', 'risk_id', 'weight'
}, set(tmap_df)
assert set(tmap_df) == {'country', 'peril', 'taxonomy', 'risk_id', 'weight'}, set(tmap_df)
taxos = set()
for (taxo, lt), df in tmap_df.groupby(['taxonomy', 'loss_type']):
for (taxo, per), df in tmap_df.groupby(['taxonomy', 'peril']):
taxos.add(taxo)
if abs(df.weight.sum() - 1.) > pmf.PRECISION:
raise InvalidFile('%s: the weights do not sum up to 1 for %s' %
Expand Down
2 changes: 1 addition & 1 deletion openquake/commonlib/tests/logictree_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2159,7 +2159,7 @@ def test_mixed_lines(self):
exp = pandas.DataFrame(
dict(risk_id='taxo1 taxo2 taxo2 taxo3 taxo1'.split(),
weight=[1., 1., .5, 1., .5],
loss_type=['*'] * 5,
peril=['*'] * 5,
country=['?'] * 5,
taxi=[1, 2, 4, 3, 4]))
pandas.testing.assert_frame_equal(got, exp)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
taxonomy,consequence,loss_type,ds1,ds2,ds3,ds4
tax1,losses,structural,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
tax2,losses,structural,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
tax3,losses,structural,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
taxonomy,consequence,peril,ds1,ds2,ds3,ds4
tax1,losses,earthquake,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
tax2,losses,earthquake,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
tax3,losses,earthquake,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
taxonomy,consequence,loss_type,LS1,LS2
RC,losses,structural,4.000000E-02,1.600000E-01
RM,losses,structural,4.000000E-02,1.600000E-01
W,losses,structural,4.000000E-02,1.600000E-01
taxonomy,consequence,peril,LS1,LS2
RC,losses,earthquake,4.000000E-02,1.600000E-01
RM,losses,earthquake,4.000000E-02,1.600000E-01
W,losses,earthquake,4.000000E-02,1.600000E-01
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
taxonomy,consequence,loss_type,slight,moderate,extreme,complete
Concrete,losses,structural,0.04,0.31,0.6,1
Adobe,losses,structural,0.04,0.31,0.6,1
Stone-Masonry,losses,structural,0.04,0.31,0.6,1
Unreinforced-Brick-Masonry,losses,structural,0.04,0.31,0.6,1
Wood,losses,structural,0.04,0.31,0.6,1
taxonomy,consequence,peril,slight,moderate,extreme,complete
Concrete,losses,earthquake,0.04,0.31,0.6,1
Adobe,losses,earthquake,0.04,0.31,0.6,1
Stone-Masonry,losses,earthquake,0.04,0.31,0.6,1
Unreinforced-Brick-Masonry,losses,earthquake,0.04,0.31,0.6,1
Wood,losses,earthquake,0.04,0.31,0.6,1
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
taxonomy,consequence,loss_type,moderate,complete
Concrete,losses,structural,0.31,1
Adobe,losses,structural,0.31,1
Stone-Masonry,losses,structural,0.31,1
Unreinforced-Brick-Masonry,losses,structural,0.31,1
Wood,losses,structural,0.31,1
Concrete,fatalities,structural,0,1
Adobe,fatalities,structural,0,1
Stone-Masonry,fatalities,structural,0,1
Unreinforced-Brick-Masonry,fatalities,structural,0,1
Wood,fatalities,structural,0,1
taxonomy,consequence,peril,moderate,complete
Concrete,losses,earthquake,0.31,1
Adobe,losses,earthquake,0.31,1
Stone-Masonry,losses,earthquake,0.31,1
Unreinforced-Brick-Masonry,losses,earthquake,0.31,1
Wood,losses,earthquake,0.31,1
Concrete,fatalities,earthquake,0,1
Adobe,fatalities,earthquake,0,1
Stone-Masonry,fatalities,earthquake,0,1
Unreinforced-Brick-Masonry,fatalities,earthquake,0,1
Wood,fatalities,earthquake,0,1
20 changes: 10 additions & 10 deletions openquake/qa_tests_data/event_based_damage/case_16/consequences.csv
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
taxonomy,consequence,loss_type,ds1,ds2,ds3,ds4
tax1,losses,nonstructural,5.000000E-02,2.500000E-01,5.000000E-01,7.500000E-01
tax2,losses,nonstructural,5.000000E-02,2.500000E-01,5.000000E-01,7.500000E-01
tax3,losses,nonstructural,5.000000E-02,2.500000E-01,5.000000E-01,7.500000E-01
tax1,losses,structural,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
tax2,losses,structural,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
tax3,losses,structural,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
tax1,losses,contents,1.000000E-01,3.000000E-01,6.000000E-01,9.000000E-01
tax2,losses,contents,1.000000E-01,3.000000E-01,6.000000E-01,9.000000E-01
tax3,losses,contents,1.000000E-01,3.000000E-01,6.000000E-01,9.000000E-01
taxonomy,consequence,peril,ds1,ds2,ds3,ds4
tax1,losses,earthquake,5.000000E-02,2.500000E-01,5.000000E-01,7.500000E-01
tax2,losses,earthquake,5.000000E-02,2.500000E-01,5.000000E-01,7.500000E-01
tax3,losses,earthquake,5.000000E-02,2.500000E-01,5.000000E-01,7.500000E-01
tax1,losses,landslide,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
tax2,losses,landslide,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
tax3,losses,landslide,4.000000E-02,1.600000E-01,3.200000E-01,6.400000E-01
tax1,losses,liquefaction,1.000000E-01,3.000000E-01,6.000000E-01,9.000000E-01
tax2,losses,liquefaction,1.000000E-01,3.000000E-01,6.000000E-01,9.000000E-01
tax3,losses,liquefaction,1.000000E-01,3.000000E-01,6.000000E-01,9.000000E-01
Loading

0 comments on commit b79cab6

Please sign in to comment.