Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions src/dscim/preprocessing/input_damages.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,9 @@ def concatenate_labor_damages(
conversion_value = 1.273526
concat_ds = xr.combine_by_coords(list_damages_batch)
for v in [f"histclim_{variable}", f"delta_{variable}"]:
concat_ds[v] = (concat_ds[v] / ec_cls.econ_vars.pop) * -1 * conversion_value
concat_ds[v] = (
(concat_ds[v] / ec_cls.econ_vars.pop.load()) * -1 * conversion_value
)

# Save file
file_name = f"{variable}_{val_type}_{i}"
Expand Down Expand Up @@ -348,7 +350,7 @@ def process_batch(g):
ds = ds.drop(var)

# get in per capita 2019 PPP-adjusted USD damages
ds = (ds / pop) * -1 * 1.273526
ds = (ds / pop.load()) * -1 * 1.273526

# replace infinite values with missings
for var in ds.keys():
Expand Down Expand Up @@ -382,6 +384,7 @@ def process_batch(g):
.squeeze()
)
batches = xr.where(np.isinf(batches), np.nan, batches)
batches = batches.astype(np.float32)

batches.rename({"wc_reallocation": varname})[varname].to_dataset().to_zarr(
store=save_path, mode="a", consolidated=True
Expand Down Expand Up @@ -584,7 +587,9 @@ def concatenate_energy_damages(
conversion_value = 1.273526
concat_ds = xr.combine_by_coords(list_damages_batch)
for v in [f"histclim_{variable}", f"delta_{variable}"]:
concat_ds[v] = (concat_ds[v] / ec_cls.econ_vars.pop) * conversion_value
concat_ds[v] = (
concat_ds[v] / ec_cls.econ_vars.pop.load()
) * conversion_value

# Save file
file_name = f"{variable}_{i}"
Expand Down
16 changes: 8 additions & 8 deletions src/dscim/utils/rff.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
def solve_optimization(ssp_df, rff_df):
"""Generate weights based on which to derive the weighted average of damage function coefficents
across six SSP-growth models for a single RFF-SP

This function applies an emulation scheme to calculate a set of weights, constrained to
sum to unity, that, when used to take a weighted average of global GDP across SSP-growth models
(3 SSPs X 2 IAMs), most closely recovers the global GDP in the RFF-SP simulation run that
Expand All @@ -29,7 +28,6 @@ def solve_optimization(ssp_df, rff_df):
in order to match the country-level GDPs designated by the given RFF-SP. Empirically, it solves
an optimization problem to minimize a weighted sum of country-level errors, taking country-level
RFF-SP GDPs as weights

Parameters
----------
ssp_df : pd.DataFrame
Expand Down Expand Up @@ -177,7 +175,6 @@ def process_ssp_sample(ssppath):
def process_rff_sample(i, rffpath, ssp_df, outdir, HEADER, **storage_options):
"""Clean raw socioeconomic projections from a single RFF-SP simulation run,
pass the cleaned dataset to the `solve_optimization` function, and save outputs

This produces a csv file of RFF emulator weights and country-level errors in 5-year
increments for a single RFF-SP
"""
Expand All @@ -188,7 +185,7 @@ def process_rff_sample(i, rffpath, ssp_df, outdir, HEADER, **storage_options):

# Fill missing data with mean across SSP scenarios of the same years
rff_df = pd.DataFrame()
for iso, group in rff_raw.groupby(["iso"]):
for iso, group in rff_raw.groupby("iso"):
minyear = min(group.year)
before_all = ssp_df[(ssp_df.year < minyear) & (ssp_df.iso == iso)][
["iso", "year", "value"]
Expand Down Expand Up @@ -320,6 +317,7 @@ def weight_df(
factors,
pulse_year,
fractional=False,
mask="unmasked",
):
"""Weight, fractionalize, and combine SSP damage functions,
then multiply by RFF GDP to return RFF damage functions.
Expand All @@ -328,7 +326,7 @@ def weight_df(
# get damage function as share of global GDP
df = (
xr.open_dataset(
f"{in_library}/{sector}/{pulse_year}/{recipe}_{disc}_eta{eta_rho[0]}_rho{eta_rho[1]}_{file}.nc4"
f"{in_library}/{sector}/{pulse_year}/{mask}/{recipe}_{disc}_eta{eta_rho[0]}_rho{eta_rho[1]}_{file}.nc4"
)
/ ssp_gdp
)
Expand All @@ -339,7 +337,7 @@ def weight_df(
# save fractional damage function
if fractional:
rff.sel(year=slice(2020, 2099)).to_netcdf(
f"{out_library}/{sector}/{pulse_year}/{recipe}_{disc}_eta{eta_rho[0]}_rho{eta_rho[1]}_fractional_{file}.nc4"
f"{out_library}/{sector}/{pulse_year}/{mask}/{recipe}_{disc}_eta{eta_rho[0]}_rho{eta_rho[1]}_fractional_{file}.nc4"
)

# recover damage function as dollars instead of fraction
Expand All @@ -350,9 +348,9 @@ def weight_df(

dfs = xr.combine_by_coords([rff, post_2100])

os.makedirs(f"{out_library}/{sector}/{pulse_year}/", exist_ok=True)
os.makedirs(f"{out_library}/{sector}/{pulse_year}/{mask}", exist_ok=True)
dfs.to_netcdf(
f"{out_library}/{sector}/{pulse_year}/{recipe}_{disc}_eta{eta_rho[0]}_rho{eta_rho[1]}_{file}.nc4"
f"{out_library}/{sector}/{pulse_year}/{mask}/{recipe}_{disc}_eta{eta_rho[0]}_rho{eta_rho[1]}_{file}.nc4"
)


Expand All @@ -368,6 +366,7 @@ def rff_damage_functions(
runid_path,
weights_path,
pulse_year,
mask,
):
"""Wrapper function for `weight_df()`."""

Expand Down Expand Up @@ -405,6 +404,7 @@ def rff_damage_functions(
weights=weights,
factors=factors,
pulse_year=pulse_year,
mask=mask,
)


Expand Down