From e7b971e2a5b82a7b5440c46527855fcf7930db08 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 7 Jul 2025 14:00:58 +0200 Subject: [PATCH 001/202] simplify indexing --- scripts/pypsa-de/modify_prenetwork.py | 40 ++++++++++++--------------- 1 file changed, 18 insertions(+), 22 deletions(-) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 9ead691e4..1b3d77efe 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -835,42 +835,38 @@ def aladin_mobility_demand(n): simulation_period_correction_factor = n.snapshot_weightings.objective.sum() / 8760 # oil demand - oil_demand = aladin_demand.Liquids * simulation_period_correction_factor - oil_index = n.loads[ - (n.loads.carrier == "land transport oil") & (n.loads.index.str[:2] == "DE") - ].index - oil_demand.index = [f"{i} land transport oil" for i in oil_demand.index] + oil_demand = pd.Series( + aladin_demand.Liquids * simulation_period_correction_factor, + index=aladin_demand.index + " land transport oil", + ) - profile = n.loads_t.p_set.loc[:, oil_index] + profile = n.loads_t.p_set.loc[:, oil_demand.index] profile /= profile.sum() - n.loads_t.p_set.loc[:, oil_index] = (oil_demand * profile).div( + n.loads_t.p_set.loc[:, oil_demand.index] = (oil_demand * profile).div( n.snapshot_weightings.objective, axis=0 ) # hydrogen demand - h2_demand = aladin_demand.Hydrogen * simulation_period_correction_factor - h2_index = n.loads[ - (n.loads.carrier == "land transport fuel cell") - & (n.loads.index.str[:2] == "DE") - ].index - h2_demand.index = [f"{i} land transport fuel cell" for i in h2_demand.index] + h2_demand = pd.Series( + aladin_demand.Hydrogen * simulation_period_correction_factor, + index=aladin_demand.index + " land transport fuel cell", + ) - profile = n.loads_t.p_set.loc[:, h2_index] + profile = n.loads_t.p_set.loc[:, h2_demand.index] profile /= profile.sum() - n.loads_t.p_set.loc[:, h2_index] = (h2_demand * profile).div( + n.loads_t.p_set.loc[:, h2_demand.index] = (h2_demand * profile).div( n.snapshot_weightings.objective, axis=0 ) # electricity demand - ev_demand = aladin_demand.Electricity * simulation_period_correction_factor - ev_index = n.loads[ - (n.loads.carrier == "land transport EV") & (n.loads.index.str[:2] == "DE") - ].index - ev_demand.index = [f"{i} land transport EV" for i in ev_demand.index] + ev_demand = pd.Series( + aladin_demand.Electricity * simulation_period_correction_factor, + index=aladin_demand.index + " land transport EV", + ) - profile = n.loads_t.p_set.loc[:, ev_index] + profile = n.loads_t.p_set.loc[:, ev_demand.index] profile /= profile.sum() - n.loads_t.p_set.loc[:, ev_index] = (ev_demand * profile).div( + n.loads_t.p_set.loc[:, ev_demand.index] = (ev_demand * profile).div( n.snapshot_weightings.objective, axis=0 ) From 76d967d12270875b54be9290ea08d4391e606b1c Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 7 Jul 2025 14:23:21 +0200 Subject: [PATCH 002/202] determine charger capacities like in prepare_sector_network; remove land_transport_electric_share --- Snakefile | 7 +++--- scripts/pypsa-de/modify_prenetwork.py | 36 ++++++++++++++------------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/Snakefile b/Snakefile index 59fc95e0e..bfa089ea7 100644 --- a/Snakefile +++ b/Snakefile @@ -488,9 +488,6 @@ rule modify_prenetwork: must_run=config_provider("must_run"), clustering=config_provider("clustering", "temporal", "resolution_sector"), H2_plants=config_provider("electricity", "H2_plants_DE"), - land_transport_electric_share=config_provider( - "sector", "land_transport_electric_share" - ), onshore_nep_force=config_provider("onshore_nep_force"), offshore_nep_force=config_provider("offshore_nep_force"), shipping_methanol_efficiency=config_provider( @@ -500,6 +497,9 @@ rule modify_prenetwork: shipping_methanol_share=config_provider("sector", "shipping_methanol_share"), mwh_meoh_per_tco2=config_provider("sector", "MWh_MeOH_per_tCO2"), scale_capacity=config_provider("scale_capacity"), + bev_charge_rate=config_provider("sector", "bev_charge_rate"), + bev_energy=config_provider("sector", "bev_energy"), + bev_dsm_availability=config_provider("sector", "bev_dsm_availability"), input: costs_modifications="ariadne-data/costs_{planning_horizons}-modifications.csv", network=resources( @@ -514,7 +514,6 @@ rule modify_prenetwork: aladin_demand=resources( "mobility_demand_aladin_{clusters}_{planning_horizons}.csv" ), - transport_data=resources("transport_data_s_{clusters}.csv"), biomass_potentials=resources( "biomass_potentials_s_{clusters}_{planning_horizons}.csv" ), diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 1b3d77efe..422196171 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -871,37 +871,39 @@ def aladin_mobility_demand(n): ) # adjust BEV charger and V2G capacities - number_cars = pd.read_csv(snakemake.input.transport_data, index_col=0)[ - "number cars" - ].filter(like="DE") - - factor = ( - aladin_demand.number_of_cars - * 1e6 - / ( - number_cars - * snakemake.params.land_transport_electric_share[ - int(snakemake.wildcards.planning_horizons) - ] - ) - ) BEV_charger_i = n.links[ (n.links.carrier == "BEV charger") & (n.links.bus0.str.startswith("DE")) ].index - n.links.loc[BEV_charger_i].p_nom *= pd.Series(factor.values, index=BEV_charger_i) + + # Check that buses in network and aladin_data appear in same order + assert [ + idx.startswith(idx2) for (idx, idx2) in zip(BEV_charger_i, aladin_demand.index) + ] + + # Then directly use .values for assignment + p_nom = ( + aladin_demand.number_of_cars.values * snakemake.params.bev_charge_rate + ) # same logic like in prepare_sector_network + + n.links.loc[BEV_charger_i].p_nom = p_nom V2G_i = n.links[ (n.links.carrier == "V2G") & (n.links.bus0.str.startswith("DE")) ].index if not V2G_i.empty: - n.links.loc[V2G_i].p_nom *= pd.Series(factor.values, index=V2G_i) + n.links.loc[V2G_i].p_nom = p_nom * snakemake.params.bev_dsm_availability dsm_i = n.stores[ (n.stores.carrier == "EV battery") & (n.stores.bus.str.startswith("DE")) ].index + e_nom = ( + aladin_demand.number_of_cars.values + * snakemake.params.bev_energy + * snakemake.params.bev_dsm_availability + ) if not dsm_i.empty: - n.stores.loc[dsm_i].e_nom *= pd.Series(factor.values, index=dsm_i) + n.stores.loc[dsm_i].e_nom = e_nom def add_hydrogen_turbines(n): From bbc4d410565a865bc263b448a3a01618651786f5 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 7 Jul 2025 14:29:06 +0200 Subject: [PATCH 003/202] add PHEV to number of electric cars because they provide charging capacity as well --- scripts/pypsa-de/build_mobility_demand.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/scripts/pypsa-de/build_mobility_demand.py b/scripts/pypsa-de/build_mobility_demand.py index c7eacdd88..f804e4349 100644 --- a/scripts/pypsa-de/build_mobility_demand.py +++ b/scripts/pypsa-de/build_mobility_demand.py @@ -30,7 +30,7 @@ def get_transport_data(db, year): transport_demand["Hydrogen"] = 0.0 + 0.0 + 0.0 + 0.0 transport_demand["Liquids"] = 41.81 + 1369.34 + 11.18 + 637.23 transport_demand = transport_demand.div(3.6e-6) # convert PJ to MWh - transport_demand["number_of_cars"] = 0.658407 + transport_demand["number_of_cars"] = 0.658407 + 0.120261 # BEV + PHEV else: df = db[year].loc[snakemake.params.leitmodelle["transport"]] @@ -41,9 +41,10 @@ def get_transport_data(db, year): transport_demand.loc[fuel] += df.get((key, "TWh/yr"), 0.0) transport_demand = transport_demand.mul(1e6) # convert TWh to MWh - transport_demand["number_of_cars"] = df.loc[ - "Stock|Transportation|LDV|BEV", "million" - ] + transport_demand["number_of_cars"] = ( + df.loc["Stock|Transportation|LDV|BEV", "million"] + + df.loc["Stock|Transportation|LDV|PHEV", "million"] + ) return transport_demand From c311dc7887609690bb7e37c12c95dd2132021516 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 7 Jul 2025 15:49:42 +0200 Subject: [PATCH 004/202] syntax fixes --- scripts/pypsa-de/modify_prenetwork.py | 32 ++++++++++++++------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 422196171..e4d63732c 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -835,20 +835,21 @@ def aladin_mobility_demand(n): simulation_period_correction_factor = n.snapshot_weightings.objective.sum() / 8760 # oil demand - oil_demand = pd.Series( - aladin_demand.Liquids * simulation_period_correction_factor, - index=aladin_demand.index + " land transport oil", - ) + if "land transport oil" in n.loads.carrier.unique(): # i.e. before 2050 + oil_demand = pd.Series( + aladin_demand.Liquids.values * simulation_period_correction_factor, + index=aladin_demand.index + " land transport oil", + ) - profile = n.loads_t.p_set.loc[:, oil_demand.index] - profile /= profile.sum() - n.loads_t.p_set.loc[:, oil_demand.index] = (oil_demand * profile).div( - n.snapshot_weightings.objective, axis=0 - ) + profile = n.loads_t.p_set.loc[:, oil_demand.index] + profile /= profile.sum() + n.loads_t.p_set.loc[:, oil_demand.index] = (oil_demand * profile).div( + n.snapshot_weightings.objective, axis=0 + ) # hydrogen demand h2_demand = pd.Series( - aladin_demand.Hydrogen * simulation_period_correction_factor, + aladin_demand.Hydrogen.values * simulation_period_correction_factor, index=aladin_demand.index + " land transport fuel cell", ) @@ -860,7 +861,7 @@ def aladin_mobility_demand(n): # electricity demand ev_demand = pd.Series( - aladin_demand.Electricity * simulation_period_correction_factor, + aladin_demand.Electricity.values * simulation_period_correction_factor, index=aladin_demand.index + " land transport EV", ) @@ -883,27 +884,28 @@ def aladin_mobility_demand(n): # Then directly use .values for assignment p_nom = ( - aladin_demand.number_of_cars.values * snakemake.params.bev_charge_rate + aladin_demand.number_of_cars.values * 1e6 * snakemake.params.bev_charge_rate ) # same logic like in prepare_sector_network - n.links.loc[BEV_charger_i].p_nom = p_nom + n.links.loc[BEV_charger_i, "p_nom"] = p_nom V2G_i = n.links[ (n.links.carrier == "V2G") & (n.links.bus0.str.startswith("DE")) ].index if not V2G_i.empty: - n.links.loc[V2G_i].p_nom = p_nom * snakemake.params.bev_dsm_availability + n.links.loc[V2G_i, "p_nom"] = p_nom * snakemake.params.bev_dsm_availability dsm_i = n.stores[ (n.stores.carrier == "EV battery") & (n.stores.bus.str.startswith("DE")) ].index e_nom = ( aladin_demand.number_of_cars.values + * 1e6 * snakemake.params.bev_energy * snakemake.params.bev_dsm_availability ) if not dsm_i.empty: - n.stores.loc[dsm_i].e_nom = e_nom + n.stores.loc[dsm_i, "e_nom"] = e_nom def add_hydrogen_turbines(n): From 1ef17042da20a6932898284989cf5eec93a3aadc Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 7 Jul 2025 15:49:48 +0200 Subject: [PATCH 005/202] rename branch --- config/config.de.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 97aaa3327..d4650b048 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250514_dhsubnodes + prefix: 20250707_improve_transport_demand name: # - ExPol - KN2045_Mix From 1e0587ecaf24c20cf6514b657c738f9047e2053a Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 8 Jul 2025 11:30:40 +0200 Subject: [PATCH 006/202] add option to specify AGEB(+KBA) as source for transport demand in 2020 and 2025 --- Snakefile | 1 + config/config.de.yaml | 1 + scripts/pypsa-de/build_mobility_demand.py | 33 ++++++++++++++++++++--- 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/Snakefile b/Snakefile index bfa089ea7..ea9b59393 100644 --- a/Snakefile +++ b/Snakefile @@ -314,6 +314,7 @@ rule build_mobility_demand: reference_scenario=config_provider("iiasa_database", "reference_scenario"), planning_horizons=config_provider("scenario", "planning_horizons"), leitmodelle=config_provider("iiasa_database", "leitmodelle"), + ageb_for_transport=config_provider("iiasa_database", "ageb_for_transport"), input: ariadne="resources/ariadne_database.csv", clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"), diff --git a/config/config.de.yaml b/config/config.de.yaml index d4650b048..741f3e2ee 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -43,6 +43,7 @@ iiasa_database: - KN2045_NFhoch reference_scenario: KN2045_Mix region: Deutschland + ageb_for_transport: true # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight foresight: myopic diff --git a/scripts/pypsa-de/build_mobility_demand.py b/scripts/pypsa-de/build_mobility_demand.py index f804e4349..665cc21ac 100644 --- a/scripts/pypsa-de/build_mobility_demand.py +++ b/scripts/pypsa-de/build_mobility_demand.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -def get_transport_data(db, year): +def get_transport_data(db, year, ageb_for_transport=False): """ Retrieve the German mobility demand from the transport_data model. @@ -19,7 +19,7 @@ def get_transport_data(db, year): transport_demand = pd.Series(0.0, index=fuels) - if snakemake.wildcards.planning_horizons == "2020": + if year == "2020": logger.info( "For 2020, using hard-coded transport data from the Ariadne2-internal database." ) @@ -32,6 +32,31 @@ def get_transport_data(db, year): transport_demand = transport_demand.div(3.6e-6) # convert PJ to MWh transport_demand["number_of_cars"] = 0.658407 + 0.120261 # BEV + PHEV + if ageb_for_transport: + # AGEB 2020, https://ag-energiebilanzen.de/daten-und-fakten/bilanzen-1990-bis-2030/?_jahresbereich-bilanz=2011-2020 + transport_demand["Electricity"] = 39129 + 2394 # Schiene + Straße + transport_demand["Hydrogen"] = 0 + transport_demand["Liquids"] = ( + 140718 + 1261942 + 10782 + 638820 + ) # Bio Strasse + Diesel Strasse + Diesel Schiene + Otto Strasse + transport_demand = transport_demand.div(3.6e-3) # convert TJ to MWH + # https://www.kba.de/DE/Statistik/Produktkatalog/produkte/Fahrzeuge/fz27_b_uebersicht.html + # FZ27_202101, table FZ 27.2, 1. January 2021: + transport_demand["number_of_cars"] = 0.358498 + 0.280149 + + elif year == "2025" and ageb_for_transport: + # AGEB2024 for train demand 25, linear extrapolation with AGEB2024 + AGEB2023 for EVs + transport_demand["Electricity"] = 39761 + 2 * 21270 - 16180 + transport_demand["Hydrogen"] = 0 + # AGEB2024 for Liquids demand 25 + transport_demand["Liquids"] = 116323 + 9650 + 1158250 + 702618 + transport_demand = transport_demand.div(3.6e-3) + # FZ27_202504, 202404, table FZ 27.8, + # linear extrapolation to 1. January 2026: "1. January 2025" + ("1. January 2025" - "1. January 2024") + # 2 * (1,810,815 + 968,734) - (1,555,265 + 922,876) = 3080957 + # rounded upwards + transport_demand["number_of_cars"] = 3.1 # million, BEV + PHEV + else: df = db[year].loc[snakemake.params.leitmodelle["transport"]] @@ -78,7 +103,9 @@ def get_transport_data(db, year): f"Retrieving German mobility demand from {snakemake.params.leitmodelle['transport']} transport model." ) # get transport_data data - transport_data = get_transport_data(db, snakemake.wildcards.planning_horizons) + transport_data = get_transport_data( + db, snakemake.wildcards.planning_horizons, snakemake.params.ageb_for_transport + ) # get German mobility weighting pop_layout = pd.read_csv(snakemake.input.clustered_pop_layout, index_col=0) From 0f6764e1de8459af496ee1adddc2faa1ab912526 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 8 Jul 2025 12:29:33 +0200 Subject: [PATCH 007/202] add warning --- scripts/pypsa-de/modify_prenetwork.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index e4d63732c..da7036d3b 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -829,6 +829,9 @@ def aladin_mobility_demand(n): """ Change loads in Germany to use Aladin data for road demand. """ + logger.info( + "Overwriting land transport demand with Aladin data. In particular the `land_transport_electric_share` config setting will not be used." + ) # get aladin data aladin_demand = pd.read_csv(snakemake.input.aladin_demand, index_col=0) From 952c72d46059faa8bb0e0c35d69378bfea7d4511 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 8 Jul 2025 12:32:16 +0200 Subject: [PATCH 008/202] set transport shares to dummy values --- config/config.de.yaml | 43 ++++++++++++++++++++++--------------------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 741f3e2ee..a4fc18066 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -334,30 +334,31 @@ sector: 2040: 0.29 2045: 0.36 2050: 0.43 + # The transport_shares are just dummy setting that get overwritten in build_mobility_demand land_transport_fuel_cell_share: - 2020: 0.05 - 2025: 0.05 - 2030: 0.05 - 2035: 0.05 - 2040: 0.05 - 2045: 0.05 - 2050: 0.05 + 2020: 0.01 + 2025: 0.01 + 2030: 0.01 + 2035: 0.01 + 2040: 0.01 + 2045: 0.01 + 2050: 0.01 land_transport_electric_share: - 2020: 0.05 - 2025: 0.15 - 2030: 0.3 - 2035: 0.45 - 2040: 0.7 - 2045: 0.85 - 2050: 0.95 + 2020: 0.04 + 2025: 0.04 + 2030: 0.04 + 2035: 0.04 + 2040: 0.04 + 2045: 0.04 + 2050: 0.04 land_transport_ice_share: - 2020: 0.9 - 2025: 0.8 - 2030: 0.65 - 2035: 0.5 - 2040: 0.25 - 2045: 0.1 - 2050: 0.0 + 2020: 0.95 + 2025: 0.95 + 2030: 0.95 + 2035: 0.95 + 2040: 0.95 + 2045: 0.95 + 2050: 0.95 # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#industry industry: From 243ba974a9768819b5f348b7c18f1e654f01ecf3 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 8 Jul 2025 16:15:10 +0200 Subject: [PATCH 009/202] add changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c22e96d02..6773a5aad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,5 @@ # Changelog +- Improved the transport demand data, added an option to source 2020 and 2025 data from AGEB instead of Aladin - Simplified scenarion definition and made `Mix` the default scenario - 0.3: workflow is all public now, no longer requires credentials to internal data - Allowing myopic optimization until 2050 From f9c13bb04a344f26187b74efccae20e345cf8880 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 11 Jul 2025 17:12:12 +0200 Subject: [PATCH 010/202] use mobility demand from uba projektionsbericht --- Snakefile | 6 +- config/config.de.yaml | 2 +- scripts/pypsa-de/build_mobility_demand.py | 123 +++++++++++++++++----- 3 files changed, 101 insertions(+), 30 deletions(-) diff --git a/Snakefile b/Snakefile index c4f8a9207..9def6be84 100644 --- a/Snakefile +++ b/Snakefile @@ -367,10 +367,14 @@ rule build_mobility_demand: reference_scenario=config_provider("iiasa_database", "reference_scenario"), planning_horizons=config_provider("scenario", "planning_horizons"), leitmodelle=config_provider("iiasa_database", "leitmodelle"), - ageb_for_transport=config_provider("iiasa_database", "ageb_for_transport"), + uba_for_mobility=config_provider("iiasa_database", "uba_for_mobility"), + shipping_oil_share=config_provider("sector", "shipping_oil_share"), + aviation_demand_factor=config_provider("sector", "aviation_demand_factor"), + energy_totals_year=config_provider("energy", "energy_totals_year"), input: ariadne="resources/ariadne_database.csv", clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"), + energy_totals=resources("energy_totals.csv"), output: mobility_demand=resources( "mobility_demand_aladin_{clusters}_{planning_horizons}.csv" diff --git a/config/config.de.yaml b/config/config.de.yaml index a4fc18066..0ba5d2e24 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -43,7 +43,7 @@ iiasa_database: - KN2045_NFhoch reference_scenario: KN2045_Mix region: Deutschland - ageb_for_transport: true + uba_for_mobility: true # MWMS scenario from Projektionsbericht 2025 # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight foresight: myopic diff --git a/scripts/pypsa-de/build_mobility_demand.py b/scripts/pypsa-de/build_mobility_demand.py index 665cc21ac..39f27b087 100644 --- a/scripts/pypsa-de/build_mobility_demand.py +++ b/scripts/pypsa-de/build_mobility_demand.py @@ -7,7 +7,12 @@ logger = logging.getLogger(__name__) -def get_transport_data(db, year, ageb_for_transport=False): +def get_transport_data( + db, + year, + non_land_liquids, + uba_for_mobility=False, +): """ Retrieve the German mobility demand from the transport_data model. @@ -24,40 +29,77 @@ def get_transport_data(db, year, ageb_for_transport=False): "For 2020, using hard-coded transport data from the Ariadne2-internal database." ) - transport_demand = pd.Series() - # if 2020 - transport_demand["Electricity"] = 0.0 + 17.0 + 35.82 + 0.0 - transport_demand["Hydrogen"] = 0.0 + 0.0 + 0.0 + 0.0 - transport_demand["Liquids"] = 41.81 + 1369.34 + 11.18 + 637.23 + transport_demand = pd.Series( + { + "Electricity": 0.0 + 17.0 + 35.82 + 0.0, + "Hydrogen": 0.0 + 0.0 + 0.0 + 0.0, + "Liquids": 41.81 + 1369.34 + 11.18 + 637.23, + } + ) + transport_demand = transport_demand.div(3.6e-6) # convert PJ to MWh transport_demand["number_of_cars"] = 0.658407 + 0.120261 # BEV + PHEV - if ageb_for_transport: + if uba_for_mobility: + logger.warning( + "For 2020, using historical AGEB and KBA data instead of UBA projections." + ) # AGEB 2020, https://ag-energiebilanzen.de/daten-und-fakten/bilanzen-1990-bis-2030/?_jahresbereich-bilanz=2011-2020 - transport_demand["Electricity"] = 39129 + 2394 # Schiene + Straße - transport_demand["Hydrogen"] = 0 - transport_demand["Liquids"] = ( - 140718 + 1261942 + 10782 + 638820 - ) # Bio Strasse + Diesel Strasse + Diesel Schiene + Otto Strasse - transport_demand = transport_demand.div(3.6e-3) # convert TJ to MWH + transport_demand = pd.Series( + { + "Electricity": 39129 + 2394, # Schiene + Straße + "Hydrogen": 0, + "Liquids": 140718 + + 1261942 + + 10782 + + 638820, # Bio Strasse + Diesel Strasse + Diesel Schiene + Otto Strasse + } + ) + transport_demand = transport_demand.div(3.6e-3) # convert PJ to MWH # https://www.kba.de/DE/Statistik/Produktkatalog/produkte/Fahrzeuge/fz27_b_uebersicht.html # FZ27_202101, table FZ 27.2, 1. January 2021: transport_demand["number_of_cars"] = 0.358498 + 0.280149 - elif year == "2025" and ageb_for_transport: - # AGEB2024 for train demand 25, linear extrapolation with AGEB2024 + AGEB2023 for EVs - transport_demand["Electricity"] = 39761 + 2 * 21270 - 16180 - transport_demand["Hydrogen"] = 0 - # AGEB2024 for Liquids demand 25 - transport_demand["Liquids"] = 116323 + 9650 + 1158250 + 702618 - transport_demand = transport_demand.div(3.6e-3) - # FZ27_202504, 202404, table FZ 27.8, - # linear extrapolation to 1. January 2026: "1. January 2025" + ("1. January 2025" - "1. January 2024") - # 2 * (1,810,815 + 968,734) - (1,555,265 + 922,876) = 3080957 - # rounded upwards - transport_demand["number_of_cars"] = 3.1 # million, BEV + PHEV + elif year == "2025" and uba_for_mobility: + # https://www.umweltbundesamt.de/sites/default/files/medien/11850/publikationen/projektionsbericht_2025.pdf, Abbildung 64 & 59, + transport_demand = pd.Series( + { + "Electricity": 21, + "Hydrogen": 0.0, + "Liquids": 524 + 51, + "number_of_cars": 2.7 + 1.2, # BEV + PHEV + } + ) + transport_demand["Liquids"] -= non_land_liquids[ + int(year) + ] # remove domestic navigation and aviation + elif year == "2030" and uba_for_mobility: + transport_demand = pd.Series( + { + "Electricity": 57, + "Hydrogen": 14, + "Liquids": 418 + 34 + 1, + "number_of_cars": 8.7 + 1.8, # BEV + PHEV + } + ) + transport_demand["Liquids"] -= non_land_liquids[int(year)] + elif year == "2035" and uba_for_mobility: + transport_demand = pd.Series( + { + "Electricity": 117, + "Hydrogen": 36, + "Liquids": 237 + 26 + 1, + "number_of_cars": 18.9 + 1.8, # BEV + PHEV + } + ) + transport_demand["Liquids"] -= non_land_liquids[int(year)] else: + if uba_for_mobility: + logger.error( + f"Year {year} is not supported for UBA mobility projections. Please use only 2020, 2025, 2030, 2035." + ) + df = db[year].loc[snakemake.params.leitmodelle["transport"]] for fuel in fuels: @@ -79,11 +121,11 @@ def get_transport_data(db, year, ageb_for_transport=False): snakemake = mock_snakemake( "build_mobility_demand", simpl="", - clusters=22, + clusters=27, opts="", ll="vopt", sector_opts="none", - planning_horizons="2020", + planning_horizons="2030", run="KN2045_Mix", ) configure_logging(snakemake) @@ -99,12 +141,37 @@ def get_transport_data(db, year, ageb_for_transport=False): :, ] + energy_totals = ( + pd.read_csv( + snakemake.input.energy_totals, + index_col=[0, 1], + ) + .xs( + snakemake.params.energy_totals_year, + level="year", + ) + .loc["DE"] + ) + + domestic_aviation = energy_totals.loc["total domestic aviation"] * pd.Series( + snakemake.params.aviation_demand_factor + ) + + domestic_navigation = energy_totals.loc["total domestic navigation"] * pd.Series( + snakemake.params.shipping_oil_share + ) + + non_land_liquids = domestic_aviation + domestic_navigation + logger.info( f"Retrieving German mobility demand from {snakemake.params.leitmodelle['transport']} transport model." ) # get transport_data data transport_data = get_transport_data( - db, snakemake.wildcards.planning_horizons, snakemake.params.ageb_for_transport + db, + snakemake.wildcards.planning_horizons, + non_land_liquids, + uba_for_mobility=snakemake.params.uba_for_mobility, ) # get German mobility weighting From 0cb4015dbab57d77ae706aaff4e2a9d825bd821b Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 14 Jul 2025 14:24:38 +0200 Subject: [PATCH 011/202] have separate option for the 2020 data --- Snakefile | 1 + config/config.de.yaml | 3 ++- scripts/pypsa-de/build_mobility_demand.py | 11 +++++++---- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/Snakefile b/Snakefile index 9def6be84..7db8b5988 100644 --- a/Snakefile +++ b/Snakefile @@ -367,6 +367,7 @@ rule build_mobility_demand: reference_scenario=config_provider("iiasa_database", "reference_scenario"), planning_horizons=config_provider("scenario", "planning_horizons"), leitmodelle=config_provider("iiasa_database", "leitmodelle"), + ageb_for_mobility=config_provider("iiasa_database", "ageb_for_mobility"), uba_for_mobility=config_provider("iiasa_database", "uba_for_mobility"), shipping_oil_share=config_provider("sector", "shipping_oil_share"), aviation_demand_factor=config_provider("sector", "aviation_demand_factor"), diff --git a/config/config.de.yaml b/config/config.de.yaml index 0ba5d2e24..4421ac683 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -43,7 +43,8 @@ iiasa_database: - KN2045_NFhoch reference_scenario: KN2045_Mix region: Deutschland - uba_for_mobility: true # MWMS scenario from Projektionsbericht 2025 + ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles + uba_for_mobility: true # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight foresight: myopic diff --git a/scripts/pypsa-de/build_mobility_demand.py b/scripts/pypsa-de/build_mobility_demand.py index 39f27b087..6e5dfc7f6 100644 --- a/scripts/pypsa-de/build_mobility_demand.py +++ b/scripts/pypsa-de/build_mobility_demand.py @@ -11,6 +11,7 @@ def get_transport_data( db, year, non_land_liquids, + ageb_for_mobility=True, uba_for_mobility=False, ): """ @@ -40,10 +41,11 @@ def get_transport_data( transport_demand = transport_demand.div(3.6e-6) # convert PJ to MWh transport_demand["number_of_cars"] = 0.658407 + 0.120261 # BEV + PHEV - if uba_for_mobility: - logger.warning( - "For 2020, using historical AGEB and KBA data instead of UBA projections." - ) + if ageb_for_mobility or uba_for_mobility: + if uba_for_mobility: + logger.warning( + "For 2020, using historical AGEB and KBA data instead of UBA projections." + ) # AGEB 2020, https://ag-energiebilanzen.de/daten-und-fakten/bilanzen-1990-bis-2030/?_jahresbereich-bilanz=2011-2020 transport_demand = pd.Series( { @@ -171,6 +173,7 @@ def get_transport_data( db, snakemake.wildcards.planning_horizons, non_land_liquids, + ageb_for_mobility=snakemake.params.ageb_for_mobility, uba_for_mobility=snakemake.params.uba_for_mobility, ) From 0ab4ed06b80a3a186b4690d5915e996a038a612b Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 14 Jul 2025 14:31:21 +0200 Subject: [PATCH 012/202] renaming --- Snakefile | 12 ++++----- config/config.de.yaml | 2 +- scripts/pypsa-de/build_mobility_demand.py | 2 +- scripts/pypsa-de/modify_prenetwork.py | 33 +++++++++++------------ 4 files changed, 24 insertions(+), 25 deletions(-) diff --git a/Snakefile b/Snakefile index 7db8b5988..be4a526d2 100644 --- a/Snakefile +++ b/Snakefile @@ -362,7 +362,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", T ruleorder: modify_cost_data > retrieve_cost_data -rule build_mobility_demand: +rule build_exogenous_mobility_demand: params: reference_scenario=config_provider("iiasa_database", "reference_scenario"), planning_horizons=config_provider("scenario", "planning_horizons"), @@ -378,14 +378,14 @@ rule build_mobility_demand: energy_totals=resources("energy_totals.csv"), output: mobility_demand=resources( - "mobility_demand_aladin_{clusters}_{planning_horizons}.csv" + "modified_mobility_demand_{clusters}_{planning_horizons}.csv" ), resources: mem_mb=1000, log: - logs("build_mobility_demand_{clusters}_{planning_horizons}.log"), + logs("build_exogenous_mobility_demand_{clusters}_{planning_horizons}.log"), script: - "scripts/pypsa-de/build_mobility_demand.py" + "scripts/pypsa-de/build_exogenous_mobility_demand.py" rule build_egon_data: @@ -570,8 +570,8 @@ rule modify_prenetwork: else [] ), costs=resources("costs_{planning_horizons}.csv"), - aladin_demand=resources( - "mobility_demand_aladin_{clusters}_{planning_horizons}.csv" + modified_mobility_demand=resources( + "modified_mobility_demand_{clusters}_{planning_horizons}.csv" ), biomass_potentials=resources( "biomass_potentials_s_{clusters}_{planning_horizons}.csv" diff --git a/config/config.de.yaml b/config/config.de.yaml index 4421ac683..c803394ea 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -335,7 +335,7 @@ sector: 2040: 0.29 2045: 0.36 2050: 0.43 - # The transport_shares are just dummy setting that get overwritten in build_mobility_demand + # The transport_shares are just dummy setting that get overwritten in build_exogenous_mobility_demand land_transport_fuel_cell_share: 2020: 0.01 2025: 0.01 diff --git a/scripts/pypsa-de/build_mobility_demand.py b/scripts/pypsa-de/build_mobility_demand.py index 6e5dfc7f6..46e9e77eb 100644 --- a/scripts/pypsa-de/build_mobility_demand.py +++ b/scripts/pypsa-de/build_mobility_demand.py @@ -121,7 +121,7 @@ def get_transport_data( if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( - "build_mobility_demand", + "build_exogenous_mobility_demand", simpl="", clusters=27, opts="", diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index da7036d3b..813fdee96 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -825,23 +825,22 @@ def must_run(n, params): n.links.loc[links_i, "p_min_pu"] = p_min_pu -def aladin_mobility_demand(n): +def modify_mobility_demand(n): """ - Change loads in Germany to use Aladin data for road demand. + Change loads in Germany to use exogenous data for road demand. """ logger.info( - "Overwriting land transport demand with Aladin data. In particular the `land_transport_electric_share` config setting will not be used." + "Overwriting land transport demand. In particular the `land_transport_electric_share` config setting will not be used." ) - # get aladin data - aladin_demand = pd.read_csv(snakemake.input.aladin_demand, index_col=0) + new_demand = pd.read_csv(snakemake.input.modified_mobility_demand, index_col=0) simulation_period_correction_factor = n.snapshot_weightings.objective.sum() / 8760 # oil demand if "land transport oil" in n.loads.carrier.unique(): # i.e. before 2050 oil_demand = pd.Series( - aladin_demand.Liquids.values * simulation_period_correction_factor, - index=aladin_demand.index + " land transport oil", + new_demand.Liquids.values * simulation_period_correction_factor, + index=new_demand.index + " land transport oil", ) profile = n.loads_t.p_set.loc[:, oil_demand.index] @@ -852,8 +851,8 @@ def aladin_mobility_demand(n): # hydrogen demand h2_demand = pd.Series( - aladin_demand.Hydrogen.values * simulation_period_correction_factor, - index=aladin_demand.index + " land transport fuel cell", + new_demand.Hydrogen.values * simulation_period_correction_factor, + index=new_demand.index + " land transport fuel cell", ) profile = n.loads_t.p_set.loc[:, h2_demand.index] @@ -864,8 +863,8 @@ def aladin_mobility_demand(n): # electricity demand ev_demand = pd.Series( - aladin_demand.Electricity.values * simulation_period_correction_factor, - index=aladin_demand.index + " land transport EV", + new_demand.Electricity.values * simulation_period_correction_factor, + index=new_demand.index + " land transport EV", ) profile = n.loads_t.p_set.loc[:, ev_demand.index] @@ -880,14 +879,14 @@ def aladin_mobility_demand(n): (n.links.carrier == "BEV charger") & (n.links.bus0.str.startswith("DE")) ].index - # Check that buses in network and aladin_data appear in same order + # Check that buses in network and new_demand data appear in same order assert [ - idx.startswith(idx2) for (idx, idx2) in zip(BEV_charger_i, aladin_demand.index) + idx.startswith(idx2) for (idx, idx2) in zip(BEV_charger_i, new_demand.index) ] # Then directly use .values for assignment p_nom = ( - aladin_demand.number_of_cars.values * 1e6 * snakemake.params.bev_charge_rate + new_demand.number_of_cars.values * 1e6 * snakemake.params.bev_charge_rate ) # same logic like in prepare_sector_network n.links.loc[BEV_charger_i, "p_nom"] = p_nom @@ -902,7 +901,7 @@ def aladin_mobility_demand(n): (n.stores.carrier == "EV battery") & (n.stores.bus.str.startswith("DE")) ].index e_nom = ( - aladin_demand.number_of_cars.values + new_demand.number_of_cars.values * 1e6 * snakemake.params.bev_energy * snakemake.params.bev_dsm_availability @@ -1282,7 +1281,7 @@ def scale_capacity(n, scaling): ) configure_logging(snakemake) - logger.info("Adding Ariadne-specific functionality") + logger.info("Adding PyPSA-DE specific functionality") n = pypsa.Network(snakemake.input.network) nhours = n.snapshot_weightings.generators.sum() @@ -1295,7 +1294,7 @@ def scale_capacity(n, scaling): nyears, ) - aladin_mobility_demand(n) + modify_mobility_demand(n) new_boiler_ban(n) From 5b365445f2a748f222c0f7796138fc7172886201 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 14 Jul 2025 16:23:59 +0200 Subject: [PATCH 013/202] rename script and changelog --- CHANGELOG.md | 2 ++ ...demand.py => build_exogenous_mobility_demand.py} | 13 +++++++++---- 2 files changed, 11 insertions(+), 4 deletions(-) rename scripts/pypsa-de/{build_mobility_demand.py => build_exogenous_mobility_demand.py} (93%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6773a5aad..9f2f60faf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,6 @@ # Changelog +- Added an option to source mobility demand from UBA MWMS (Projektionsbericht 2025) for the years 2025-2035 +- Renamed functions and script for exogenous mobility demand - Improved the transport demand data, added an option to source 2020 and 2025 data from AGEB instead of Aladin - Simplified scenarion definition and made `Mix` the default scenario - 0.3: workflow is all public now, no longer requires credentials to internal data diff --git a/scripts/pypsa-de/build_mobility_demand.py b/scripts/pypsa-de/build_exogenous_mobility_demand.py similarity index 93% rename from scripts/pypsa-de/build_mobility_demand.py rename to scripts/pypsa-de/build_exogenous_mobility_demand.py index 46e9e77eb..cbadef7e5 100644 --- a/scripts/pypsa-de/build_mobility_demand.py +++ b/scripts/pypsa-de/build_exogenous_mobility_demand.py @@ -69,32 +69,37 @@ def get_transport_data( "Electricity": 21, "Hydrogen": 0.0, "Liquids": 524 + 51, - "number_of_cars": 2.7 + 1.2, # BEV + PHEV } ) transport_demand["Liquids"] -= non_land_liquids[ int(year) - ] # remove domestic navigation and aviation + ] # remove domestic navigation and aviation from UBA data to avoid double counting + transport_demand = transport_demand.mul(1e6) # convert TWh to MWh + transport_demand["number_of_cars"] = 2.7 + 1.2 # BEV + PHEV + elif year == "2030" and uba_for_mobility: transport_demand = pd.Series( { "Electricity": 57, "Hydrogen": 14, "Liquids": 418 + 34 + 1, - "number_of_cars": 8.7 + 1.8, # BEV + PHEV } ) transport_demand["Liquids"] -= non_land_liquids[int(year)] + transport_demand = transport_demand.mul(1e6) + transport_demand["number_of_cars"] = 8.7 + 1.8 + elif year == "2035" and uba_for_mobility: transport_demand = pd.Series( { "Electricity": 117, "Hydrogen": 36, "Liquids": 237 + 26 + 1, - "number_of_cars": 18.9 + 1.8, # BEV + PHEV } ) transport_demand["Liquids"] -= non_land_liquids[int(year)] + transport_demand = transport_demand.mul(1e6) + transport_demand["number_of_cars"] = 18.9 + 1.8 else: if uba_for_mobility: From c85efb46853d97da4af1900850fff53254fa4494 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 14 Jul 2025 16:28:11 +0200 Subject: [PATCH 014/202] rename scripts --- Snakefile | 8 ++++---- ...y_industry_demand.py => modify_industry_production.py} | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) rename scripts/pypsa-de/{modify_industry_demand.py => modify_industry_production.py} (96%) diff --git a/Snakefile b/Snakefile index be4a526d2..6b55c5073 100644 --- a/Snakefile +++ b/Snakefile @@ -599,7 +599,7 @@ rule modify_prenetwork: "scripts/pypsa-de/modify_prenetwork.py" -ruleorder: modify_industry_demand > build_industrial_production_per_country_tomorrow +ruleorder: modify_industry_production > build_industrial_production_per_country_tomorrow rule modify_existing_heating: @@ -660,7 +660,7 @@ rule build_existing_chp_de: "scripts/pypsa-de/build_existing_chp_de.py" -rule modify_industry_demand: +rule modify_industry_production: params: reference_scenario=config_provider("iiasa_database", "reference_scenario"), input: @@ -675,9 +675,9 @@ rule modify_industry_demand: resources: mem_mb=1000, log: - logs("modify_industry_demand_{planning_horizons}.log"), + logs("modify_industry_production_{planning_horizons}.log"), script: - "scripts/pypsa-de/modify_industry_demand.py" + "scripts/pypsa-de/modify_industry_production.py" rule build_wasserstoff_kernnetz: diff --git a/scripts/pypsa-de/modify_industry_demand.py b/scripts/pypsa-de/modify_industry_production.py similarity index 96% rename from scripts/pypsa-de/modify_industry_demand.py rename to scripts/pypsa-de/modify_industry_production.py index 6494ce8c2..00771eb56 100644 --- a/scripts/pypsa-de/modify_industry_demand.py +++ b/scripts/pypsa-de/modify_industry_production.py @@ -23,7 +23,7 @@ if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( - "modify_industry_demand", + "modify_industry_production", simpl="", clusters=22, opts="", @@ -155,6 +155,6 @@ ], ) - existing_industry.to_csv( - snakemake.output.industrial_production_per_country_tomorrow - ) + # existing_industry.to_csv( + # snakemake.output.industrial_production_per_country_tomorrow + # ) From 81d51304743d32719b72dfa48e7763ccc0d6b67a Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 14 Jul 2025 18:15:30 +0200 Subject: [PATCH 015/202] first stab at industry demand modification --- Snakefile | 6 ++ scripts/pypsa-de/modify_prenetwork.py | 82 ++++++++++++++++++++++++++- 2 files changed, 87 insertions(+), 1 deletion(-) diff --git a/Snakefile b/Snakefile index 6b55c5073..f1f97c3ab 100644 --- a/Snakefile +++ b/Snakefile @@ -579,6 +579,12 @@ rule modify_prenetwork: industrial_demand=resources( "industrial_energy_demand_base_s_{clusters}_{planning_horizons}.csv" ), + industrial_production_per_country_tomorrow=resources( + "industrial_production_per_country_tomorrow_{planning_horizons}-modified.csv" + ), + industry_sector_ratios=resources( + "industry_sector_ratios_{planning_horizons}.csv" + ), pop_weighted_energy_totals=resources( "pop_weighted_energy_totals_s_{clusters}.csv" ), diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 813fdee96..ac595fe58 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1267,6 +1267,86 @@ def scale_capacity(n, scaling): ] +def modify_industry_demand(n, industry_production_file, sector_ratios_file): + sector_ratios = pd.read_csv( + snakemake.input.industry_sector_ratios, + header=[0, 1], + index_col=0, + ).rename_axis("carrier") + industry_production = pd.read_csv( + snakemake.input.industrial_production_per_country_tomorrow, + index_col="kton/a", + ).rename_axis("country") + + subcategories = ["HVC", "Methanol", "Chlorine", "Ammonia"] + carrier = ["hydrogen", "methane", "naphtha"] + + ip = industry_production.loc["DE", subcategories] # kt/a + sr = sector_ratios["DE"].loc[carrier, subcategories] # MWh/tMaterial + non_energy = sr.multiply(ip).sum(axis=1) * 1e3 + + # MWMS data 2025 + uba_data = pd.Series( + {"fossil": 324, "electricity": 211, "biomass": 30, "hydrogen": 0, "heat": 48} + ).mul(1e6) + # TODO make sure this works for periods of all lengths + # TODO what happens if load - non_energy < 0? + + uba_industry_without_non_energy = uba_data.sum() + _industry_loads = [ + "solid biomass for industry", + "gas for industry", + "H2 for industry", + "industry methanol", + "naphtha for industry", + "low-temperature heat for industry", + "industry electricity", + "coal for industry", + ] + industry_loads = n.loads.query( + f"carrier in {_industry_loads} and bus.str.startswith('DE')" + ) + pypsa_industry_without_non_energy = ( + industry_loads.p_set.sum() * 8760 - non_energy.sum() + ) # MWh/a + # TODO Should we scale the non-energy use with this factor? + non_energy_scaling_factor = ( + uba_industry_without_non_energy / pypsa_industry_without_non_energy + ) + print( + f"Notused at the moment: Scaling factor for non-energy use: {non_energy_scaling_factor:.2f}" + ) + + # H2 + h2_loads = n.loads.query( + "carrier.str.contains('H2 for industry') and bus.str.startswith('DE')" + ) + total_h2 = h2_loads.p_set.values.sum() * 8760 + scaling_factor_h2 = (non_energy["hydrogen"] + uba_data["hydrogen"]) / total_h2 + + n.loads.loc[h2_loads.index, "p_set"] *= scaling_factor_h2 + + # electricity + electricity_loads = n.loads.query( + "carrier.str.contains('industry electricity') and bus.str.startswith('DE')" + ) + total_electricity = electricity_loads.p_set.values.sum() * 8760 + scaling_factor_electricity = uba_data["electricity"] / total_electricity + + n.loads.loc[electricity_loads.index, "p_set"] *= scaling_factor_electricity + + # fossil fuels + fossil_loads = n.loads.query( + "carrier.str.contains('gas for industry|coal for industry|naphtha for industry') and bus.str.startswith('DE')" + ) + total_fossil = fossil_loads.p_set.values.sum() * 8760 + scaling_factor_fossil = ( + non_energy["methane"] + non_energy["naphtha"] + uba_data["fossil"] + ) / total_fossil + n.loads.loc[fossil_loads.index, "p_set"] *= scaling_factor_fossil + # TODO this will have to be done separately for coal, gas and oil + + if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( @@ -1349,4 +1429,4 @@ def scale_capacity(n, scaling): sanitize_custom_columns(n) - n.export_to_netcdf(snakemake.output.network) + # n.export_to_netcdf(snakemake.output.network) From 921505dbf8fe9161ec54634175f5fb859b663f70 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 16 Jul 2025 11:55:23 +0200 Subject: [PATCH 016/202] modify industry demand should be working now --- Snakefile | 5 + ...rojektionsbericht2025_Abbildung31_MWMS.csv | 6 + config/config.de.yaml | 3 +- scripts/pypsa-de/modify_prenetwork.py | 144 ++++++++++++------ 4 files changed, 107 insertions(+), 51 deletions(-) create mode 100644 ariadne-data/UBA_Projektionsbericht2025_Abbildung31_MWMS.csv diff --git a/Snakefile b/Snakefile index f1f97c3ab..fff8b8e3f 100644 --- a/Snakefile +++ b/Snakefile @@ -559,6 +559,10 @@ rule modify_prenetwork: bev_charge_rate=config_provider("sector", "bev_charge_rate"), bev_energy=config_provider("sector", "bev_energy"), bev_dsm_availability=config_provider("sector", "bev_dsm_availability"), + uba_for_industry=config_provider("iiasa_database", "uba_for_industry"), + scale_industry_non_energy=config_provider( + "iiasa_database", "scale_industry_non_energy" + ), input: costs_modifications="ariadne-data/costs_{planning_horizons}-modifications.csv", network=resources( @@ -592,6 +596,7 @@ rule modify_prenetwork: regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"), regions_offshore=resources("regions_offshore_base_s_{clusters}.geojson"), offshore_connection_points="ariadne-data/offshore_connection_points.csv", + new_industrial_energy_demand="ariadne-data/UBA_Projektionsbericht2025_Abbildung31_MWMS.csv", output: network=resources( "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_final.nc" diff --git a/ariadne-data/UBA_Projektionsbericht2025_Abbildung31_MWMS.csv b/ariadne-data/UBA_Projektionsbericht2025_Abbildung31_MWMS.csv new file mode 100644 index 000000000..9f504877c --- /dev/null +++ b/ariadne-data/UBA_Projektionsbericht2025_Abbildung31_MWMS.csv @@ -0,0 +1,6 @@ +carrier,2025,2030,2035 +fossil,324,258,191 +industry electricity,211,234,249 +solid biomass for industry,31,35,31 +H2 for industry,0,6,42 +low+AC0-temperature heat for industry,48,59,63 diff --git a/config/config.de.yaml b/config/config.de.yaml index c803394ea..d957e0581 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -45,7 +45,8 @@ iiasa_database: region: Deutschland ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: true # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - + uba_for_industry: true # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 + scale_industry_non_energy: false # Scale non-energy industry demand directly proportional to energy demand # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight foresight: myopic diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index ac595fe58..ae35f9261 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1267,32 +1267,51 @@ def scale_capacity(n, scaling): ] -def modify_industry_demand(n, industry_production_file, sector_ratios_file): +def modify_industry_demand( + n, + year, + industry_energy_demand_file, + industry_production_file, + sector_ratios_file, + scale_non_energy=False, +): + logger.info("Modifying industry demand in Germany.") + + industry_production = pd.read_csv( + industry_production_file, + index_col="kton/a", + ).rename_axis("country") + sector_ratios = pd.read_csv( - snakemake.input.industry_sector_ratios, + sector_ratios_file, header=[0, 1], index_col=0, ).rename_axis("carrier") - industry_production = pd.read_csv( - snakemake.input.industrial_production_per_country_tomorrow, - index_col="kton/a", - ).rename_axis("country") + + new_demand = pd.read_csv( + industry_energy_demand_file, + index_col=0, + )[str(year)].mul(1e6) subcategories = ["HVC", "Methanol", "Chlorine", "Ammonia"] carrier = ["hydrogen", "methane", "naphtha"] ip = industry_production.loc["DE", subcategories] # kt/a sr = sector_ratios["DE"].loc[carrier, subcategories] # MWh/tMaterial - non_energy = sr.multiply(ip).sum(axis=1) * 1e3 - - # MWMS data 2025 - uba_data = pd.Series( - {"fossil": 324, "electricity": 211, "biomass": 30, "hydrogen": 0, "heat": 48} - ).mul(1e6) - # TODO make sure this works for periods of all lengths - # TODO what happens if load - non_energy < 0? + _non_energy = sr.multiply(ip).sum(axis=1) * 1e3 + + non_energy = pd.Series( + { + "industry electricity": 0.0, + "low-temperature heat for industry": 0.0, + "solid biomass for industry": 0.0, + "H2 for industry": _non_energy["hydrogen"], + "coal for industry": 0.0, + "gas for industry": _non_energy["methane"], + "naphtha for industry": _non_energy["naphtha"], + } + ) - uba_industry_without_non_energy = uba_data.sum() _industry_loads = [ "solid biomass for industry", "gas for industry", @@ -1306,45 +1325,60 @@ def modify_industry_demand(n, industry_production_file, sector_ratios_file): industry_loads = n.loads.query( f"carrier in {_industry_loads} and bus.str.startswith('DE')" ) - pypsa_industry_without_non_energy = ( - industry_loads.p_set.sum() * 8760 - non_energy.sum() - ) # MWh/a - # TODO Should we scale the non-energy use with this factor? - non_energy_scaling_factor = ( - uba_industry_without_non_energy / pypsa_industry_without_non_energy - ) - print( - f"Notused at the moment: Scaling factor for non-energy use: {non_energy_scaling_factor:.2f}" - ) - # H2 - h2_loads = n.loads.query( - "carrier.str.contains('H2 for industry') and bus.str.startswith('DE')" - ) - total_h2 = h2_loads.p_set.values.sum() * 8760 - scaling_factor_h2 = (non_energy["hydrogen"] + uba_data["hydrogen"]) / total_h2 + if scale_non_energy: + new_demand_without_non_energy = new_demand.sum() + pypsa_industry_without_non_energy = ( + industry_loads.p_set.sum() * 8760 - non_energy.sum() + ) + non_energy_scaling_factor = ( + new_demand_without_non_energy / pypsa_industry_without_non_energy + ) + logger.info( + f"Scaling non-energy use by {non_energy_scaling_factor:.2f} to match UBA data." + ) + non_energy_corrected = non_energy * non_energy_scaling_factor + else: + non_energy_corrected = non_energy - n.loads.loc[h2_loads.index, "p_set"] *= scaling_factor_h2 + for carrier in [ + "industry electricity", + "H2 for industry", + "solid biomass for industry", + "low-temperature heat for industry", + ]: + loads_i = n.loads.query( + f"carrier == '{carrier}' and bus.str.startswith('DE')" + ).index + logger.info( + f"Total load of {carrier} in DE before scaling: {n.loads.loc[loads_i, 'p_set'].sum() * 8760:.2f} MWh/a" + ) + total_load = industry_loads.p_set.loc[loads_i].sum() * 8760 + scaling_factor = ( + new_demand[carrier] + non_energy_corrected[carrier] + ) / total_load + n.loads.loc[loads_i, "p_set"] *= scaling_factor + logger.info( + f"Total load of {carrier} in DE after scaling: {n.loads.loc[loads_i, 'p_set'].sum() * 8760:.2f} MWh/a" + ) - # electricity - electricity_loads = n.loads.query( - "carrier.str.contains('industry electricity') and bus.str.startswith('DE')" + # Fossil fuels are aggregated in UBA MWMS but have to be scaled separately + fossil_loads = industry_loads.query("carrier.str.contains('gas|coal|naphtha')") + fossil_totals = ( + fossil_loads[["p_set", "carrier"]].groupby("carrier").p_set.sum() * 8760 ) - total_electricity = electricity_loads.p_set.values.sum() * 8760 - scaling_factor_electricity = uba_data["electricity"] / total_electricity - - n.loads.loc[electricity_loads.index, "p_set"] *= scaling_factor_electricity - - # fossil fuels - fossil_loads = n.loads.query( - "carrier.str.contains('gas for industry|coal for industry|naphtha for industry') and bus.str.startswith('DE')" + fossil_energy = fossil_totals - non_energy[fossil_totals.index] + fossil_energy_corrected = fossil_energy * new_demand["fossil"] / fossil_energy.sum() + fossil_totals_corrected = ( + fossil_energy_corrected + non_energy_corrected[fossil_totals.index] ) - total_fossil = fossil_loads.p_set.values.sum() * 8760 - scaling_factor_fossil = ( - non_energy["methane"] + non_energy["naphtha"] + uba_data["fossil"] - ) / total_fossil - n.loads.loc[fossil_loads.index, "p_set"] *= scaling_factor_fossil - # TODO this will have to be done separately for coal, gas and oil + for carrier in fossil_totals.index: + loads_i = fossil_loads.query( + f"carrier == '{carrier}' and bus.str.startswith('DE')" + ).index + n.loads.loc[loads_i, "p_set"] *= ( + fossil_totals_corrected[carrier] / fossil_totals[carrier] + ) if __name__ == "__main__": @@ -1429,4 +1463,14 @@ def modify_industry_demand(n, industry_production_file, sector_ratios_file): sanitize_custom_columns(n) - # n.export_to_netcdf(snakemake.output.network) + if snakemake.params.uba_for_industry: + modify_industry_demand( + n, + current_year, + snakemake.input.new_industrial_energy_demand, + snakemake.input.industrial_production_per_country_tomorrow, + snakemake.input.industry_sector_ratios, + scale_non_energy=snakemake.params.scale_industry_non_energy, + ) + + n.export_to_netcdf(snakemake.output.network) From eca7802dbb8581677095ae02e07b7ec82bb4e56e Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 16 Jul 2025 12:02:25 +0200 Subject: [PATCH 017/202] rename scenario, fix comment --- config/config.de.yaml | 2 +- scripts/pypsa-de/modify_industry_production.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index d957e0581..13bd51694 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250707_improve_transport_demand + prefix: 20250716_improve_industry_demand name: # - ExPol - KN2045_Mix diff --git a/scripts/pypsa-de/modify_industry_production.py b/scripts/pypsa-de/modify_industry_production.py index 00771eb56..a473210d5 100644 --- a/scripts/pypsa-de/modify_industry_production.py +++ b/scripts/pypsa-de/modify_industry_production.py @@ -155,6 +155,6 @@ ], ) - # existing_industry.to_csv( - # snakemake.output.industrial_production_per_country_tomorrow - # ) + existing_industry.to_csv( + snakemake.output.industrial_production_per_country_tomorrow + ) From bb978a916763404d49166b08730700079a7b0a46 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 16 Jul 2025 13:31:33 +0200 Subject: [PATCH 018/202] add index name --- ariadne-data/UBA_Projektionsbericht2025_Abbildung31_MWMS.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ariadne-data/UBA_Projektionsbericht2025_Abbildung31_MWMS.csv b/ariadne-data/UBA_Projektionsbericht2025_Abbildung31_MWMS.csv index 9f504877c..417fd22c4 100644 --- a/ariadne-data/UBA_Projektionsbericht2025_Abbildung31_MWMS.csv +++ b/ariadne-data/UBA_Projektionsbericht2025_Abbildung31_MWMS.csv @@ -3,4 +3,4 @@ fossil,324,258,191 industry electricity,211,234,249 solid biomass for industry,31,35,31 H2 for industry,0,6,42 -low+AC0-temperature heat for industry,48,59,63 +low-temperature heat for industry,48,59,63 From 1f36395c3cac031b491a418fb6388cbe30b59745 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 16 Jul 2025 13:31:47 +0200 Subject: [PATCH 019/202] add error if using uba data after 2040 --- scripts/pypsa-de/modify_prenetwork.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index ae35f9261..5fced02ed 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1463,7 +1463,11 @@ def modify_industry_demand( sanitize_custom_columns(n) - if snakemake.params.uba_for_industry: + if snakemake.params.uba_for_industry and current_year >= 2025: + if current_year >= 2040: + logger.error( + "The UBA for industry data is only available for 2025, 2030 and 2035. Please check your config." + ) modify_industry_demand( n, current_year, From adcbc17f1472adbb55911f8bb7d2aad7284abd61 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 16 Jul 2025 13:32:54 +0200 Subject: [PATCH 020/202] add to changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9f2f60faf..f8369793c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,6 @@ # Changelog +- Added an option to source industry energy demand from UBA MWMS (Projektionsbericht 2025) for the years 2025-2035 +- renamed some scripts - Added an option to source mobility demand from UBA MWMS (Projektionsbericht 2025) for the years 2025-2035 - Renamed functions and script for exogenous mobility demand - Improved the transport demand data, added an option to source 2020 and 2025 data from AGEB instead of Aladin From c0bce54e66173872f8d1b58723e2d77e56dea8a1 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 16 Jul 2025 13:49:55 +0200 Subject: [PATCH 021/202] transporte_shares matter outside of Germany! --- config/config.de.yaml | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index c803394ea..0f55725f7 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -335,31 +335,31 @@ sector: 2040: 0.29 2045: 0.36 2050: 0.43 - # The transport_shares are just dummy setting that get overwritten in build_exogenous_mobility_demand + # For Germany these settings get overwritten in build_mobility_demand land_transport_fuel_cell_share: 2020: 0.01 2025: 0.01 - 2030: 0.01 - 2035: 0.01 - 2040: 0.01 - 2045: 0.01 - 2050: 0.01 + 2030: 0.02 + 2035: 0.03 + 2040: 0.03 + 2045: 0.03 + 2050: 0.03 land_transport_electric_share: 2020: 0.04 - 2025: 0.04 - 2030: 0.04 - 2035: 0.04 - 2040: 0.04 - 2045: 0.04 - 2050: 0.04 + 2025: 0.10 + 2030: 0.3 + 2035: 0.45 + 2040: 0.72 + 2045: 0.87 + 2050: 0.97 land_transport_ice_share: 2020: 0.95 - 2025: 0.95 - 2030: 0.95 - 2035: 0.95 - 2040: 0.95 - 2045: 0.95 - 2050: 0.95 + 2025: 0.89 + 2030: 0.68 + 2035: 0.52 + 2040: 0.25 + 2045: 0.1 + 2050: 0.0 # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#industry industry: From 451f59b5ac22cb31ccc12f4779400fe7042292d3 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 16 Jul 2025 13:56:01 +0200 Subject: [PATCH 022/202] rename mobility_demand -> mobility_data --- Snakefile | 14 +++++++------- config/config.de.yaml | 2 +- .../pypsa-de/build_exogenous_mobility_demand.py | 6 +++--- scripts/pypsa-de/modify_prenetwork.py | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Snakefile b/Snakefile index be4a526d2..691fe5222 100644 --- a/Snakefile +++ b/Snakefile @@ -362,7 +362,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", T ruleorder: modify_cost_data > retrieve_cost_data -rule build_exogenous_mobility_demand: +rule build_exogenous_mobility_data: params: reference_scenario=config_provider("iiasa_database", "reference_scenario"), planning_horizons=config_provider("scenario", "planning_horizons"), @@ -377,15 +377,15 @@ rule build_exogenous_mobility_demand: clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"), energy_totals=resources("energy_totals.csv"), output: - mobility_demand=resources( - "modified_mobility_demand_{clusters}_{planning_horizons}.csv" + mobility_data=resources( + "modified_mobility_data_{clusters}_{planning_horizons}.csv" ), resources: mem_mb=1000, log: - logs("build_exogenous_mobility_demand_{clusters}_{planning_horizons}.log"), + logs("build_exogenous_mobility_data_{clusters}_{planning_horizons}.log"), script: - "scripts/pypsa-de/build_exogenous_mobility_demand.py" + "scripts/pypsa-de/build_exogenous_mobility_data.py" rule build_egon_data: @@ -570,8 +570,8 @@ rule modify_prenetwork: else [] ), costs=resources("costs_{planning_horizons}.csv"), - modified_mobility_demand=resources( - "modified_mobility_demand_{clusters}_{planning_horizons}.csv" + modified_mobility_data=resources( + "modified_mobility_data_{clusters}_{planning_horizons}.csv" ), biomass_potentials=resources( "biomass_potentials_s_{clusters}_{planning_horizons}.csv" diff --git a/config/config.de.yaml b/config/config.de.yaml index 0f55725f7..df93e81d3 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -335,7 +335,7 @@ sector: 2040: 0.29 2045: 0.36 2050: 0.43 - # For Germany these settings get overwritten in build_mobility_demand + # For Germany these settings get overwritten in build_exogenous_mobility_data land_transport_fuel_cell_share: 2020: 0.01 2025: 0.01 diff --git a/scripts/pypsa-de/build_exogenous_mobility_demand.py b/scripts/pypsa-de/build_exogenous_mobility_demand.py index cbadef7e5..38bba5a9a 100644 --- a/scripts/pypsa-de/build_exogenous_mobility_demand.py +++ b/scripts/pypsa-de/build_exogenous_mobility_demand.py @@ -126,7 +126,7 @@ def get_transport_data( if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( - "build_exogenous_mobility_demand", + "build_exogenous_mobility_data", simpl="", clusters=27, opts="", @@ -187,10 +187,10 @@ def get_transport_data( # only get German data pop_layout = pop_layout[pop_layout.ct == "DE"].fraction - mobility_demand = pd.DataFrame( + mobility_data = pd.DataFrame( pop_layout.values[:, None] * transport_data.values, index=pop_layout.index, columns=transport_data.index, ) - mobility_demand.to_csv(snakemake.output.mobility_demand) + mobility_data.to_csv(snakemake.output.mobility_data) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 813fdee96..1dc2fbfad 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -832,7 +832,7 @@ def modify_mobility_demand(n): logger.info( "Overwriting land transport demand. In particular the `land_transport_electric_share` config setting will not be used." ) - new_demand = pd.read_csv(snakemake.input.modified_mobility_demand, index_col=0) + new_demand = pd.read_csv(snakemake.input.modified_mobility_data, index_col=0) simulation_period_correction_factor = n.snapshot_weightings.objective.sum() / 8760 From 00ce2746b4a50b4cdf1c0ec335a1e8f6ccad58d8 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 16 Jul 2025 14:02:15 +0200 Subject: [PATCH 023/202] more renaming --- .../build_exogenous_mobility_demand.py | 58 +++++++++---------- scripts/pypsa-de/modify_prenetwork.py | 12 ++-- 2 files changed, 36 insertions(+), 34 deletions(-) diff --git a/scripts/pypsa-de/build_exogenous_mobility_demand.py b/scripts/pypsa-de/build_exogenous_mobility_demand.py index 38bba5a9a..b24a9d3e7 100644 --- a/scripts/pypsa-de/build_exogenous_mobility_demand.py +++ b/scripts/pypsa-de/build_exogenous_mobility_demand.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -def get_transport_data( +def get_mobility_data( db, year, non_land_liquids, @@ -15,7 +15,7 @@ def get_transport_data( uba_for_mobility=False, ): """ - Retrieve the German mobility demand from the transport_data model. + Retrieve the German mobility demand from the transport model. Sum over the subsectors Bus, LDV, Rail, and Truck for the fuels electricity, hydrogen, and synthetic fuels. @@ -23,14 +23,14 @@ def get_transport_data( subsectors = ["Bus", "LDV", "Rail", "Truck"] fuels = ["Electricity", "Hydrogen", "Liquids"] - transport_demand = pd.Series(0.0, index=fuels) + mobility_data = pd.Series(0.0, index=fuels) if year == "2020": logger.info( "For 2020, using hard-coded transport data from the Ariadne2-internal database." ) - transport_demand = pd.Series( + mobility_data = pd.Series( { "Electricity": 0.0 + 17.0 + 35.82 + 0.0, "Hydrogen": 0.0 + 0.0 + 0.0 + 0.0, @@ -38,8 +38,8 @@ def get_transport_data( } ) - transport_demand = transport_demand.div(3.6e-6) # convert PJ to MWh - transport_demand["number_of_cars"] = 0.658407 + 0.120261 # BEV + PHEV + mobility_data = mobility_data.div(3.6e-6) # convert PJ to MWh + mobility_data["million_evs"] = 0.658407 + 0.120261 # BEV + PHEV if ageb_for_mobility or uba_for_mobility: if uba_for_mobility: @@ -47,7 +47,7 @@ def get_transport_data( "For 2020, using historical AGEB and KBA data instead of UBA projections." ) # AGEB 2020, https://ag-energiebilanzen.de/daten-und-fakten/bilanzen-1990-bis-2030/?_jahresbereich-bilanz=2011-2020 - transport_demand = pd.Series( + mobility_data = pd.Series( { "Electricity": 39129 + 2394, # Schiene + Straße "Hydrogen": 0, @@ -57,49 +57,49 @@ def get_transport_data( + 638820, # Bio Strasse + Diesel Strasse + Diesel Schiene + Otto Strasse } ) - transport_demand = transport_demand.div(3.6e-3) # convert PJ to MWH + mobility_data = mobility_data.div(3.6e-3) # convert PJ to MWH # https://www.kba.de/DE/Statistik/Produktkatalog/produkte/Fahrzeuge/fz27_b_uebersicht.html # FZ27_202101, table FZ 27.2, 1. January 2021: - transport_demand["number_of_cars"] = 0.358498 + 0.280149 + mobility_data["million_evs"] = 0.358498 + 0.280149 elif year == "2025" and uba_for_mobility: # https://www.umweltbundesamt.de/sites/default/files/medien/11850/publikationen/projektionsbericht_2025.pdf, Abbildung 64 & 59, - transport_demand = pd.Series( + mobility_data = pd.Series( { "Electricity": 21, "Hydrogen": 0.0, "Liquids": 524 + 51, } ) - transport_demand["Liquids"] -= non_land_liquids[ + mobility_data["Liquids"] -= non_land_liquids[ int(year) ] # remove domestic navigation and aviation from UBA data to avoid double counting - transport_demand = transport_demand.mul(1e6) # convert TWh to MWh - transport_demand["number_of_cars"] = 2.7 + 1.2 # BEV + PHEV + mobility_data = mobility_data.mul(1e6) # convert TWh to MWh + mobility_data["million_evs"] = 2.7 + 1.2 # BEV + PHEV elif year == "2030" and uba_for_mobility: - transport_demand = pd.Series( + mobility_data = pd.Series( { "Electricity": 57, "Hydrogen": 14, "Liquids": 418 + 34 + 1, } ) - transport_demand["Liquids"] -= non_land_liquids[int(year)] - transport_demand = transport_demand.mul(1e6) - transport_demand["number_of_cars"] = 8.7 + 1.8 + mobility_data["Liquids"] -= non_land_liquids[int(year)] + mobility_data = mobility_data.mul(1e6) + mobility_data["million_evs"] = 8.7 + 1.8 elif year == "2035" and uba_for_mobility: - transport_demand = pd.Series( + mobility_data = pd.Series( { "Electricity": 117, "Hydrogen": 36, "Liquids": 237 + 26 + 1, } ) - transport_demand["Liquids"] -= non_land_liquids[int(year)] - transport_demand = transport_demand.mul(1e6) - transport_demand["number_of_cars"] = 18.9 + 1.8 + mobility_data["Liquids"] -= non_land_liquids[int(year)] + mobility_data = mobility_data.mul(1e6) + mobility_data["million_evs"] = 18.9 + 1.8 else: if uba_for_mobility: @@ -112,15 +112,15 @@ def get_transport_data( for fuel in fuels: for subsector in subsectors: key = f"Final Energy|Transportation|{subsector}|{fuel}" - transport_demand.loc[fuel] += df.get((key, "TWh/yr"), 0.0) + mobility_data.loc[fuel] += df.get((key, "TWh/yr"), 0.0) - transport_demand = transport_demand.mul(1e6) # convert TWh to MWh - transport_demand["number_of_cars"] = ( + mobility_data = mobility_data.mul(1e6) # convert TWh to MWh + mobility_data["million_evs"] = ( df.loc["Stock|Transportation|LDV|BEV", "million"] + df.loc["Stock|Transportation|LDV|PHEV", "million"] ) - return transport_demand + return mobility_data if __name__ == "__main__": @@ -173,8 +173,8 @@ def get_transport_data( logger.info( f"Retrieving German mobility demand from {snakemake.params.leitmodelle['transport']} transport model." ) - # get transport_data data - transport_data = get_transport_data( + # get mobility_data data + mobility_data = get_mobility_data( db, snakemake.wildcards.planning_horizons, non_land_liquids, @@ -188,9 +188,9 @@ def get_transport_data( pop_layout = pop_layout[pop_layout.ct == "DE"].fraction mobility_data = pd.DataFrame( - pop_layout.values[:, None] * transport_data.values, + pop_layout.values[:, None] * mobility_data.values, index=pop_layout.index, - columns=transport_data.index, + columns=mobility_data.index, ) mobility_data.to_csv(snakemake.output.mobility_data) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 1dc2fbfad..c9829b273 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -825,14 +825,16 @@ def must_run(n, params): n.links.loc[links_i, "p_min_pu"] = p_min_pu -def modify_mobility_demand(n): +def modify_mobility_demand(n, mobility_data_file): """ Change loads in Germany to use exogenous data for road demand. + + The mobility_data contains the """ logger.info( "Overwriting land transport demand. In particular the `land_transport_electric_share` config setting will not be used." ) - new_demand = pd.read_csv(snakemake.input.modified_mobility_data, index_col=0) + new_demand = pd.read_csv(mobility_data_file, index_col=0) simulation_period_correction_factor = n.snapshot_weightings.objective.sum() / 8760 @@ -886,7 +888,7 @@ def modify_mobility_demand(n): # Then directly use .values for assignment p_nom = ( - new_demand.number_of_cars.values * 1e6 * snakemake.params.bev_charge_rate + new_demand.million_evs.values * 1e6 * snakemake.params.bev_charge_rate ) # same logic like in prepare_sector_network n.links.loc[BEV_charger_i, "p_nom"] = p_nom @@ -901,7 +903,7 @@ def modify_mobility_demand(n): (n.stores.carrier == "EV battery") & (n.stores.bus.str.startswith("DE")) ].index e_nom = ( - new_demand.number_of_cars.values + new_demand.million_evs.values * 1e6 * snakemake.params.bev_energy * snakemake.params.bev_dsm_availability @@ -1294,7 +1296,7 @@ def scale_capacity(n, scaling): nyears, ) - modify_mobility_demand(n) + modify_mobility_demand(n, snakemake.input.modified_mobility_data) new_boiler_ban(n) From ec5a9986e7f4ceddcedd38f7b9573773f5e01014 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 17 Jul 2025 10:38:22 +0200 Subject: [PATCH 024/202] rename and refactor --- Snakefile | 1 - config/config.de.yaml | 4 +- ...nd.py => build_exogenous_mobility_data.py} | 27 ++--- scripts/pypsa-de/modify_prenetwork.py | 112 ++++++++---------- 4 files changed, 59 insertions(+), 85 deletions(-) rename scripts/pypsa-de/{build_exogenous_mobility_demand.py => build_exogenous_mobility_data.py} (87%) diff --git a/Snakefile b/Snakefile index 691fe5222..fd64a6049 100644 --- a/Snakefile +++ b/Snakefile @@ -374,7 +374,6 @@ rule build_exogenous_mobility_data: energy_totals_year=config_provider("energy", "energy_totals_year"), input: ariadne="resources/ariadne_database.csv", - clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"), energy_totals=resources("energy_totals.csv"), output: mobility_data=resources( diff --git a/config/config.de.yaml b/config/config.de.yaml index df93e81d3..6dd4bbb7d 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -346,7 +346,7 @@ sector: 2050: 0.03 land_transport_electric_share: 2020: 0.04 - 2025: 0.10 + 2025: 0.15 2030: 0.3 2035: 0.45 2040: 0.72 @@ -354,7 +354,7 @@ sector: 2050: 0.97 land_transport_ice_share: 2020: 0.95 - 2025: 0.89 + 2025: 0.84 2030: 0.68 2035: 0.52 2040: 0.25 diff --git a/scripts/pypsa-de/build_exogenous_mobility_demand.py b/scripts/pypsa-de/build_exogenous_mobility_data.py similarity index 87% rename from scripts/pypsa-de/build_exogenous_mobility_demand.py rename to scripts/pypsa-de/build_exogenous_mobility_data.py index b24a9d3e7..fc9ac2069 100644 --- a/scripts/pypsa-de/build_exogenous_mobility_demand.py +++ b/scripts/pypsa-de/build_exogenous_mobility_data.py @@ -39,7 +39,7 @@ def get_mobility_data( ) mobility_data = mobility_data.div(3.6e-6) # convert PJ to MWh - mobility_data["million_evs"] = 0.658407 + 0.120261 # BEV + PHEV + mobility_data["million_EVs"] = 0.658407 + 0.120261 # BEV + PHEV if ageb_for_mobility or uba_for_mobility: if uba_for_mobility: @@ -60,7 +60,7 @@ def get_mobility_data( mobility_data = mobility_data.div(3.6e-3) # convert PJ to MWH # https://www.kba.de/DE/Statistik/Produktkatalog/produkte/Fahrzeuge/fz27_b_uebersicht.html # FZ27_202101, table FZ 27.2, 1. January 2021: - mobility_data["million_evs"] = 0.358498 + 0.280149 + mobility_data["million_EVs"] = 0.358498 + 0.280149 elif year == "2025" and uba_for_mobility: # https://www.umweltbundesamt.de/sites/default/files/medien/11850/publikationen/projektionsbericht_2025.pdf, Abbildung 64 & 59, @@ -75,7 +75,7 @@ def get_mobility_data( int(year) ] # remove domestic navigation and aviation from UBA data to avoid double counting mobility_data = mobility_data.mul(1e6) # convert TWh to MWh - mobility_data["million_evs"] = 2.7 + 1.2 # BEV + PHEV + mobility_data["million_EVs"] = 2.7 + 1.2 # BEV + PHEV elif year == "2030" and uba_for_mobility: mobility_data = pd.Series( @@ -87,7 +87,7 @@ def get_mobility_data( ) mobility_data["Liquids"] -= non_land_liquids[int(year)] mobility_data = mobility_data.mul(1e6) - mobility_data["million_evs"] = 8.7 + 1.8 + mobility_data["million_EVs"] = 8.7 + 1.8 elif year == "2035" and uba_for_mobility: mobility_data = pd.Series( @@ -99,7 +99,7 @@ def get_mobility_data( ) mobility_data["Liquids"] -= non_land_liquids[int(year)] mobility_data = mobility_data.mul(1e6) - mobility_data["million_evs"] = 18.9 + 1.8 + mobility_data["million_EVs"] = 18.9 + 1.8 else: if uba_for_mobility: @@ -115,7 +115,7 @@ def get_mobility_data( mobility_data.loc[fuel] += df.get((key, "TWh/yr"), 0.0) mobility_data = mobility_data.mul(1e6) # convert TWh to MWh - mobility_data["million_evs"] = ( + mobility_data["million_EVs"] = ( df.loc["Stock|Transportation|LDV|BEV", "million"] + df.loc["Stock|Transportation|LDV|PHEV", "million"] ) @@ -132,7 +132,7 @@ def get_mobility_data( opts="", ll="vopt", sector_opts="none", - planning_horizons="2030", + planning_horizons="2020", run="KN2045_Mix", ) configure_logging(snakemake) @@ -182,15 +182,4 @@ def get_mobility_data( uba_for_mobility=snakemake.params.uba_for_mobility, ) - # get German mobility weighting - pop_layout = pd.read_csv(snakemake.input.clustered_pop_layout, index_col=0) - # only get German data - pop_layout = pop_layout[pop_layout.ct == "DE"].fraction - - mobility_data = pd.DataFrame( - pop_layout.values[:, None] * mobility_data.values, - index=pop_layout.index, - columns=mobility_data.index, - ) - - mobility_data.to_csv(snakemake.output.mobility_data) + mobility_data.to_csv(snakemake.output.mobility_data, header=False) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index c9829b273..1d8de2346 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -829,87 +829,73 @@ def modify_mobility_demand(n, mobility_data_file): """ Change loads in Germany to use exogenous data for road demand. - The mobility_data contains the + The mobility_data contains the demand of Electricity, Hydrogen and Liquids in MWh/a, and the number of EVs in million. """ logger.info( "Overwriting land transport demand. In particular the `land_transport_electric_share` config setting will not be used." ) - new_demand = pd.read_csv(mobility_data_file, index_col=0) - simulation_period_correction_factor = n.snapshot_weightings.objective.sum() / 8760 + fraction_modelyear = n.snapshot_weightings.stores.sum() / 8760 - # oil demand - if "land transport oil" in n.loads.carrier.unique(): # i.e. before 2050 - oil_demand = pd.Series( - new_demand.Liquids.values * simulation_period_correction_factor, - index=new_demand.index + " land transport oil", - ) - - profile = n.loads_t.p_set.loc[:, oil_demand.index] - profile /= profile.sum() - n.loads_t.p_set.loc[:, oil_demand.index] = (oil_demand * profile).div( - n.snapshot_weightings.objective, axis=0 - ) + new_demand = pd.read_csv(mobility_data_file, header=None, index_col=0).iloc[:, 0] - # hydrogen demand - h2_demand = pd.Series( - new_demand.Hydrogen.values * simulation_period_correction_factor, - index=new_demand.index + " land transport fuel cell", - ) - - profile = n.loads_t.p_set.loc[:, h2_demand.index] - profile /= profile.sum() - n.loads_t.p_set.loc[:, h2_demand.index] = (h2_demand * profile).div( - n.snapshot_weightings.objective, axis=0 - ) + number_of_EVs = new_demand.pop("million_EVs") * 1e6 - # electricity demand - ev_demand = pd.Series( - new_demand.Electricity.values * simulation_period_correction_factor, - index=new_demand.index + " land transport EV", - ) + new_demand *= fraction_modelyear - profile = n.loads_t.p_set.loc[:, ev_demand.index] - profile /= profile.sum() - n.loads_t.p_set.loc[:, ev_demand.index] = (ev_demand * profile).div( - n.snapshot_weightings.objective, axis=0 - ) + carrier_fuel_map = { + "land transport EV": "Electricity", + "land transport fuel cell": "Hydrogen", + "land transport oil": "Liquids", + } + for carrier, fuel in carrier_fuel_map.items(): + loads_i = n.loads[ + (n.loads.carrier == carrier) & n.loads.index.str.startswith("DE") + ] + old_demand = ( + n.loads_t.p_set.loc[:, loads_i.index] + .sum(axis=1) + .mul(n.snapshot_weightings.stores) + .sum() + ) + scale_factor = new_demand[fuel] / old_demand + logger.info( + f"Scaling {carrier} loads in Germany by {scale_factor:.2f}.\nPrevious total demand: {old_demand:.2f} MWh/a, new total demand: {new_demand[fuel]:.2f} MWh/a." + ) + n.loads_t.p_set.loc[:, loads_i.index] *= scale_factor # adjust BEV charger and V2G capacities - BEV_charger_i = n.links[ + BEV_chargers = n.links[ (n.links.carrier == "BEV charger") & (n.links.bus0.str.startswith("DE")) - ].index - - # Check that buses in network and new_demand data appear in same order - assert [ - idx.startswith(idx2) for (idx, idx2) in zip(BEV_charger_i, new_demand.index) ] - # Then directly use .values for assignment - p_nom = ( - new_demand.million_evs.values * 1e6 * snakemake.params.bev_charge_rate - ) # same logic like in prepare_sector_network + scale_factor = ( + number_of_EVs * snakemake.params.bev_charge_rate / BEV_chargers.p_nom.sum() + ) + logger.info( + f"Scaling BEV charger capacities in Germany by {scale_factor:.2f} to match the new number of EVs.\nPrevious total capacity: {BEV_chargers.p_nom.sum():.2f} MW, new total capacity: {number_of_EVs * snakemake.params.bev_charge_rate:.2f} MW." + ) + n.links.loc[BEV_chargers.index, "p_nom"] *= scale_factor - n.links.loc[BEV_charger_i, "p_nom"] = p_nom + V2G = n.links[(n.links.carrier == "V2G") & (n.links.bus0.str.startswith("DE"))] - V2G_i = n.links[ - (n.links.carrier == "V2G") & (n.links.bus0.str.startswith("DE")) - ].index - if not V2G_i.empty: - n.links.loc[V2G_i, "p_nom"] = p_nom * snakemake.params.bev_dsm_availability + if not V2G.empty: + n.links.loc[V2G.index, "p_nom"] *= ( + scale_factor * snakemake.params.bev_dsm_availability + ) - dsm_i = n.stores[ + dsm = n.stores[ (n.stores.carrier == "EV battery") & (n.stores.bus.str.startswith("DE")) - ].index - e_nom = ( - new_demand.million_evs.values - * 1e6 - * snakemake.params.bev_energy - * snakemake.params.bev_dsm_availability - ) - if not dsm_i.empty: - n.stores.loc[dsm_i, "e_nom"] = e_nom + ] + + if not dsm.empty: + scale_factor = ( + number_of_EVs + * snakemake.params.bev_energy + * snakemake.params.bev_dsm_availability + ) / dsm.e_nom.sum() + n.stores.loc[dsm.index, "e_nom"] *= scale_factor def add_hydrogen_turbines(n): @@ -1351,4 +1337,4 @@ def scale_capacity(n, scaling): sanitize_custom_columns(n) - n.export_to_netcdf(snakemake.output.network) + # n.export_to_netcdf(snakemake.output.network) From 40a83579d046a9bed533bc827c3c2cabbcae5e63 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 17 Jul 2025 11:06:55 +0200 Subject: [PATCH 025/202] small adjustments --- config/config.de.yaml | 6 +++--- scripts/pypsa-de/modify_prenetwork.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 0a5767465..a75d9a6a9 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -44,7 +44,7 @@ iiasa_database: reference_scenario: KN2045_Mix region: Deutschland ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles - uba_for_mobility: true # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 + uba_for_mobility: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight foresight: myopic @@ -344,7 +344,7 @@ sector: 2045: 0.03 2050: 0.03 land_transport_electric_share: - 2020: 0.04 + 2020: 0.05 2025: 0.15 2030: 0.3 2035: 0.45 @@ -352,7 +352,7 @@ sector: 2045: 0.87 2050: 0.97 land_transport_ice_share: - 2020: 0.95 + 2020: 0.94 2025: 0.84 2030: 0.68 2035: 0.52 diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 1d8de2346..102622061 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1337,4 +1337,4 @@ def scale_capacity(n, scaling): sanitize_custom_columns(n) - # n.export_to_netcdf(snakemake.output.network) + n.export_to_netcdf(snakemake.output.network) From 4b78729e1018f02054cc740d7ecf7195843214b2 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 18 Jul 2025 17:44:17 +0200 Subject: [PATCH 026/202] improve export; disable uba_for _industry by default --- config/config.de.yaml | 2 +- scripts/pypsa-de/export_ariadne_variables.py | 66 +++++++++++++++----- 2 files changed, 53 insertions(+), 15 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 4c53cc483..18ea784f6 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -45,7 +45,7 @@ iiasa_database: region: Deutschland ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - uba_for_industry: true # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 + uba_for_industry: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 scale_industry_non_energy: false # Scale non-energy industry demand directly proportional to energy demand # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight foresight: myopic diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 61401b5da..e0122b519 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -1817,9 +1817,17 @@ def get_secondary_energy(n, region, _industry_demand): axis=0, ).sum() mwh_coal_per_mwh_coke = 1.366 + coke_fraction = ( + industry_demand.get("coke") + * mwh_coal_per_mwh_coke + / ( + industry_demand.get("coke") * mwh_coal_per_mwh_coke + + industry_demand.get("coal") + ) + ) # Coke is added as a coal demand, so we need to convert back to units of coke for secondary energy var["Secondary Energy|Solids|Coal"] = var["Secondary Energy|Solids"] = ( - industry_demand.get("coke", 0) / mwh_coal_per_mwh_coke + sum_load(n, "coal for industry", region) * coke_fraction / mwh_coal_per_mwh_coke ) biomass_usage = ( @@ -1989,14 +1997,17 @@ def get_final_energy( # !: Pypsa-eur does not strictly distinguish between energy and # non-energy use - var["Final Energy|Industry|Electricity"] = industry_demand.get("electricity") - # or use: sum_load(n, "industry electricity", region) + var["Final Energy|Industry|Electricity"] = sum_load( + n, "industry electricity", region + ) # electricity is not used for non-energy purposes var["Final Energy|Industry excl Non-Energy Use|Electricity"] = var[ "Final Energy|Industry|Electricity" ] - var["Final Energy|Industry|Heat"] = industry_demand.get("low-temperature heat") + var["Final Energy|Industry|Heat"] = sum_load( + n, "low-temperature heat for industry", region + ) # heat is not used for non-energy purposes var["Final Energy|Industry excl Non-Energy Use|Heat"] = var[ "Final Energy|Industry|Heat" @@ -2008,7 +2019,7 @@ def get_final_energy( # var["Final Energy|Industry|Geothermal"] = \ # Not implemented - var["Final Energy|Industry|Gases"] = industry_demand.get("methane") + var["Final Energy|Industry|Gases"] = sum_load(n, "gas for industry", region) for gas_type in gas_fractions.index: var[f"Final Energy|Industry|Gases|{gas_type}"] = ( @@ -2030,7 +2041,7 @@ def get_final_energy( # var["Final Energy|Industry|Power2Heat"] = \ # Q: misleading description - var["Final Energy|Industry|Hydrogen"] = industry_demand.get("hydrogen") + var["Final Energy|Industry|Hydrogen"] = sum_load(n, "H2 for industry", region) # subtract non-energy used hydrogen from total hydrogen demand var["Final Energy|Industry excl Non-Energy Use|Hydrogen"] = ( var["Final Energy|Industry|Hydrogen"] @@ -2074,16 +2085,29 @@ def get_final_energy( # var["Final Energy|Industry|Other"] = \ - var["Final Energy|Industry|Solids|Biomass"] = industry_demand.get("solid biomass") + var["Final Energy|Industry|Solids|Biomass"] = sum_load( + n, "solid biomass for industry", region + ) var["Final Energy|Industry excl Non-Energy Use|Solids|Biomass"] = var[ "Final Energy|Industry|Solids|Biomass" ] mwh_coal_per_mwh_coke = 1.366 - # Coke is added as a coal demand, so we need to convert back to units of coke for final energy + coke_fraction = ( + industry_demand.get("coke") + * mwh_coal_per_mwh_coke + / ( + industry_demand.get("coke") * mwh_coal_per_mwh_coke + + industry_demand.get("coal") + ) + ) + # Contains coke demand, which is a coal product + # Here coke is considered a secondary energy source var["Final Energy|Industry|Solids|Coal"] = ( - industry_demand.get("coal") - + industry_demand.get("coke") / mwh_coal_per_mwh_coke + sum_load(n, "coal for industry", region) * (1 - coke_fraction) + + sum_load(n, "coal for industry", region) + * coke_fraction + / mwh_coal_per_mwh_coke ) var["Final Energy|Industry excl Non-Energy Use|Solids|Coal"] = var[ "Final Energy|Industry|Solids|Coal" @@ -2571,10 +2595,10 @@ def get_final_energy( return var * MWh2TWh -def get_emissions(n, region, _energy_totals, industry_demand): +def get_emissions(n, region, _energy_totals, _industry_demand): energy_totals = _energy_totals.loc[region[0:2]] - industry_DE = industry_demand.filter( + industry_demand = _industry_demand.filter( like=region, axis=0, ).sum() @@ -2880,8 +2904,22 @@ def get_emissions(n, region, _energy_totals, industry_demand): ) # considered 0 anyways mwh_coal_per_mwh_coke = 1.366 # from eurostat energy balance - # 0.3361 t/MWh, 1e-6 to convert to Mt - coking_emissions = industry_DE.coke * (mwh_coal_per_mwh_coke - 1) * 0.3361 * t2Mt + coke_fraction = ( + industry_demand.get("coke") + * mwh_coal_per_mwh_coke + / ( + industry_demand.get("coke") * mwh_coal_per_mwh_coke + + industry_demand.get("coal") + ) + ) + # 0.3361 t_CO2/MWh + coking_emissions = ( + sum_load(n, "coal for industry", region) + * coke_fraction + * (mwh_coal_per_mwh_coke - 1) + * 0.3361 + * t2Mt + ) var["Emissions|Gross Fossil CO2|Energy|Demand|Industry"] = ( co2_emissions.reindex( [ From 46a53f75793047ec06c7607b19245780db3b3d7a Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 18 Jul 2025 17:46:55 +0200 Subject: [PATCH 027/202] define low demand scenario with heat pump limits from UBA MWMS --- config/config.de.yaml | 9 +-- config/scenarios.manual.yaml | 14 ++++ scripts/pypsa-de/additional_functionality.py | 69 ++++++++++++++++++++ scripts/pypsa-de/export_ariadne_variables.py | 14 ++-- 4 files changed, 98 insertions(+), 8 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 18ea784f6..a5c49d09d 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -8,6 +8,7 @@ run: name: # - ExPol - KN2045_Mix + - LowDemand # - KN2045_Elek # - KN2045_H2 # - KN2045_NFniedrig @@ -62,13 +63,13 @@ scenario: sector_opts: - none planning_horizons: - - 2020 + #- 2020 - 2025 - 2030 - 2035 - - 2040 - - 2045 - - 2050 + # - 2040 + # - 2045 + # - 2050 existing_capacities: grouping_years_power: [1920, 1950, 1955, 1960, 1965, 1970, 1975, 1980, 1985, 1990, 1995, 2000, 2005, 2010, 2015, 2020] diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index eaa142a28..8390a5ab0 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -74,6 +74,20 @@ KN2045_Mix: # dient als Referenzszenario in der Familie der Ariadne-Szenarien +LowDemand: + iiasa_database: + reference_scenario: ExPol # e.g., CO2 emissions from REMIND will follow ExPol + uba_for_industry: true + uba_for_mobility: true + solving: + constraints: + decentral_heat_pump_budgets: + DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + 2025: 12.0 + 2030: 26.0 + 2035: 38.0 + + KN2045_Elek: # Fokus auf dem Hochlauf von Technologien zur direkten Elektrifizierung der Sektoren # Strom als präferierter Energieträger in der Endenergie, andere Energieträger ergänzend, wo nötig (Wasserstoff, synthetische Kraftstoffe) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 13e26736c..dc031b78e 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -535,6 +535,68 @@ def add_national_co2_budgets(n, snakemake, national_co2_budgets, investment_year ) +def add_decentral_heat_pump_budgets(n, decentral_heat_pump_budgets, investment_year): + carriers = [ + "rural air heat pumprural ground heat pumpurban decentral air heat pump" + ] + + heat_pumps = n.links.index[n.links.carrier.isin(carriers)] + + if heat_pumps.empty: + logger.warning( + "No heat pumps found in the network. Skipping decentral heat pump budgets." + ) + return + + if investment_year not in decentral_heat_pump_budgets["DE"].keys(): + logger.warning( + f"No decentral heat pump budget for {investment_year} found in the config file. Skipping." + ) + return + + logger.info("Adding decentral heat pump budgets") + + for ct in decentral_heat_pump_budgets: + if ct != "DE": + logger.error( + f"Heat pump budget for countries other than `DE` is not yet supported. Found country {ct}. Please check the config file." + ) + + limit = decentral_heat_pump_budgets[ct][investment_year] * 1e6 + + logger.info( + f"Limiting decentral heat pump electricity consumption in country {ct} to {decentral_heat_pump_budgets[ct][investment_year]:.1%} MWh.", + ) + + lhs = [] + + lhs.append( + ( + n.model["Link-p"].loc[:, heat_pumps] * n.snapshot_weightings.generators + ).sum() + ) + + cname = f"decentral_heat_pump_limit-{ct}" + n.model.add_constraints( + lhs <= limit, + name=f"GlobalConstraint-{cname}", + ) + if cname in n.global_constraints.index: + logger.warning( + f"Global constraint {cname} already exists. Dropping and adding it again." + ) + n.global_constraints.drop(cname, inplace=True) + + n.add( + "GlobalConstraint", + cname, + constant=limit, + sense="<=", + type="", + carrier_attribute="", + ) + + def force_boiler_profiles_existing_per_load(n): """ This scales the boiler dispatch to the load profile with a factor common to @@ -773,5 +835,12 @@ def additional_functionality(n, snapshots, snakemake): else: logger.warning("No national CO2 budget specified!") + if isinstance(constraints["decentral_heat_pump_budgets"], dict): + add_decentral_heat_pump_budgets( + n, + constraints["decentral_heat_pump_budgets"], + investment_year, + ) + if investment_year == 2020: adapt_nuclear_output(n) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index e0122b519..5b75c593e 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -995,6 +995,10 @@ def _get_capacities(n, region, cap_func, cap_string="Capacity|"): like="solar thermal" ).sum() + var[cap_string + "Decentral Heat|Heat Pump"] = capacities_decentral_heat.filter( + like="heat pump" + ).sum() + capacities_h2 = ( cap_func( bus_carrier="H2", @@ -5195,7 +5199,7 @@ def get_data( opts="", ll="vopt", sector_opts="None", - run="KN2045_Mix", + run="LowDemand", ) configure_logging(snakemake) config = snakemake.config @@ -5320,12 +5324,14 @@ def get_data( ac_projects_invest = df.query( "Variable == 'Investment|Energy Supply|Electricity|Transmission|AC|NEP|Onshore'" )[planning_horizons].values.sum() - + active_years = [ + int(year) for year in modelyears if int(year) in [2025, 2030, 2035, 2040] + ] df.loc[ df.query( "Variable == 'Investment|Energy Supply|Electricity|Transmission|AC|Übernahme|Startnetz Delta'" ).index, - [2025, 2030, 2035, 2040], + active_years, ] += (ac_startnetz - ac_projects_invest) / 4 for suffix in ["|AC|NEP", "|AC", "", " and Distribution"]: @@ -5333,7 +5339,7 @@ def get_data( df.query( f"Variable == 'Investment|Energy Supply|Electricity|Transmission{suffix}'" ).index, - [2025, 2030, 2035, 2040], + active_years, ] += (ac_startnetz - ac_projects_invest) / 4 print("Assigning mean investments of year and year + 5 to year.") From 0d829144d79aa673ae4ecabfb143b08961f483f8 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 18 Jul 2025 17:47:06 +0200 Subject: [PATCH 028/202] simplify workflow temporariliy --- Snakefile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Snakefile b/Snakefile index 2b8d9127d..9539b4fdb 100644 --- a/Snakefile +++ b/Snakefile @@ -855,10 +855,10 @@ rule plot_ariadne_variables: rule ariadne_all: input: expand(RESULTS + "graphs/costs.svg", run=config_provider("run", "name")), - expand( - RESULTS + "ariadne/capacity_detailed.png", - run=config_provider("run", "name"), - ), + # expand( + # RESULTS + "ariadne/capacity_detailed.png", + # run=config_provider("run", "name"), + # ), expand( RESULTS + "maps/base_s_{clusters}_{opts}_{sector_opts}-h2_network_incl_kernnetz_{planning_horizons}.pdf", From 91db2549f7fcc0adc774113998f095795eca392f Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 18 Jul 2025 17:47:41 +0200 Subject: [PATCH 029/202] two more grouping years --- config/config.de.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index a5c49d09d..12476fcd2 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -72,8 +72,8 @@ scenario: # - 2050 existing_capacities: - grouping_years_power: [1920, 1950, 1955, 1960, 1965, 1970, 1975, 1980, 1985, 1990, 1995, 2000, 2005, 2010, 2015, 2020] - grouping_years_heat: [1980, 1985, 1990, 1995, 2000, 2005, 2010, 2015, 2019] # heat grouping years >= baseyear will be ignored + grouping_years_power: [1920, 1950, 1955, 1960, 1965, 1970, 1975, 1980, 1985, 1990, 1995, 2000, 2005, 2010, 2015, 2020, 2025] + grouping_years_heat: [1980, 1985, 1990, 1995, 2000, 2005, 2010, 2015, 2020, 2024] # heat grouping years >= baseyear will be ignored fill_value_gas_chp_lifetime: 40 # if no explicit lifetime is given use 40 years. The number was chosen s.t. the existing capacities in 2020 match with statistics. From be534e940f30d9e71766c8cfeda35cb2761e4b4f Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 18 Jul 2025 17:53:24 +0200 Subject: [PATCH 030/202] refine scenarios --- config/config.de.yaml | 4 ++-- config/scenarios.manual.yaml | 14 ++++++++++++++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 12476fcd2..e3a622f48 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250716_improve_industry_demand + prefix: 20250718_improve_heat_pump_demand name: # - ExPol - KN2045_Mix @@ -479,7 +479,7 @@ solving: offwind: DE: 2020: 7.8 - 2025: 11.3 + 2025: 12 2030: 29.3 # uba Projektionsbericht and NEP without delayed BalWin 3 2035: 50 # Planned projects until 2035 (offshore_connection_points.csv) -1.3 GW for potential delays 2040: 65 # Planned projects until 2040 -1.5 GW for potential retirments diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 8390a5ab0..f9e1b131e 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -86,6 +86,20 @@ LowDemand: 2025: 12.0 2030: 26.0 2035: 38.0 + limits_capacity_min: + Generator: + onwind: + DE: + 2030: 0 + 2035: 0 + offwind: + DE: + 2030: 0 + 2035: 0 + Link: + H2 electrolysis: + DE: + 2030: 0 KN2045_Elek: From 7d6af4cc2b9fa278b3e1f92fc6ebb8d772a62223 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 18 Jul 2025 18:17:53 +0200 Subject: [PATCH 031/202] fix minor --- scripts/pypsa-de/additional_functionality.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index dc031b78e..4f3ad44b6 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -835,7 +835,7 @@ def additional_functionality(n, snapshots, snakemake): else: logger.warning("No national CO2 budget specified!") - if isinstance(constraints["decentral_heat_pump_budgets"], dict): + if isinstance(constraints.get("decentral_heat_pump_budgets"), dict): add_decentral_heat_pump_budgets( n, constraints["decentral_heat_pump_budgets"], From 09a00ebf00fcf64b20a4d57e99944d70db25abd3 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 18 Jul 2025 18:26:51 +0200 Subject: [PATCH 032/202] very minor --- scripts/pypsa-de/export_ariadne_variables.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 5b75c593e..12697264f 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -5274,7 +5274,7 @@ def get_data( if "debug" == "debug": # For debugging var = pd.Series() - idx = 6 + idx = 1 n = networks[idx] c = costs[idx] _industry_demand = industry_demands[idx] From 19e1b91c00b3fe2e9fc07ae0d4edcfcbb882434c Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 22 Jul 2025 17:30:36 +0200 Subject: [PATCH 033/202] define regret workflow --- Snakefile | 45 +++++++++++++ scripts/pypsa-de/solve_regret.py | 109 +++++++++++++++++++++++++++++++ 2 files changed, 154 insertions(+) create mode 100644 scripts/pypsa-de/solve_regret.py diff --git a/Snakefile b/Snakefile index 9539b4fdb..4b09d23f7 100644 --- a/Snakefile +++ b/Snakefile @@ -973,3 +973,48 @@ rule ariadne_report_only: RESULTS + "ariadne/report/elec_price_duration_curve.pdf", run=config_provider("run", "name"), ), + + +rule solve_regret: + params: + solving=config_provider("solving"), + foresight=config_provider("foresight"), + co2_sequestration_potential=config_provider( + "sector", "co2_sequestration_potential", default=200 + ), + custom_extra_functionality="data/custom_extra_functionality.py", + energy_year=config_provider("energy", "energy_totals_year"), + input: + decision=RESULTS.replace("{run}", "{decision}") + + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + realization=RESULTS + + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + output: + regret_network=RESULTS.replace("{run}", "{decision}") + + "regret_networks/realization_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + shadow: + shadow_config + log: + solver=RESULTS.replace("{run}", "{decision}") + + "logs/realization_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", + memory=RESULTS.replace("{run}", "{decision}") + + "logs/realization_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", + python=RESULTS.replace("{run}", "{decision}") + + "logs/realization_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", + threads: solver_threads + resources: + mem_mb=config_provider("solving", "mem_mb"), + runtime=config_provider("solving", "runtime", default="6h"), + script: + "scripts/pypsa-de/solve_regret.py" + + +rule regret_all: + input: + regret_networks=expand( + RESULTS.replace("{run}", "{decision}") + + "regret_networks/realization_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + run=config_provider("run", "name"), + decision=config_provider("run", "name"), + **config["scenario"], + ), diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py new file mode 100644 index 000000000..11deccda2 --- /dev/null +++ b/scripts/pypsa-de/solve_regret.py @@ -0,0 +1,109 @@ +import logging + +import numpy as np +import pypsa + +from scripts._benchmark import memory_logger +from scripts._helpers import ( + configure_logging, + mock_snakemake, + set_scenario_config, + update_config_from_wildcards, +) +from scripts.solve_network import solve_network + +logger = logging.getLogger(__name__) + + +def fix_capacities(realization, decision): + realization.optimize.fix_optimal_capacities() # also sets p_nom = p_nom_opt + decision.optimize.fix_optimal_capacities() + + nominal_attrs = { + "generators": "p_nom", + "lines": "s_nom", + "links": "p_nom", + "stores": "e_nom", + } + + realization.links.index.intersection(decision.links.index) + + for name, attr in nominal_attrs.items(): + real = getattr(realization, name) + deci = getattr(decision, name) + + common = real.index.intersection(deci.index) + if not real.index.equals(deci.index): + logger.warning( + f"Indices of {name} in realization and decision networks do not match. " + "This may lead to unexpected results." + ) + + real.loc[common, attr] = deci.loc[common, attr] + + return realization + + +if __name__ == "__main__": + if "snakemake" not in globals(): + snakemake = mock_snakemake( + "solve_regret", + clusters=27, + opts="", + sector_opts="none", + planning_horizons="2030", + realization="KN2045_Mix", + run="LowDemand", + ) + + configure_logging(snakemake) + set_scenario_config(snakemake) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) + + solve_opts = snakemake.params.solving["options"] + + np.random.seed(solve_opts.get("seed", 123)) + + if snakemake.input.realization == snakemake.input.decision: + import os + import sys + + src = os.path.abspath(snakemake.input.realization) + dst = os.path.abspath(snakemake.output.regret_network) + os.symlink(src, dst) + sys.exit(0) + + logger.info("Loading realization and decision networks") + + import pathlib + + # Touch output file to ensure it exists + pathlib.Path(snakemake.output.regret_network).touch() + + realization = pypsa.Network(snakemake.input.realization) + decision = pypsa.Network(snakemake.input.decision) + + planning_horizons = snakemake.wildcards.get("planning_horizons", None) + logging_frequency = snakemake.config.get("solving", {}).get( + "mem_logging_frequency", 30 + ) + + n = fix_capacities(realization, decision) + + with memory_logger( + filename=getattr(snakemake.log, "memory", None), interval=logging_frequency + ) as mem: + solve_network( + n, + config=snakemake.config, + params=snakemake.params, + solving=snakemake.params.solving, + planning_horizons=planning_horizons, + rule_name=snakemake.rule, + log_fn=snakemake.log.solver, + snakemake=snakemake, + ) + logger.info(f"Maximum memory usage: {mem.mem_usage}") + + n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) + n.export_to_netcdf(snakemake.output.network) From a7d22a439b0d7e2336f8713c23bfaf49b34d0b5d Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 22 Jul 2025 17:32:04 +0200 Subject: [PATCH 034/202] define extra_functionality within solve_network to capture snakemake object --- scripts/solve_network.py | 164 ++++++++++++++++++++------------------- 1 file changed, 84 insertions(+), 80 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index fdc950487..c4121ea3c 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -1157,86 +1157,6 @@ def add_co2_atmosphere_constraint(n, snapshots): n.model.add_constraints(lhs <= rhs, name=f"GlobalConstraint-{name}") -def extra_functionality( - n: pypsa.Network, snapshots: pd.DatetimeIndex, planning_horizons: str | None = None -) -> None: - """ - Add custom constraints and functionality. - - Parameters - ---------- - n : pypsa.Network - The PyPSA network instance with config and params attributes - snapshots : pd.DatetimeIndex - Simulation timesteps - planning_horizons : str, optional - The current planning horizon year or None in perfect foresight - - Collects supplementary constraints which will be passed to - ``pypsa.optimization.optimize``. - - If you want to enforce additional custom constraints, this is a good - location to add them. The arguments ``opts`` and - ``snakemake.config`` are expected to be attached to the network. - """ - config = n.config - constraints = config["solving"].get("constraints", {}) - if constraints["BAU"] and n.generators.p_nom_extendable.any(): - add_BAU_constraints(n, config) - if constraints["SAFE"] and n.generators.p_nom_extendable.any(): - add_SAFE_constraints(n, config) - if constraints["CCL"] and n.generators.p_nom_extendable.any(): - add_CCL_constraints(n, config, planning_horizons) - - reserve = config["electricity"].get("operational_reserve", {}) - if reserve.get("activate"): - add_operational_reserve_margin(n, snapshots, config) - - if EQ_o := constraints["EQ"]: - add_EQ_constraints(n, EQ_o.replace("EQ", "")) - - if {"solar-hsat", "solar"}.issubset( - config["electricity"]["renewable_carriers"] - ) and {"solar-hsat", "solar"}.issubset( - config["electricity"]["extendable_carriers"]["Generator"] - ): - add_solar_potential_constraints(n, config) - - if n.config.get("sector", {}).get("tes", False): - if n.buses.index.str.contains( - r"urban central heat|urban decentral heat|rural heat", - case=False, - na=False, - ).any(): - add_TES_energy_to_power_ratio_constraints(n) - add_TES_charger_ratio_constraints(n) - - add_battery_constraints(n) - add_lossy_bidirectional_link_constraints(n) - add_pipe_retrofit_constraint(n) - if n._multi_invest: - add_carbon_constraint(n, snapshots) - add_carbon_budget_constraint(n, snapshots) - add_retrofit_gas_boiler_constraint(n, snapshots) - else: - add_co2_atmosphere_constraint(n, snapshots) - - if config["sector"]["enhanced_geothermal"]["enable"]: - add_flexible_egs_constraint(n) - - if config["sector"]["imports"]["enable"]: - add_import_limit_constraint(n, snapshots) - - if n.params.custom_extra_functionality: - source_path = pathlib.Path(n.params.custom_extra_functionality).resolve() - assert source_path.exists(), f"{source_path} does not exist" - sys.path.append(os.path.dirname(source_path)) - module_name = os.path.splitext(os.path.basename(source_path))[0] - module = importlib.import_module(module_name) - custom_extra_functionality = getattr(module, module_name) - custom_extra_functionality(n, snapshots, snakemake) # pylint: disable=E0601 - - def check_objective_value(n: pypsa.Network, solving: dict) -> None: """ Check if objective value matches expected value within tolerance. @@ -1270,6 +1190,7 @@ def solve_network( config: dict, params: dict, solving: dict, + snakemake: Any, rule_name: str | None = None, planning_horizons: str | None = None, **kwargs, @@ -1310,6 +1231,88 @@ def solve_network( ObjectiveValueError If objective value differs from expected value """ + + def extra_functionality( + n: pypsa.Network, + snapshots: pd.DatetimeIndex, + planning_horizons: str | None = None, + ) -> None: + """ + Add custom constraints and functionality. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network instance with config and params attributes + snapshots : pd.DatetimeIndex + Simulation timesteps + planning_horizons : str, optional + The current planning horizon year or None in perfect foresight + + Collects supplementary constraints which will be passed to + ``pypsa.optimization.optimize``. + + If you want to enforce additional custom constraints, this is a good + location to add them. The arguments ``opts`` and + ``snakemake.config`` are expected to be attached to the network. + """ + config = n.config + constraints = config["solving"].get("constraints", {}) + if constraints["BAU"] and n.generators.p_nom_extendable.any(): + add_BAU_constraints(n, config) + if constraints["SAFE"] and n.generators.p_nom_extendable.any(): + add_SAFE_constraints(n, config) + if constraints["CCL"] and n.generators.p_nom_extendable.any(): + add_CCL_constraints(n, config, planning_horizons) + + reserve = config["electricity"].get("operational_reserve", {}) + if reserve.get("activate"): + add_operational_reserve_margin(n, snapshots, config) + + if EQ_o := constraints["EQ"]: + add_EQ_constraints(n, EQ_o.replace("EQ", "")) + + if {"solar-hsat", "solar"}.issubset( + config["electricity"]["renewable_carriers"] + ) and {"solar-hsat", "solar"}.issubset( + config["electricity"]["extendable_carriers"]["Generator"] + ): + add_solar_potential_constraints(n, config) + + if n.config.get("sector", {}).get("tes", False): + if n.buses.index.str.contains( + r"urban central heat|urban decentral heat|rural heat", + case=False, + na=False, + ).any(): + add_TES_energy_to_power_ratio_constraints(n) + add_TES_charger_ratio_constraints(n) + + add_battery_constraints(n) + add_lossy_bidirectional_link_constraints(n) + add_pipe_retrofit_constraint(n) + if n._multi_invest: + add_carbon_constraint(n, snapshots) + add_carbon_budget_constraint(n, snapshots) + add_retrofit_gas_boiler_constraint(n, snapshots) + else: + add_co2_atmosphere_constraint(n, snapshots) + + if config["sector"]["enhanced_geothermal"]["enable"]: + add_flexible_egs_constraint(n) + + if config["sector"]["imports"]["enable"]: + add_import_limit_constraint(n, snapshots) + + if n.params.custom_extra_functionality: + source_path = pathlib.Path(n.params.custom_extra_functionality).resolve() + assert source_path.exists(), f"{source_path} does not exist" + sys.path.append(os.path.dirname(source_path)) + module_name = os.path.splitext(os.path.basename(source_path))[0] + module = importlib.import_module(module_name) + custom_extra_functionality = getattr(module, module_name) + custom_extra_functionality(n, snapshots, snakemake) # pylint: disable=E0601 + set_of_options = solving["solver"]["options"] cf_solving = solving["options"] @@ -1432,6 +1435,7 @@ def solve_network( planning_horizons=planning_horizons, rule_name=snakemake.rule, log_fn=snakemake.log.solver, + snakemake=snakemake, ) logger.info(f"Maximum memory usage: {mem.mem_usage}") From 4aa5867dfd47534eb5e5f430ed2d56146cf2a4e7 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 22 Jul 2025 17:32:33 +0200 Subject: [PATCH 035/202] more robustness for networks with fixed capacities --- scripts/pypsa-de/additional_functionality.py | 6 +++++- scripts/solve_network.py | 6 ++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 4f3ad44b6..407909e90 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -45,7 +45,11 @@ def add_capacity_limits(n, investment_year, limits_capacity, sense="maximum"): logger.info( f"Existing {c.name} {carrier} capacity in {ct}: {existing_capacity} {units}" ) - + if extendable_index.empty: + logger.warning( + f"No extendable {c.name} {carrier} capacities found in {ct}. Skipping." + ) + continue nom = n.model[c.name + "-" + attr + "_nom"].loc[extendable_index] lhs = nom.sum() diff --git a/scripts/solve_network.py b/scripts/solve_network.py index c4121ea3c..4e50aa2c5 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -869,9 +869,10 @@ def add_TES_energy_to_power_ratio_constraints(n: pypsa.Network) -> None: ] if indices_charger_p_nom_extendable.empty or indices_stores_e_nom_extendable.empty: - raise ValueError( + logger.warning( "No valid extendable charger links or stores found for TES energy to power constraints." ) + return energy_to_power_ratio_values = n.links.loc[ indices_charger_p_nom_extendable, "energy to power ratio" @@ -939,9 +940,10 @@ def add_TES_charger_ratio_constraints(n: pypsa.Network) -> None: indices_charger_p_nom_extendable.empty or indices_discharger_p_nom_extendable.empty ): - raise ValueError( + logger.warning( "No valid extendable TES discharger or charger links found for TES charger ratio constraints." ) + return for charger, discharger in zip( indices_charger_p_nom_extendable, indices_discharger_p_nom_extendable From bd09d35dcde468be3e4b88f5407eca56fc03040d Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 23 Jul 2025 11:13:04 +0200 Subject: [PATCH 036/202] rename --- ...scenario_comparison.py => plot_scenario_comparison.py} | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) rename scripts/pypsa-de/{plot_ariadne_scenario_comparison.py => plot_scenario_comparison.py} (85%) diff --git a/scripts/pypsa-de/plot_ariadne_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py similarity index 85% rename from scripts/pypsa-de/plot_ariadne_scenario_comparison.py rename to scripts/pypsa-de/plot_scenario_comparison.py index d2904d8fe..623096e3f 100644 --- a/scripts/pypsa-de/plot_ariadne_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -17,7 +17,9 @@ def scenario_plot(df, var): ax = df.T.plot(xlabel="years", ylabel=str(unit), title=str(var)) prefix = snakemake.config["run"]["prefix"] var = var.replace("|", "-").replace("\\", "-").replace(" ", "-").replace("/", "-") - ax.figure.savefig(f"results/{prefix}/ariadne_comparison/{var}", bbox_inches="tight") + ax.figure.savefig( + f"results/{prefix}/scenario_comparison/{var}", bbox_inches="tight" + ) plt.close(ax.figure) @@ -44,8 +46,8 @@ def scenario_plot(df, var): df = pd.concat(dfs, axis=0) prefix = snakemake.config["run"]["prefix"] - if not os.path.exists(f"results/{prefix}/ariadne_comparison/"): - os.mkdir(f"results/{prefix}/ariadne_comparison/") + if not os.path.exists(f"results/{prefix}/scenario_comparison/"): + os.mkdir(f"results/{prefix}/scenario_comparison/") for var in df._get_label_or_level_values("Variable"): scenario_plot(df.xs(var, level="Variable"), var) From ded2fc07975725e43941e2e1ba4810bb0936c2ba Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 23 Jul 2025 11:13:36 +0200 Subject: [PATCH 037/202] add scenario comparison plots to regret workflow --- Snakefile | 92 +++++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 80 insertions(+), 12 deletions(-) diff --git a/Snakefile b/Snakefile index 4b09d23f7..7a5f4a6d1 100644 --- a/Snakefile +++ b/Snakefile @@ -751,7 +751,6 @@ rule download_ariadne_template: rule export_ariadne_variables: params: planning_horizons=config_provider("scenario", "planning_horizons"), - hours=config_provider("clustering", "temporal", "resolution_sector"), max_hours=config_provider("electricity", "max_hours"), costs=config_provider("costs"), config_industry=config_provider("industry"), @@ -871,7 +870,7 @@ rule ariadne_all: run=config_provider("run", "name"), ), script: - "scripts/pypsa-de/plot_ariadne_scenario_comparison.py" + "scripts/pypsa-de/plot_scenario_comparison.py" rule build_scenarios: @@ -990,17 +989,17 @@ rule solve_regret: realization=RESULTS + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", output: - regret_network=RESULTS.replace("{run}", "{decision}") - + "regret_networks/realization_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + regret_network=RESULTS + + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", shadow: shadow_config log: - solver=RESULTS.replace("{run}", "{decision}") - + "logs/realization_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", - memory=RESULTS.replace("{run}", "{decision}") - + "logs/realization_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", - python=RESULTS.replace("{run}", "{decision}") - + "logs/realization_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", + solver=RESULTS + + "logs/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", + memory=RESULTS + + "logs/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", + python=RESULTS + + "logs/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", threads: solver_threads resources: mem_mb=config_provider("solving", "mem_mb"), @@ -1009,12 +1008,81 @@ rule solve_regret: "scripts/pypsa-de/solve_regret.py" +rule export_regret_variables: + params: + planning_horizons=config_provider("scenario", "planning_horizons"), + max_hours=config_provider("electricity", "max_hours"), + costs=config_provider("costs"), + config_industry=config_provider("industry"), + energy_totals_year=config_provider("energy", "energy_totals_year"), + co2_price_add_on_fossils=config_provider("co2_price_add_on_fossils"), + co2_sequestration_cost=config_provider("sector", "co2_sequestration_cost"), + post_discretization=config_provider("solving", "options", "post_discretization"), + NEP_year=config_provider("costs", "NEP"), + NEP_transmission=config_provider("costs", "transmission"), + input: + template="data/template_ariadne_database.xlsx", + industry_demands=expand( + resources( + "industrial_energy_demand_base_s_{clusters}_{planning_horizons}.csv" + ), + **config["scenario"], + allow_missing=True, + ), + networks=expand( + RESULTS + + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + **config["scenario"], + allow_missing=True, + ), + costs=expand( + resources("costs_{planning_horizons}.csv"), + **config["scenario"], + allow_missing=True, + ), + industrial_production_per_country_tomorrow=expand( + resources( + "industrial_production_per_country_tomorrow_{planning_horizons}-modified.csv" + ), + **config["scenario"], + allow_missing=True, + ), + industry_sector_ratios=expand( + resources("industry_sector_ratios_{planning_horizons}.csv"), + **config["scenario"], + allow_missing=True, + ), + industrial_production=resources("industrial_production_per_country.csv"), + energy_totals=resources("energy_totals.csv"), + output: + exported_variables=RESULTS + "regret_variables/regret_variables_{decision}.xlsx", + exported_variables_full=RESULTS + + "regret_variables/regret_variables_{decision}_full.xlsx", + resources: + mem_mb=16000, + log: + RESULTS + "logs/export_regret_variables_{decision}.log", + script: + "scripts/pypsa-de/export_ariadne_variables.py" + + rule regret_all: input: regret_networks=expand( - RESULTS.replace("{run}", "{decision}") - + "regret_networks/realization_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + RESULTS + + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", run=config_provider("run", "name"), decision=config_provider("run", "name"), **config["scenario"], ), + + +rule regret_all_variables: + input: + exported_variables=expand( + RESULTS + "regret_variables/regret_variables_{decision}_full.xlsx", + run=config_provider("run", "name"), + decision=config_provider("run", "name"), + ), + script: + "scripts/pypsa-de/plot_scenario_comparison.py" From 5179615096c55be37761e684acef2b9a988d394d Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 23 Jul 2025 11:13:49 +0200 Subject: [PATCH 038/202] allow for use in regret workflow --- scripts/pypsa-de/export_ariadne_variables.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 12697264f..a1f7af66d 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -5294,6 +5294,9 @@ def get_data( yearly_dfs = [] for i, year in enumerate(planning_horizons): print(f"Getting data for year {year}...") + decision = "" + if snakemake.wildcards.get("decision"): + decision = "_decision_" + snakemake.wildcards.decision yearly_dfs.append( get_data( networks[i], @@ -5305,7 +5308,7 @@ def get_data( "DE", year=year, version=config["version"], - scenario=snakemake.wildcards.run, + scenario=snakemake.wildcards.run + decision, ) ) From b7781c1bd490ef77fefdeb941611b472ad397249 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 23 Jul 2025 11:14:19 +0200 Subject: [PATCH 039/202] allow some components to be extended to fix infeasibilities --- scripts/pypsa-de/solve_regret.py | 80 ++++++++++++++++++++++++++++---- 1 file changed, 71 insertions(+), 9 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 11deccda2..b0fe785c0 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -41,6 +41,68 @@ def fix_capacities(realization, decision): real.loc[common, attr] = deci.loc[common, attr] + if name == "links": + virtual_links = [ + "oil refining", + "gas compressing", + "BEV charger", + "land transport oil", + "land transport fuel cell", + "unsustainable bioliquids", + "solid biomass for industry", + "gas for industry", + "industry methanol", + "naphtha for industry", + "process emissions", + "coal for industry", + "H2 for industry", + "shipping methanol", + "shipping oil", + "kerosene for aviation", + "agriculture machinery oil", + "renewable oil", + "methanol", + "renewable gas", + ] + real.loc[real.carrier.isin(virtual_links), "p_nom_extendable"] = True + real.loc[real.carrier.isin(virtual_links), "p_nom_min"] = real.loc[ + real.carrier.isin(virtual_links), "p_nom" + ] + + real.loc[real.carrier == "SMR", "p_nom_extendable"] = True + real.loc[real.carrier == "SMR", "p_nom_min"] = real.loc[ + real.carrier == "SMR", "p_nom" + ] + + real.loc[real.carrier == "waste CHP", "p_nom_extendable"] = True + real.loc[real.carrier == "waste CHP", "p_nom_min"] = real.loc[ + real.carrier == "waste CHP", "p_nom" + ] + + real.loc[ + real.carrier == "electricity distribution grid", "p_nom_extendable" + ] = True # either this or load shedding? + + real.loc[ + real.carrier == "electricity distribution grid", "p_nom_extendable" + ] = True # either this or load shedding? + real.loc[real.carrier == "electricity distribution grid", "p_nom_min"] = ( + real.loc[real.carrier == "electricity distribution grid", "p_nom"] + ) + + if name == "generators": + fuels_and_vents = [ + "lignite", + "coal", + "oil primary", + "uranium", + "gas primary", + "urban central heat vent", + "rural heat vent", + "urban decentral heat vent", + ] + real.loc[real.carrier.isin(fuels_and_vents), "p_nom_extendable"] = True + return realization @@ -52,7 +114,7 @@ def fix_capacities(realization, decision): opts="", sector_opts="none", planning_horizons="2030", - realization="KN2045_Mix", + decision="KN2045_Mix", run="LowDemand", ) @@ -64,14 +126,14 @@ def fix_capacities(realization, decision): np.random.seed(solve_opts.get("seed", 123)) - if snakemake.input.realization == snakemake.input.decision: - import os - import sys + # if snakemake.input.realization == snakemake.input.decision: + # import os + # import sys - src = os.path.abspath(snakemake.input.realization) - dst = os.path.abspath(snakemake.output.regret_network) - os.symlink(src, dst) - sys.exit(0) + # src = os.path.abspath(snakemake.input.realization) + # dst = os.path.abspath(snakemake.output.regret_network) + # os.symlink(src, dst) + # sys.exit(0) logger.info("Loading realization and decision networks") @@ -106,4 +168,4 @@ def fix_capacities(realization, decision): logger.info(f"Maximum memory usage: {mem.mem_usage}") n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) - n.export_to_netcdf(snakemake.output.network) + n.export_to_netcdf(snakemake.output.regret_network) From 1d8f2d9f336ac50f5e23a0c2d2be59972adcc2bb Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 23 Jul 2025 14:22:36 +0200 Subject: [PATCH 040/202] fix p_nom_min to avoid less fossil imports --- scripts/pypsa-de/solve_regret.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index b0fe785c0..20ccfb707 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -91,17 +91,22 @@ def fix_capacities(realization, decision): ) if name == "generators": - fuels_and_vents = [ + fuels = [ "lignite", "coal", "oil primary", "uranium", "gas primary", + ] + vents = [ "urban central heat vent", "rural heat vent", "urban decentral heat vent", ] - real.loc[real.carrier.isin(fuels_and_vents), "p_nom_extendable"] = True + real.loc[real.carrier.isin(fuels + vents), "p_nom_extendable"] = True + real.loc[real.carrier.isin(fuels), "p_nom_min"] = real.loc[ + real.carrier.isin(fuels), "p_nom" + ] return realization From 0c89fc2cc0e18a1614af11a2c00dd90f2d0361a2 Mon Sep 17 00:00:00 2001 From: Toni <153275395+toniseibold@users.noreply.github.com> Date: Wed, 23 Jul 2025 14:23:18 +0200 Subject: [PATCH 041/202] Correct Total System Costs Calculation (#40) * include interconnector and links/line costs into total system costs * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * take care of deprecation warnings * include energy trade costs * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Micha --- scripts/pypsa-de/export_ariadne_variables.py | 186 ++++++++++++++++++- 1 file changed, 180 insertions(+), 6 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index a1f7af66d..cb751f816 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -13,6 +13,7 @@ import pandas as pd import pypsa from numpy import isclose +from pypsa.statistics import get_transmission_carriers from scripts._helpers import configure_logging, mock_snakemake from scripts.add_electricity import calculate_annuity, load_costs @@ -4336,15 +4337,188 @@ def get_policy(n, investment_year): def get_economy(n, region): var = pd.Series() - s = n.statistics - grouper = ["country", "carrier"] - system_cost = s.capex(groupby=grouper).add(s.opex(groupby=grouper)) + def get_tsc(n, country): + n.statistics.set_parameters(drop_zero=False) + capex = n.statistics.capex( + groupby=pypsa.statistics.groupers["name", "carrier"], nice_names=False + ) - # Cost|Total Energy System Cost in billion EUR2020/yr - var["Cost|Total Energy System Cost"] = round( - system_cost.groupby("country").sum()[region] / 1e9, 4 + opex = n.statistics.opex( + groupby=pypsa.statistics.groupers["name", "carrier"], nice_names=False + ) + + # filter inter country transmission lines and links + inter_country_lines = n.lines.bus0.map(n.buses.country) != n.lines.bus1.map( + n.buses.country + ) + inter_country_links = n.links.bus0.map(n.buses.country) != n.links.bus1.map( + n.buses.country + ) + # + transmission_carriers = get_transmission_carriers(n).get_level_values("carrier") + transmission_lines = n.lines.carrier.isin(transmission_carriers) + transmission_links = n.links.carrier.isin(transmission_carriers) + # + country_transmission_lines = ( + (n.lines.bus0.str.contains(country)) & ~(n.lines.bus1.str.contains(country)) + ) | ( + ~(n.lines.bus0.str.contains(country)) & (n.lines.bus1.str.contains(country)) + ) + country_tranmission_links = ( + (n.links.bus0.str.contains(country)) & ~(n.links.bus1.str.contains(country)) + ) | ( + ~(n.links.bus0.str.contains(country)) & (n.links.bus1.str.contains(country)) + ) + # + inter_country_transmission_lines = ( + inter_country_lines & transmission_lines & country_transmission_lines + ) + inter_country_transmission_links = ( + inter_country_links & transmission_links & country_tranmission_links + ) + inter_country_transmission_lines_i = inter_country_transmission_lines[ + inter_country_transmission_lines + ].index + inter_country_transmission_links_i = inter_country_transmission_links[ + inter_country_transmission_links + ].index + inter_country_transmission_i = inter_country_transmission_lines_i.union( + inter_country_transmission_links_i + ) + + # + tsc = pd.concat([capex, opex], axis=1, keys=["capex", "opex"]) + tsc = tsc.reset_index().set_index("name") + tsc.loc[inter_country_transmission_i, ["capex", "opex"]] = ( + tsc.loc[inter_country_transmission_i, ["capex", "opex"]] / 2 + ) + tsc.rename( + index={ + index: index + " " + country for index in inter_country_transmission_i + }, + inplace=True, + ) + # rename inter region links and lines + to_rename_links = n.links[ + (n.links.bus0.str.contains(region)) + & (n.links.bus1.str.contains(region)) + & ~(n.links.index.str.contains(region)) + ].index + to_rename_lines = n.lines[ + (n.lines.bus0.str.contains(region)) + & (n.lines.bus1.str.contains(region)) + & ~(n.lines.index.str.contains(region)) + ].index + tsc.rename( + index={index: index + " " + region for index in to_rename_links}, + inplace=True, + ) + tsc.rename( + index={index: index + " " + region for index in to_rename_lines}, + inplace=True, + ) + + tsc = ( + tsc.filter(like=country, axis=0) + .drop("component", axis=1) + .groupby("carrier") + .sum() + ) + + return tsc + + def get_link_opex(n, carriers, region, sw): + # get flow of electricity/hydrogen... + # multiply it with the marginal costs + supplying = n.links[ + (n.links.carrier.isin(carriers)) + & (n.links.bus0.str.startswith(region)) + & (~n.links.bus1.str.startswith(region)) + ].index + + receiving = n.links[ + (n.links.carrier.isin(carriers)) + & (~n.links.bus0.str.startswith(region)) + & (n.links.bus1.str.startswith(region)) + ].index + + trade_out = 0 + for index in supplying: + # price of energy in trade country + marg_price = n.buses_t.marginal_price[n.links.loc[index].bus0] + trade = n.links_t.p1[index].mul(sw) + trade_out += marg_price.mul(trade).sum() + + trade_in = 0 + for index in receiving: + # price of energy in Germany + marg_price = n.buses_t.marginal_price[n.links.loc[index].bus0] + trade = n.links_t.p1[index].mul(sw) + trade_in += marg_price.mul(trade).sum() + return abs(trade_in) - abs(trade_out) + # > 0: costs for Germany + # < 0: profit for Germany + + def get_line_opex(n, region, sw): + supplying = n.lines[ + (n.lines.carrier.isin(["AC"])) + & (n.lines.bus0.str.startswith(region)) + & (~n.lines.bus1.str.startswith(region)) + ].index + receiving = n.lines[ + (n.lines.carrier.isin(["AC"])) + & (~n.lines.bus0.str.startswith(region)) + & (n.lines.bus1.str.startswith(region)) + ].index + + # i have to clip the trade + net_out = 0 + for index in supplying: + trade = n.lines_t.p1[index].mul(sw) + trade_out = trade.clip(lower=0) # positive + trade_in = trade.clip(upper=0) # negative + marg_price_DE = n.buses_t.marginal_price[n.lines.loc[index].bus0] + marg_price_EU = n.buses_t.marginal_price[n.lines.loc[index].bus1] + net_out += ( + trade_out.mul(marg_price_DE).sum() + trade_in.mul(marg_price_EU).sum() + ) + # net_out > 0: Germany is exporting more electricity + # net_out < 0: Germany is importing more electricity + + net_in = 0 + for index in receiving: + trade = n.lines_t.p1[index].mul(sw) + trade_in = trade.clip(lower=0) # positive + trade_out = trade.clip(upper=0) # negative + trade_out = trade_out.clip(upper=0) + marg_price_EU = n.buses_t.marginal_price[n.lines.loc[index].bus0] + marg_price_DE = n.buses_t.marginal_price[n.lines.loc[index].bus1] + net_in += ( + trade_in.mul(marg_price_EU).sum() + trade_out.mul(marg_price_DE).sum() + ) + # net_in > 0: Germany is importing more electricity + # net_in < 0: Germany is exporting more electricity + + return -net_out + net_in + + trade_carriers = [ + "DC", + "H2 pipeline", + "H2 pipeline (Kernnetz)", + "H2 pipeline retrofittedrenewable oil", + "renewable gas", + "methanol", + ] + + sw = n.snapshot_weightings.generators + tsc = get_tsc(n, region).sum().sum() + trade_costs = get_link_opex(n, trade_carriers, region, sw) + get_line_opex( + n, region, sw ) + # Cost|Total Energy System Cost in billion EUR2020/yr + var["Cost|Total Energy System Cost"] = round((tsc + trade_costs) / 1e9, 4) + return var From d4a8282849f876feb4088151dd4d6d28a1b171db Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 23 Jul 2025 18:44:42 +0200 Subject: [PATCH 042/202] more care with which assets are free, allow methanolisation buildout to account for minor numerical issues --- scripts/pypsa-de/solve_regret.py | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 20ccfb707..96f769034 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -32,7 +32,9 @@ def fix_capacities(realization, decision): real = getattr(realization, name) deci = getattr(decision, name) - common = real.index.intersection(deci.index) + common = real.index.intersection(deci.index).difference( + real.query("carrier == 'BEV charger'").index + ) if not real.index.equals(deci.index): logger.warning( f"Indices of {name} in realization and decision networks do not match. " @@ -43,12 +45,12 @@ def fix_capacities(realization, decision): if name == "links": virtual_links = [ - "oil refining", - "gas compressing", - "BEV charger", + # "oil refining", + # "gas compressing", + # "BEV charger", "land transport oil", "land transport fuel cell", - "unsustainable bioliquids", + # "unsustainable bioliquids", "solid biomass for industry", "gas for industry", "industry methanol", @@ -60,9 +62,9 @@ def fix_capacities(realization, decision): "shipping oil", "kerosene for aviation", "agriculture machinery oil", - "renewable oil", - "methanol", - "renewable gas", + # "renewable oil", + # "methanol", + # "renewable gas", ] real.loc[real.carrier.isin(virtual_links), "p_nom_extendable"] = True real.loc[real.carrier.isin(virtual_links), "p_nom_min"] = real.loc[ @@ -79,10 +81,6 @@ def fix_capacities(realization, decision): real.carrier == "waste CHP", "p_nom" ] - real.loc[ - real.carrier == "electricity distribution grid", "p_nom_extendable" - ] = True # either this or load shedding? - real.loc[ real.carrier == "electricity distribution grid", "p_nom_extendable" ] = True # either this or load shedding? @@ -90,6 +88,11 @@ def fix_capacities(realization, decision): real.loc[real.carrier == "electricity distribution grid", "p_nom"] ) + real.loc[real.carrier == "methanolisation", "p_nom_extendable"] = True + real.loc[real.carrier == "methanolisation", "p_nom_min"] = real.loc[ + real.carrier == "methanolisation", "p_nom" + ] + if name == "generators": fuels = [ "lignite", From 1c4762b4e792d3349031857f9278d5bc00172bde Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 24 Jul 2025 15:19:46 +0200 Subject: [PATCH 043/202] reduce resolution for faster plotting --- scripts/pypsa-de/plot_scenario_comparison.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index 623096e3f..0278168c5 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -18,7 +18,7 @@ def scenario_plot(df, var): prefix = snakemake.config["run"]["prefix"] var = var.replace("|", "-").replace("\\", "-").replace(" ", "-").replace("/", "-") ax.figure.savefig( - f"results/{prefix}/scenario_comparison/{var}", bbox_inches="tight" + f"results/{prefix}/scenario_comparison/{var}", bbox_inches="tight", dpi=100 ) plt.close(ax.figure) From b9005a80b7cb0d6871f4c0995907d503f60617df Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 24 Jul 2025 15:20:15 +0200 Subject: [PATCH 044/202] free co2 atmosphere store and restructure code --- scripts/pypsa-de/solve_regret.py | 43 ++++++++++---------------------- 1 file changed, 13 insertions(+), 30 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 96f769034..32b58d060 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -45,12 +45,8 @@ def fix_capacities(realization, decision): if name == "links": virtual_links = [ - # "oil refining", - # "gas compressing", - # "BEV charger", "land transport oil", "land transport fuel cell", - # "unsustainable bioliquids", "solid biomass for industry", "gas for industry", "industry methanol", @@ -62,35 +58,19 @@ def fix_capacities(realization, decision): "shipping oil", "kerosene for aviation", "agriculture machinery oil", - # "renewable oil", - # "methanol", - # "renewable gas", ] - real.loc[real.carrier.isin(virtual_links), "p_nom_extendable"] = True - real.loc[real.carrier.isin(virtual_links), "p_nom_min"] = real.loc[ - real.carrier.isin(virtual_links), "p_nom" + essential_links = [ + "SMR", + "waste CHP", ] - - real.loc[real.carrier == "SMR", "p_nom_extendable"] = True - real.loc[real.carrier == "SMR", "p_nom_min"] = real.loc[ - real.carrier == "SMR", "p_nom" + bottleneck_links = [ + "electricity distribution grid", + "methanolisation", ] - - real.loc[real.carrier == "waste CHP", "p_nom_extendable"] = True - real.loc[real.carrier == "waste CHP", "p_nom_min"] = real.loc[ - real.carrier == "waste CHP", "p_nom" - ] - - real.loc[ - real.carrier == "electricity distribution grid", "p_nom_extendable" - ] = True # either this or load shedding? - real.loc[real.carrier == "electricity distribution grid", "p_nom_min"] = ( - real.loc[real.carrier == "electricity distribution grid", "p_nom"] - ) - - real.loc[real.carrier == "methanolisation", "p_nom_extendable"] = True - real.loc[real.carrier == "methanolisation", "p_nom_min"] = real.loc[ - real.carrier == "methanolisation", "p_nom" + links_to_free = virtual_links + essential_links + bottleneck_links + real.loc[real.carrier.isin(links_to_free), "p_nom_extendable"] = True + real.loc[real.carrier.isin(links_to_free), "p_nom_min"] = real.loc[ + real.carrier.isin(links_to_free), "p_nom" ] if name == "generators": @@ -111,6 +91,9 @@ def fix_capacities(realization, decision): real.carrier.isin(fuels), "p_nom" ] + if name == "stores": + real.loc[real.carrier == "co2", "e_nom_extendable"] = True + return realization From 11714732d6f21f06d68f7e1d7153db218eab9dc5 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 24 Jul 2025 18:41:18 +0200 Subject: [PATCH 045/202] rewrite fix capacities --- scripts/pypsa-de/solve_regret.py | 99 +++++++++++++++++++++++++------- 1 file changed, 78 insertions(+), 21 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 32b58d060..fd442b25c 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -16,8 +16,7 @@ def fix_capacities(realization, decision): - realization.optimize.fix_optimal_capacities() # also sets p_nom = p_nom_opt - decision.optimize.fix_optimal_capacities() + n = realization.copy() nominal_attrs = { "generators": "p_nom", @@ -26,22 +25,47 @@ def fix_capacities(realization, decision): "stores": "e_nom", } - realization.links.index.intersection(decision.links.index) - for name, attr in nominal_attrs.items(): - real = getattr(realization, name) + new = getattr(n, name) deci = getattr(decision, name) - common = real.index.intersection(deci.index).difference( - real.query("carrier == 'BEV charger'").index - ) - if not real.index.equals(deci.index): - logger.warning( + if not new.index.equals(deci.index): + logger.error( f"Indices of {name} in realization and decision networks do not match. " "This may lead to unexpected results." ) + # raise ValueError("Indices of realization and decision networks do not match.") + + common_i = new.index.intersection(deci.index).difference( + new.query("carrier == 'BEV charger'").index + ) # Exclude BEV chargers from modification because the p_nom are taken from UBA + + extendable_i = new.query(f"{attr}_extendable").index - real.loc[common, attr] = deci.loc[common, attr] + if not deci.query(f"{attr}_opt > {attr}_max").empty: + logger.error( + f"Decision network have {name} with {attr}_opt > {attr}_max. " + "This may lead to unexpected results." + f"These assets are: {deci.query(f'{attr}_opt > {attr}_max').index.tolist()}" + ) + _idx = deci.query(f"{attr}_opt > {attr}_max").index + deci.loc[_idx, attr + "_max"] = deci.loc[_idx, attr + "_opt"] = np.minimum( + deci.loc[_idx, attr + "_max"], deci.loc[_idx, attr + "_opt"] + ) + + if not deci.query(f"{attr}_min > {attr}_opt").empty: + logger.error( + f"Decision network have {name} with {attr}_min > {attr}_opt. " + "This may lead to unexpected results." + f"These assets are: {deci.query(f'{attr}_min > {attr}_opt').index.tolist()}" + ) + new.loc[common_i, attr] = deci.loc[common_i, attr] + new.loc[common_i, attr + "_opt"] = deci.loc[common_i, attr + "_opt"] + new.loc[common_i, attr + "_min"] = deci.loc[common_i, attr + "_min"] + new.loc[common_i, attr + "_max"] = deci.loc[common_i, attr + "_max"] + + # Ideally nothing should be extendable + new.loc[extendable_i, attr + "_extendable"] = False if name == "links": virtual_links = [ @@ -58,20 +82,29 @@ def fix_capacities(realization, decision): "shipping oil", "kerosene for aviation", "agriculture machinery oil", + # "urban central water pits charger", # This should have no effect on results since e_nom of the water pits store remains fixed and the energy to power ratio constraints force them to be the same, however it may avoid infeasibilitiesl ] + # TODO double check if these are all needed essential_links = [ "SMR", "waste CHP", + "biogas to gas", ] + # TODO double check if these are all needed bottleneck_links = [ "electricity distribution grid", "methanolisation", + "rural gas boiler", + "urban decentral gas boiler", ] links_to_free = virtual_links + essential_links + bottleneck_links - real.loc[real.carrier.isin(links_to_free), "p_nom_extendable"] = True - real.loc[real.carrier.isin(links_to_free), "p_nom_min"] = real.loc[ - real.carrier.isin(links_to_free), "p_nom" - ] + _idx = new.carrier.isin(links_to_free).index.intersection(extendable_i) + new.loc[_idx, "p_nom_extendable"] = True + # For essential links and bottleneck links allow more, but not less + _idx = new.carrier.isin( + essential_links + bottleneck_links + ).index.intersection(extendable_i) + new.loc[_idx, "p_nom_min"] = new.loc[_idx, "p_nom_opt"] if name == "generators": fuels = [ @@ -86,15 +119,39 @@ def fix_capacities(realization, decision): "rural heat vent", "urban decentral heat vent", ] - real.loc[real.carrier.isin(fuels + vents), "p_nom_extendable"] = True - real.loc[real.carrier.isin(fuels), "p_nom_min"] = real.loc[ - real.carrier.isin(fuels), "p_nom" - ] + _idx = new.carrier.isin(fuels + vents).index.intersection(extendable_i) + new.loc[_idx, "p_nom_extendable"] = True + # For fuels allow more, but not less + _idx = new.carrier.isin(fuels).index.intersection(extendable_i) + new.loc[_idx, "p_nom_min"] = new.loc[_idx, "p_nom_opt"] if name == "stores": - real.loc[real.carrier == "co2", "e_nom_extendable"] = True + # there is only one co2 atmosphere store which is always extendable, hence no intersection with extendable_i needed + new.loc[new.carrier == "co2", "e_nom_extendable"] = True + + # Just making sure that the defaults are set correctly + assert (new.loc[new.carrier == "co2", "e_nom_min"] == 0).all() + assert (new.loc[new.carrier == "co2", "e_nom_max"] == np.inf).all() + assert (new.loc[new.carrier == "co2", "e_nom"] == 0).all() + + # TODO double check if these are all needed + co2_stores = ["co2 stored", "co2 sequestered"] + _idx = new.carrier.isin(co2_stores).index.intersection(extendable_i) + new.loc[_idx, "e_nom_extendable"] = True + # Allow less, but not more + new.loc[_idx, "e_nom_max"] = new.loc[_idx, "e_nom_opt"] + + _idx = new.carrier.isin(["urban central water pits"]).index.intersection( + extendable_i + ) + new.loc[_idx, "e_nom_extendable"] = ( + True # This should have no effect on results as long as p_nom of the water pits charger remains fixed and the energy to power ratio constraints force them to be the same, however it may avoid infeasibilities + ) + new.loc[_idx, "e_nom_min"] = 0 + new.loc[_idx, "e_nom_max"] = np.inf + new.loc[_idx, "e_nom"] = 0 - return realization + return n if __name__ == "__main__": From a996f1697e1b6d365d47054febcaf39ed55469b0 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 25 Jul 2025 13:47:34 +0200 Subject: [PATCH 046/202] cleaner comparison plots --- Snakefile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Snakefile b/Snakefile index 7a5f4a6d1..cece74fe3 100644 --- a/Snakefile +++ b/Snakefile @@ -852,6 +852,8 @@ rule plot_ariadne_variables: rule ariadne_all: + params: + output_dir="results/" + config["run"]["prefix"] + "/scenario_comparison/", input: expand(RESULTS + "graphs/costs.svg", run=config_provider("run", "name")), # expand( @@ -1078,6 +1080,8 @@ rule regret_all: rule regret_all_variables: + params: + output_dir="results/" + config["run"]["prefix"] + "/regret_comparison/", input: exported_variables=expand( RESULTS + "regret_variables/regret_variables_{decision}_full.xlsx", From fb2408d3155ef126ff0547a82f16526054ccc185 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 25 Jul 2025 13:47:52 +0200 Subject: [PATCH 047/202] rename scenario and limit transmission expansion --- config/config.de.yaml | 7 +++---- config/scenarios.manual.yaml | 6 +++++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index e3a622f48..68278691b 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,10 +4,11 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250718_improve_heat_pump_demand + prefix: 20250725_regrets name: # - ExPol - - KN2045_Mix + # - KN2045_Mix + - AriadneDemand - LowDemand # - KN2045_Elek # - KN2045_H2 @@ -54,8 +55,6 @@ foresight: myopic # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#scenario # Wildcard docs in https://pypsa-eur.readthedocs.io/en/latest/wildcards.html scenario: - ll: - - vopt clusters: - 27 #current options: 27, 49 opts: diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index f9e1b131e..aa4b72e58 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -73,6 +73,9 @@ KN2045_Mix: # Importe erneuerbar erzeugter Energien auf mittlerem Niveau # dient als Referenzszenario in der Familie der Ariadne-Szenarien +AriadneDemand: + electricity: + transmission_limit: v1 LowDemand: iiasa_database: @@ -100,7 +103,8 @@ LowDemand: H2 electrolysis: DE: 2030: 0 - + electricity: + transmission_limit: v1 KN2045_Elek: # Fokus auf dem Hochlauf von Technologien zur direkten Elektrifizierung der Sektoren From 34da1248b752a345787b31138fb3cc3f4ab40690 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 25 Jul 2025 13:48:07 +0200 Subject: [PATCH 048/202] catch output from linopy --- scripts/solve_network.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 4e50aa2c5..28eb1b5c1 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -26,7 +26,9 @@ based on the rule :mod:`solve_network`. """ +import contextlib import importlib +import io import logging import os import pathlib @@ -1378,9 +1380,12 @@ def extra_functionality( raise RuntimeError("Solving status 'warning'. Discarding solution.") if "infeasible" in condition: - labels = n.model.compute_infeasibilities() - logger.info(f"Labels:\n{labels}") - n.model.print_infeasibilities() + # labels = n.model.compute_infeasibilities() + # logger.info(f"Labels:\n{labels}") + buf = io.StringIO() + with contextlib.redirect_stdout(buf): + n.model.print_infeasibilities() + logger.info(buf.getvalue()) raise RuntimeError("Solving status 'infeasible'. Infeasibilities computed.") if status == "warning": From 3a03f059195f3fb00a39a0b9d655387ddff40bb2 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 25 Jul 2025 13:49:20 +0200 Subject: [PATCH 049/202] use output dir --- scripts/pypsa-de/plot_scenario_comparison.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index 0278168c5..7e9f0a79d 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -15,11 +15,8 @@ def scenario_plot(df, var): unit = "billion EUR2020/yr" df = df.droplevel("Unit") ax = df.T.plot(xlabel="years", ylabel=str(unit), title=str(var)) - prefix = snakemake.config["run"]["prefix"] var = var.replace("|", "-").replace("\\", "-").replace(" ", "-").replace("/", "-") - ax.figure.savefig( - f"results/{prefix}/scenario_comparison/{var}", bbox_inches="tight", dpi=100 - ) + ax.figure.savefig(snakemake.params.output_dir + var, bbox_inches="tight", dpi=100) plt.close(ax.figure) From 35176993f1e99db8fab565a7d7c02cf81fc9d0c1 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 25 Jul 2025 13:55:13 +0200 Subject: [PATCH 050/202] fighting with ifeasibilities -> currently a problem with co2 limit --- scripts/pypsa-de/solve_regret.py | 88 +++++++++++++++++--------------- 1 file changed, 47 insertions(+), 41 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index fd442b25c..98686ad62 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -29,11 +29,18 @@ def fix_capacities(realization, decision): new = getattr(n, name) deci = getattr(decision, name) - if not new.index.equals(deci.index): + if not new.index.symmetric_difference(deci.index).empty: logger.error( f"Indices of {name} in realization and decision networks do not match. " "This may lead to unexpected results." ) + assert ( + new.query(f"{attr}_extendable") + .index.symmetric_difference(deci.query(f"{attr}_extendable").index) + .empty + ), ( + f"Indices of {name} with {attr}_extendable in realization and decision networks do not match." + ) # raise ValueError("Indices of realization and decision networks do not match.") common_i = new.index.intersection(deci.index).difference( @@ -43,22 +50,25 @@ def fix_capacities(realization, decision): extendable_i = new.query(f"{attr}_extendable").index if not deci.query(f"{attr}_opt > {attr}_max").empty: - logger.error( + logger.warning( f"Decision network have {name} with {attr}_opt > {attr}_max. " - "This may lead to unexpected results." f"These assets are: {deci.query(f'{attr}_opt > {attr}_max').index.tolist()}" ) _idx = deci.query(f"{attr}_opt > {attr}_max").index - deci.loc[_idx, attr + "_max"] = deci.loc[_idx, attr + "_opt"] = np.minimum( - deci.loc[_idx, attr + "_max"], deci.loc[_idx, attr + "_opt"] + deci.loc[_idx, attr + "_opt"] = deci.loc[_idx, attr + "_max"] + logger.warning( + f"Setting {name} with {attr}_opt > {attr}_max to {attr}_max." ) if not deci.query(f"{attr}_min > {attr}_opt").empty: - logger.error( + ValueError( f"Decision network have {name} with {attr}_min > {attr}_opt. " "This may lead to unexpected results." f"These assets are: {deci.query(f'{attr}_min > {attr}_opt').index.tolist()}" ) + _idx = deci.query(f"{attr}_min > {attr}_opt").index + deci.loc[_idx, attr + "_opt"] = deci.loc[_idx, attr + "_min"] + new.loc[common_i, attr] = deci.loc[common_i, attr] new.loc[common_i, attr + "_opt"] = deci.loc[common_i, attr + "_opt"] new.loc[common_i, attr + "_min"] = deci.loc[common_i, attr + "_min"] @@ -82,7 +92,6 @@ def fix_capacities(realization, decision): "shipping oil", "kerosene for aviation", "agriculture machinery oil", - # "urban central water pits charger", # This should have no effect on results since e_nom of the water pits store remains fixed and the energy to power ratio constraints force them to be the same, however it may avoid infeasibilitiesl ] # TODO double check if these are all needed essential_links = [ @@ -96,14 +105,22 @@ def fix_capacities(realization, decision): "methanolisation", "rural gas boiler", "urban decentral gas boiler", + "urban central gas CHP", + # For 2035 + "rural biomass boiler", + "urban decentral biomass boiler", + "DAC", ] links_to_free = virtual_links + essential_links + bottleneck_links - _idx = new.carrier.isin(links_to_free).index.intersection(extendable_i) + _idx = new.loc[new.carrier.isin(links_to_free)].index.intersection( + extendable_i + ) new.loc[_idx, "p_nom_extendable"] = True # For essential links and bottleneck links allow more, but not less - _idx = new.carrier.isin( - essential_links + bottleneck_links - ).index.intersection(extendable_i) + links_to_limit = essential_links + bottleneck_links + _idx = new.loc[new.carrier.isin(links_to_limit)].index.intersection( + extendable_i + ) new.loc[_idx, "p_nom_min"] = new.loc[_idx, "p_nom_opt"] if name == "generators": @@ -119,37 +136,32 @@ def fix_capacities(realization, decision): "rural heat vent", "urban decentral heat vent", ] - _idx = new.carrier.isin(fuels + vents).index.intersection(extendable_i) + _idx = new.loc[new.carrier.isin(fuels + vents)].index.intersection( + extendable_i + ) new.loc[_idx, "p_nom_extendable"] = True # For fuels allow more, but not less - _idx = new.carrier.isin(fuels).index.intersection(extendable_i) + _idx = new.loc[new.carrier.isin(fuels)].index.intersection(extendable_i) new.loc[_idx, "p_nom_min"] = new.loc[_idx, "p_nom_opt"] if name == "stores": # there is only one co2 atmosphere store which is always extendable, hence no intersection with extendable_i needed - new.loc[new.carrier == "co2", "e_nom_extendable"] = True + _idx = new.query("carrier == 'co2'").index + new.loc[_idx, "e_nom_extendable"] = True # Just making sure that the defaults are set correctly - assert (new.loc[new.carrier == "co2", "e_nom_min"] == 0).all() - assert (new.loc[new.carrier == "co2", "e_nom_max"] == np.inf).all() - assert (new.loc[new.carrier == "co2", "e_nom"] == 0).all() + assert (new.loc[_idx, "e_nom_min"] == 0).all() + assert (new.loc[_idx, "e_nom_max"] == np.inf).all() + assert (new.loc[_idx, "e_nom"] == 0).all() # TODO double check if these are all needed co2_stores = ["co2 stored", "co2 sequestered"] - _idx = new.carrier.isin(co2_stores).index.intersection(extendable_i) - new.loc[_idx, "e_nom_extendable"] = True - # Allow less, but not more - new.loc[_idx, "e_nom_max"] = new.loc[_idx, "e_nom_opt"] - - _idx = new.carrier.isin(["urban central water pits"]).index.intersection( + _idx = new.loc[new.carrier.isin(co2_stores)].index.intersection( extendable_i ) - new.loc[_idx, "e_nom_extendable"] = ( - True # This should have no effect on results as long as p_nom of the water pits charger remains fixed and the energy to power ratio constraints force them to be the same, however it may avoid infeasibilities - ) - new.loc[_idx, "e_nom_min"] = 0 - new.loc[_idx, "e_nom_max"] = np.inf - new.loc[_idx, "e_nom"] = 0 + new.loc[_idx, "e_nom_extendable"] = True + # TODO this should probably be active - Allow less, but not more + # new.loc[_idx, "e_nom_max"] = new.loc[_idx, "e_nom_opt"] return n @@ -162,8 +174,8 @@ def fix_capacities(realization, decision): opts="", sector_opts="none", planning_horizons="2030", - decision="KN2045_Mix", - run="LowDemand", + decision="LowDemand", + run="AriadneDemand", ) configure_logging(snakemake) @@ -174,15 +186,6 @@ def fix_capacities(realization, decision): np.random.seed(solve_opts.get("seed", 123)) - # if snakemake.input.realization == snakemake.input.decision: - # import os - # import sys - - # src = os.path.abspath(snakemake.input.realization) - # dst = os.path.abspath(snakemake.output.regret_network) - # os.symlink(src, dst) - # sys.exit(0) - logger.info("Loading realization and decision networks") import pathlib @@ -199,7 +202,10 @@ def fix_capacities(realization, decision): ) n = fix_capacities(realization, decision) - + # TODO remove this attempt at a hotfix + n.links.loc[n.links.carrier == "methanolisation", "p_min_pu"] = 0.0 + # n.links.loc[n.links.carrier.str.contains('vent'), 'p_max_pu'] = 1.0 + n_pre = n.copy() with memory_logger( filename=getattr(snakemake.log, "memory", None), interval=logging_frequency ) as mem: From 6e718cf6a2e146b033d6d3cabfea53ce8f149e55 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 31 Jul 2025 13:26:27 +0200 Subject: [PATCH 051/202] exclude DC projects from adjustements --- scripts/pypsa-de/solve_regret.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 98686ad62..885c212ad 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -54,10 +54,19 @@ def fix_capacities(realization, decision): f"Decision network have {name} with {attr}_opt > {attr}_max. " f"These assets are: {deci.query(f'{attr}_opt > {attr}_max').index.tolist()}" ) - _idx = deci.query(f"{attr}_opt > {attr}_max").index + _idx = deci.query( + f"{attr}_opt > {attr}_max and not (carrier == 'DC' and build_year > 0)" + ).index deci.loc[_idx, attr + "_opt"] = deci.loc[_idx, attr + "_max"] logger.warning( - f"Setting {name} with {attr}_opt > {attr}_max to {attr}_max." + f"Setting {attr}_opt > {attr}_max to {attr}_max for indices: {_idx.tolist()}." + ) + _idx = deci.query( + f"{attr}_opt > {attr}_max and (carrier == 'DC' and build_year > 0)" + ).index + deci.loc[_idx, attr + "_max"] = deci.loc[_idx, attr + "_opt"] + logger.warning( + f"Setting {attr}_max = {attr}_opt for indices: {_idx.tolist()}." ) if not deci.query(f"{attr}_min > {attr}_opt").empty: From 9992e7e74ad656620b954bf80bbf6cd9039ea279 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 1 Aug 2025 09:11:18 +0200 Subject: [PATCH 052/202] make plotting script more flexible --- Snakefile | 2 -- scripts/pypsa-de/plot_scenario_comparison.py | 18 +++++++++++++----- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/Snakefile b/Snakefile index cece74fe3..887aca66a 100644 --- a/Snakefile +++ b/Snakefile @@ -1080,8 +1080,6 @@ rule regret_all: rule regret_all_variables: - params: - output_dir="results/" + config["run"]["prefix"] + "/regret_comparison/", input: exported_variables=expand( RESULTS + "regret_variables/regret_variables_{decision}_full.xlsx", diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index 7e9f0a79d..a0bab12ba 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -9,14 +9,14 @@ from scripts._helpers import mock_snakemake -def scenario_plot(df, var): +def scenario_plot(df, output_dir, var): unit = df._get_label_or_level_values("Unit")[0] if var.startswith("Investment"): unit = "billion EUR2020/yr" df = df.droplevel("Unit") ax = df.T.plot(xlabel="years", ylabel=str(unit), title=str(var)) var = var.replace("|", "-").replace("\\", "-").replace(" ", "-").replace("/", "-") - ax.figure.savefig(snakemake.params.output_dir + var, bbox_inches="tight", dpi=100) + ax.figure.savefig(f"{output_dir}/{var}.png", bbox_inches="tight", dpi=100) plt.close(ax.figure) @@ -43,8 +43,16 @@ def scenario_plot(df, var): df = pd.concat(dfs, axis=0) prefix = snakemake.config["run"]["prefix"] - if not os.path.exists(f"results/{prefix}/scenario_comparison/"): - os.mkdir(f"results/{prefix}/scenario_comparison/") + root_dir = snakemake.input[0][: snakemake.input[0].find(prefix)] + comparison_dir = ( + "regret_comparison/" + if "regret_variables" in snakemake.input[0] + else "scenario_comparison/" + ) + output_dir = root_dir + prefix + "/" + comparison_dir + + if not os.path.exists(output_dir): + os.makedirs(output_dir) for var in df._get_label_or_level_values("Variable"): - scenario_plot(df.xs(var, level="Variable"), var) + scenario_plot(df.xs(var, level="Variable"), output_dir, var) From 8ee4a914bc8cb7cd6bf00145760c16ab8226308b Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 1 Aug 2025 16:12:34 +0200 Subject: [PATCH 053/202] fix heat pump limit --- scripts/pypsa-de/additional_functionality.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 407909e90..472f07eaa 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -541,7 +541,9 @@ def add_national_co2_budgets(n, snakemake, national_co2_budgets, investment_year def add_decentral_heat_pump_budgets(n, decentral_heat_pump_budgets, investment_year): carriers = [ - "rural air heat pumprural ground heat pumpurban decentral air heat pump" + "rural air heat pump", + "rural ground heat pump", + "urban decentral air heat pump", ] heat_pumps = n.links.index[n.links.carrier.isin(carriers)] @@ -580,6 +582,8 @@ def add_decentral_heat_pump_budgets(n, decentral_heat_pump_budgets, investment_y ).sum() ) + lhs = sum(lhs) + cname = f"decentral_heat_pump_limit-{ct}" n.model.add_constraints( lhs <= limit, From 5e15e01a63267b4c6ee87825f83bfdbf0ad5c2a9 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 1 Aug 2025 16:12:47 +0200 Subject: [PATCH 054/202] allow line expansion again --- config/scenarios.manual.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index aa4b72e58..f1facbb46 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -74,8 +74,6 @@ KN2045_Mix: # dient als Referenzszenario in der Familie der Ariadne-Szenarien AriadneDemand: - electricity: - transmission_limit: v1 LowDemand: iiasa_database: @@ -103,8 +101,6 @@ LowDemand: H2 electrolysis: DE: 2030: 0 - electricity: - transmission_limit: v1 KN2045_Elek: # Fokus auf dem Hochlauf von Technologien zur direkten Elektrifizierung der Sektoren From ef8afdbee1b77a97529072b5b0dd483d426310dc Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 1 Aug 2025 16:16:47 +0200 Subject: [PATCH 055/202] add constraints from additional_functionality and prepare_network, do not overwrite p_nom of extendable assets --- Snakefile | 4 +- scripts/pypsa-de/solve_regret.py | 153 +++++++++++++++++++------------ 2 files changed, 97 insertions(+), 60 deletions(-) diff --git a/Snakefile b/Snakefile index 887aca66a..22e64357a 100644 --- a/Snakefile +++ b/Snakefile @@ -983,13 +983,15 @@ rule solve_regret: co2_sequestration_potential=config_provider( "sector", "co2_sequestration_potential", default=200 ), - custom_extra_functionality="data/custom_extra_functionality.py", + custom_extra_functionality=input_custom_extra_functionality, energy_year=config_provider("energy", "energy_totals_year"), input: decision=RESULTS.replace("{run}", "{decision}") + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", realization=RESULTS + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + co2_totals_name=resources("co2_totals.csv"), + energy_totals=resources("energy_totals.csv"), output: regret_network=RESULTS + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 885c212ad..cddc507db 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -10,11 +10,27 @@ set_scenario_config, update_config_from_wildcards, ) -from scripts.solve_network import solve_network +from scripts.solve_network import prepare_network, solve_network logger = logging.getLogger(__name__) +def check_matching_components(new, deci, name, attr): + if not new.index.symmetric_difference(deci.index).empty: + logger.error( + f"Indices of {name} in realization and decision networks do not match. " + "This may lead to unexpected results." + f"Offending indices are: {new.index.symmetric_difference(deci.index).tolist()}" + ) + assert ( + new.query(f"{attr}_extendable") + .index.symmetric_difference(deci.query(f"{attr}_extendable").index) + .empty + ), ( + f"Indices of {name} with {attr}_extendable in realization and decision networks do not match." + ) + + def fix_capacities(realization, decision): n = realization.copy() @@ -29,55 +45,28 @@ def fix_capacities(realization, decision): new = getattr(n, name) deci = getattr(decision, name) - if not new.index.symmetric_difference(deci.index).empty: - logger.error( - f"Indices of {name} in realization and decision networks do not match. " - "This may lead to unexpected results." - ) - assert ( - new.query(f"{attr}_extendable") - .index.symmetric_difference(deci.query(f"{attr}_extendable").index) - .empty - ), ( - f"Indices of {name} with {attr}_extendable in realization and decision networks do not match." - ) - # raise ValueError("Indices of realization and decision networks do not match.") - - common_i = new.index.intersection(deci.index).difference( - new.query("carrier == 'BEV charger'").index - ) # Exclude BEV chargers from modification because the p_nom are taken from UBA - - extendable_i = new.query(f"{attr}_extendable").index - - if not deci.query(f"{attr}_opt > {attr}_max").empty: + greater = deci.query(f"{attr}_opt > {attr}_max") + if not greater.empty: logger.warning( f"Decision network have {name} with {attr}_opt > {attr}_max. " - f"These assets are: {deci.query(f'{attr}_opt > {attr}_max').index.tolist()}" - ) - _idx = deci.query( - f"{attr}_opt > {attr}_max and not (carrier == 'DC' and build_year > 0)" - ).index - deci.loc[_idx, attr + "_opt"] = deci.loc[_idx, attr + "_max"] - logger.warning( - f"Setting {attr}_opt > {attr}_max to {attr}_max for indices: {_idx.tolist()}." + f"These assets are: {greater.index.tolist()}" ) - _idx = deci.query( - f"{attr}_opt > {attr}_max and (carrier == 'DC' and build_year > 0)" - ).index - deci.loc[_idx, attr + "_max"] = deci.loc[_idx, attr + "_opt"] - logger.warning( - f"Setting {attr}_max = {attr}_opt for indices: {_idx.tolist()}." - ) - - if not deci.query(f"{attr}_min > {attr}_opt").empty: - ValueError( + smaller = deci.query(f"{attr}_min > {attr}_opt") + if not smaller.empty: + logger.error( f"Decision network have {name} with {attr}_min > {attr}_opt. " "This may lead to unexpected results." - f"These assets are: {deci.query(f'{attr}_min > {attr}_opt').index.tolist()}" + f"These assets are: {smaller.index.tolist()}" ) - _idx = deci.query(f"{attr}_min > {attr}_opt").index - deci.loc[_idx, attr + "_opt"] = deci.loc[_idx, attr + "_min"] + check_matching_components(new, deci, name, attr) + + common_i = new.query("carrier != 'BEV charger'").index.intersection( + deci.query("carrier != 'BEV charger'").index + ) # Exclude BEV chargers from modification because the p_nom are taken from UBA + extendable_i = new.query(f"{attr}_extendable").index + + # Copy all assets (maybe this should only be done for current planning horizon) new.loc[common_i, attr] = deci.loc[common_i, attr] new.loc[common_i, attr + "_opt"] = deci.loc[common_i, attr + "_opt"] new.loc[common_i, attr + "_min"] = deci.loc[common_i, attr + "_min"] @@ -85,6 +74,8 @@ def fix_capacities(realization, decision): # Ideally nothing should be extendable new.loc[extendable_i, attr + "_extendable"] = False + # In this case p_nom = p_nom_opt + new.loc[extendable_i, attr] = new.loc[extendable_i, attr + "_opt"] if name == "links": virtual_links = [ @@ -106,7 +97,7 @@ def fix_capacities(realization, decision): essential_links = [ "SMR", "waste CHP", - "biogas to gas", + # "biogas to gas", ] # TODO double check if these are all needed bottleneck_links = [ @@ -125,6 +116,7 @@ def fix_capacities(realization, decision): extendable_i ) new.loc[_idx, "p_nom_extendable"] = True + # For essential links and bottleneck links allow more, but not less links_to_limit = essential_links + bottleneck_links _idx = new.loc[new.carrier.isin(links_to_limit)].index.intersection( @@ -132,6 +124,20 @@ def fix_capacities(realization, decision): ) new.loc[_idx, "p_nom_min"] = new.loc[_idx, "p_nom_opt"] + # For DC fix everything to p_nom_opt + _idx = new.loc[new.carrier == "DC"].index.intersection(extendable_i) + new.loc[_idx, "p_nom_min"] = new.loc[_idx, "p_nom_opt"] + new.loc[_idx, "p_nom_max"] = new.loc[_idx, "p_nom_opt"] + new.loc[_idx, "p_nom_extendable"] = True + + if name == "lines": + # For lines fix everything to s_nom_opt + _idx = new.index.intersection(extendable_i) + + new.loc[_idx, "s_nom_min"] = new.loc[_idx, "s_nom_opt"] + new.loc[_idx, "s_nom_max"] = new.loc[_idx, "s_nom_opt"] + new.loc[_idx, "s_nom_extendable"] = True + if name == "generators": fuels = [ "lignite", @@ -149,29 +155,32 @@ def fix_capacities(realization, decision): extendable_i ) new.loc[_idx, "p_nom_extendable"] = True - # For fuels allow more, but not less - _idx = new.loc[new.carrier.isin(fuels)].index.intersection(extendable_i) - new.loc[_idx, "p_nom_min"] = new.loc[_idx, "p_nom_opt"] if name == "stores": - # there is only one co2 atmosphere store which is always extendable, hence no intersection with extendable_i needed + # there is only one co2 atmosphere store which should always be extendable, hence no intersection with extendable_i needed _idx = new.query("carrier == 'co2'").index new.loc[_idx, "e_nom_extendable"] = True - # Just making sure that the defaults are set correctly - assert (new.loc[_idx, "e_nom_min"] == 0).all() - assert (new.loc[_idx, "e_nom_max"] == np.inf).all() - assert (new.loc[_idx, "e_nom"] == 0).all() - # TODO double check if these are all needed co2_stores = ["co2 stored", "co2 sequestered"] _idx = new.loc[new.carrier.isin(co2_stores)].index.intersection( extendable_i ) new.loc[_idx, "e_nom_extendable"] = True - # TODO this should probably be active - Allow less, but not more - # new.loc[_idx, "e_nom_max"] = new.loc[_idx, "e_nom_opt"] + # Allow less, but not more + if ( + min( + decision.global_constraints.mu.get("co2_sequestration_limit"), + realization.global_constraints.mu.get("co2_sequestration_limit"), + ) + < 1 + ): + new.loc[_idx, "e_nom_max"] = new.loc[_idx, "e_nom_opt"] + + # Above several assets were switch to extendable again, for these the p_nom value is restored, i.e. set to the value from the decision network + _idx = new.query(f"{attr}_extendable").index.intersection(extendable_i) + new.loc[_idx, attr] = deci.loc[_idx, attr] return n @@ -184,7 +193,7 @@ def fix_capacities(realization, decision): sector_opts="none", planning_horizons="2030", decision="LowDemand", - run="AriadneDemand", + run="LowDemand", ) configure_logging(snakemake) @@ -211,10 +220,36 @@ def fix_capacities(realization, decision): ) n = fix_capacities(realization, decision) - # TODO remove this attempt at a hotfix - n.links.loc[n.links.carrier == "methanolisation", "p_min_pu"] = 0.0 - # n.links.loc[n.links.carrier.str.contains('vent'), 'p_max_pu'] = 1.0 + n_pre = n.copy() + + logger.info("Adding CO2 removal service outside DE.") + n.add("Carrier", "CO2 removal service") + + # pypsa calculates with CO2-tonnes-equivalent not single units of CO2 -> marginal cost in €/tCO2 + + n.add( + "Generator", + "CO2 removal service", + bus="co2 atmosphere", + carrier="CO2 removal service", + p_nom=1e6, + marginal_cost=350, + p_nom_extendable=False, + p_min_pu=0, + p_max_pu=1.0, + sign=-1, + ) + + prepare_network( + n, + solve_opts=snakemake.params.solving["options"], + foresight=snakemake.params.foresight, + planning_horizons=planning_horizons, + co2_sequestration_potential=snakemake.params["co2_sequestration_potential"], + limit_max_growth=snakemake.params.get("sector", {}).get("limit_max_growth"), + ) + with memory_logger( filename=getattr(snakemake.log, "memory", None), interval=logging_frequency ) as mem: From d3c29beb769561c6d3d57a9ab7ff076745862035 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 7 Aug 2025 10:15:20 +0200 Subject: [PATCH 056/202] fix heat pump constraint --- scripts/pypsa-de/additional_functionality.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 472f07eaa..28ee5f733 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -544,6 +544,8 @@ def add_decentral_heat_pump_budgets(n, decentral_heat_pump_budgets, investment_y "rural air heat pump", "rural ground heat pump", "urban decentral air heat pump", + "rural resistive heater", + "urban decentral resistive heater", ] heat_pumps = n.links.index[n.links.carrier.isin(carriers)] @@ -573,6 +575,7 @@ def add_decentral_heat_pump_budgets(n, decentral_heat_pump_budgets, investment_y logger.info( f"Limiting decentral heat pump electricity consumption in country {ct} to {decentral_heat_pump_budgets[ct][investment_year]:.1%} MWh.", ) + heat_pumps = heat_pumps[heat_pumps.str.startswith(ct)] lhs = [] @@ -585,16 +588,16 @@ def add_decentral_heat_pump_budgets(n, decentral_heat_pump_budgets, investment_y lhs = sum(lhs) cname = f"decentral_heat_pump_limit-{ct}" - n.model.add_constraints( - lhs <= limit, - name=f"GlobalConstraint-{cname}", - ) if cname in n.global_constraints.index: logger.warning( f"Global constraint {cname} already exists. Dropping and adding it again." ) n.global_constraints.drop(cname, inplace=True) + n.model.add_constraints( + lhs <= limit, + name=f"GlobalConstraint-{cname}", + ) n.add( "GlobalConstraint", cname, From 40dedb7229a2aed5e798513eada9f88c89126549 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 7 Aug 2025 10:16:37 +0200 Subject: [PATCH 057/202] don't write some constraints in regret runs --- Snakefile | 1 + scripts/pypsa-de/additional_functionality.py | 18 ++++++++-- scripts/solve_network.py | 35 +++++++++++++++----- 3 files changed, 43 insertions(+), 11 deletions(-) diff --git a/Snakefile b/Snakefile index 22e64357a..265b8338e 100644 --- a/Snakefile +++ b/Snakefile @@ -985,6 +985,7 @@ rule solve_regret: ), custom_extra_functionality=input_custom_extra_functionality, energy_year=config_provider("energy", "energy_totals_year"), + #regret_run=True, input: decision=RESULTS.replace("{run}", "{decision}") + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 28ee5f733..abe642750 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -9,7 +9,9 @@ logger = logging.getLogger(__name__) -def add_capacity_limits(n, investment_year, limits_capacity, sense="maximum"): +def add_capacity_limits( + n, investment_year, limits_capacity, snakemake, sense="maximum" +): for c in n.iterate_components(limits_capacity): logger.info(f"Adding {sense} constraints for {c.list_name}") @@ -56,6 +58,16 @@ def add_capacity_limits(n, investment_year, limits_capacity, sense="maximum"): cname = f"capacity_{sense}-{ct}-{c.name}-{carrier.replace(' ', '-')}" + if snakemake.params.get("regret_run"): + logger.info( + f"Skipping capacity limit adjustment for {c.name} {carrier} with planning horizons {investment_year}, because of regret run." + ) + if cname in n.global_constraints.index: + logger.warning( + f"Global constraint {cname} already exists. Dropping it." + ) + n.global_constraints.drop(cname, inplace=True) + continue if cname in n.global_constraints.index: logger.warning( f"Global constraint {cname} already exists. Dropping and adding it again." @@ -809,11 +821,11 @@ def additional_functionality(n, snapshots, snakemake): constraints = snakemake.params.solving["constraints"] add_capacity_limits( - n, investment_year, constraints["limits_capacity_min"], "minimum" + n, investment_year, constraints["limits_capacity_min"], snakemake, "minimum" ) add_capacity_limits( - n, investment_year, constraints["limits_capacity_max"], "maximum" + n, investment_year, constraints["limits_capacity_max"], snakemake, "maximum" ) add_power_limits(n, investment_year, constraints["limits_power_max"]) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 28eb1b5c1..75cd7cb44 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -134,7 +134,9 @@ def check_p_min_p_max(p_nom_max): n.buses.loc[bus, name] = df_carrier.p_nom_max.values -def add_land_use_constraint(n: pypsa.Network, planning_horizons: str) -> None: +def add_land_use_constraint( + n: pypsa.Network, planning_horizons: str, regret_run=False +) -> None: """ Add land use constraints for renewable energy potential. @@ -161,6 +163,11 @@ def add_land_use_constraint(n: pypsa.Network, planning_horizons: str) -> None: "offwind-dc", "offwind-float", ]: + if regret_run: + logger.info( + f"Skipping land use constraint adjustment for {carrier} with planning horizons {planning_horizons}, because of regret run." + ) + continue ext_i = (n.generators.carrier == carrier) & ~n.generators.p_nom_extendable grouper = n.generators.loc[ext_i].index.str.replace( f" {carrier}.*$", "", regex=True @@ -185,7 +192,9 @@ def add_land_use_constraint(n: pypsa.Network, planning_horizons: str) -> None: n.generators["p_nom_max"] = n.generators["p_nom_max"].clip(lower=0) -def add_solar_potential_constraints(n: pypsa.Network, config: dict) -> None: +def add_solar_potential_constraints( + n: pypsa.Network, config: dict, regret_run=False +) -> None: """ Add constraint to make sure the sum capacity of all solar technologies (fixed, tracking, ets. ) is below the region potential. @@ -203,6 +212,12 @@ def add_solar_potential_constraints(n: pypsa.Network, config: dict) -> None: rename = {} if PYPSA_V1 else {"Generator-ext": "Generator"} solar_carriers = ["solar", "solar-hsat"] + + if regret_run: + logger.info( + "Skipping solar potential constraint adjustment, because of regret run." + ) + return solar = n.generators[ n.generators.carrier.isin(solar_carriers) & n.generators.p_nom_extendable ].index @@ -424,6 +439,7 @@ def prepare_network( planning_horizons: str | None, co2_sequestration_potential: dict[str, float], limit_max_growth: dict[str, Any] | None = None, + regret_run: bool = False, ) -> None: """ Prepare network with various constraints and modifications. @@ -513,7 +529,7 @@ def prepare_network( n.snapshot_weightings[:] = 8760.0 / nhours if foresight == "myopic": - add_land_use_constraint(n, planning_horizons) + add_land_use_constraint(n, planning_horizons, regret_run) if foresight == "perfect": add_land_use_constraint_perfect(n) @@ -1281,7 +1297,9 @@ def extra_functionality( ) and {"solar-hsat", "solar"}.issubset( config["electricity"]["extendable_carriers"]["Generator"] ): - add_solar_potential_constraints(n, config) + add_solar_potential_constraints( + n, config, snakemake.params.get("regret_run", False) + ) if n.config.get("sector", {}).get("tes", False): if n.buses.index.str.contains( @@ -1401,11 +1419,12 @@ def extra_functionality( from scripts._helpers import mock_snakemake snakemake = mock_snakemake( - "solve_sector_network", + "solve_sector_network_myopic", + simpl="", + clusters=27, opts="", - clusters="5", - configfiles="config/test/config.overnight.yaml", - sector_opts="", + sector_opts="none", + run="LowDemand", planning_horizons="2030", ) configure_logging(snakemake) From f8f3a779322362de4f1cdd2abfe0b8d6bcb7c4c1 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 7 Aug 2025 10:17:47 +0200 Subject: [PATCH 058/202] turn off line losses, line expansion and noisy costs for better reproducibility --- config/scenarios.manual.yaml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index f1facbb46..85d437a14 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -74,13 +74,24 @@ KN2045_Mix: # dient als Referenzszenario in der Familie der Ariadne-Szenarien AriadneDemand: + solving: + options: + noisy_costs: false + electricity: + transmission_losses: 0 + transmission_limit: v1 LowDemand: iiasa_database: reference_scenario: ExPol # e.g., CO2 emissions from REMIND will follow ExPol uba_for_industry: true uba_for_mobility: true + electricity: + transmission_limit: v1 solving: + options: + transmission_losses: 0 + noisy_costs: false constraints: decentral_heat_pump_budgets: DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 From 9fef0c6e7610512cfc6e200ebab66fd8f2ea1032 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 7 Aug 2025 10:18:02 +0200 Subject: [PATCH 059/202] big refactor --- scripts/pypsa-de/solve_regret.py | 241 ++++++++++++++++++++++--------- 1 file changed, 173 insertions(+), 68 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index cddc507db..c55a0dbf5 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -31,6 +31,43 @@ def check_matching_components(new, deci, name, attr): ) +def fix_capacities_strict(realization, decision): + n = realization.copy() + + nominal_attrs = { + "generators": "p_nom", + "lines": "s_nom", + "links": "p_nom", + "stores": "e_nom", + } + + for name, attr in nominal_attrs.items(): + new = getattr(n, name) + deci = getattr(decision, name) + + check_matching_components(new, deci, name, attr) + + common_i = new.query("carrier != 'BEV charger'").index.intersection( + deci.query("carrier != 'BEV charger'").index + ) # Exclude BEV chargers from modification because the p_nom are taken from UBA + extendable_i = new.query(f"{attr}_extendable").index + + # Copy all assets (maybe this should only be done for current planning horizon) + new.loc[common_i, attr] = deci.loc[common_i, attr] + new.loc[common_i, attr + "_opt"] = deci.loc[common_i, attr + "_opt"] + new.loc[common_i, attr + "_min"] = deci.loc[common_i, attr + "_min"] + new.loc[common_i, attr + "_max"] = deci.loc[common_i, attr + "_max"] + + # Fix everything... + new.loc[extendable_i, attr + "_extendable"] = False + new.loc[extendable_i, attr] = new.loc[extendable_i, attr + "_opt"] + if name == "stores": + # there is only one co2 atmosphere store which should always be extendable, hence no intersection with extendable_i needed + _idx = new.query("carrier == 'co2'").index + new.loc[_idx, "e_nom_extendable"] = True + return n + + def fix_capacities(realization, decision): n = realization.copy() @@ -47,7 +84,7 @@ def fix_capacities(realization, decision): greater = deci.query(f"{attr}_opt > {attr}_max") if not greater.empty: - logger.warning( + logger.error( f"Decision network have {name} with {attr}_opt > {attr}_max. " f"These assets are: {greater.index.tolist()}" ) @@ -72,9 +109,8 @@ def fix_capacities(realization, decision): new.loc[common_i, attr + "_min"] = deci.loc[common_i, attr + "_min"] new.loc[common_i, attr + "_max"] = deci.loc[common_i, attr + "_max"] - # Ideally nothing should be extendable + # Fix everything... new.loc[extendable_i, attr + "_extendable"] = False - # In this case p_nom = p_nom_opt new.loc[extendable_i, attr] = new.loc[extendable_i, attr + "_opt"] if name == "links": @@ -92,51 +128,57 @@ def fix_capacities(realization, decision): "shipping oil", "kerosene for aviation", "agriculture machinery oil", + # "co2 sequestered", ] - # TODO double check if these are all needed - essential_links = [ - "SMR", - "waste CHP", - # "biogas to gas", - ] - # TODO double check if these are all needed + + _idx = new.loc[new.carrier.isin(virtual_links)].index.intersection( + extendable_i + ) + # Virtual links are free + new.loc[_idx, "p_nom_extendable"] = True + bottleneck_links = [ - "electricity distribution grid", - "methanolisation", + "SMR", "rural gas boiler", "urban decentral gas boiler", - "urban central gas CHP", # For 2035 "rural biomass boiler", "urban decentral biomass boiler", - "DAC", ] - links_to_free = virtual_links + essential_links + bottleneck_links - _idx = new.loc[new.carrier.isin(links_to_free)].index.intersection( + # Bottleneck links can be extended, but not reduced + _idx = new.loc[new.carrier.isin(bottleneck_links)].index.intersection( extendable_i ) new.loc[_idx, "p_nom_extendable"] = True + new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - # For essential links and bottleneck links allow more, but not less - links_to_limit = essential_links + bottleneck_links - _idx = new.loc[new.carrier.isin(links_to_limit)].index.intersection( - extendable_i - ) - new.loc[_idx, "p_nom_min"] = new.loc[_idx, "p_nom_opt"] + # No limits for waste burning outside DE + _idx = new.query( + "carrier == 'HVC to air' and not index.str.startswith('DE')" + ).index.intersection(extendable_i) + new.loc[_idx, "p_nom_extendable"] = True + new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] + # Tight limits inside DE + _idx = new.query( + "carrier == 'waste CHP' and index.str.startswith('DE')" + ).index.intersection(extendable_i) + new.loc[_idx, "p_nom_extendable"] = True + new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] + # new.loc[_idx, "p_nom_max"] = deci.loc[_idx, "p_nom_opt"] + 0.1 - # For DC fix everything to p_nom_opt - _idx = new.loc[new.carrier == "DC"].index.intersection(extendable_i) - new.loc[_idx, "p_nom_min"] = new.loc[_idx, "p_nom_opt"] - new.loc[_idx, "p_nom_max"] = new.loc[_idx, "p_nom_opt"] + _idx = new.query( + "carrier == 'electricity distribution grid'" + ).index.intersection(extendable_i) new.loc[_idx, "p_nom_extendable"] = True + new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] + # new.loc[_idx, "p_nom_max"] = deci.loc[_idx, "p_nom_opt"] + 0.1 - if name == "lines": - # For lines fix everything to s_nom_opt - _idx = new.index.intersection(extendable_i) + # if name == "lines": + # # For lines allow only a minimal extension, to avoid powerflow constraint issues + # new.loc[extendable_i, "s_nom_extendable"] = True - new.loc[_idx, "s_nom_min"] = new.loc[_idx, "s_nom_opt"] - new.loc[_idx, "s_nom_max"] = new.loc[_idx, "s_nom_opt"] - new.loc[_idx, "s_nom_extendable"] = True + # new.loc[extendable_i, "s_nom_min"] = deci.loc[extendable_i, "s_nom_opt"] + # new.loc[extendable_i, "s_nom_max"] = deci.loc[extendable_i, "s_nom_opt"] + 10 if name == "generators": fuels = [ @@ -161,24 +203,19 @@ def fix_capacities(realization, decision): _idx = new.query("carrier == 'co2'").index new.loc[_idx, "e_nom_extendable"] = True - # TODO double check if these are all needed - co2_stores = ["co2 stored", "co2 sequestered"] - _idx = new.loc[new.carrier.isin(co2_stores)].index.intersection( - extendable_i - ) - new.loc[_idx, "e_nom_extendable"] = True + # For co2 sequestered no limits are needed since the global constraints is active + # _idx = new.query("carrier == 'co2 sequestered'").index.intersection( + # extendable_i + # ) + # new.loc[_idx, "e_nom_extendable"] = True + # new.loc[_idx, "e_nom_max"] = deci.loc[_idx, "e_nom_opt"] + 1 - # Allow less, but not more - if ( - min( - decision.global_constraints.mu.get("co2_sequestration_limit"), - realization.global_constraints.mu.get("co2_sequestration_limit"), - ) - < 1 - ): - new.loc[_idx, "e_nom_max"] = new.loc[_idx, "e_nom_opt"] - - # Above several assets were switch to extendable again, for these the p_nom value is restored, i.e. set to the value from the decision network + # _idx = new.query("carrier == 'co2 stored'").index.intersection( + # extendable_i + # ) + # new.loc[_idx, "e_nom_extendable"] = True + + # Above several assets are switched to extendable again, for these the p_nom value is restored, i.e. set to the value from the decision network _idx = new.query(f"{attr}_extendable").index.intersection(extendable_i) new.loc[_idx, attr] = deci.loc[_idx, attr] return n @@ -191,7 +228,7 @@ def fix_capacities(realization, decision): clusters=27, opts="", sector_opts="none", - planning_horizons="2030", + planning_horizons="2035", decision="LowDemand", run="LowDemand", ) @@ -219,27 +256,68 @@ def fix_capacities(realization, decision): "mem_logging_frequency", 30 ) - n = fix_capacities(realization, decision) + n = fix_capacities_strict(realization, decision) n_pre = n.copy() - logger.info("Adding CO2 removal service outside DE.") - n.add("Carrier", "CO2 removal service") - - # pypsa calculates with CO2-tonnes-equivalent not single units of CO2 -> marginal cost in €/tCO2 - - n.add( - "Generator", - "CO2 removal service", - bus="co2 atmosphere", - carrier="CO2 removal service", - p_nom=1e6, - marginal_cost=350, - p_nom_extendable=False, - p_min_pu=0, - p_max_pu=1.0, - sign=-1, - ) + # logger.info("Adding CO2 removal service outside DE.") + # n.add("Carrier", "CO2 removal service") + + # n.add( + # "Generator", + # "CO2 removal service", + # bus="co2 atmosphere", + # carrier="CO2 removal service", + # p_nom=1e6, + # marginal_cost=1000, # marginal cost in €/tCO2 + # p_nom_extendable=False, + # p_min_pu=0, + # p_max_pu=1.0, + # sign=-1, + # ) + + snakemake.params.solving["options"]["noisy_costs"] = False + + # TODO remove this again + snakemake.params.solving["options"]["load_shedding"] = 100 + + results = {} + crazy = {} + for addon in np.logspace(0, 3, 10): + m = n.copy() + m.global_constraints.loc[["CO2Limit"], "constant"] += addon + + prepare_network( + m, + solve_opts=snakemake.params.solving["options"], + foresight=snakemake.params.foresight, + planning_horizons=planning_horizons, + co2_sequestration_potential=snakemake.params["co2_sequestration_potential"], + limit_max_growth=snakemake.params.get("sector", {}).get("limit_max_growth"), + regret_run=snakemake.params.get("regret_run", False), + ) + with memory_logger( + filename=getattr(snakemake.log, "memory", None), interval=logging_frequency + ) as mem: + solve_network( + m, + config=snakemake.config, + params=snakemake.params, + solving=snakemake.params.solving, + planning_horizons=planning_horizons, + rule_name=snakemake.rule, + log_fn=snakemake.log.solver, + snakemake=snakemake, + ) + logger.info(f"Maximum memory usage: {mem.mem_usage}") + results[addon] = m.global_constraints.loc["CO2Limit", "mu"] + crazy[addon] = ( + decision.global_constraints.loc["CO2Limit", "constant"] + - m.global_constraints.loc["CO2Limit", "constant"] + ) + + n.global_constraints.loc[["CO2Limit"], "constant"] += 1000 + # n.global_constraints.loc[["co2_sequestration_limit"], "constant"] -= 100 prepare_network( n, @@ -248,6 +326,7 @@ def fix_capacities(realization, decision): planning_horizons=planning_horizons, co2_sequestration_potential=snakemake.params["co2_sequestration_potential"], limit_max_growth=snakemake.params.get("sector", {}).get("limit_max_growth"), + regret_run=snakemake.params.get("regret_run", False), ) with memory_logger( @@ -267,3 +346,29 @@ def fix_capacities(realization, decision): n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output.regret_network) + + # logger.info((n.lines.s_nom_opt - decision.lines.s_nom_opt).sort_values()) + + logger.info( + ( + n.links.query("carrier == 'electricity distribution grid'").p_nom_opt + - decision.links.query( + "carrier == 'electricity distribution grid'" + ).p_nom_opt + ).sort_values() + ) + + logger.info( + ( + decision.links.query( + "carrier == 'waste CHP' and index.str.startswith('DE')" + ).p_nom_opt + - n.links.query( + "carrier == 'waste CHP' and index.str.startswith('DE')" + ).p_nom_opt + ).sort_values() + ) + + logger.info( + (decision.global_constraints.mu - n.global_constraints.mu).round().sort_values() + ) From a603769650b94ee84087a0ffcd77b2a7de6e6160 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 13 Aug 2025 09:43:51 +0200 Subject: [PATCH 060/202] swap sign of some load shedding generators --- scripts/solve_network.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 75cd7cb44..8f608a09a 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -493,6 +493,14 @@ def prepare_network( p_nom=1e9, # kW ) + n.generators.loc[ + (n.generators.carrier == "load") + & ( + n.generators.index.str.contains("non-sequestered HVC|process emissions") + ), + "sign", + ] *= -1 + if solve_opts.get("curtailment_mode"): n.add("Carrier", "curtailment", color="#fedfed", nice_name="Curtailment") n.generators_t.p_min_pu = n.generators_t.p_max_pu From fb5a980e05b6816620a20e02d5e4c0b65bae873a Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 13 Aug 2025 09:44:53 +0200 Subject: [PATCH 061/202] add functionality for setting a co2 price wvia a co2 generator --- scripts/pypsa-de/additional_functionality.py | 21 ++++++++++++++ scripts/pypsa-de/export_ariadne_variables.py | 29 ++++++++++++++------ 2 files changed, 41 insertions(+), 9 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index abe642750..5457c32a5 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -814,6 +814,21 @@ def adapt_nuclear_output(n): ) +def add_empty_co2_atmosphere_store_constraint(n): + """ + Ensures that the CO2 atmosphere store at the last snapshot is empty. + """ + logger.info( + "Adding constraint for empty CO2 atmosphere store at the last snapshot." + ) + cname = "empty_co2_atmosphere_store" + + last_snapshot = n.snapshots.values[-1] + lhs = n.model["Store-e"].loc[last_snapshot, "co2 atmosphere"] + + n.model.add_constraints(lhs == 0, name=cname) + + def additional_functionality(n, snapshots, snakemake): logger.info("Adding Ariadne-specific functionality") @@ -867,3 +882,9 @@ def additional_functionality(n, snapshots, snakemake): if investment_year == 2020: adapt_nuclear_output(n) + + if "co2 atmosphere" in n.generators.index: + logger.warning( + "CO2 atmosphere generator found. Adding constraint for empty CO2 atmosphere store at the last snapshot." + ) + add_empty_co2_atmosphere_store_constraint(n) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index cb751f816..0e5c03517 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -3381,8 +3381,12 @@ def get_prices(n, region): except KeyError: co2_limit_de = 0 + try: + co2_limit_eu = n.global_constraints.loc["co2_limit-EU", "mu"] + except KeyError: + co2_limit_eu = n.generators.loc["co2 atmosphere", "marginal_cost"] # co2 additions - co2_price = -n.global_constraints.loc["CO2Limit", "mu"] - co2_limit_de + co2_price = -co2_limit_eu - co2_limit_de # specific emissions in tons CO2/MWh according to n.links[n.links.carrier =="your_carrier].efficiency2.unique().item() specific_emissions = { "oil": 0.2571, @@ -4317,12 +4321,14 @@ def get_policy(n, investment_year): co2_limit_de = n.global_constraints.loc["co2_limit-DE", "mu"] except KeyError: co2_limit_de = 0 - var["Price|Carbon"] = ( - -n.global_constraints.loc["CO2Limit", "mu"] - co2_limit_de + co2_price_add_on - ) + try: + co2_limit_eu = n.global_constraints.loc["co2_limit-EU", "mu"] + except KeyError: + co2_limit_eu = n.generators.loc["co2 atmosphere", "marginal_cost"] + var["Price|Carbon"] = -co2_limit_eu - co2_limit_de + co2_price_add_on var["Price|Carbon|EU-wide Regulation All Sectors"] = ( - -n.global_constraints.loc["CO2Limit", "mu"] + co2_price_add_on + -co2_limit_eu + co2_price_add_on ) # Price|Carbon|EU-wide Regulation Non-ETS @@ -5073,7 +5079,11 @@ def hack_DC_projects(n, p_nom_start, p_nom_planned, model_year, snakemake, costs ) # Future projects should not have any capacity - assert isclose(n.links.loc[future_projects, "p_nom_opt"], 0).all() + try: + assert isclose(n.links.loc[future_projects, "p_nom_opt"], 0).all() + except AssertionError: + logger.warning("Future projects have non-zero p_nom_opt. Overwriting with 0.") + n.links.loc[future_projects, "p_nom_opt"] = 0 # Setting p_nom to 0 such that n.statistics does not compute negative expanded capex or capacity additions # Setting p_nom_min to 0 for the grid_expansion calculation @@ -5367,13 +5377,14 @@ def get_data( if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( - "export_ariadne_variables", + "export_regret_variables", simpl="", clusters=27, opts="", ll="vopt", sector_opts="None", - run="LowDemand", + run="AriadneDemand", + decision="LowDemand", ) configure_logging(snakemake) config = snakemake.config @@ -5448,7 +5459,7 @@ def get_data( if "debug" == "debug": # For debugging var = pd.Series() - idx = 1 + idx = 0 n = networks[idx] c = costs[idx] _industry_demand = industry_demands[idx] From 74a547185fc8d039abdc6474f4e71ae07f9a1894 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 13 Aug 2025 09:45:08 +0200 Subject: [PATCH 062/202] large refactor --- scripts/pypsa-de/solve_regret.py | 320 ++++++++++++------------------- 1 file changed, 118 insertions(+), 202 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index c55a0dbf5..0b3bd2990 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -1,4 +1,5 @@ import logging +import pathlib import numpy as np import pypsa @@ -31,44 +32,105 @@ def check_matching_components(new, deci, name, attr): ) -def fix_capacities_strict(realization, decision): - n = realization.copy() - - nominal_attrs = { - "generators": "p_nom", - "lines": "s_nom", - "links": "p_nom", - "stores": "e_nom", - } - - for name, attr in nominal_attrs.items(): - new = getattr(n, name) - deci = getattr(decision, name) - - check_matching_components(new, deci, name, attr) - - common_i = new.query("carrier != 'BEV charger'").index.intersection( - deci.query("carrier != 'BEV charger'").index - ) # Exclude BEV chargers from modification because the p_nom are taken from UBA - extendable_i = new.query(f"{attr}_extendable").index +def _unfix_bottlenecks(new, deci, name, extendable_i): + if name == "links": + virtual_links = [ + "land transport oil", + "land transport fuel cell", + "solid biomass for industry", + "gas for industry", + "industry methanol", + "naphtha for industry", + "process emissions", + "coal for industry", + "H2 for industry", + "shipping methanol", + "shipping oil", + "kerosene for aviation", + "agriculture machinery oil", + # "co2 sequestered", + ] + + _idx = new.loc[new.carrier.isin(virtual_links)].index.intersection(extendable_i) + # Virtual links are free + new.loc[_idx, "p_nom_extendable"] = True + + bottleneck_links = [ + "SMR", + "rural gas boiler", + "urban decentral gas boiler", + # For 2035 + # "rural biomass boiler", + # "urban decentral biomass boiler", + ] + # Bottleneck links can be extended, but not reduced + _idx = new.loc[new.carrier.isin(bottleneck_links)].index.intersection( + extendable_i + ) + new.loc[_idx, "p_nom_extendable"] = True + new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] + + # No limits for waste burning outside DE + _idx = new.query( + "carrier == 'HVC to air' and not index.str.startswith('DE')" + ).index.intersection(extendable_i) + new.loc[_idx, "p_nom_extendable"] = True + new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] + # Tight limits inside DE + _idx = new.query( + "carrier == 'waste CHP' and index.str.startswith('DE')" + ).index.intersection(extendable_i) + new.loc[_idx, "p_nom_extendable"] = True + new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] + # new.loc[_idx, "p_nom_max"] = deci.loc[_idx, "p_nom_opt"] + 0.1 + + _idx = new.query( + "carrier == 'electricity distribution grid'" + ).index.intersection(extendable_i) + new.loc[_idx, "p_nom_extendable"] = True + new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] + # new.loc[_idx, "p_nom_max"] = deci.loc[_idx, "p_nom_opt"] + 0.1 + + # if name == "lines": + # # For lines allow only a minimal extension, to avoid powerflow constraint issues + # new.loc[extendable_i, "s_nom_extendable"] = True + + # new.loc[extendable_i, "s_nom_min"] = deci.loc[extendable_i, "s_nom_opt"] + # new.loc[extendable_i, "s_nom_max"] = deci.loc[extendable_i, "s_nom_opt"] + 10 + + if name == "generators": + fuels = [ + "lignite", + "coal", + "oil primary", + "uranium", + "gas primary", + ] + vents = [ + "urban central heat vent", + "rural heat vent", + "urban decentral heat vent", + ] + _idx = new.loc[new.carrier.isin(fuels + vents)].index.intersection(extendable_i) + new.loc[_idx, "p_nom_extendable"] = True + + # if name == "stores": + # For co2 sequestered no limits are needed since the global constraints is active + # _idx = new.query("carrier == 'co2 sequestered'").index.intersection( + # extendable_i + # ) + # new.loc[_idx, "e_nom_extendable"] = True + # new.loc[_idx, "e_nom_max"] = deci.loc[_idx, "e_nom_opt"] + 1 - # Copy all assets (maybe this should only be done for current planning horizon) - new.loc[common_i, attr] = deci.loc[common_i, attr] - new.loc[common_i, attr + "_opt"] = deci.loc[common_i, attr + "_opt"] - new.loc[common_i, attr + "_min"] = deci.loc[common_i, attr + "_min"] - new.loc[common_i, attr + "_max"] = deci.loc[common_i, attr + "_max"] + # _idx = new.query("carrier == 'co2 stored'").index.intersection( + # extendable_i + # ) + # new.loc[_idx, "e_nom_extendable"] = True - # Fix everything... - new.loc[extendable_i, attr + "_extendable"] = False - new.loc[extendable_i, attr] = new.loc[extendable_i, attr + "_opt"] - if name == "stores": - # there is only one co2 atmosphere store which should always be extendable, hence no intersection with extendable_i needed - _idx = new.query("carrier == 'co2'").index - new.loc[_idx, "e_nom_extendable"] = True - return n + return -def fix_capacities(realization, decision): +def fix_capacities(realization, decision, scope="", strict=False): n = realization.copy() nominal_attrs = { @@ -101,7 +163,9 @@ def fix_capacities(realization, decision): common_i = new.query("carrier != 'BEV charger'").index.intersection( deci.query("carrier != 'BEV charger'").index ) # Exclude BEV chargers from modification because the p_nom are taken from UBA - extendable_i = new.query(f"{attr}_extendable").index + extendable_i = new.query( + f"{attr}_extendable and index.str.startswith('{scope}')" + ).index # Copy all assets (maybe this should only be done for current planning horizon) new.loc[common_i, attr] = deci.loc[common_i, attr] @@ -113,111 +177,18 @@ def fix_capacities(realization, decision): new.loc[extendable_i, attr + "_extendable"] = False new.loc[extendable_i, attr] = new.loc[extendable_i, attr + "_opt"] - if name == "links": - virtual_links = [ - "land transport oil", - "land transport fuel cell", - "solid biomass for industry", - "gas for industry", - "industry methanol", - "naphtha for industry", - "process emissions", - "coal for industry", - "H2 for industry", - "shipping methanol", - "shipping oil", - "kerosene for aviation", - "agriculture machinery oil", - # "co2 sequestered", - ] - - _idx = new.loc[new.carrier.isin(virtual_links)].index.intersection( - extendable_i - ) - # Virtual links are free - new.loc[_idx, "p_nom_extendable"] = True - - bottleneck_links = [ - "SMR", - "rural gas boiler", - "urban decentral gas boiler", - # For 2035 - "rural biomass boiler", - "urban decentral biomass boiler", - ] - # Bottleneck links can be extended, but not reduced - _idx = new.loc[new.carrier.isin(bottleneck_links)].index.intersection( - extendable_i - ) - new.loc[_idx, "p_nom_extendable"] = True - new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - - # No limits for waste burning outside DE - _idx = new.query( - "carrier == 'HVC to air' and not index.str.startswith('DE')" - ).index.intersection(extendable_i) - new.loc[_idx, "p_nom_extendable"] = True - new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - # Tight limits inside DE - _idx = new.query( - "carrier == 'waste CHP' and index.str.startswith('DE')" - ).index.intersection(extendable_i) - new.loc[_idx, "p_nom_extendable"] = True - new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - # new.loc[_idx, "p_nom_max"] = deci.loc[_idx, "p_nom_opt"] + 0.1 - - _idx = new.query( - "carrier == 'electricity distribution grid'" - ).index.intersection(extendable_i) - new.loc[_idx, "p_nom_extendable"] = True - new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - # new.loc[_idx, "p_nom_max"] = deci.loc[_idx, "p_nom_opt"] + 0.1 - - # if name == "lines": - # # For lines allow only a minimal extension, to avoid powerflow constraint issues - # new.loc[extendable_i, "s_nom_extendable"] = True - - # new.loc[extendable_i, "s_nom_min"] = deci.loc[extendable_i, "s_nom_opt"] - # new.loc[extendable_i, "s_nom_max"] = deci.loc[extendable_i, "s_nom_opt"] + 10 - - if name == "generators": - fuels = [ - "lignite", - "coal", - "oil primary", - "uranium", - "gas primary", - ] - vents = [ - "urban central heat vent", - "rural heat vent", - "urban decentral heat vent", - ] - _idx = new.loc[new.carrier.isin(fuels + vents)].index.intersection( - extendable_i - ) - new.loc[_idx, "p_nom_extendable"] = True + if not strict: + _unfix_bottlenecks(new, deci, name, extendable_i) if name == "stores": # there is only one co2 atmosphere store which should always be extendable, hence no intersection with extendable_i needed _idx = new.query("carrier == 'co2'").index new.loc[_idx, "e_nom_extendable"] = True - # For co2 sequestered no limits are needed since the global constraints is active - # _idx = new.query("carrier == 'co2 sequestered'").index.intersection( - # extendable_i - # ) - # new.loc[_idx, "e_nom_extendable"] = True - # new.loc[_idx, "e_nom_max"] = deci.loc[_idx, "e_nom_opt"] + 1 - - # _idx = new.query("carrier == 'co2 stored'").index.intersection( - # extendable_i - # ) - # new.loc[_idx, "e_nom_extendable"] = True - # Above several assets are switched to extendable again, for these the p_nom value is restored, i.e. set to the value from the decision network _idx = new.query(f"{attr}_extendable").index.intersection(extendable_i) new.loc[_idx, attr] = deci.loc[_idx, attr] + return n @@ -230,7 +201,7 @@ def fix_capacities(realization, decision): sector_opts="none", planning_horizons="2035", decision="LowDemand", - run="LowDemand", + run="AriadneDemand", ) configure_logging(snakemake) @@ -243,8 +214,6 @@ def fix_capacities(realization, decision): logger.info("Loading realization and decision networks") - import pathlib - # Touch output file to ensure it exists pathlib.Path(snakemake.output.regret_network).touch() @@ -256,68 +225,14 @@ def fix_capacities(realization, decision): "mem_logging_frequency", 30 ) - n = fix_capacities_strict(realization, decision) + n = fix_capacities(realization, decision, scope="DE", strict=False) n_pre = n.copy() - # logger.info("Adding CO2 removal service outside DE.") - # n.add("Carrier", "CO2 removal service") - - # n.add( - # "Generator", - # "CO2 removal service", - # bus="co2 atmosphere", - # carrier="CO2 removal service", - # p_nom=1e6, - # marginal_cost=1000, # marginal cost in €/tCO2 - # p_nom_extendable=False, - # p_min_pu=0, - # p_max_pu=1.0, - # sign=-1, - # ) - snakemake.params.solving["options"]["noisy_costs"] = False # TODO remove this again - snakemake.params.solving["options"]["load_shedding"] = 100 - - results = {} - crazy = {} - for addon in np.logspace(0, 3, 10): - m = n.copy() - m.global_constraints.loc[["CO2Limit"], "constant"] += addon - - prepare_network( - m, - solve_opts=snakemake.params.solving["options"], - foresight=snakemake.params.foresight, - planning_horizons=planning_horizons, - co2_sequestration_potential=snakemake.params["co2_sequestration_potential"], - limit_max_growth=snakemake.params.get("sector", {}).get("limit_max_growth"), - regret_run=snakemake.params.get("regret_run", False), - ) - with memory_logger( - filename=getattr(snakemake.log, "memory", None), interval=logging_frequency - ) as mem: - solve_network( - m, - config=snakemake.config, - params=snakemake.params, - solving=snakemake.params.solving, - planning_horizons=planning_horizons, - rule_name=snakemake.rule, - log_fn=snakemake.log.solver, - snakemake=snakemake, - ) - logger.info(f"Maximum memory usage: {mem.mem_usage}") - results[addon] = m.global_constraints.loc["CO2Limit", "mu"] - crazy[addon] = ( - decision.global_constraints.loc["CO2Limit", "constant"] - - m.global_constraints.loc["CO2Limit", "constant"] - ) - - n.global_constraints.loc[["CO2Limit"], "constant"] += 1000 - # n.global_constraints.loc[["co2_sequestration_limit"], "constant"] -= 100 + # snakemake.params.solving["options"]["load_shedding"] = 100 prepare_network( n, @@ -326,9 +241,21 @@ def fix_capacities(realization, decision): planning_horizons=planning_horizons, co2_sequestration_potential=snakemake.params["co2_sequestration_potential"], limit_max_growth=snakemake.params.get("sector", {}).get("limit_max_growth"), - regret_run=snakemake.params.get("regret_run", False), + regret_run=True, # snakemake.params.get("regret_run", False), ) + # n.add( + # "Generator", + # "co2 atmosphere", + # bus="co2 atmosphere", + # p_min_pu=-1, + # p_max_pu=0, + # p_nom_extendable=True, + # marginal_cost=realization.global_constraints.loc["CO2Limit", "mu"], + # ) + + # n.global_constraints.drop("CO2Limit", inplace=True) + with memory_logger( filename=getattr(snakemake.log, "memory", None), interval=logging_frequency ) as mem: @@ -358,17 +285,6 @@ def fix_capacities(realization, decision): ).sort_values() ) - logger.info( - ( - decision.links.query( - "carrier == 'waste CHP' and index.str.startswith('DE')" - ).p_nom_opt - - n.links.query( - "carrier == 'waste CHP' and index.str.startswith('DE')" - ).p_nom_opt - ).sort_values() - ) - logger.info( (decision.global_constraints.mu - n.global_constraints.mu).round().sort_values() ) From b7aa4fb860a2e4893b232f75464604d82bcafecf Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 13 Aug 2025 11:59:31 +0200 Subject: [PATCH 063/202] allow transmission expansion in EU, in DE fix to NEP expansion until 2030 --- config/scenarios.manual.yaml | 10 +++--- scripts/pypsa-de/export_ariadne_variables.py | 8 +++-- scripts/pypsa-de/modify_prenetwork.py | 32 +++++++++++++++++++- 3 files changed, 42 insertions(+), 8 deletions(-) diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 85d437a14..8ab1eea0a 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -76,18 +76,18 @@ KN2045_Mix: AriadneDemand: solving: options: + transmission_losses: 0 noisy_costs: false - electricity: - transmission_losses: 0 - transmission_limit: v1 + # electricity: + # transmission_limit: v1 LowDemand: iiasa_database: reference_scenario: ExPol # e.g., CO2 emissions from REMIND will follow ExPol uba_for_industry: true uba_for_mobility: true - electricity: - transmission_limit: v1 + # electricity: + # transmission_limit: v1 solving: options: transmission_losses: 0 diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 0e5c03517..8f8fab98a 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -4362,8 +4362,12 @@ def get_tsc(n, country): ) # transmission_carriers = get_transmission_carriers(n).get_level_values("carrier") - transmission_lines = n.lines.carrier.isin(transmission_carriers) - transmission_links = n.links.carrier.isin(transmission_carriers) + transmission_lines = ( + n.lines.carrier.isin(transmission_carriers) & n.lines.active + ) + transmission_links = ( + n.links.carrier.isin(transmission_carriers) & n.links.active + ) # country_transmission_lines = ( (n.lines.bus0.str.contains(country)) & ~(n.lines.bus1.str.contains(country)) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index f5efceb6e..1f15f9002 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1192,10 +1192,35 @@ def drop_duplicate_transmission_projects(n): f"Dropping transmission projects with build year <= {year}. They are likely already in the OSM base network." ) # Maybe one 2024 line is missing in the OSM base network - to_drop = n.lines.query("0 < build_year <= @year").index + to_drop = n.lines.query(f"0 < build_year <= {year}").index n.remove("Line", to_drop) + to_deactivate = n.links.query( + f"carrier == 'DC' and (0 < build_year <= {year})" + ).index + n.links.loc[to_deactivate, "active"] = False + + +def deactivate_late_transmission_projects(n): + year = snakemake.params.onshore_nep_force["cutout_year"] + + to_deactivate = n.links.query(f"carrier == 'DC' and build_year > {year}").index + n.links.loc[to_deactivate, "active"] = False + + to_deactivate = n.lines.query(f"build_year > {year}").index + n.lines.loc[to_deactivate, "active"] = False + + +def fix_transmission_DE(n): + to_fix = n.lines.query("bus0.str.contains('DE') or bus1.str.contains('DE')").index + n.lines.loc[to_fix, "s_nom_extendable"] = False + + to_fix = n.links.query( + "(bus0.str.contains('DE') or bus1.str.contains('DE')) and carrier=='DC'" + ).index + n.links.loc[to_fix, "p_nom_extendable"] = False + def scale_capacity(n, scaling): """ @@ -1465,4 +1490,9 @@ def modify_industry_demand( scale_non_energy=snakemake.params.scale_industry_non_energy, ) + # For regret runs + deactivate_late_transmission_projects(n) + + fix_transmission_DE(n) + n.export_to_netcdf(snakemake.output.network) From 4fb516ba286c4f8c98c515f769097f2e21f0a8b8 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 13 Aug 2025 12:00:21 +0200 Subject: [PATCH 064/202] correctly access the co2 constraint --- scripts/pypsa-de/export_ariadne_variables.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 8f8fab98a..5c1eaec8c 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -3382,7 +3382,7 @@ def get_prices(n, region): co2_limit_de = 0 try: - co2_limit_eu = n.global_constraints.loc["co2_limit-EU", "mu"] + co2_limit_eu = n.global_constraints.loc["CO2Limit", "mu"] except KeyError: co2_limit_eu = n.generators.loc["co2 atmosphere", "marginal_cost"] # co2 additions @@ -4322,7 +4322,7 @@ def get_policy(n, investment_year): except KeyError: co2_limit_de = 0 try: - co2_limit_eu = n.global_constraints.loc["co2_limit-EU", "mu"] + co2_limit_eu = n.global_constraints.loc["CO2Limit", "mu"] except KeyError: co2_limit_eu = n.generators.loc["co2 atmosphere", "marginal_cost"] var["Price|Carbon"] = -co2_limit_eu - co2_limit_de + co2_price_add_on From ed5d813efec420906eef05b153ecd22eb3c069c0 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 13 Aug 2025 13:08:44 +0200 Subject: [PATCH 065/202] unfix co2 sequestered because of capital_cost usage of that store --- scripts/pypsa-de/solve_regret.py | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 0b3bd2990..1d9bf6d48 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -48,7 +48,7 @@ def _unfix_bottlenecks(new, deci, name, extendable_i): "shipping oil", "kerosene for aviation", "agriculture machinery oil", - # "co2 sequestered", + "co2 sequestered", ] _idx = new.loc[new.carrier.isin(virtual_links)].index.intersection(extendable_i) @@ -114,19 +114,6 @@ def _unfix_bottlenecks(new, deci, name, extendable_i): _idx = new.loc[new.carrier.isin(fuels + vents)].index.intersection(extendable_i) new.loc[_idx, "p_nom_extendable"] = True - # if name == "stores": - # For co2 sequestered no limits are needed since the global constraints is active - # _idx = new.query("carrier == 'co2 sequestered'").index.intersection( - # extendable_i - # ) - # new.loc[_idx, "e_nom_extendable"] = True - # new.loc[_idx, "e_nom_max"] = deci.loc[_idx, "e_nom_opt"] + 1 - - # _idx = new.query("carrier == 'co2 stored'").index.intersection( - # extendable_i - # ) - # new.loc[_idx, "e_nom_extendable"] = True - return @@ -185,6 +172,10 @@ def fix_capacities(realization, decision, scope="", strict=False): _idx = new.query("carrier == 'co2'").index new.loc[_idx, "e_nom_extendable"] = True + _idx = new.query("carrier == 'co2 sequestered'").index.intersection( + extendable_i + ) + new.loc[_idx, "e_nom_extendable"] = True # Above several assets are switched to extendable again, for these the p_nom value is restored, i.e. set to the value from the decision network _idx = new.query(f"{attr}_extendable").index.intersection(extendable_i) new.loc[_idx, attr] = deci.loc[_idx, attr] From edb5a4f3e18c8b5afae8aa025d612ffb57fd7773 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 15 Aug 2025 11:38:26 +0200 Subject: [PATCH 066/202] major refactor --- scripts/pypsa-de/solve_regret.py | 194 +++++++++++++------------------ 1 file changed, 83 insertions(+), 111 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 1d9bf6d48..3da6cfb2c 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -16,24 +16,9 @@ logger = logging.getLogger(__name__) -def check_matching_components(new, deci, name, attr): - if not new.index.symmetric_difference(deci.index).empty: - logger.error( - f"Indices of {name} in realization and decision networks do not match. " - "This may lead to unexpected results." - f"Offending indices are: {new.index.symmetric_difference(deci.index).tolist()}" - ) - assert ( - new.query(f"{attr}_extendable") - .index.symmetric_difference(deci.query(f"{attr}_extendable").index) - .empty - ), ( - f"Indices of {name} with {attr}_extendable in realization and decision networks do not match." - ) - - def _unfix_bottlenecks(new, deci, name, extendable_i): if name == "links": + # Links that have 0-cost and are extendable virtual_links = [ "land transport oil", "land transport fuel cell", @@ -52,51 +37,32 @@ def _unfix_bottlenecks(new, deci, name, extendable_i): ] _idx = new.loc[new.carrier.isin(virtual_links)].index.intersection(extendable_i) - # Virtual links are free new.loc[_idx, "p_nom_extendable"] = True + # Bottleneck links can be extended, but not reduced to fix infeasibilities due to numerical inconsistencies bottleneck_links = [ + "electricity distribution grid", + "waste CHP", "SMR", + # Boilers create bottlenecks AND should be extendable for fixed_profile_scaling constraints to be applied correctly "rural gas boiler", "urban decentral gas boiler", - # For 2035 - # "rural biomass boiler", - # "urban decentral biomass boiler", + # Biomass for 2035 when gas is banned + "rural biomass boiler", + "urban decentral biomass boiler", ] - # Bottleneck links can be extended, but not reduced _idx = new.loc[new.carrier.isin(bottleneck_links)].index.intersection( extendable_i ) new.loc[_idx, "p_nom_extendable"] = True new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - # No limits for waste burning outside DE + # Waste outside DE can also be burned directly _idx = new.query( "carrier == 'HVC to air' and not index.str.startswith('DE')" ).index.intersection(extendable_i) new.loc[_idx, "p_nom_extendable"] = True new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - # Tight limits inside DE - _idx = new.query( - "carrier == 'waste CHP' and index.str.startswith('DE')" - ).index.intersection(extendable_i) - new.loc[_idx, "p_nom_extendable"] = True - new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - # new.loc[_idx, "p_nom_max"] = deci.loc[_idx, "p_nom_opt"] + 0.1 - - _idx = new.query( - "carrier == 'electricity distribution grid'" - ).index.intersection(extendable_i) - new.loc[_idx, "p_nom_extendable"] = True - new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - # new.loc[_idx, "p_nom_max"] = deci.loc[_idx, "p_nom_opt"] + 0.1 - - # if name == "lines": - # # For lines allow only a minimal extension, to avoid powerflow constraint issues - # new.loc[extendable_i, "s_nom_extendable"] = True - - # new.loc[extendable_i, "s_nom_min"] = deci.loc[extendable_i, "s_nom_opt"] - # new.loc[extendable_i, "s_nom_max"] = deci.loc[extendable_i, "s_nom_opt"] + 10 if name == "generators": fuels = [ @@ -117,8 +83,19 @@ def _unfix_bottlenecks(new, deci, name, extendable_i): return -def fix_capacities(realization, decision, scope="", strict=False): - n = realization.copy() +def fix_capacities(realization, decision, scope="DE", strict=False): + logger.info(f"Fixing all capacities for scope: {scope}") + if scope == "EU": + scope = "" + if not strict: + logger.info("Freeing virtual links, bottlenecks and fossil generators.") + + # Copy all existing assets from the decision network + n = decision.copy() + + # The constraints and loads are taken from the realization network + n.global_constraints = realization.global_constraints.copy() + n.loads = realization.loads.copy() nominal_attrs = { "generators": "p_nom", @@ -130,54 +107,44 @@ def fix_capacities(realization, decision, scope="", strict=False): for name, attr in nominal_attrs.items(): new = getattr(n, name) deci = getattr(decision, name) + real = getattr(realization, name) - greater = deci.query(f"{attr}_opt > {attr}_max") - if not greater.empty: - logger.error( - f"Decision network have {name} with {attr}_opt > {attr}_max. " - f"These assets are: {greater.index.tolist()}" - ) - smaller = deci.query(f"{attr}_min > {attr}_opt") - if not smaller.empty: - logger.error( - f"Decision network have {name} with {attr}_min > {attr}_opt. " - "This may lead to unexpected results." - f"These assets are: {smaller.index.tolist()}" - ) - - check_matching_components(new, deci, name, attr) + # Scenario specific assets are taken from the realization network + _idx = real.query("carrier in ['BEV charger', 'V2G', 'EV battery']").index + new.loc[_idx, attr] = real.loc[_idx, attr] - common_i = new.query("carrier != 'BEV charger'").index.intersection( - deci.query("carrier != 'BEV charger'").index - ) # Exclude BEV chargers from modification because the p_nom are taken from UBA + # Start with fixing everything... extendable_i = new.query( f"{attr}_extendable and index.str.startswith('{scope}')" ).index - - # Copy all assets (maybe this should only be done for current planning horizon) - new.loc[common_i, attr] = deci.loc[common_i, attr] - new.loc[common_i, attr + "_opt"] = deci.loc[common_i, attr + "_opt"] - new.loc[common_i, attr + "_min"] = deci.loc[common_i, attr + "_min"] - new.loc[common_i, attr + "_max"] = deci.loc[common_i, attr + "_max"] - - # Fix everything... new.loc[extendable_i, attr + "_extendable"] = False new.loc[extendable_i, attr] = new.loc[extendable_i, attr + "_opt"] + # Some links should be extendable to avoid infeasibilities or allow burning more fossil fuels if not strict: _unfix_bottlenecks(new, deci, name, extendable_i) + # The CO2 constraints on atmosphere and sequestration need extendable stores to work correctly if name == "stores": + logger.info("Freeing co2 atmosphere and sequestered stores.") # there is only one co2 atmosphere store which should always be extendable, hence no intersection with extendable_i needed _idx = new.query("carrier == 'co2'").index new.loc[_idx, "e_nom_extendable"] = True - + # co2 sequestered stores from previous planning horizons should not be extendable _idx = new.query("carrier == 'co2 sequestered'").index.intersection( extendable_i ) new.loc[_idx, "e_nom_extendable"] = True - # Above several assets are switched to extendable again, for these the p_nom value is restored, i.e. set to the value from the decision network - _idx = new.query(f"{attr}_extendable").index.intersection(extendable_i) + + # Above several assets are switched to extendable again, for these the p_nom value is restored to the value from the decision network + + _idx = new.query(f"{attr}_extendable") + + if not _idx.difference(extendable_i).empty: + raise ValueError( + "Assets that are not extendable in the decision network have been set to extendable. This should not happen. Aborting." + ) + new.loc[_idx, attr] = deci.loc[_idx, attr] return n @@ -191,7 +158,7 @@ def fix_capacities(realization, decision, scope="", strict=False): opts="", sector_opts="none", planning_horizons="2035", - decision="LowDemand", + decision="AriadneDemand", run="AriadneDemand", ) @@ -199,12 +166,6 @@ def fix_capacities(realization, decision, scope="", strict=False): set_scenario_config(snakemake) update_config_from_wildcards(snakemake.config, snakemake.wildcards) - solve_opts = snakemake.params.solving["options"] - - np.random.seed(solve_opts.get("seed", 123)) - - logger.info("Loading realization and decision networks") - # Touch output file to ensure it exists pathlib.Path(snakemake.output.regret_network).touch() @@ -215,15 +176,30 @@ def fix_capacities(realization, decision, scope="", strict=False): logging_frequency = snakemake.config.get("solving", {}).get( "mem_logging_frequency", 30 ) + solve_opts = snakemake.params.solving["options"] + assert solve_opts["noisy_costs"] == False, ( + "Noisy costs should not be used in regret runs." + ) + np.random.seed(solve_opts.get("seed", 123)) - n = fix_capacities(realization, decision, scope="DE", strict=False) - - n_pre = n.copy() + n = fix_capacities( + realization, decision, scope=snakemake.params.scope_to_fix, strict=False + ) - snakemake.params.solving["options"]["noisy_costs"] = False + if solve_opts["post_discretization"].get("enable") and not solve_opts.get( + "skip_iterations" + ): + # Undo the last lines of optimize transmission expansion iteratively + n.lines.s_nom_extendable = False + n.lines.s_nom = n.lines.s_nom_opt - # TODO remove this again - # snakemake.params.solving["options"]["load_shedding"] = 100 + discretized_links = n.links.query( + f"carrier in {list(solve_opts['post_discretization'].get('link_unit_size').keys())}" + ).index + n.links.loc[discretized_links, "p_nom_extendable"] = False + n.links.loc[discretized_links, "p_nom"] = n.links.loc[ + discretized_links, "p_nom_opt" + ] prepare_network( n, @@ -232,20 +208,24 @@ def fix_capacities(realization, decision, scope="", strict=False): planning_horizons=planning_horizons, co2_sequestration_potential=snakemake.params["co2_sequestration_potential"], limit_max_growth=snakemake.params.get("sector", {}).get("limit_max_growth"), - regret_run=True, # snakemake.params.get("regret_run", False), + regret_run=True, ) - # n.add( - # "Generator", - # "co2 atmosphere", - # bus="co2 atmosphere", - # p_min_pu=-1, - # p_max_pu=0, - # p_nom_extendable=True, - # marginal_cost=realization.global_constraints.loc["CO2Limit", "mu"], - # ) - - # n.global_constraints.drop("CO2Limit", inplace=True) + if snakemake.params.scope_to_fix == "EU": + logger.info( + f"Fixing Scope EU chosen. Setting the CO2 price to the price from the realization network to avoid infeasibilities: {realization.global_constraints.loc['CO2Limit', 'mu']} €/t_CO2" + ) + n.add( + "Generator", + "co2 atmosphere", + bus="co2 atmosphere", + p_min_pu=-1, + p_max_pu=0, + p_nom_extendable=True, + carrier="co2", + marginal_cost=realization.global_constraints.loc["CO2Limit", "mu"], + ) + n.global_constraints.drop("CO2Limit", inplace=True) with memory_logger( filename=getattr(snakemake.log, "memory", None), interval=logging_frequency @@ -265,17 +245,9 @@ def fix_capacities(realization, decision, scope="", strict=False): n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output.regret_network) - # logger.info((n.lines.s_nom_opt - decision.lines.s_nom_opt).sort_values()) - - logger.info( - ( - n.links.query("carrier == 'electricity distribution grid'").p_nom_opt - - decision.links.query( - "carrier == 'electricity distribution grid'" - ).p_nom_opt - ).sort_values() - ) - logger.info( - (decision.global_constraints.mu - n.global_constraints.mu).round().sort_values() + "Difference in global constraints (decision - regret_network): %s", + (decision.global_constraints.mu - n.global_constraints.mu) + .round(2) + .sort_values(), ) From 2e2068b9d53fdd4e69f35898416317caf30ade6a Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 15 Aug 2025 11:46:01 +0200 Subject: [PATCH 067/202] allow to fix either DE or CO2Price&EU --- Snakefile | 2 +- config/config.de.yaml | 6 ++++-- scripts/pypsa-de/solve_regret.py | 25 ++++++++++++++++++++----- 3 files changed, 25 insertions(+), 8 deletions(-) diff --git a/Snakefile b/Snakefile index 265b8338e..42c5e87fe 100644 --- a/Snakefile +++ b/Snakefile @@ -985,7 +985,7 @@ rule solve_regret: ), custom_extra_functionality=input_custom_extra_functionality, energy_year=config_provider("energy", "energy_totals_year"), - #regret_run=True, + scope_to_fix=config_provider("iiasa_database", "regret_run", "scope_to_fix"), input: decision=RESULTS.replace("{run}", "{decision}") + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/config/config.de.yaml b/config/config.de.yaml index 68278691b..2d2c591df 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -45,6 +45,8 @@ iiasa_database: - KN2045_NFhoch reference_scenario: KN2045_Mix region: Deutschland + regret_run: + scope_to_fix: DE # Supported values are DE and EU ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 uba_for_industry: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 @@ -432,11 +434,11 @@ solving: options: assign_all_duals: true load_shedding: false - skip_iterations: true # settings for post-discretization: false + skip_iterations: false # settings for post-discretization: false min_iterations: 1 # settings for post-discretization: 1 max_iterations: 1 # settings for post-discretization: 1 post_discretization: - enable: false + enable: true line_unit_size: 1698 line_threshold: 0.3 link_unit_size: diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 3da6cfb2c..af3ff4d78 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -213,7 +213,10 @@ def fix_capacities(realization, decision, scope="DE", strict=False): if snakemake.params.scope_to_fix == "EU": logger.info( - f"Fixing Scope EU chosen. Setting the CO2 price to the price from the realization network to avoid infeasibilities: {realization.global_constraints.loc['CO2Limit', 'mu']} €/t_CO2" + f"Fixing Scope 'EU' chosen. Setting the CO2 price to the price from the realization network to avoid infeasibilities: {realization.global_constraints.loc['CO2Limit', 'mu']} €/t_CO2" + ) + logger.warning( + "Please make sure that the long-term run with unchanged demand is consistent with the short-term run." ) n.add( "Generator", @@ -243,11 +246,23 @@ def fix_capacities(realization, decision, scope="DE", strict=False): logger.info(f"Maximum memory usage: {mem.mem_usage}") n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) - n.export_to_netcdf(snakemake.output.regret_network) - logger.info( - "Difference in global constraints (decision - regret_network): %s", + constraint_diff = ( (decision.global_constraints.mu - n.global_constraints.mu) .round(2) - .sort_values(), + .sort_values() + ) + + logger.info( + "Difference in global constraints (decision - regret_network): %s", + constraint_diff, ) + + if snakemake.input.realization == snakemake.input.decision: + if abs(constraint_diff["CO2Limit"]) > 1: + logger.error( + "Difference in CO2 price between long-term and short-term model is too high: %s", + constraint_diff["CO2Limit"], + ) + + n.export_to_netcdf(snakemake.output.regret_network) From f54450f90b9d7cab9cbbf536e30422227bc60a19 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 15 Aug 2025 11:46:25 +0200 Subject: [PATCH 068/202] minor --- scripts/pypsa-de/additional_functionality.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 5457c32a5..73af0b1cd 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -68,6 +68,7 @@ def add_capacity_limits( ) n.global_constraints.drop(cname, inplace=True) continue + if cname in n.global_constraints.index: logger.warning( f"Global constraint {cname} already exists. Dropping and adding it again." From db23995c50a9632d13b31532d1ba4d17d02b43a5 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 15 Aug 2025 11:47:49 +0200 Subject: [PATCH 069/202] add congestion rent and fix typo in trade carriers --- scripts/pypsa-de/export_ariadne_variables.py | 31 ++++++++++++++------ 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 5c1eaec8c..0b0993426 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -4437,7 +4437,7 @@ def get_tsc(n, country): return tsc - def get_link_opex(n, carriers, region, sw): + def get_link_opex(n, carriers, region, sw, add_congestion_rent=False): # get flow of electricity/hydrogen... # multiply it with the marginal costs supplying = n.links[ @@ -4456,6 +4456,10 @@ def get_link_opex(n, carriers, region, sw): for index in supplying: # price of energy in trade country marg_price = n.buses_t.marginal_price[n.links.loc[index].bus0] + if add_congestion_rent: + marg_price = ( + marg_price + n.buses_t.marginal_price[n.links.loc[index].bus1] + ) / 2 trade = n.links_t.p1[index].mul(sw) trade_out += marg_price.mul(trade).sum() @@ -4463,13 +4467,17 @@ def get_link_opex(n, carriers, region, sw): for index in receiving: # price of energy in Germany marg_price = n.buses_t.marginal_price[n.links.loc[index].bus0] + if add_congestion_rent: + marg_price = ( + marg_price + n.buses_t.marginal_price[n.links.loc[index].bus1] + ) / 2 trade = n.links_t.p1[index].mul(sw) trade_in += marg_price.mul(trade).sum() return abs(trade_in) - abs(trade_out) # > 0: costs for Germany # < 0: profit for Germany - def get_line_opex(n, region, sw): + def get_line_opex(n, region, sw, add_congestion_rent=False): supplying = n.lines[ (n.lines.carrier.isin(["AC"])) & (n.lines.bus0.str.startswith(region)) @@ -4489,9 +4497,11 @@ def get_line_opex(n, region, sw): trade_in = trade.clip(upper=0) # negative marg_price_DE = n.buses_t.marginal_price[n.lines.loc[index].bus0] marg_price_EU = n.buses_t.marginal_price[n.lines.loc[index].bus1] - net_out += ( - trade_out.mul(marg_price_DE).sum() + trade_in.mul(marg_price_EU).sum() - ) + price_out = marg_price_DE + price_in = marg_price_EU + if add_congestion_rent: + price_out = price_in = (marg_price_DE + marg_price_EU) / 2 + net_out += trade_out.mul(price_out).sum() + trade_in.mul(price_in).sum() # net_out > 0: Germany is exporting more electricity # net_out < 0: Germany is importing more electricity @@ -4503,9 +4513,11 @@ def get_line_opex(n, region, sw): trade_out = trade_out.clip(upper=0) marg_price_EU = n.buses_t.marginal_price[n.lines.loc[index].bus0] marg_price_DE = n.buses_t.marginal_price[n.lines.loc[index].bus1] - net_in += ( - trade_in.mul(marg_price_EU).sum() + trade_out.mul(marg_price_DE).sum() - ) + price_out = marg_price_DE + price_in = marg_price_EU + if add_congestion_rent: + price_out = price_in = (marg_price_DE + marg_price_EU) / 2 + net_in += trade_in.mul(price_in).sum() + trade_out.mul(price_out).sum() # net_in > 0: Germany is importing more electricity # net_in < 0: Germany is exporting more electricity @@ -4515,7 +4527,8 @@ def get_line_opex(n, region, sw): "DC", "H2 pipeline", "H2 pipeline (Kernnetz)", - "H2 pipeline retrofittedrenewable oil", + "H2 pipeline retrofitted", + "renewable oil", "renewable gas", "methanol", ] From 5668e22da07fcb5103fc9f119a3e783a4f024dbc Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Aug 2025 13:45:41 +0000 Subject: [PATCH 070/202] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/pypsa-de/export_ariadne_variables.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index a996f47f5..985b6ec03 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -4344,7 +4344,6 @@ def get_economy(n, region): var = pd.Series() def get_tsc(n, country): - pypsa.options.params.statistics.drop_zero = False capex = n.statistics.capex( groupby=pypsa.statistics.groupers["name", "carrier"], nice_names=False From 5cffc7a1317904d3d512f43ecc32df9ebf4cbd1f Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 15 Aug 2025 15:58:00 +0200 Subject: [PATCH 071/202] distinguish components of link opex --- scripts/pypsa-de/export_ariadne_variables.py | 26 ++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 0b0993426..8f3c3c7bc 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -4538,6 +4538,32 @@ def get_line_opex(n, region, sw, add_congestion_rent=False): trade_costs = get_link_opex(n, trade_carriers, region, sw) + get_line_opex( n, region, sw ) + var["Cost|Total Energy System Cost|Trade"] = trade_costs / 1e9 + var["Cost|Total Energy System Cost|Trade|Electricity"] = ( + get_line_opex(n, region, sw) / 1e9 + get_link_opex(n, ["DC"], region, sw) / 1e9 + ) + var["Cost|Total Energy System Cost|Trade|Efuels"] = ( + get_link_opex(n, ["renewable oil", "renewable gas", "methanol"], region, sw) + / 1e9 + ) + var["Cost|Total Energy System Cost|Trade|Hydrogen"] = ( + get_link_opex( + n, + ["H2 pipeline", "H2 pipeline (Kernnetz)", "H2 pipeline retrofitted"], + region, + sw, + ) + / 1e9 + ) + if not isclose( + var["Cost|Total Energy System Cost|Trade"], + var["Cost|Total Energy System Cost|Trade|Electricity"] + + var["Cost|Total Energy System Cost|Trade|Efuels"] + + var["Cost|Total Energy System Cost|Trade|Hydrogen"], + ): + logger.error( + "Total Energy System Cost|Trade does not equal the sum of its components. This should be fixed!" + ) # Cost|Total Energy System Cost in billion EUR2020/yr var["Cost|Total Energy System Cost"] = round((tsc + trade_costs) / 1e9, 4) From 9e288e76337c489114485c298e41e1ebcc2df887 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 15 Aug 2025 17:29:32 +0200 Subject: [PATCH 072/202] improve code structure --- scripts/solve_network.py | 179 ++++++++++++++++++++------------------- 1 file changed, 93 insertions(+), 86 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 43428e5a2..91a345588 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -45,6 +45,7 @@ import yaml from pypsa.descriptors import get_activity_mask from pypsa.descriptors import get_switchable_as_dense as get_as_dense +from snakemake.script import Snakemake from scripts._benchmark import memory_logger from scripts._helpers import ( @@ -1191,6 +1192,93 @@ def add_co2_atmosphere_constraint(n, snapshots): n.model.add_constraints(lhs <= rhs, name=f"GlobalConstraint-{name}") +def extra_functionality( + n: pypsa.Network, + snapshots: pd.DatetimeIndex, + snakemake: Snakemake, + planning_horizons: str | None = None, +) -> None: + """ + Add custom constraints and functionality. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network instance with config and params attributes + snapshots : pd.DatetimeIndex + Simulation timesteps + snakemake : snakemake.script.Snakemake + Snakemake instance for accessing workflow parameters + planning_horizons : str, optional + The current planning horizon year or None in perfect foresight + + Collects supplementary constraints which will be passed to + ``pypsa.optimization.optimize``. + + If you want to enforce additional custom constraints, this is a good + location to add them. The arguments ``opts`` and + ``snakemake.config`` are expected to be attached to the network. + """ + config = n.config + constraints = config["solving"].get("constraints", {}) + if constraints["BAU"] and n.generators.p_nom_extendable.any(): + add_BAU_constraints(n, config) + if constraints["SAFE"] and n.generators.p_nom_extendable.any(): + add_SAFE_constraints(n, config) + if constraints["CCL"] and n.generators.p_nom_extendable.any(): + add_CCL_constraints(n, config, planning_horizons) + + reserve = config["electricity"].get("operational_reserve", {}) + if reserve.get("activate"): + add_operational_reserve_margin(n, snapshots, config) + + if EQ_o := constraints["EQ"]: + add_EQ_constraints(n, EQ_o.replace("EQ", "")) + + if {"solar-hsat", "solar"}.issubset( + config["electricity"]["renewable_carriers"] + ) and {"solar-hsat", "solar"}.issubset( + config["electricity"]["extendable_carriers"]["Generator"] + ): + add_solar_potential_constraints( + n, config, snakemake.params.get("regret_run", False) + ) + + if n.config.get("sector", {}).get("tes", False): + if n.buses.index.str.contains( + r"urban central heat|urban decentral heat|rural heat", + case=False, + na=False, + ).any(): + add_TES_energy_to_power_ratio_constraints(n) + add_TES_charger_ratio_constraints(n) + + add_battery_constraints(n) + add_lossy_bidirectional_link_constraints(n) + add_pipe_retrofit_constraint(n) + if n._multi_invest: + add_carbon_constraint(n, snapshots) + add_carbon_budget_constraint(n, snapshots) + add_retrofit_gas_boiler_constraint(n, snapshots) + else: + add_co2_atmosphere_constraint(n, snapshots) + + if config["sector"]["enhanced_geothermal"]["enable"]: + add_flexible_egs_constraint(n) + + if config["sector"]["imports"]["enable"]: + add_import_limit_constraint(n, snapshots) + + if n.params.custom_extra_functionality: + source_path = pathlib.Path(n.params.custom_extra_functionality).resolve() + assert source_path.exists(), f"{source_path} does not exist" + sys.path.append(os.path.dirname(source_path)) + module_name = os.path.splitext(os.path.basename(source_path))[0] + module = importlib.import_module(module_name) + custom_extra_functionality = getattr(module, module_name) + custom_extra_functionality(n, snapshots, snakemake) # pylint: disable=E0601 + + def check_objective_value(n: pypsa.Network, solving: dict) -> None: """ Check if objective value matches expected value within tolerance. @@ -1224,7 +1312,7 @@ def solve_network( config: dict, params: dict, solving: dict, - snakemake: Any, + snakemake: Snakemake, rule_name: str | None = None, planning_horizons: str | None = None, **kwargs, @@ -1242,6 +1330,8 @@ def solve_network( Dictionary of solving parameters solving : Dict Dictionary of solving options and configuration + snakemake : snakemake.script.Snakemake + Snakemake instance for accessing workflow parameters rule_name : str, optional Name of the snakemake rule being executed planning_horizons : str, optional @@ -1266,89 +1356,6 @@ def solve_network( If objective value differs from expected value """ - def extra_functionality( - n: pypsa.Network, - snapshots: pd.DatetimeIndex, - planning_horizons: str | None = None, - ) -> None: - """ - Add custom constraints and functionality. - - Parameters - ---------- - n : pypsa.Network - The PyPSA network instance with config and params attributes - snapshots : pd.DatetimeIndex - Simulation timesteps - planning_horizons : str, optional - The current planning horizon year or None in perfect foresight - - Collects supplementary constraints which will be passed to - ``pypsa.optimization.optimize``. - - If you want to enforce additional custom constraints, this is a good - location to add them. The arguments ``opts`` and - ``snakemake.config`` are expected to be attached to the network. - """ - config = n.config - constraints = config["solving"].get("constraints", {}) - if constraints["BAU"] and n.generators.p_nom_extendable.any(): - add_BAU_constraints(n, config) - if constraints["SAFE"] and n.generators.p_nom_extendable.any(): - add_SAFE_constraints(n, config) - if constraints["CCL"] and n.generators.p_nom_extendable.any(): - add_CCL_constraints(n, config, planning_horizons) - - reserve = config["electricity"].get("operational_reserve", {}) - if reserve.get("activate"): - add_operational_reserve_margin(n, snapshots, config) - - if EQ_o := constraints["EQ"]: - add_EQ_constraints(n, EQ_o.replace("EQ", "")) - - if {"solar-hsat", "solar"}.issubset( - config["electricity"]["renewable_carriers"] - ) and {"solar-hsat", "solar"}.issubset( - config["electricity"]["extendable_carriers"]["Generator"] - ): - add_solar_potential_constraints( - n, config, snakemake.params.get("regret_run", False) - ) - - if n.config.get("sector", {}).get("tes", False): - if n.buses.index.str.contains( - r"urban central heat|urban decentral heat|rural heat", - case=False, - na=False, - ).any(): - add_TES_energy_to_power_ratio_constraints(n) - add_TES_charger_ratio_constraints(n) - - add_battery_constraints(n) - add_lossy_bidirectional_link_constraints(n) - add_pipe_retrofit_constraint(n) - if n._multi_invest: - add_carbon_constraint(n, snapshots) - add_carbon_budget_constraint(n, snapshots) - add_retrofit_gas_boiler_constraint(n, snapshots) - else: - add_co2_atmosphere_constraint(n, snapshots) - - if config["sector"]["enhanced_geothermal"]["enable"]: - add_flexible_egs_constraint(n) - - if config["sector"]["imports"]["enable"]: - add_import_limit_constraint(n, snapshots) - - if n.params.custom_extra_functionality: - source_path = pathlib.Path(n.params.custom_extra_functionality).resolve() - assert source_path.exists(), f"{source_path} does not exist" - sys.path.append(os.path.dirname(source_path)) - module_name = os.path.splitext(os.path.basename(source_path))[0] - module = importlib.import_module(module_name) - custom_extra_functionality = getattr(module, module_name) - custom_extra_functionality(n, snapshots, snakemake) # pylint: disable=E0601 - set_of_options = solving["solver"]["options"] cf_solving = solving["options"] @@ -1358,7 +1365,7 @@ def extra_functionality( ) kwargs["solver_name"] = solving["solver"]["name"] kwargs["extra_functionality"] = partial( - extra_functionality, planning_horizons=planning_horizons + extra_functionality, snakemake=snakemake, planning_horizons=planning_horizons ) kwargs["transmission_losses"] = cf_solving.get("transmission_losses", False) kwargs["linearized_unit_commitment"] = cf_solving.get( @@ -1472,10 +1479,10 @@ def extra_functionality( config=snakemake.config, params=snakemake.params, solving=snakemake.params.solving, + snakemake=snakemake, planning_horizons=planning_horizons, rule_name=snakemake.rule, log_fn=snakemake.log.solver, - snakemake=snakemake, ) logger.info(f"Maximum memory usage: {mem.mem_usage}") From eda4c9994d0879013ae2842b09fa1debab637e07 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 15 Aug 2025 17:29:44 +0200 Subject: [PATCH 073/202] fix minor error --- scripts/pypsa-de/solve_regret.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index af3ff4d78..aa26048ea 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -138,12 +138,7 @@ def fix_capacities(realization, decision, scope="DE", strict=False): # Above several assets are switched to extendable again, for these the p_nom value is restored to the value from the decision network - _idx = new.query(f"{attr}_extendable") - - if not _idx.difference(extendable_i).empty: - raise ValueError( - "Assets that are not extendable in the decision network have been set to extendable. This should not happen. Aborting." - ) + _idx = new.query(f"{attr}_extendable").index new.loc[_idx, attr] = deci.loc[_idx, attr] From 7ce649caeef5695b72309dde5c6ad57fe15672c0 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Tue, 26 Aug 2025 18:13:40 +0200 Subject: [PATCH 074/202] first plot ideas --- Snakefile | 50 +++ config/config.de.yaml | 2 +- scripts/pypsa-de/regret_plots.py | 227 ++++++++++++ scripts/pypsa-de/regret_plots_lt.py | 512 ++++++++++++++++++++++++++++ 4 files changed, 790 insertions(+), 1 deletion(-) create mode 100644 scripts/pypsa-de/regret_plots.py create mode 100644 scripts/pypsa-de/regret_plots_lt.py diff --git a/Snakefile b/Snakefile index 8fcb6bd4f..152916218 100644 --- a/Snakefile +++ b/Snakefile @@ -42,6 +42,10 @@ cutout_dir = config["atlite"]["cutout_directory"] CDIR = Path(cutout_dir).joinpath("" if run["shared_cutouts"] else RDIR) RESULTS = "results/" + RDIR +run_prefix = config["run"]["prefix"] +regret_scenarios = ["AriadneDemand", "LowDemand"] +horizons = [2025, 2030, 2035] + localrules: purge, @@ -1077,6 +1081,9 @@ rule regret_all: decision=config_provider("run", "name"), **config["scenario"], ), + elec_capa_comp_de_2025 = f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", + elec_price_comp_de = f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", + rule regret_all_variables: @@ -1088,3 +1095,46 @@ rule regret_all_variables: ), script: "scripts/pypsa-de/plot_scenario_comparison.py" + + +rule regret_plots_lt: + params: + scenarios=get_scenarios(run), + planning_horizons=config_provider("scenario", "planning_horizons"), + plotting=config_provider("plotting"), + input: + networks = expand( + "results/{run}/{scenario}/networks/base_s_27__none_{year}.nc", + run=run_prefix, + scenario=regret_scenarios, + year=horizons + ), + regret_variables=expand( + "results/{run}/{scenario}/regret_variables/regret_variables_{scenario}_full.xlsx", + run=run_prefix, + scenario=regret_scenarios, + ), + output: + elec_capa_comp_de_2025 = f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", + dir=directory(f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT"), + script: + "scripts/pypsa-de/regret_plots_lt.py" + +rule regret_plots: + params: + scenarios=get_scenarios(run), + planning_horizons=config_provider("scenario", "planning_horizons"), + plotting=config_provider("plotting"), + input: + regret_networks=expand( + "results/{run}/{scenario}/regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + run=run_prefix, + scenario=regret_scenarios, + decision=config_provider("run", "name"), + **config["scenario"], + ), + output: + elec_price_comp_de = f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", + dir=directory(f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand"), + script: + "scripts/pypsa-de/regret_plots.py" diff --git a/config/config.de.yaml b/config/config.de.yaml index 2c077e5e3..9d9358dd9 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250725_regrets + prefix: 20250826_regrets name: # - ExPol # - KN2045_Mix diff --git a/scripts/pypsa-de/regret_plots.py b/scripts/pypsa-de/regret_plots.py new file mode 100644 index 000000000..87e40d845 --- /dev/null +++ b/scripts/pypsa-de/regret_plots.py @@ -0,0 +1,227 @@ +import logging +import os +import sys + +sys.path.append(os.path.abspath(os.path.dirname(__file__))) +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +) + +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import pypsa +import os, re, collections +import itertools + +from _helpers import configure_logging, mock_snakemake + +groups = { + "gas": ["gas CHP", "OCGT", "CCGT", "gas"], + "heat vent": ["heat vent"], + "water tanks": ["water tank", "water pit"], + "heat pump" : ["heat pump"], + "resistive heater" : ["resistive heater"], + "biomass": ["biomass"], + "lignite": ["lignite"], + "coal": ["coal"], + "oil": ["oil"], + "waste": ["waste"], + "solar": ["solar"], + "offwind": ["offwind"], +} + +def aggregate_by_keywords(opex_comp_agg, groups): + """ + Aggregate rows in opex_comp_agg according to keyword groups. + + Parameters + ---------- + opex_comp_agg : pd.DataFrame + DataFrame with row index as technology names. + groups : dict + Keys = new aggregated name, + Values = list of substrings to match in the index. + + Returns + ------- + pd.DataFrame + """ + df_out = opex_comp_agg.copy() + for new_name, keywords in groups.items(): + mask = df_out.index.to_series().str.contains("|".join(keywords)) + if mask.any(): + summed = df_out.loc[mask].sum() + df_out = df_out.drop(df_out.index[mask]) + df_out.loc[new_name] = summed + return df_out + + + +if __name__ == "__main__": + if "snakemake" not in globals(): + import os + import sys + + from _helpers import mock_snakemake + + snakemake = mock_snakemake( + "regret_plots", + ) + + configure_logging(snakemake) + config = snakemake.config + planning_horizons = snakemake.params.planning_horizons + scenarios = ["AriadneDemand", "LowDemand"] + tech_colors = snakemake.params.plotting["tech_colors"] + + # Nested dict: networks[year][scenario][decision] = Network + networks = collections.defaultdict( + lambda: collections.defaultdict(dict) + ) + + for fn in snakemake.input.regret_networks: + parts = fn.split(os.sep) + + # scenario is the folder name 2 levels up + scenario = parts[-3] + if scenario not in scenarios: + raise ValueError(f"Unexpected scenario '{scenario}' in {fn}. Allowed: {scenarios}") + + # extract year (4 digits before .nc) + m = re.search(r"_(\d{4})\.nc$", fn) + if not m: + raise ValueError(f"Could not parse year from {fn}") + year = int(m.group(1)) + + # extract decision_* (string until the 2nd underscore in filename) + filename = parts[-1] + decision = "_".join(filename.split("_")[:2]) + if not decision.startswith("decision"): + raise ValueError(f"Unexpected decision string in {filename}") + + # load and store + # print(f"Loading {fn} ...") + # print(f" scenario: {scenario}, year: {year}, decision: {decision}") + networks[year][scenario][decision] = pypsa.Network(fn) + + # ensure output directory exist + for dir in snakemake.output[-1]: + if not os.path.exists(dir): + os.makedirs(dir) + + # Plot electricity price duration curves + + fig, ax = plt.subplots(figsize=(10, 15), nrows=3, ncols=1) + ax = ax.flatten() + + years = [2025, 2030, 2035] + scenarios = ["AriadneDemand", "LowDemand"] + decisions = ["decision_AriadneDemand", "decision_LowDemand"] + + for i, year in enumerate(years): + for scenario, decision in itertools.product(scenarios, decisions): + + n = networks[year][scenario][decision] + lmps = n.buses_t.marginal_price.loc[:, + (n.buses.carrier == "AC") & (n.buses.index.str.startswith("DE"))] + lmps_sorted = pd.DataFrame(lmps.values.flatten(), columns=["lmp"]).sort_values(by="lmp", ascending=False) + lmps_sorted["percentage"] = np.arange(len(lmps_sorted)) / len(lmps_sorted) * 100 + + ax[i].plot( + lmps_sorted["percentage"], + lmps_sorted["lmp"], + label=f"{scenario}_{decision} (avg: {lmps_sorted['lmp'].mean():.2f})" + ) + + ax[i].legend() + ax[i].set_xlabel("Percentage of time") + ax[i].set_ylabel("€/MWh") + ax[i].set_title(f"Price duration curves {year}") + + plt.tight_layout() + plt.savefig(snakemake.output.elec_price_comp_de, bbox_inches="tight") + plt.close() + + + # Print CO2 prices + + # for i, year in enumerate(years): + # for scenario, decision in itertools.product(scenarios, decisions): + + # n = networks[year][scenario][decision] + + # print(f"CO2 price for {year}, {scenario}, {decision}: {n.global_constraints.loc["CO2Limit", "mu"] + n.global_constraints.loc["co2_limit-DE", "mu"]}") + + + # Plot OPEX + + kwargs = { + "groupby": ["bus", "carrier"], + "at_port": True, + "nice_names": False, + } + + fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(12, 18)) + axes = axes.flatten() + + for i, year in enumerate(years): + opex_comp = pd.DataFrame(columns=["_".join(tup) for tup in itertools.product(scenarios, decisions)]) + + # Collect OPEX for all scenario-decision combinations + for scenario, decision in itertools.product(scenarios, decisions): + n = networks[year][scenario][decision] + + opex = ( + n.statistics.opex(**kwargs) + .filter(like="DE") + .groupby("carrier").sum() + .multiply(1e-9) # to billion € + ) + opex_comp[f"{scenario}_{decision}"] = opex + + # Aggregate cost components with less than 0.1 (100 Mio €) as "Other" + opex_comp_agg = aggregate_by_keywords(opex_comp, groups) + small_rows = opex_comp_agg.abs().max(axis=1) < 0.1 + other_row = opex_comp_agg[small_rows].sum(axis=0) + opex_comp_agg = opex_comp_agg.loc[~small_rows] + opex_comp_agg.loc['Other'] = other_row + + # Prepare labels with line breaks + labels = [col.replace('_', '\n') for col in opex_comp_agg.columns] + + # Plot stacked bar + ax = axes[i] + bottom = np.zeros(len(opex_comp_agg.columns)) + + for tech in opex_comp_agg.index: + values = opex_comp_agg.loc[tech].values + ax.bar(labels, values, bottom=bottom, color=tech_colors.get(tech, '#333333'), label=tech) + + # Add numbers in the middle, except for 'Other' + if tech != 'Other': + for j, val in enumerate(values): + if val > 0: # only if positive + ax.text( + j, + bottom[j] + val/2, # middle of the segment + f'{val:.2f}', + ha='center', va='center', fontsize=8, color='white' + ) + + bottom += values + + # Add total sum labels on top of bars + totals = opex_comp_agg.sum(axis=0) + for j, total in enumerate(totals): + ax.text(j, total + total*0.02, f'{total:.2f}', ha='center', va='bottom', fontsize=10) + + # Adjust y-limit + ax.set_ylim(0, max(totals)*1.08) + ax.set_ylabel('OPEX [billion €]') + ax.set_title(f'Stacked OPEX composition by technology, {year}') + + # Legend outside + axes[-1].legend(loc='upper left', bbox_to_anchor=(1,1)) + plt.savefig(snakemake.output[-1] + f"/opex_comp_de.png", bbox_inches="tight") + plt.close() diff --git a/scripts/pypsa-de/regret_plots_lt.py b/scripts/pypsa-de/regret_plots_lt.py new file mode 100644 index 000000000..98bf6152a --- /dev/null +++ b/scripts/pypsa-de/regret_plots_lt.py @@ -0,0 +1,512 @@ +import logging +import os +import sys + +sys.path.append(os.path.abspath(os.path.dirname(__file__))) # Adds 'scripts/' to path +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +) # Adds repo root + +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import pypsa +from collections import defaultdict +import re +import matplotlib.patches as mpatches +from matplotlib.patches import Rectangle +from matplotlib.lines import Line2D +from _helpers import configure_logging, mock_snakemake + +groups = { + "gas (+CHP)": ["gas CHP", "OCGT", "CCGT"], + "heat pump" : ["heat pump"], + "resistive heater" : ["resistive heater"], + "biomass (+ CHP)": ["biomass"], + "coal (+ CHP)": ["coal"], + "oil (+ CHP)": ["oil"], + "waste CHP": ["waste"], + "solar": ["solar"], + "offwind": ["offwind"], +} + +def aggregate_by_keywords(df, groups): + """ + Aggregate rows in df according to keyword groups. + + Parameters + ---------- + df : pd.DataFrame + DataFrame with row index as technology names. + groups : dict + Keys = new aggregated name, + Values = list of substrings to match in the index. + + Returns + ------- + pd.DataFrame + """ + df_out = df.copy() + for new_name, keywords in groups.items(): + mask = df_out.index.to_series().str.contains("|".join(keywords)) + if mask.any(): + summed = df_out.loc[mask].sum() + df_out = df_out.drop(df_out.index[mask]) + df_out.loc[new_name] = summed + return df_out + + +def plot_capacity_comparison( + df, + scenarios=("AriadneDemand", "LowDemand"), + tech_colors=None, + plot_diff=False, + title="Electricity capacities", + ylabel="GW", + save_path=None, + figsize=(12, 6), + hatch_second="//", # hatch for second scenario in non-diff plot + show_dummy_legend=True, # legend with empty + hatched boxes +): + """ + Plot electricity capacities by carrier for multiple scenarios, or their difference. + + Parameters + ---------- + df : pd.DataFrame # index: carriers, columns: scenario names + scenarios : tuple(str, str) # (base, compare) + tech_colors : dict # carrier -> color + plot_diff : bool # if True, plot (compare - base) as signed bars + """ + if tech_colors is None: + tech_colors = {} + + base_name, cmp_name = scenarios + df = df.copy() + + # --- Establish supply/demand membership AND ORDER from base scenario only --- + base = df[base_name] + supply_order = base[base >= 0].sort_values(ascending=False).index + demand_order = base[base < 0].abs().sort_values(ascending=False).index # sort by |capacity| + + # Build plotting table + if plot_diff: + # signed difference (compare - base) + diff = (df[cmp_name] - df[base_name]).rename("Diff").to_frame() + df_plot_supply = diff.loc[supply_order.intersection(diff.index)] + df_plot_demand = diff.loc[demand_order.intersection(diff.index)] + plot_columns = ["Diff"] + else: + # normal comparison: keep both scenarios + df_plot_supply = df[[base_name, cmp_name]].loc[supply_order.intersection(df.index)] + df_plot_demand = df[[base_name, cmp_name]].loc[demand_order.intersection(df.index)] + # For the demand side, show magnitudes (upward bars). Clip to avoid negative heights. + df_plot_demand = df_plot_demand.apply(lambda s: (-s).clip(lower=0)) + plot_columns = [base_name, cmp_name] + + # Positions + n_supply = len(df_plot_supply) + n_demand = len(df_plot_demand) + pos_supply = np.arange(n_supply) + pos_demand = np.arange(n_demand) + n_supply + 1 # +1 gap + bar_width = 0.35 + + # Plot + fig, ax = plt.subplots(figsize=figsize) + + # Supply bars + for j, col in enumerate(plot_columns): + if n_supply: + vals = df_plot_supply[col].values + ax.bar( + pos_supply + (j - 0.5) * bar_width if len(plot_columns) > 1 else pos_supply, + vals, + width=bar_width if len(plot_columns) > 1 else 0.8, + color=[tech_colors.get(t, "#1f77b4") for t in df_plot_supply.index], + hatch=("" if (not plot_diff and col == base_name) else (hatch_second if not plot_diff else "")), + alpha=0.9, + linewidth=0.0, + ) + + # Demand bars + for j, col in enumerate(plot_columns): + if n_demand: + vals = df_plot_demand[col].values if not plot_diff else (df_plot_demand[col].values) + # In diff mode we plot signed values; in normal mode they are already positive magnitudes + ax.bar( + pos_demand + (j - 0.5) * bar_width if len(plot_columns) > 1 else pos_demand, + vals, + width=bar_width if len(plot_columns) > 1 else 0.8, + color=[tech_colors.get(t, "#1f77b4") for t in df_plot_demand.index], + hatch=("" if (not plot_diff and col == base_name) else (hatch_second if not plot_diff else "")), + alpha=0.9, + linewidth=0.0, + ) + + # Divider line between supply and demand + if n_supply: + ax.axvline(x=n_supply - 0.5, color="black", linestyle="--", linewidth=1.0) + + # Baseline for signed diffs + if plot_diff: + ax.axhline(0, color="black", linewidth=1.0) + + # X labels + tech_labels = list(df_plot_supply.index) + list(df_plot_demand.index) + all_positions = np.concatenate([pos_supply, pos_demand]) + ax.set_xticks(all_positions) + ax.set_xticklabels(tech_labels, rotation=45, ha="right") + + ax.set_ylabel(ylabel) + ax.set_title(title) + ax.grid(True, axis="y", alpha=0.3) + + # Legend + if plot_diff: + handles = [ + mpatches.Patch( + facecolor="white", + edgecolor="black", + label=f"{cmp_name} – {base_name}", + ) + ] + ax.legend(handles=handles, title="Difference") + elif show_dummy_legend: + handles = [ + mpatches.Patch(facecolor="white", edgecolor="black", hatch="", label=base_name), + mpatches.Patch(facecolor="white", edgecolor="black", hatch=hatch_second, label=cmp_name), + ] + ax.legend(handles=handles, title="Scenario") + + plt.tight_layout() + if save_path: + plt.savefig(save_path, bbox_inches="tight") + plt.close() + + + +if __name__ == "__main__": + if "snakemake" not in globals(): + import os + import sys + + from _helpers import mock_snakemake + + snakemake = mock_snakemake( + "regret_plots_lt", + ) + + configure_logging(snakemake) + config = snakemake.config + planning_horizons = snakemake.params.planning_horizons + scenarios = ["AriadneDemand", "LowDemand"] + tech_colors = snakemake.params.plotting["tech_colors"] + + # Load networks + networks = defaultdict(dict) + + for fn in snakemake.input.networks: + scenario = fn.split(os.sep)[-3] + year = int(re.search(r"_(\d{4})\.nc$", fn).group(1)) + networks[scenario][year] = pypsa.Network(fn) + + # Load variables + vars_dict = {} + + for fn in snakemake.input.regret_variables: + df = ( + pd.read_excel( + fn, + index_col=list(range(5)), + # index_col=["Model", "Scenario", "Region", "Variable", "Unit"], + sheet_name="data", + ) + .groupby(["Variable", "Unit"], dropna=False) + .sum() + ).round(5) + + if "AriadneDemand" in fn: + vars_dict["AriadneDemand"] = df + elif "LowDemand" in fn: + vars_dict["LowDemand"] = df + + # ensure output directory exist + for dir in snakemake.output[-1]: + if not os.path.exists(dir): + os.makedirs(dir) + + + # Capacity plot DE + + tech_colors["gas (+CHP)"] = tech_colors["OCGT"] + tech_colors["biomass (+ CHP)"] = tech_colors["biomass"] + tech_colors["coal (+ CHP)"] = tech_colors["coal"] + tech_colors["oil (+ CHP)"] = tech_colors["oil"] + tech_colors["heat pump"] = tech_colors["heat pump"] + tech_colors["waste CHP"] = tech_colors["waste"] + + kwargs = { + "groupby": ["bus", "carrier"], + "at_port": True, + "nice_names": False, + } + + capa_comp = pd.DataFrame(columns=scenarios) + + for year in planning_horizons: + for scenario in scenarios: + n = networks[scenario][year] + + capacities = ( + n.statistics.optimal_capacity( + bus_carrier=["AC", "low voltage"], + **kwargs, + ) + .filter(like="DE") + .groupby("carrier").sum() + .drop( + ["AC", "DC", "electricity distribution grid"], + errors="ignore", + ) + .multiply(1e-3) # MW → GW + ) + capa_comp[scenario] = capacities + + capa_comp_agg = aggregate_by_keywords(capa_comp, groups) + # drop capa with less than 100 MW in both scenarios + capa_comp_agg = capa_comp_agg[ + (capa_comp_agg.abs() >= 0.1).any(axis=1) + ] + + plot_capacity_comparison( + df=capa_comp_agg, + scenarios=["AriadneDemand", "LowDemand"], + tech_colors=tech_colors, + plot_diff=False, + title=f"Electricity capacities in DE: {year}", + save_path=snakemake.output.elec_capa_comp_de_2025 if year == 2025 else snakemake.output[-1] + f"/elec_capa_comp_de_{year}.png", + ) + + plot_capacity_comparison( + df=capa_comp_agg, + scenarios=["AriadneDemand", "LowDemand"], + tech_colors=tech_colors, + plot_diff=True, + title=f"Difference of electricity capacities in DE: {year}", + save_path=snakemake.output[-1] + f"/elec_capa_diff_de_{year}.png", + ) + + # Capacity plot outside DE + + for year in planning_horizons: + for scenario in scenarios: + n = networks[scenario][year] + + capacities = ( + n.statistics.optimal_capacity( + bus_carrier=["AC", "low voltage"], + **kwargs, + ) + .loc[lambda df: ~df.index.get_level_values("bus").str.contains("DE", regex=False)] + .groupby("carrier").sum() + .drop(["AC", "DC", "electricity distribution grid"], errors="ignore") + .multiply(1e-3) # MW → GW + ) + capa_comp[scenario] = capacities + + capa_comp_agg = aggregate_by_keywords(capa_comp, groups) + # drop capa with less than 100 MW in both scenarios + capa_comp_agg = capa_comp_agg[ + (capa_comp_agg.abs() >= 0.1).any(axis=1) + ] + + plot_capacity_comparison( + df=capa_comp_agg, + scenarios=["AriadneDemand", "LowDemand"], + tech_colors=tech_colors, + plot_diff=False, + title=f"Electricity capacities in EU (outside DE): {year}", + save_path=snakemake.output[-1] + f"/elec_capa_comp_eu_{year}.png", + ) + + plot_capacity_comparison( + df=capa_comp_agg, + scenarios=["AriadneDemand", "LowDemand"], + tech_colors=tech_colors, + plot_diff=True, + title=f"Difference of electricity capacities in EU (outside DE): {year}", + save_path=snakemake.output[-1] + f"/elec_capa_diff_eu_{year}.png", + ) + + # Electricity demand as bar plot + + demand_comp = pd.DataFrame(columns=scenarios) + + for year in planning_horizons: + for scenario in scenarios: + n = networks[scenario][year] + + electricity_withdrawal = ( + n.statistics.withdrawal(bus_carrier=["low voltage", "AC"], **kwargs) + .filter(like="DE") + .groupby(["carrier"]) + .sum() + .multiply(1e-6) # MWh → TWh + ) + + demand_comp[scenario] = electricity_withdrawal + + demand_comp_agg = aggregate_by_keywords(demand_comp, groups) + # drop capa with less than 100 MW in both scenarios + demand_comp_agg = demand_comp_agg[ + (demand_comp_agg.abs() >= 0.1).any(axis=1) + ] + + plot_capacity_comparison( + df=demand_comp_agg, + scenarios=["AriadneDemand", "LowDemand"], + tech_colors=tech_colors, + plot_diff=False, + title=f"Electricity demand in DE: {year}", + ylabel="TWh", + save_path=snakemake.output[-1] + f"/elec_demand_comp_de_{year}.png", + ) + + plot_capacity_comparison( + df=demand_comp_agg, + scenarios=["AriadneDemand", "LowDemand"], + tech_colors=tech_colors, + plot_diff=True, + title=f"Difference of electricity demand in DE: {year}", + ylabel="TWh", + save_path=snakemake.output[-1] + f"/elec_demand_diff_de_{year}.png", + ) + + # ToDo + + # Electricity demand temporal (+ import) + # Split electricity demand of distribution grid + + + # System cost CAPEX + capex_data = {} + + for scenario in scenarios: + # Extract CAPEX data + df = vars_dict[scenario] + capex = df[df.index.get_level_values("Variable").str.startswith("System Cost|CAPEX")] + capex_top = capex[capex.index.get_level_values("Variable").str.count(r"\|") == 2] + + # Reset index and prepare data + capex_reset = capex_top.reset_index() + capex_reset['Category'] = capex_reset['Variable'].str.split('|').str[2] + + # Get year columns on first iteration + if scenario == scenarios[0]: + year_cols = [col for col in capex_reset.columns if str(col) in ['2025', '2030', '2035']] + + # Store processed data + capex_data[scenario] = capex_reset.set_index('Category')[year_cols].fillna(0) + + # Extract for easier access + capex_low_plot = capex_data["LowDemand"] + capex_ariadne_plot = capex_data["AriadneDemand"] + + # Set up the plot + fig, ax = plt.subplots(figsize=(12, 8)) + + # Define years and categories + years = [str(col) for col in year_cols] # Convert to strings for labels + categories = capex_low_plot.index.tolist() + + # Define colors for each category using tech_colors + category_color_map = { + 'Electricity': "gold", + 'Gases': tech_colors["gas"], + 'Heat': tech_colors["heat"], + 'Hydrogen': tech_colors["H2"], + 'Liquids': tech_colors["oil"], + 'Methanol': tech_colors["methanol"] + } + + # Create colors list based on categories + colors = [category_color_map.get(cat, '#808080') for cat in categories] + + # Set up bar positions + x = np.arange(len(years)) + width = 0.35 + + # Create stacked bars + bottom_low = np.zeros(len(years)) + bottom_ariadne = np.zeros(len(years)) + + for i, category in enumerate(categories): + # Get values for each year + low_values = [capex_low_plot.loc[category, col] for col in year_cols] + ariadne_values = [capex_ariadne_plot.loc[category, col] for col in year_cols] + + # Plot bars - LowDemand with hatching, AriadneDemand without + ax.bar(x - width/2, low_values, width, bottom=bottom_low, + label=category, color=colors[i], alpha=0.8, hatch='///') + ax.bar(x + width/2, ariadne_values, width, bottom=bottom_ariadne, + color=colors[i], alpha=0.8) + + # Add category values inside bars (only if value > threshold for readability) + for j in range(len(years)): + # LowDemand category values + if low_values[j] > 0.5: # Only show if value is significant + ax.text(j - width/2, bottom_low[j] + low_values[j]/2, + f'{low_values[j]:.1f}', ha='center', va='center', + fontsize=8, fontweight='bold', color='white') + + # AriadneDemand category values + if ariadne_values[j] > 0.5: # Only show if value is significant + ax.text(j + width/2, bottom_ariadne[j] + ariadne_values[j]/2, + f'{ariadne_values[j]:.1f}', ha='center', va='center', + fontsize=8, fontweight='bold', color='white') + + # Update bottom for stacking + bottom_low += low_values + bottom_ariadne += ariadne_values + + # Customize the plot + ax.set_xlabel('Year', fontsize=12) + ax.set_ylabel('billion €', fontsize=12) + ax.set_title('System Cost CAPEX Comparison: LowDemand vs AriadneDemand', fontsize=14, fontweight='bold') + ax.set_xticks(x) + ax.set_xticklabels(years) + + # Category legend elements + category_elements = [Rectangle((0,0),1,1, facecolor=colors[i], alpha=0.8, label=cat) + for i, cat in enumerate(categories)] + + # Scenario legend elements + scenario_elements = [ + Rectangle((0,0),1,1, facecolor='gray', alpha=0.8, hatch='///', label='LowDemand'), + Rectangle((0,0),1,1, facecolor='gray', alpha=0.8, label='AriadneDemand') + ] + + # Create separate legends + category_legend = ax.legend(handles=category_elements, loc='upper left', bbox_to_anchor=(1.05, 1), title='Categories') + scenario_legend = ax.legend(handles=scenario_elements, loc='upper left', bbox_to_anchor=(1.05, 0.6), title='Scenarios') + + # Add both legends to the plot + ax.add_artist(category_legend) + + # Add total CAPEX values on top of bars + for i, year in enumerate(years): + ax.text(i - width/2, bottom_low[i] + max(max(bottom_low), max(bottom_ariadne)) * 0.02, + f'{bottom_low[i]:.1f}', ha='center', fontweight='bold', fontsize=9) + ax.text(i + width/2, bottom_ariadne[i] + max(max(bottom_low), max(bottom_ariadne)) * 0.02, + f'{bottom_ariadne[i]:.1f}', ha='center', fontweight='bold', fontsize=9) + + # Add grid + ax.grid(True, alpha=0.3, axis='y') + ax.set_axisbelow(True) + + # Adjust y-axis limits to accommodate top labels + y_max = max(max(bottom_low), max(bottom_ariadne)) + ax.set_ylim(0, y_max * 1.1) + plt.savefig(snakemake.output[-1] + f"/capex_de.png", bbox_inches="tight") + plt.close() + From 1490f472fbe8a15f377e6c598db4b2a98f2cac1f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 26 Aug 2025 16:13:59 +0000 Subject: [PATCH 075/202] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- Snakefile | 18 +- scripts/pypsa-de/regret_plots.py | 116 ++++++----- scripts/pypsa-de/regret_plots_lt.py | 296 +++++++++++++++++++--------- 3 files changed, 276 insertions(+), 154 deletions(-) diff --git a/Snakefile b/Snakefile index 152916218..cf71be359 100644 --- a/Snakefile +++ b/Snakefile @@ -43,8 +43,8 @@ CDIR = Path(cutout_dir).joinpath("" if run["shared_cutouts"] else RDIR) RESULTS = "results/" + RDIR run_prefix = config["run"]["prefix"] -regret_scenarios = ["AriadneDemand", "LowDemand"] -horizons = [2025, 2030, 2035] +regret_scenarios = ["AriadneDemand", "LowDemand"] +horizons = [2025, 2030, 2035] localrules: @@ -1081,9 +1081,8 @@ rule regret_all: decision=config_provider("run", "name"), **config["scenario"], ), - elec_capa_comp_de_2025 = f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", - elec_price_comp_de = f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", - + elec_capa_comp_de_2025=f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", + elec_price_comp_de=f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", rule regret_all_variables: @@ -1103,11 +1102,11 @@ rule regret_plots_lt: planning_horizons=config_provider("scenario", "planning_horizons"), plotting=config_provider("plotting"), input: - networks = expand( + networks=expand( "results/{run}/{scenario}/networks/base_s_27__none_{year}.nc", run=run_prefix, scenario=regret_scenarios, - year=horizons + year=horizons, ), regret_variables=expand( "results/{run}/{scenario}/regret_variables/regret_variables_{scenario}_full.xlsx", @@ -1115,11 +1114,12 @@ rule regret_plots_lt: scenario=regret_scenarios, ), output: - elec_capa_comp_de_2025 = f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", + elec_capa_comp_de_2025=f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", dir=directory(f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT"), script: "scripts/pypsa-de/regret_plots_lt.py" + rule regret_plots: params: scenarios=get_scenarios(run), @@ -1134,7 +1134,7 @@ rule regret_plots: **config["scenario"], ), output: - elec_price_comp_de = f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", + elec_price_comp_de=f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", dir=directory(f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand"), script: "scripts/pypsa-de/regret_plots.py" diff --git a/scripts/pypsa-de/regret_plots.py b/scripts/pypsa-de/regret_plots.py index 87e40d845..562b3939a 100644 --- a/scripts/pypsa-de/regret_plots.py +++ b/scripts/pypsa-de/regret_plots.py @@ -1,27 +1,26 @@ -import logging import os import sys -sys.path.append(os.path.abspath(os.path.dirname(__file__))) -sys.path.append( - os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) -) +sys.path.append(os.path.abspath(os.path.dirname(__file__))) +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) + +import collections +import itertools +import os +import re import matplotlib.pyplot as plt import numpy as np import pandas as pd import pypsa -import os, re, collections -import itertools - from _helpers import configure_logging, mock_snakemake groups = { "gas": ["gas CHP", "OCGT", "CCGT", "gas"], "heat vent": ["heat vent"], "water tanks": ["water tank", "water pit"], - "heat pump" : ["heat pump"], - "resistive heater" : ["resistive heater"], + "heat pump": ["heat pump"], + "resistive heater": ["resistive heater"], "biomass": ["biomass"], "lignite": ["lignite"], "coal": ["coal"], @@ -31,18 +30,19 @@ "offwind": ["offwind"], } + def aggregate_by_keywords(opex_comp_agg, groups): """ Aggregate rows in opex_comp_agg according to keyword groups. - + Parameters ---------- opex_comp_agg : pd.DataFrame DataFrame with row index as technology names. groups : dict - Keys = new aggregated name, + Keys = new aggregated name, Values = list of substrings to match in the index. - + Returns ------- pd.DataFrame @@ -57,7 +57,6 @@ def aggregate_by_keywords(opex_comp_agg, groups): return df_out - if __name__ == "__main__": if "snakemake" not in globals(): import os @@ -72,13 +71,11 @@ def aggregate_by_keywords(opex_comp_agg, groups): configure_logging(snakemake) config = snakemake.config planning_horizons = snakemake.params.planning_horizons - scenarios = ["AriadneDemand", "LowDemand"] + scenarios = ["AriadneDemand", "LowDemand"] tech_colors = snakemake.params.plotting["tech_colors"] # Nested dict: networks[year][scenario][decision] = Network - networks = collections.defaultdict( - lambda: collections.defaultdict(dict) - ) + networks = collections.defaultdict(lambda: collections.defaultdict(dict)) for fn in snakemake.input.regret_networks: parts = fn.split(os.sep) @@ -86,7 +83,9 @@ def aggregate_by_keywords(opex_comp_agg, groups): # scenario is the folder name 2 levels up scenario = parts[-3] if scenario not in scenarios: - raise ValueError(f"Unexpected scenario '{scenario}' in {fn}. Allowed: {scenarios}") + raise ValueError( + f"Unexpected scenario '{scenario}' in {fn}. Allowed: {scenarios}" + ) # extract year (4 digits before .nc) m = re.search(r"_(\d{4})\.nc$", fn) @@ -121,17 +120,21 @@ def aggregate_by_keywords(opex_comp_agg, groups): for i, year in enumerate(years): for scenario, decision in itertools.product(scenarios, decisions): - - n = networks[year][scenario][decision] - lmps = n.buses_t.marginal_price.loc[:, - (n.buses.carrier == "AC") & (n.buses.index.str.startswith("DE"))] - lmps_sorted = pd.DataFrame(lmps.values.flatten(), columns=["lmp"]).sort_values(by="lmp", ascending=False) - lmps_sorted["percentage"] = np.arange(len(lmps_sorted)) / len(lmps_sorted) * 100 + n = networks[year][scenario][decision] + lmps = n.buses_t.marginal_price.loc[ + :, (n.buses.carrier == "AC") & (n.buses.index.str.startswith("DE")) + ] + lmps_sorted = pd.DataFrame( + lmps.values.flatten(), columns=["lmp"] + ).sort_values(by="lmp", ascending=False) + lmps_sorted["percentage"] = ( + np.arange(len(lmps_sorted)) / len(lmps_sorted) * 100 + ) ax[i].plot( - lmps_sorted["percentage"], - lmps_sorted["lmp"], - label=f"{scenario}_{decision} (avg: {lmps_sorted['lmp'].mean():.2f})" + lmps_sorted["percentage"], + lmps_sorted["lmp"], + label=f"{scenario}_{decision} (avg: {lmps_sorted['lmp'].mean():.2f})", ) ax[i].legend() @@ -143,17 +146,15 @@ def aggregate_by_keywords(opex_comp_agg, groups): plt.savefig(snakemake.output.elec_price_comp_de, bbox_inches="tight") plt.close() - # Print CO2 prices - + # for i, year in enumerate(years): # for scenario, decision in itertools.product(scenarios, decisions): - # n = networks[year][scenario][decision] + # n = networks[year][scenario][decision] # print(f"CO2 price for {year}, {scenario}, {decision}: {n.global_constraints.loc["CO2Limit", "mu"] + n.global_constraints.loc["co2_limit-DE", "mu"]}") - # Plot OPEX kwargs = { @@ -166,7 +167,9 @@ def aggregate_by_keywords(opex_comp_agg, groups): axes = axes.flatten() for i, year in enumerate(years): - opex_comp = pd.DataFrame(columns=["_".join(tup) for tup in itertools.product(scenarios, decisions)]) + opex_comp = pd.DataFrame( + columns=["_".join(tup) for tup in itertools.product(scenarios, decisions)] + ) # Collect OPEX for all scenario-decision combinations for scenario, decision in itertools.product(scenarios, decisions): @@ -175,7 +178,8 @@ def aggregate_by_keywords(opex_comp_agg, groups): opex = ( n.statistics.opex(**kwargs) .filter(like="DE") - .groupby("carrier").sum() + .groupby("carrier") + .sum() .multiply(1e-9) # to billion € ) opex_comp[f"{scenario}_{decision}"] = opex @@ -185,10 +189,10 @@ def aggregate_by_keywords(opex_comp_agg, groups): small_rows = opex_comp_agg.abs().max(axis=1) < 0.1 other_row = opex_comp_agg[small_rows].sum(axis=0) opex_comp_agg = opex_comp_agg.loc[~small_rows] - opex_comp_agg.loc['Other'] = other_row + opex_comp_agg.loc["Other"] = other_row # Prepare labels with line breaks - labels = [col.replace('_', '\n') for col in opex_comp_agg.columns] + labels = [col.replace("_", "\n") for col in opex_comp_agg.columns] # Plot stacked bar ax = axes[i] @@ -196,17 +200,26 @@ def aggregate_by_keywords(opex_comp_agg, groups): for tech in opex_comp_agg.index: values = opex_comp_agg.loc[tech].values - ax.bar(labels, values, bottom=bottom, color=tech_colors.get(tech, '#333333'), label=tech) + ax.bar( + labels, + values, + bottom=bottom, + color=tech_colors.get(tech, "#333333"), + label=tech, + ) # Add numbers in the middle, except for 'Other' - if tech != 'Other': + if tech != "Other": for j, val in enumerate(values): if val > 0: # only if positive ax.text( - j, - bottom[j] + val/2, # middle of the segment - f'{val:.2f}', - ha='center', va='center', fontsize=8, color='white' + j, + bottom[j] + val / 2, # middle of the segment + f"{val:.2f}", + ha="center", + va="center", + fontsize=8, + color="white", ) bottom += values @@ -214,14 +227,21 @@ def aggregate_by_keywords(opex_comp_agg, groups): # Add total sum labels on top of bars totals = opex_comp_agg.sum(axis=0) for j, total in enumerate(totals): - ax.text(j, total + total*0.02, f'{total:.2f}', ha='center', va='bottom', fontsize=10) + ax.text( + j, + total + total * 0.02, + f"{total:.2f}", + ha="center", + va="bottom", + fontsize=10, + ) # Adjust y-limit - ax.set_ylim(0, max(totals)*1.08) - ax.set_ylabel('OPEX [billion €]') - ax.set_title(f'Stacked OPEX composition by technology, {year}') + ax.set_ylim(0, max(totals) * 1.08) + ax.set_ylabel("OPEX [billion €]") + ax.set_title(f"Stacked OPEX composition by technology, {year}") # Legend outside - axes[-1].legend(loc='upper left', bbox_to_anchor=(1,1)) - plt.savefig(snakemake.output[-1] + f"/opex_comp_de.png", bbox_inches="tight") + axes[-1].legend(loc="upper left", bbox_to_anchor=(1, 1)) + plt.savefig(snakemake.output[-1] + "/opex_comp_de.png", bbox_inches="tight") plt.close() diff --git a/scripts/pypsa-de/regret_plots_lt.py b/scripts/pypsa-de/regret_plots_lt.py index 98bf6152a..4a550d938 100644 --- a/scripts/pypsa-de/regret_plots_lt.py +++ b/scripts/pypsa-de/regret_plots_lt.py @@ -1,4 +1,3 @@ -import logging import os import sys @@ -7,21 +6,21 @@ os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) ) # Adds repo root +import re +from collections import defaultdict + +import matplotlib.patches as mpatches import matplotlib.pyplot as plt import numpy as np import pandas as pd import pypsa -from collections import defaultdict -import re -import matplotlib.patches as mpatches -from matplotlib.patches import Rectangle -from matplotlib.lines import Line2D from _helpers import configure_logging, mock_snakemake +from matplotlib.patches import Rectangle groups = { "gas (+CHP)": ["gas CHP", "OCGT", "CCGT"], - "heat pump" : ["heat pump"], - "resistive heater" : ["resistive heater"], + "heat pump": ["heat pump"], + "resistive heater": ["resistive heater"], "biomass (+ CHP)": ["biomass"], "coal (+ CHP)": ["coal"], "oil (+ CHP)": ["oil"], @@ -30,18 +29,19 @@ "offwind": ["offwind"], } + def aggregate_by_keywords(df, groups): """ Aggregate rows in df according to keyword groups. - + Parameters ---------- df : pd.DataFrame DataFrame with row index as technology names. groups : dict - Keys = new aggregated name, + Keys = new aggregated name, Values = list of substrings to match in the index. - + Returns ------- pd.DataFrame @@ -65,8 +65,8 @@ def plot_capacity_comparison( ylabel="GW", save_path=None, figsize=(12, 6), - hatch_second="//", # hatch for second scenario in non-diff plot - show_dummy_legend=True, # legend with empty + hatched boxes + hatch_second="//", # hatch for second scenario in non-diff plot + show_dummy_legend=True, # legend with empty + hatched boxes ): """ Plot electricity capacities by carrier for multiple scenarios, or their difference. @@ -87,7 +87,9 @@ def plot_capacity_comparison( # --- Establish supply/demand membership AND ORDER from base scenario only --- base = df[base_name] supply_order = base[base >= 0].sort_values(ascending=False).index - demand_order = base[base < 0].abs().sort_values(ascending=False).index # sort by |capacity| + demand_order = ( + base[base < 0].abs().sort_values(ascending=False).index + ) # sort by |capacity| # Build plotting table if plot_diff: @@ -98,8 +100,12 @@ def plot_capacity_comparison( plot_columns = ["Diff"] else: # normal comparison: keep both scenarios - df_plot_supply = df[[base_name, cmp_name]].loc[supply_order.intersection(df.index)] - df_plot_demand = df[[base_name, cmp_name]].loc[demand_order.intersection(df.index)] + df_plot_supply = df[[base_name, cmp_name]].loc[ + supply_order.intersection(df.index) + ] + df_plot_demand = df[[base_name, cmp_name]].loc[ + demand_order.intersection(df.index) + ] # For the demand side, show magnitudes (upward bars). Clip to avoid negative heights. df_plot_demand = df_plot_demand.apply(lambda s: (-s).clip(lower=0)) plot_columns = [base_name, cmp_name] @@ -119,11 +125,17 @@ def plot_capacity_comparison( if n_supply: vals = df_plot_supply[col].values ax.bar( - pos_supply + (j - 0.5) * bar_width if len(plot_columns) > 1 else pos_supply, + pos_supply + (j - 0.5) * bar_width + if len(plot_columns) > 1 + else pos_supply, vals, width=bar_width if len(plot_columns) > 1 else 0.8, color=[tech_colors.get(t, "#1f77b4") for t in df_plot_supply.index], - hatch=("" if (not plot_diff and col == base_name) else (hatch_second if not plot_diff else "")), + hatch=( + "" + if (not plot_diff and col == base_name) + else (hatch_second if not plot_diff else "") + ), alpha=0.9, linewidth=0.0, ) @@ -131,14 +143,24 @@ def plot_capacity_comparison( # Demand bars for j, col in enumerate(plot_columns): if n_demand: - vals = df_plot_demand[col].values if not plot_diff else (df_plot_demand[col].values) + vals = ( + df_plot_demand[col].values + if not plot_diff + else (df_plot_demand[col].values) + ) # In diff mode we plot signed values; in normal mode they are already positive magnitudes ax.bar( - pos_demand + (j - 0.5) * bar_width if len(plot_columns) > 1 else pos_demand, + pos_demand + (j - 0.5) * bar_width + if len(plot_columns) > 1 + else pos_demand, vals, width=bar_width if len(plot_columns) > 1 else 0.8, color=[tech_colors.get(t, "#1f77b4") for t in df_plot_demand.index], - hatch=("" if (not plot_diff and col == base_name) else (hatch_second if not plot_diff else "")), + hatch=( + "" + if (not plot_diff and col == base_name) + else (hatch_second if not plot_diff else "") + ), alpha=0.9, linewidth=0.0, ) @@ -173,8 +195,12 @@ def plot_capacity_comparison( ax.legend(handles=handles, title="Difference") elif show_dummy_legend: handles = [ - mpatches.Patch(facecolor="white", edgecolor="black", hatch="", label=base_name), - mpatches.Patch(facecolor="white", edgecolor="black", hatch=hatch_second, label=cmp_name), + mpatches.Patch( + facecolor="white", edgecolor="black", hatch="", label=base_name + ), + mpatches.Patch( + facecolor="white", edgecolor="black", hatch=hatch_second, label=cmp_name + ), ] ax.legend(handles=handles, title="Scenario") @@ -184,7 +210,6 @@ def plot_capacity_comparison( plt.close() - if __name__ == "__main__": if "snakemake" not in globals(): import os @@ -199,7 +224,7 @@ def plot_capacity_comparison( configure_logging(snakemake) config = snakemake.config planning_horizons = snakemake.params.planning_horizons - scenarios = ["AriadneDemand", "LowDemand"] + scenarios = ["AriadneDemand", "LowDemand"] tech_colors = snakemake.params.plotting["tech_colors"] # Load networks @@ -229,13 +254,12 @@ def plot_capacity_comparison( vars_dict["AriadneDemand"] = df elif "LowDemand" in fn: vars_dict["LowDemand"] = df - + # ensure output directory exist for dir in snakemake.output[-1]: if not os.path.exists(dir): os.makedirs(dir) - # Capacity plot DE tech_colors["gas (+CHP)"] = tech_colors["OCGT"] @@ -258,25 +282,24 @@ def plot_capacity_comparison( n = networks[scenario][year] capacities = ( - n.statistics.optimal_capacity( - bus_carrier=["AC", "low voltage"], - **kwargs, - ) - .filter(like="DE") - .groupby("carrier").sum() - .drop( - ["AC", "DC", "electricity distribution grid"], - errors="ignore", - ) - .multiply(1e-3) # MW → GW + n.statistics.optimal_capacity( + bus_carrier=["AC", "low voltage"], + **kwargs, + ) + .filter(like="DE") + .groupby("carrier") + .sum() + .drop( + ["AC", "DC", "electricity distribution grid"], + errors="ignore", + ) + .multiply(1e-3) # MW → GW ) capa_comp[scenario] = capacities capa_comp_agg = aggregate_by_keywords(capa_comp, groups) # drop capa with less than 100 MW in both scenarios - capa_comp_agg = capa_comp_agg[ - (capa_comp_agg.abs() >= 0.1).any(axis=1) - ] + capa_comp_agg = capa_comp_agg[(capa_comp_agg.abs() >= 0.1).any(axis=1)] plot_capacity_comparison( df=capa_comp_agg, @@ -284,7 +307,9 @@ def plot_capacity_comparison( tech_colors=tech_colors, plot_diff=False, title=f"Electricity capacities in DE: {year}", - save_path=snakemake.output.elec_capa_comp_de_2025 if year == 2025 else snakemake.output[-1] + f"/elec_capa_comp_de_{year}.png", + save_path=snakemake.output.elec_capa_comp_de_2025 + if year == 2025 + else snakemake.output[-1] + f"/elec_capa_comp_de_{year}.png", ) plot_capacity_comparison( @@ -307,8 +332,13 @@ def plot_capacity_comparison( bus_carrier=["AC", "low voltage"], **kwargs, ) - .loc[lambda df: ~df.index.get_level_values("bus").str.contains("DE", regex=False)] - .groupby("carrier").sum() + .loc[ + lambda df: ~df.index.get_level_values("bus").str.contains( + "DE", regex=False + ) + ] + .groupby("carrier") + .sum() .drop(["AC", "DC", "electricity distribution grid"], errors="ignore") .multiply(1e-3) # MW → GW ) @@ -316,9 +346,7 @@ def plot_capacity_comparison( capa_comp_agg = aggregate_by_keywords(capa_comp, groups) # drop capa with less than 100 MW in both scenarios - capa_comp_agg = capa_comp_agg[ - (capa_comp_agg.abs() >= 0.1).any(axis=1) - ] + capa_comp_agg = capa_comp_agg[(capa_comp_agg.abs() >= 0.1).any(axis=1)] plot_capacity_comparison( df=capa_comp_agg, @@ -358,9 +386,7 @@ def plot_capacity_comparison( demand_comp_agg = aggregate_by_keywords(demand_comp, groups) # drop capa with less than 100 MW in both scenarios - demand_comp_agg = demand_comp_agg[ - (demand_comp_agg.abs() >= 0.1).any(axis=1) - ] + demand_comp_agg = demand_comp_agg[(demand_comp_agg.abs() >= 0.1).any(axis=1)] plot_capacity_comparison( df=demand_comp_agg, @@ -387,26 +413,37 @@ def plot_capacity_comparison( # Electricity demand temporal (+ import) # Split electricity demand of distribution grid - # System cost CAPEX capex_data = {} for scenario in scenarios: # Extract CAPEX data df = vars_dict[scenario] - capex = df[df.index.get_level_values("Variable").str.startswith("System Cost|CAPEX")] - capex_top = capex[capex.index.get_level_values("Variable").str.count(r"\|") == 2] - + capex = df[ + df.index.get_level_values("Variable").str.startswith( + "System Cost|CAPEX" + ) + ] + capex_top = capex[ + capex.index.get_level_values("Variable").str.count(r"\|") == 2 + ] + # Reset index and prepare data capex_reset = capex_top.reset_index() - capex_reset['Category'] = capex_reset['Variable'].str.split('|').str[2] - + capex_reset["Category"] = capex_reset["Variable"].str.split("|").str[2] + # Get year columns on first iteration if scenario == scenarios[0]: - year_cols = [col for col in capex_reset.columns if str(col) in ['2025', '2030', '2035']] - + year_cols = [ + col + for col in capex_reset.columns + if str(col) in ["2025", "2030", "2035"] + ] + # Store processed data - capex_data[scenario] = capex_reset.set_index('Category')[year_cols].fillna(0) + capex_data[scenario] = capex_reset.set_index("Category")[year_cols].fillna( + 0 + ) # Extract for easier access capex_low_plot = capex_data["LowDemand"] @@ -421,16 +458,16 @@ def plot_capacity_comparison( # Define colors for each category using tech_colors category_color_map = { - 'Electricity': "gold", - 'Gases': tech_colors["gas"], - 'Heat': tech_colors["heat"], - 'Hydrogen': tech_colors["H2"], - 'Liquids': tech_colors["oil"], - 'Methanol': tech_colors["methanol"] + "Electricity": "gold", + "Gases": tech_colors["gas"], + "Heat": tech_colors["heat"], + "Hydrogen": tech_colors["H2"], + "Liquids": tech_colors["oil"], + "Methanol": tech_colors["methanol"], } # Create colors list based on categories - colors = [category_color_map.get(cat, '#808080') for cat in categories] + colors = [category_color_map.get(cat, "#808080") for cat in categories] # Set up bar positions x = np.arange(len(years)) @@ -443,70 +480,135 @@ def plot_capacity_comparison( for i, category in enumerate(categories): # Get values for each year low_values = [capex_low_plot.loc[category, col] for col in year_cols] - ariadne_values = [capex_ariadne_plot.loc[category, col] for col in year_cols] - + ariadne_values = [ + capex_ariadne_plot.loc[category, col] for col in year_cols + ] + # Plot bars - LowDemand with hatching, AriadneDemand without - ax.bar(x - width/2, low_values, width, bottom=bottom_low, - label=category, color=colors[i], alpha=0.8, hatch='///') - ax.bar(x + width/2, ariadne_values, width, bottom=bottom_ariadne, - color=colors[i], alpha=0.8) - + ax.bar( + x - width / 2, + low_values, + width, + bottom=bottom_low, + label=category, + color=colors[i], + alpha=0.8, + hatch="///", + ) + ax.bar( + x + width / 2, + ariadne_values, + width, + bottom=bottom_ariadne, + color=colors[i], + alpha=0.8, + ) + # Add category values inside bars (only if value > threshold for readability) for j in range(len(years)): # LowDemand category values if low_values[j] > 0.5: # Only show if value is significant - ax.text(j - width/2, bottom_low[j] + low_values[j]/2, - f'{low_values[j]:.1f}', ha='center', va='center', - fontsize=8, fontweight='bold', color='white') - + ax.text( + j - width / 2, + bottom_low[j] + low_values[j] / 2, + f"{low_values[j]:.1f}", + ha="center", + va="center", + fontsize=8, + fontweight="bold", + color="white", + ) + # AriadneDemand category values if ariadne_values[j] > 0.5: # Only show if value is significant - ax.text(j + width/2, bottom_ariadne[j] + ariadne_values[j]/2, - f'{ariadne_values[j]:.1f}', ha='center', va='center', - fontsize=8, fontweight='bold', color='white') - + ax.text( + j + width / 2, + bottom_ariadne[j] + ariadne_values[j] / 2, + f"{ariadne_values[j]:.1f}", + ha="center", + va="center", + fontsize=8, + fontweight="bold", + color="white", + ) + # Update bottom for stacking bottom_low += low_values bottom_ariadne += ariadne_values # Customize the plot - ax.set_xlabel('Year', fontsize=12) - ax.set_ylabel('billion €', fontsize=12) - ax.set_title('System Cost CAPEX Comparison: LowDemand vs AriadneDemand', fontsize=14, fontweight='bold') + ax.set_xlabel("Year", fontsize=12) + ax.set_ylabel("billion €", fontsize=12) + ax.set_title( + "System Cost CAPEX Comparison: LowDemand vs AriadneDemand", + fontsize=14, + fontweight="bold", + ) ax.set_xticks(x) ax.set_xticklabels(years) # Category legend elements - category_elements = [Rectangle((0,0),1,1, facecolor=colors[i], alpha=0.8, label=cat) - for i, cat in enumerate(categories)] + category_elements = [ + Rectangle((0, 0), 1, 1, facecolor=colors[i], alpha=0.8, label=cat) + for i, cat in enumerate(categories) + ] # Scenario legend elements scenario_elements = [ - Rectangle((0,0),1,1, facecolor='gray', alpha=0.8, hatch='///', label='LowDemand'), - Rectangle((0,0),1,1, facecolor='gray', alpha=0.8, label='AriadneDemand') + Rectangle( + (0, 0), + 1, + 1, + facecolor="gray", + alpha=0.8, + hatch="///", + label="LowDemand", + ), + Rectangle((0, 0), 1, 1, facecolor="gray", alpha=0.8, label="AriadneDemand"), ] # Create separate legends - category_legend = ax.legend(handles=category_elements, loc='upper left', bbox_to_anchor=(1.05, 1), title='Categories') - scenario_legend = ax.legend(handles=scenario_elements, loc='upper left', bbox_to_anchor=(1.05, 0.6), title='Scenarios') + category_legend = ax.legend( + handles=category_elements, + loc="upper left", + bbox_to_anchor=(1.05, 1), + title="Categories", + ) + scenario_legend = ax.legend( + handles=scenario_elements, + loc="upper left", + bbox_to_anchor=(1.05, 0.6), + title="Scenarios", + ) # Add both legends to the plot ax.add_artist(category_legend) # Add total CAPEX values on top of bars for i, year in enumerate(years): - ax.text(i - width/2, bottom_low[i] + max(max(bottom_low), max(bottom_ariadne)) * 0.02, - f'{bottom_low[i]:.1f}', ha='center', fontweight='bold', fontsize=9) - ax.text(i + width/2, bottom_ariadne[i] + max(max(bottom_low), max(bottom_ariadne)) * 0.02, - f'{bottom_ariadne[i]:.1f}', ha='center', fontweight='bold', fontsize=9) + ax.text( + i - width / 2, + bottom_low[i] + max(max(bottom_low), max(bottom_ariadne)) * 0.02, + f"{bottom_low[i]:.1f}", + ha="center", + fontweight="bold", + fontsize=9, + ) + ax.text( + i + width / 2, + bottom_ariadne[i] + max(max(bottom_low), max(bottom_ariadne)) * 0.02, + f"{bottom_ariadne[i]:.1f}", + ha="center", + fontweight="bold", + fontsize=9, + ) # Add grid - ax.grid(True, alpha=0.3, axis='y') + ax.grid(True, alpha=0.3, axis="y") ax.set_axisbelow(True) # Adjust y-axis limits to accommodate top labels y_max = max(max(bottom_low), max(bottom_ariadne)) ax.set_ylim(0, y_max * 1.1) - plt.savefig(snakemake.output[-1] + f"/capex_de.png", bbox_inches="tight") + plt.savefig(snakemake.output[-1] + "/capex_de.png", bbox_inches="tight") plt.close() - From cfa2743854b39ca8b7a85717c3d23b5b4a30f15b Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Wed, 27 Aug 2025 11:44:48 +0200 Subject: [PATCH 076/202] fix bug on directory creation --- scripts/pypsa-de/regret_plots.py | 5 ++--- scripts/pypsa-de/regret_plots_lt.py | 5 ++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/scripts/pypsa-de/regret_plots.py b/scripts/pypsa-de/regret_plots.py index 562b3939a..a5724f70a 100644 --- a/scripts/pypsa-de/regret_plots.py +++ b/scripts/pypsa-de/regret_plots.py @@ -105,9 +105,8 @@ def aggregate_by_keywords(opex_comp_agg, groups): networks[year][scenario][decision] = pypsa.Network(fn) # ensure output directory exist - for dir in snakemake.output[-1]: - if not os.path.exists(dir): - os.makedirs(dir) + if not os.path.exists(snakemake.output[-1]): + os.makedirs(snakemake.output[-1]) # Plot electricity price duration curves diff --git a/scripts/pypsa-de/regret_plots_lt.py b/scripts/pypsa-de/regret_plots_lt.py index 4a550d938..f8920ebae 100644 --- a/scripts/pypsa-de/regret_plots_lt.py +++ b/scripts/pypsa-de/regret_plots_lt.py @@ -256,9 +256,8 @@ def plot_capacity_comparison( vars_dict["LowDemand"] = df # ensure output directory exist - for dir in snakemake.output[-1]: - if not os.path.exists(dir): - os.makedirs(dir) + if not os.path.exists(snakemake.output[-1]): + os.makedirs(snakemake.output[-1]) # Capacity plot DE From d795386237f5af656a5bd7d55771efe580946d15 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Thu, 28 Aug 2025 13:30:03 +0200 Subject: [PATCH 077/202] increase plotting memory for 3H runs --- Snakefile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Snakefile b/Snakefile index cf71be359..37f90a999 100644 --- a/Snakefile +++ b/Snakefile @@ -1116,6 +1116,8 @@ rule regret_plots_lt: output: elec_capa_comp_de_2025=f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", dir=directory(f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT"), + resources: + mem_mb=32000, script: "scripts/pypsa-de/regret_plots_lt.py" @@ -1136,5 +1138,7 @@ rule regret_plots: output: elec_price_comp_de=f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", dir=directory(f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand"), + resources: + mem_mb=32000, script: "scripts/pypsa-de/regret_plots.py" From 898d6082fede7122e890f2710a57012fefaf3e5b Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Thu, 28 Aug 2025 13:54:04 +0200 Subject: [PATCH 078/202] change diff order in plot --- scripts/pypsa-de/regret_plots_lt.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/pypsa-de/regret_plots_lt.py b/scripts/pypsa-de/regret_plots_lt.py index f8920ebae..a77928800 100644 --- a/scripts/pypsa-de/regret_plots_lt.py +++ b/scripts/pypsa-de/regret_plots_lt.py @@ -93,8 +93,8 @@ def plot_capacity_comparison( # Build plotting table if plot_diff: - # signed difference (compare - base) - diff = (df[cmp_name] - df[base_name]).rename("Diff").to_frame() + # signed difference (base - compare) + diff = (df[base_name].abs() - df[cmp_name].abs()).rename("Diff").to_frame() df_plot_supply = diff.loc[supply_order.intersection(diff.index)] df_plot_demand = diff.loc[demand_order.intersection(diff.index)] plot_columns = ["Diff"] @@ -189,7 +189,7 @@ def plot_capacity_comparison( mpatches.Patch( facecolor="white", edgecolor="black", - label=f"{cmp_name} – {base_name}", + label=f"{base_name} - {cmp_name}", ) ] ax.legend(handles=handles, title="Difference") From fd203ed05e39f7114bf11634e883e5b167cf42ea Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 29 Aug 2025 18:13:28 +0200 Subject: [PATCH 079/202] increase marginal cost to avoid simultaneous in and export --- scripts/pypsa-de/modify_prenetwork.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 3143c28b9..ffacab32c 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -397,7 +397,7 @@ def unravel_carbonaceous_fuels(n): carrier="renewable oil", p_nom=1e6, p_min_pu=0, - marginal_cost=0.01, + marginal_cost=1, ) if snakemake.params.efuel_export_ban: @@ -479,7 +479,7 @@ def unravel_carbonaceous_fuels(n): carrier="methanol", p_nom=1e6, p_min_pu=0, - marginal_cost=0.01, + marginal_cost=1, ) if snakemake.params.efuel_export_ban: @@ -702,7 +702,7 @@ def unravel_gasbus(n, costs): carrier="renewable gas", p_nom=1e6, p_min_pu=0, - marginal_cost=0.01, + marginal_cost=1, ) if snakemake.params.efuel_export_ban: From 3d8eee38cfd0c47d1d34146dd1f59bc601e66535 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 29 Aug 2025 18:14:41 +0200 Subject: [PATCH 080/202] more info on constraint mus --- scripts/pypsa-de/export_ariadne_variables.py | 51 ++++++++++++++++++-- 1 file changed, 48 insertions(+), 3 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index b5c9770c2..c5f1c2043 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -4331,11 +4331,56 @@ def get_policy(n, investment_year): -co2_limit_eu + co2_price_add_on ) - # Price|Carbon|EU-wide Regulation Non-ETS - var["Price|Carbon|National Climate Target"] = -co2_limit_de - # Price|Carbon|National Climate Target Non-ETS + try: + var["Price|Policy|Electricity Import Independence"] = n.global_constraints.loc[ + "Electricity_import_limit-DE", "mu" + ] + except KeyError: + var["Price|Policy|Electricity Import Independence"] = 0 + + try: + var["Price|Policy|Hydrogen Import Independence"] = n.global_constraints.loc[ + "H2_import_limit-DE", "mu" + ] + except KeyError: + var["Price|Policy|Hydrogen Import Independence"] = 0 + + try: + var["Price|Policy|Methanol Import Independence"] = n.global_constraints.loc[ + "methanol_import_limit-DE", "mu" + ] + except KeyError: + var["Price|Policy|Methanol Import Independence"] = 0 + + try: + var["Price|Policy|Hydrogen Export Ban"] = n.global_constraints.loc[ + "H2_export_ban-DE", "mu" + ] + except KeyError: + var["Price|Policy|Hydrogen Export Ban"] = 0 + + try: + var["Price|Policy|Renewable Oil Import Independence"] = ( + n.global_constraints.loc["renewable_oil_import_limit-DE", "mu"] + ) + except KeyError: + var["Price|Policy|Renewable Oil Import Independence"] = 0 + + try: + var["Price|Policy|Renewable Gas Import Independence"] = ( + n.global_constraints.loc["renewable_gas_import_limit-DE", "mu"] + ) + except KeyError: + var["Price|Policy|Renewable Gas Import Independence"] = 0 + + try: + var["Price|Policy|Hydrogen Derivate Import Independence"] = ( + n.global_constraints.loc["H2_derivate_import_limit-DE", "mu"] + ) + except KeyError: + var["Price|Policy|Hydrogen Derivate Import Independence"] = 0 return var From 6182a99891d3cd0a86058b97cd47246adb7c6267 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 1 Sep 2025 13:27:38 +0200 Subject: [PATCH 081/202] fix double counting of methnaol --- scripts/pypsa-de/export_ariadne_variables.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index c5f1c2043..6fd3fb883 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -4755,7 +4755,7 @@ def get_export_import_links(n, region, carriers): ) exports_oil_renew, imports_oil_renew = get_export_import_links( - n, region, ["renewable oil", "methanol"] + n, region, ["renewable oil"] ) var["Trade|Secondary Energy|Liquids|Biomass|Volume"] = ( From d214f6d0efe23116a235bcb09ac0548bc8715a33 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 1 Sep 2025 14:03:41 +0200 Subject: [PATCH 082/202] strengthen import limit for h2 derivatives --- scripts/pypsa-de/additional_functionality.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 73af0b1cd..4d6415bb7 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -738,10 +738,16 @@ def add_h2_derivate_limit(n, investment_year, limits_volume_max): ].index carrier_idx_dict = { + # Every carrier should respect the limit individually "renewable_oil": 0, "methanol": 1, "renewable_gas": 2, - "H2_derivate": [0, 1, 2], + # Exports of one carrier should not compensate for imports of another carrier + "H2_derivate_oil_meoh": [0, 1], + "H2_derivate_oil_gas": [0, 2], + "H2_derivate_meoh_gas": [1, 2], + # The sum of all carriers should respect the limit + "H2_derivate_oil_meoh_gas": [0, 1, 2], } for carrier, idx in carrier_idx_dict.items(): cname = f"{carrier}_import_limit-{ct}" From d1e27200af2acced7a7accbe463d9713ec42d9dc Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 1 Sep 2025 15:33:18 +0200 Subject: [PATCH 083/202] allow a small amount of derivatives to be imported (mostly MeOH) --- config/config.de.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 9d9358dd9..76d3edc67 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -610,8 +610,8 @@ solving: # boundary condition lower? DE: 2020: 0 - 2025: 0 - 2030: 10 + 2025: 6 + 2030: 20 2035: 105 2040: 200 2045: 300 From 9e4068501689d72fecf40bc616cdc9fd5c0b0cee Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 1 Sep 2025 15:34:56 +0200 Subject: [PATCH 084/202] fix capacities in all of EU --- config/config.de.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 76d3edc67..9df8960fd 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -46,7 +46,7 @@ iiasa_database: reference_scenario: KN2045_Mix region: Deutschland regret_run: - scope_to_fix: DE # Supported values are DE and EU + scope_to_fix: EU # Supported values are DE and EU ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 uba_for_industry: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 From 7b52ba8b73e3d9d888b86fa41b5a2cf8f8f76c70 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 1 Sep 2025 15:35:14 +0200 Subject: [PATCH 085/202] allow more elec trade --- config/config.de.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 9df8960fd..4f1a83b61 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -590,12 +590,12 @@ solving: electricity_import: DE: 2020: -20 - 2025: 0 - 2030: 0 - 2035: 40 - 2040: 80 + 2025: 25 + 2030: 50 + 2035: 75 + 2040: 100 2045: 125 - 2050: 125 + 2050: 150 electrolysis: # boundary condition lower? DE: From bcb4ec86bdad7cf7b2fbed6f853594be44ab386e Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 1 Sep 2025 15:37:01 +0200 Subject: [PATCH 086/202] streamline regret_runs; use only one co2 price in ST model --- scripts/pypsa-de/additional_functionality.py | 71 +++++++++----------- scripts/pypsa-de/solve_regret.py | 54 ++++++++++++++- 2 files changed, 83 insertions(+), 42 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 4d6415bb7..b0e416383 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -58,17 +58,6 @@ def add_capacity_limits( cname = f"capacity_{sense}-{ct}-{c.name}-{carrier.replace(' ', '-')}" - if snakemake.params.get("regret_run"): - logger.info( - f"Skipping capacity limit adjustment for {c.name} {carrier} with planning horizons {investment_year}, because of regret run." - ) - if cname in n.global_constraints.index: - logger.warning( - f"Global constraint {cname} already exists. Dropping it." - ) - n.global_constraints.drop(cname, inplace=True) - continue - if cname in n.global_constraints.index: logger.warning( f"Global constraint {cname} already exists. Dropping and adding it again." @@ -842,44 +831,46 @@ def additional_functionality(n, snapshots, snakemake): investment_year = int(snakemake.wildcards.planning_horizons[-4:]) constraints = snakemake.params.solving["constraints"] - add_capacity_limits( - n, investment_year, constraints["limits_capacity_min"], snakemake, "minimum" - ) - - add_capacity_limits( - n, investment_year, constraints["limits_capacity_max"], snakemake, "maximum" - ) + if not snakemake.config.get("regret_run"): + add_capacity_limits( + n, investment_year, constraints["limits_capacity_min"], snakemake, "minimum" + ) - add_power_limits(n, investment_year, constraints["limits_power_max"]) + add_capacity_limits( + n, investment_year, constraints["limits_capacity_max"], snakemake, "maximum" + ) - if snakemake.wildcards.clusters != "1": - h2_import_limits(n, investment_year, constraints["limits_volume_max"]) + add_power_limits(n, investment_year, constraints["limits_power_max"]) - electricity_import_limits(n, investment_year, constraints["limits_volume_max"]) + if snakemake.wildcards.clusters != "1": + h2_import_limits(n, investment_year, constraints["limits_volume_max"]) - if investment_year >= 2025: - h2_production_limits( - n, - investment_year, - constraints["limits_volume_min"], - constraints["limits_volume_max"], - ) + electricity_import_limits( + n, investment_year, constraints["limits_volume_max"] + ) - add_h2_derivate_limit(n, investment_year, constraints["limits_volume_max"]) + if investment_year >= 2025: + h2_production_limits( + n, + investment_year, + constraints["limits_volume_min"], + constraints["limits_volume_max"], + ) + add_h2_derivate_limit(n, investment_year, constraints["limits_volume_max"]) + + if isinstance(constraints["co2_budget_national"], dict): + add_national_co2_budgets( + n, + snakemake, + constraints["co2_budget_national"], + investment_year, + ) + else: + logger.warning("No national CO2 budget specified!") # force_boiler_profiles_existing_per_load(n) force_boiler_profiles_existing_per_boiler(n) - if isinstance(constraints["co2_budget_national"], dict): - add_national_co2_budgets( - n, - snakemake, - constraints["co2_budget_national"], - investment_year, - ) - else: - logger.warning("No national CO2 budget specified!") - if isinstance(constraints.get("decentral_heat_pump_budgets"), dict): add_decentral_heat_pump_budgets( n, diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index aa26048ea..30fcced05 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -177,10 +177,56 @@ def fix_capacities(realization, decision, scope="DE", strict=False): ) np.random.seed(solve_opts.get("seed", 123)) + strict = False + n = fix_capacities( - realization, decision, scope=snakemake.params.scope_to_fix, strict=False + realization, decision, scope=snakemake.params.scope_to_fix, strict=strict + ) + if strict: + snakemake.params.solving["options"]["load_shedding"] = True + + n.add("Carrier", "H2 vent", color="#dd2e23", nice_name="H2 vent") + + n.add( + "Generator", + n.buses.query("carrier=='H2'").index, + " H2 vent", + bus=n.buses.query("carrier=='H2'").index, + carrier="H2 vent", + sign=-1e-3, # Adjust sign to measure p and p_nom in kW instead of MW + marginal_cost=100, # Eur/kWh + p_nom=1e9, # kW ) + snakemake.config["regret_run"] = True + + if snakemake.config["regret_run"]: + constraint_names = [ # TODO assert really everything gets dropped + "H2_production_limit_upper-DE", + "H2_production_limit_lower-DE", + "capacity_minimum-DE-Generator-solar", + "capacity_maximum-DE-Generator-onwind", + "capacity_maximum-DE-Generator-offwind", + "capacity_maximum-DE-Generator-solar", + "capacity_maximum-DE-Store-co2-sequestered", + "capacity_maximum-DE-Link-HVC-to-air", + "H2_import_limit-DE", + "H2_export_ban-DE", + "Electricity_import_limit-DE", + "renewable_oil_import_limit-DE", + "methanol_import_limit-DE", + "renewable_gas_import_limit-DE", + "H2_derivate_import_limit-DE", + "H2_derivate_meoh_gas_import_limit-DE", + "H2_derivate_oil_meoh_import_limit-DE", + "H2_derivate_oil_meoh_gas_import_limit-DE", + "H2_derivate_oil_gas_import_limit-DE", + ] + logger.info("Regret run detected. Dropping the following constraints:") + logger.info(constraint_names) + + n.global_constraints.drop(constraint_names, errors="ignore", inplace=True) + if solve_opts["post_discretization"].get("enable") and not solve_opts.get( "skip_iterations" ): @@ -221,9 +267,13 @@ def fix_capacities(realization, decision, scope="DE", strict=False): p_max_pu=0, p_nom_extendable=True, carrier="co2", - marginal_cost=realization.global_constraints.loc["CO2Limit", "mu"], + marginal_cost=( + realization.global_constraints.loc["CO2Limit", "mu"] + + realization.global_constraints.loc["co2_limit-DE", "mu"] + ), ) n.global_constraints.drop("CO2Limit", inplace=True) + n.global_constraints.drop("co2_limit-DE", inplace=True) with memory_logger( filename=getattr(snakemake.log, "memory", None), interval=logging_frequency From 1cbe0f68143f2d57cd8c20679c67c000533c1588 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 1 Sep 2025 15:37:32 +0200 Subject: [PATCH 087/202] improve plots by swapping colors --- scripts/pypsa-de/plot_scenario_comparison.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index a0bab12ba..1f1c92d97 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -34,7 +34,11 @@ def scenario_plot(df, output_dir, var): ) dfs = [] - for file in snakemake.input.exported_variables: + fns = snakemake.input.exported_variables + if "regret_variables" in fns[0]: + # reorder indices of fns as 0312 + fns = [fns[i] for i in [0, 3, 2, 1] if i < len(fns)] + for file in fns: _df = pd.read_excel( file, index_col=list(range(5)), sheet_name="data" ).droplevel(["Model", "Region"]) From a0f174afcefddbe9c91c1edc28b852d896431291 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 1 Sep 2025 15:46:26 +0200 Subject: [PATCH 088/202] simplify dropping of constraints --- scripts/pypsa-de/solve_regret.py | 33 +++++++++++--------------------- 1 file changed, 11 insertions(+), 22 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 30fcced05..2c12ea6f8 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -201,31 +201,20 @@ def fix_capacities(realization, decision, scope="DE", strict=False): snakemake.config["regret_run"] = True if snakemake.config["regret_run"]: - constraint_names = [ # TODO assert really everything gets dropped - "H2_production_limit_upper-DE", - "H2_production_limit_lower-DE", - "capacity_minimum-DE-Generator-solar", - "capacity_maximum-DE-Generator-onwind", - "capacity_maximum-DE-Generator-offwind", - "capacity_maximum-DE-Generator-solar", - "capacity_maximum-DE-Store-co2-sequestered", - "capacity_maximum-DE-Link-HVC-to-air", - "H2_import_limit-DE", - "H2_export_ban-DE", - "Electricity_import_limit-DE", - "renewable_oil_import_limit-DE", - "methanol_import_limit-DE", - "renewable_gas_import_limit-DE", - "H2_derivate_import_limit-DE", - "H2_derivate_meoh_gas_import_limit-DE", - "H2_derivate_oil_meoh_import_limit-DE", - "H2_derivate_oil_meoh_gas_import_limit-DE", - "H2_derivate_oil_gas_import_limit-DE", + to_keep = [ + "biomass limit", + "unsustainable biomass limit", + "co2_sequestration_limit", + "CO2Limit", + "co2_limit-DE", ] + + to_drop = n.global_constraints.index.difference(to_keep) + logger.info("Regret run detected. Dropping the following constraints:") - logger.info(constraint_names) + logger.info(to_drop) - n.global_constraints.drop(constraint_names, errors="ignore", inplace=True) + n.global_constraints.drop(to_drop, inplace=True) if solve_opts["post_discretization"].get("enable") and not solve_opts.get( "skip_iterations" From bead072dba87e27603f4c48739c7cf034555d8a5 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 1 Sep 2025 17:52:48 +0200 Subject: [PATCH 089/202] cheaper h2vent --- scripts/pypsa-de/solve_regret.py | 31 +++++++------------------------ 1 file changed, 7 insertions(+), 24 deletions(-) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/solve_regret.py index 2c12ea6f8..e90f57557 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/solve_regret.py @@ -152,8 +152,8 @@ def fix_capacities(realization, decision, scope="DE", strict=False): clusters=27, opts="", sector_opts="none", - planning_horizons="2035", - decision="AriadneDemand", + planning_horizons="2025", + decision="LowDemand", run="AriadneDemand", ) @@ -190,13 +190,14 @@ def fix_capacities(realization, decision, scope="DE", strict=False): n.add( "Generator", n.buses.query("carrier=='H2'").index, - " H2 vent", + " vent", bus=n.buses.query("carrier=='H2'").index, carrier="H2 vent", - sign=-1e-3, # Adjust sign to measure p and p_nom in kW instead of MW - marginal_cost=100, # Eur/kWh - p_nom=1e9, # kW + sign=-1, + marginal_cost=1, + p_nom=1e6, ) + # n.generators_t.p[n.generators.query("carrier == 'H2 vent'").index].T.mul(n.snapshot_weightings.generators).T.sum() snakemake.config["regret_run"] = True @@ -281,22 +282,4 @@ def fix_capacities(realization, decision, scope="DE", strict=False): n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) - constraint_diff = ( - (decision.global_constraints.mu - n.global_constraints.mu) - .round(2) - .sort_values() - ) - - logger.info( - "Difference in global constraints (decision - regret_network): %s", - constraint_diff, - ) - - if snakemake.input.realization == snakemake.input.decision: - if abs(constraint_diff["CO2Limit"]) > 1: - logger.error( - "Difference in CO2 price between long-term and short-term model is too high: %s", - constraint_diff["CO2Limit"], - ) - n.export_to_netcdf(snakemake.output.regret_network) From 88f77767392fa35e8150f69fadd523bc44fa98f5 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 1 Sep 2025 17:53:00 +0200 Subject: [PATCH 090/202] revert some of the pypsa-eur config changes --- config/config.de.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/config/config.de.yaml b/config/config.de.yaml index 4f1a83b61..199d7e515 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -316,6 +316,7 @@ sector: urban decentral: true rural: true co2_spatial: true + co2_network: false biomass_spatial: true ammonia: false methanol: @@ -328,7 +329,10 @@ sector: regional_coal_demand: true #set to true if regional CO2 constraints needed gas_network: false regional_gas_demand: true + regional_co2_sequestration_potential: + enable: false H2_retrofit: true + hydrogen_turbine: false biogas_upgrading_cc: true biomass_to_liquid: true biomass_to_liquid_cc: true From 2f363572a88b58d1f57b5911a24809d5e4dbfa69 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 2 Sep 2025 13:02:24 +0200 Subject: [PATCH 091/202] activate power import limits in regret runs --- scripts/pypsa-de/additional_functionality.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index b0e416383..94b14963e 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -840,8 +840,6 @@ def additional_functionality(n, snapshots, snakemake): n, investment_year, constraints["limits_capacity_max"], snakemake, "maximum" ) - add_power_limits(n, investment_year, constraints["limits_power_max"]) - if snakemake.wildcards.clusters != "1": h2_import_limits(n, investment_year, constraints["limits_volume_max"]) @@ -868,6 +866,8 @@ def additional_functionality(n, snapshots, snakemake): else: logger.warning("No national CO2 budget specified!") + add_power_limits(n, investment_year, constraints["limits_power_max"]) + # force_boiler_profiles_existing_per_load(n) force_boiler_profiles_existing_per_boiler(n) From 9846934734b11bb15308ff512d8c81c1e79699da Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 2 Sep 2025 16:29:08 +0200 Subject: [PATCH 092/202] split solve_regret and export a prenetwork --- Snakefile | 27 ++++- config/config.de.yaml | 2 + scripts/pypsa-de/additional_functionality.py | 2 +- ...ve_regret.py => prepare_regret_network.py} | 99 ++++++++----------- scripts/pypsa-de/solve_regret_network.py | 62 ++++++++++++ 5 files changed, 130 insertions(+), 62 deletions(-) rename scripts/pypsa-de/{solve_regret.py => prepare_regret_network.py} (76%) create mode 100644 scripts/pypsa-de/solve_regret_network.py diff --git a/Snakefile b/Snakefile index 37f90a999..dfff71b2b 100644 --- a/Snakefile +++ b/Snakefile @@ -977,21 +977,40 @@ rule ariadne_report_only: ), -rule solve_regret: +rule prepare_regret_network: params: solving=config_provider("solving"), foresight=config_provider("foresight"), co2_sequestration_potential=config_provider( "sector", "co2_sequestration_potential", default=200 ), - custom_extra_functionality=input_custom_extra_functionality, - energy_year=config_provider("energy", "energy_totals_year"), scope_to_fix=config_provider("iiasa_database", "regret_run", "scope_to_fix"), + h2_vent=config_provider("iiasa_database", "regret_run", "h2_vent"), + strict=config_provider("iiasa_database", "regret_run", "strict"), input: decision=RESULTS.replace("{run}", "{decision}") + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", realization=RESULTS + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + output: + regret_prenetwork=RESULTS + + "regret_prenetworks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + log: + RESULTS + + "logs/regret_prenetwork_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.log", + script: + "scripts/pypsa-de/prepare_regret_network.py" + + +rule solve_regret: + params: + solving=config_provider("solving"), + regret_run=True, + energy_year=config_provider("energy", "energy_totals_year"), + custom_extra_functionality=input_custom_extra_functionality, + input: + regret_prenetwork=RESULTS + + "regret_prenetworks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", co2_totals_name=resources("co2_totals.csv"), energy_totals=resources("energy_totals.csv"), output: @@ -1011,7 +1030,7 @@ rule solve_regret: mem_mb=config_provider("solving", "mem_mb"), runtime=config_provider("solving", "runtime", default="6h"), script: - "scripts/pypsa-de/solve_regret.py" + "scripts/pypsa-de/solve_regret_network.py" rule export_regret_variables: diff --git a/config/config.de.yaml b/config/config.de.yaml index 199d7e515..cae88fa74 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -47,6 +47,8 @@ iiasa_database: region: Deutschland regret_run: scope_to_fix: EU # Supported values are DE and EU + strict: false # If false, the model allows capacity expansion for virtual links and bottlenecks + h2_vent: true ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 uba_for_industry: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 94b14963e..60a4a4972 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -831,7 +831,7 @@ def additional_functionality(n, snapshots, snakemake): investment_year = int(snakemake.wildcards.planning_horizons[-4:]) constraints = snakemake.params.solving["constraints"] - if not snakemake.config.get("regret_run"): + if not snakemake.params.get("regret_run"): add_capacity_limits( n, investment_year, constraints["limits_capacity_min"], snakemake, "minimum" ) diff --git a/scripts/pypsa-de/solve_regret.py b/scripts/pypsa-de/prepare_regret_network.py similarity index 76% rename from scripts/pypsa-de/solve_regret.py rename to scripts/pypsa-de/prepare_regret_network.py index e90f57557..45f6198f1 100644 --- a/scripts/pypsa-de/solve_regret.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -4,14 +4,13 @@ import numpy as np import pypsa -from scripts._benchmark import memory_logger from scripts._helpers import ( configure_logging, mock_snakemake, set_scenario_config, update_config_from_wildcards, ) -from scripts.solve_network import prepare_network, solve_network +from scripts.solve_network import prepare_network logger = logging.getLogger(__name__) @@ -162,7 +161,7 @@ def fix_capacities(realization, decision, scope="DE", strict=False): update_config_from_wildcards(snakemake.config, snakemake.wildcards) # Touch output file to ensure it exists - pathlib.Path(snakemake.output.regret_network).touch() + pathlib.Path(snakemake.output.regret_prenetwork).touch() realization = pypsa.Network(snakemake.input.realization) decision = pypsa.Network(snakemake.input.decision) @@ -177,50 +176,53 @@ def fix_capacities(realization, decision, scope="DE", strict=False): ) np.random.seed(solve_opts.get("seed", 123)) - strict = False + strict = snakemake.params["strict"] + scope_to_fix = snakemake.params["scope_to_fix"] + h2_vent = snakemake.params["h2_vent"] - n = fix_capacities( - realization, decision, scope=snakemake.params.scope_to_fix, strict=strict - ) + n = fix_capacities(realization, decision, scope=scope_to_fix, strict=strict) if strict: + logger.info( + "Strict regret run chosen. No capacities are extendable. Activating load shedding to prevent infeasibilites." + ) snakemake.params.solving["options"]["load_shedding"] = True - n.add("Carrier", "H2 vent", color="#dd2e23", nice_name="H2 vent") - - n.add( - "Generator", - n.buses.query("carrier=='H2'").index, - " vent", - bus=n.buses.query("carrier=='H2'").index, - carrier="H2 vent", - sign=-1, - marginal_cost=1, - p_nom=1e6, - ) - # n.generators_t.p[n.generators.query("carrier == 'H2 vent'").index].T.mul(n.snapshot_weightings.generators).T.sum() + if h2_vent: + logger.info("H2 venting activated for regret run.") + n.add("Carrier", "H2 vent", color="#dd2e23", nice_name="H2 vent") - snakemake.config["regret_run"] = True + n.add( + "Generator", + n.buses.query("carrier=='H2'").index, + " vent", + bus=n.buses.query("carrier=='H2'").index, + carrier="H2 vent", + sign=-1, + marginal_cost=1, + p_nom=1e6, + ) + # n.generators_t.p[n.generators.query("carrier == 'H2 vent'").index].T.mul(n.snapshot_weightings.generators).T.sum() - if snakemake.config["regret_run"]: - to_keep = [ - "biomass limit", - "unsustainable biomass limit", - "co2_sequestration_limit", - "CO2Limit", - "co2_limit-DE", - ] + # Manipulating the global constraints + to_keep = [ + "biomass limit", + "unsustainable biomass limit", + "co2_sequestration_limit", + "CO2Limit", + "co2_limit-DE", + ] - to_drop = n.global_constraints.index.difference(to_keep) + to_drop = n.global_constraints.index.difference(to_keep) - logger.info("Regret run detected. Dropping the following constraints:") - logger.info(to_drop) + logger.info("Regret run detected. Dropping the following constraints:") + logger.info(to_drop) - n.global_constraints.drop(to_drop, inplace=True) + n.global_constraints.drop(to_drop, inplace=True) + # If running with post-discretization the last lines of optimize_transmission_expansion_iteratively have to be undone for the operational run if solve_opts["post_discretization"].get("enable") and not solve_opts.get( "skip_iterations" ): - # Undo the last lines of optimize transmission expansion iteratively n.lines.s_nom_extendable = False n.lines.s_nom = n.lines.s_nom_opt @@ -242,12 +244,11 @@ def fix_capacities(realization, decision, scope="DE", strict=False): regret_run=True, ) - if snakemake.params.scope_to_fix == "EU": + # These constraints have to be changed AFTER prepare_network + + if scope_to_fix == "EU": logger.info( - f"Fixing Scope 'EU' chosen. Setting the CO2 price to the price from the realization network to avoid infeasibilities: {realization.global_constraints.loc['CO2Limit', 'mu']} €/t_CO2" - ) - logger.warning( - "Please make sure that the long-term run with unchanged demand is consistent with the short-term run." + f"Fixing Scope 'EU' chosen. Setting the EU CO2 price to the sum of the EU and DE CO2 prices from the realization network: {realization.global_constraints.loc['CO2Limit', 'mu'] + realization.global_constraints.loc['co2_limit-DE', 'mu']} €/t_CO2" ) n.add( "Generator", @@ -262,24 +263,8 @@ def fix_capacities(realization, decision, scope="DE", strict=False): + realization.global_constraints.loc["co2_limit-DE", "mu"] ), ) + logger.info("Adding negative CO2 generator and dropping co2 limits.") n.global_constraints.drop("CO2Limit", inplace=True) n.global_constraints.drop("co2_limit-DE", inplace=True) - with memory_logger( - filename=getattr(snakemake.log, "memory", None), interval=logging_frequency - ) as mem: - solve_network( - n, - config=snakemake.config, - params=snakemake.params, - solving=snakemake.params.solving, - planning_horizons=planning_horizons, - rule_name=snakemake.rule, - log_fn=snakemake.log.solver, - snakemake=snakemake, - ) - logger.info(f"Maximum memory usage: {mem.mem_usage}") - - n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) - - n.export_to_netcdf(snakemake.output.regret_network) + n.export_to_netcdf(snakemake.output.regret_prenetwork) diff --git a/scripts/pypsa-de/solve_regret_network.py b/scripts/pypsa-de/solve_regret_network.py new file mode 100644 index 000000000..5affbaf65 --- /dev/null +++ b/scripts/pypsa-de/solve_regret_network.py @@ -0,0 +1,62 @@ +import logging +import pathlib + +import numpy as np +import pypsa + +from scripts._benchmark import memory_logger +from scripts._helpers import ( + configure_logging, + mock_snakemake, + set_scenario_config, + update_config_from_wildcards, +) +from scripts.solve_network import solve_network + +logger = logging.getLogger(__name__) + +if __name__ == "__main__": + if "snakemake" not in globals(): + snakemake = mock_snakemake( + "solve_regret", + clusters=27, + opts="", + sector_opts="none", + planning_horizons="2025", + decision="LowDemand", + run="AriadneDemand", + ) + + configure_logging(snakemake) + set_scenario_config(snakemake) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) + + n = pypsa.Network(snakemake.input.regret_prenetwork) + + # Touch output file to ensure it exists + pathlib.Path(snakemake.output.regret_network).touch() + + planning_horizons = snakemake.wildcards.get("planning_horizons", None) + logging_frequency = snakemake.config.get("solving", {}).get( + "mem_logging_frequency", 30 + ) + np.random.seed(snakemake.params.solving["options"].get("seed", 123)) + + with memory_logger( + filename=getattr(snakemake.log, "memory", None), interval=logging_frequency + ) as mem: + solve_network( + n, + config=snakemake.config, + params=snakemake.params, + solving=snakemake.params.solving, + planning_horizons=planning_horizons, + rule_name=snakemake.rule, + log_fn=snakemake.log.solver, + snakemake=snakemake, + ) + logger.info(f"Maximum memory usage: {mem.mem_usage}") + + n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) + + n.export_to_netcdf(snakemake.output.regret_network) From 72402ac79739e0563e4960ff270755b868cb0297 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 3 Sep 2025 09:33:37 +0200 Subject: [PATCH 093/202] define no flex sensitivity rules --- Snakefile | 115 +++++++++++++++++++ scripts/pypsa-de/plot_scenario_comparison.py | 4 +- scripts/pypsa-de/solve_regret_network.py | 24 +++- 3 files changed, 140 insertions(+), 3 deletions(-) diff --git a/Snakefile b/Snakefile index dfff71b2b..d477709b9 100644 --- a/Snakefile +++ b/Snakefile @@ -1033,6 +1033,121 @@ rule solve_regret: "scripts/pypsa-de/solve_regret_network.py" +rule solve_regret_no_flex: + params: + no_flex_sensitivity=True, + solving=config_provider("solving"), + regret_run=True, + energy_year=config_provider("energy", "energy_totals_year"), + custom_extra_functionality=input_custom_extra_functionality, + input: + regret_prenetwork=RESULTS + + "regret_prenetworks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + co2_totals_name=resources("co2_totals.csv"), + energy_totals=resources("energy_totals.csv"), + output: + regret_network=RESULTS + + "no_flex_regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + shadow: + shadow_config + log: + solver=RESULTS + + "logs/no_flex_decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", + memory=RESULTS + + "logs/no_flex_decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", + python=RESULTS + + "logs/no_flex_decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", + threads: solver_threads + resources: + mem_mb=config_provider("solving", "mem_mb"), + runtime=config_provider("solving", "runtime", default="6h"), + script: + "scripts/pypsa-de/solve_regret_network.py" + + +rule export_regret_variables_no_flex: + params: + planning_horizons=config_provider("scenario", "planning_horizons"), + max_hours=config_provider("electricity", "max_hours"), + costs=config_provider("costs"), + config_industry=config_provider("industry"), + energy_totals_year=config_provider("energy", "energy_totals_year"), + co2_price_add_on_fossils=config_provider("co2_price_add_on_fossils"), + co2_sequestration_cost=config_provider("sector", "co2_sequestration_cost"), + post_discretization=config_provider("solving", "options", "post_discretization"), + NEP_year=config_provider("costs", "NEP"), + NEP_transmission=config_provider("costs", "transmission"), + input: + template="data/template_ariadne_database.xlsx", + industry_demands=expand( + resources( + "industrial_energy_demand_base_s_{clusters}_{planning_horizons}.csv" + ), + **config["scenario"], + allow_missing=True, + ), + networks=expand( + RESULTS + + "no_flex_regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + **config["scenario"], + allow_missing=True, + ), + costs=expand( + resources("costs_{planning_horizons}.csv"), + **config["scenario"], + allow_missing=True, + ), + industrial_production_per_country_tomorrow=expand( + resources( + "industrial_production_per_country_tomorrow_{planning_horizons}-modified.csv" + ), + **config["scenario"], + allow_missing=True, + ), + industry_sector_ratios=expand( + resources("industry_sector_ratios_{planning_horizons}.csv"), + **config["scenario"], + allow_missing=True, + ), + industrial_production=resources("industrial_production_per_country.csv"), + energy_totals=resources("energy_totals.csv"), + output: + exported_variables=RESULTS + + "no_flex_regret_variables/regret_variables_{decision}.xlsx", + exported_variables_full=RESULTS + + "no_flex_regret_variables/regret_variables_{decision}_full.xlsx", + resources: + mem_mb=16000, + log: + RESULTS + "logs/no_flex_export_regret_variables_{decision}.log", + script: + "scripts/pypsa-de/export_ariadne_variables.py" + + +rule regret_all_no_flex: + input: + regret_networks=expand( + RESULTS + + "no_flex_regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + run=config_provider("run", "name"), + decision=config_provider("run", "name"), + **config["scenario"], + ), + elec_capa_comp_de_2025=f"results/{run_prefix}/no_flex_regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", + elec_price_comp_de=f"results/{run_prefix}/no_flex_regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", + + +rule regret_all_variables_no_flex: + input: + exported_variables=expand( + RESULTS + "no_flex_regret_variables/regret_variables_{decision}_full.xlsx", + run=config_provider("run", "name"), + decision=config_provider("run", "name"), + ), + script: + "scripts/pypsa-de/plot_scenario_comparison.py" + + rule export_regret_variables: params: planning_horizons=config_provider("scenario", "planning_horizons"), diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index 1f1c92d97..03582599c 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -49,7 +49,9 @@ def scenario_plot(df, output_dir, var): prefix = snakemake.config["run"]["prefix"] root_dir = snakemake.input[0][: snakemake.input[0].find(prefix)] comparison_dir = ( - "regret_comparison/" + "no_flex_regret_comparison/" + if "no_flex_regret_variables" in snakemake.input[0] + else "regret_comparison/" if "regret_variables" in snakemake.input[0] else "scenario_comparison/" ) diff --git a/scripts/pypsa-de/solve_regret_network.py b/scripts/pypsa-de/solve_regret_network.py index 5affbaf65..baaa38bab 100644 --- a/scripts/pypsa-de/solve_regret_network.py +++ b/scripts/pypsa-de/solve_regret_network.py @@ -18,11 +18,11 @@ if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( - "solve_regret", + "solve_regret_no_flex", clusters=27, opts="", sector_opts="none", - planning_horizons="2025", + planning_horizons="2035", decision="LowDemand", run="AriadneDemand", ) @@ -42,6 +42,26 @@ ) np.random.seed(snakemake.params.solving["options"].get("seed", 123)) + if snakemake.params.get("no_flex_sensitivity"): + carriers_to_drop = [ + "urban decentral water tanks charger", + "urban decentral water tanks discharger", + "urban decentral water tanks", + "rural water tanks charger", + "rural water tanks discharger", + "rural water tanks", + "battery charger", + "battery discharger", + "home battery charger", + "home battery discharger", + "battery", + "home battery", + "EV battery", + ] + n.remove("Link", n.links.query("carrier in @carriers_to_drop").index) + n.remove("Store", n.stores.query("carrier in @carriers_to_drop").index) + # n.remove("Bus", n.buses.query("carrier in @carriers_to_drop").index) + with memory_logger( filename=getattr(snakemake.log, "memory", None), interval=logging_frequency ) as mem: From bc2b4eb5b53dfc6cc0131a89420ad5a4af6377c2 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 3 Sep 2025 11:29:29 +0200 Subject: [PATCH 094/202] avoid code duplication with a wildcard --- Snakefile | 133 +++-------------------- scripts/pypsa-de/solve_regret_network.py | 5 +- 2 files changed, 21 insertions(+), 117 deletions(-) diff --git a/Snakefile b/Snakefile index d477709b9..ac48f86eb 100644 --- a/Snakefile +++ b/Snakefile @@ -1002,8 +1002,11 @@ rule prepare_regret_network: "scripts/pypsa-de/prepare_regret_network.py" -rule solve_regret: +rule solve_regret_network: params: + no_flex_sensitivity=lambda w: ( + True if w.regret_network == "no_flex_regret_networks" else False + ), solving=config_provider("solving"), regret_run=True, energy_year=config_provider("energy", "energy_totals_year"), @@ -1015,48 +1018,16 @@ rule solve_regret: energy_totals=resources("energy_totals.csv"), output: regret_network=RESULTS - + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", - shadow: - shadow_config - log: - solver=RESULTS - + "logs/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", - memory=RESULTS - + "logs/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", - python=RESULTS - + "logs/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", - threads: solver_threads - resources: - mem_mb=config_provider("solving", "mem_mb"), - runtime=config_provider("solving", "runtime", default="6h"), - script: - "scripts/pypsa-de/solve_regret_network.py" - - -rule solve_regret_no_flex: - params: - no_flex_sensitivity=True, - solving=config_provider("solving"), - regret_run=True, - energy_year=config_provider("energy", "energy_totals_year"), - custom_extra_functionality=input_custom_extra_functionality, - input: - regret_prenetwork=RESULTS - + "regret_prenetworks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", - co2_totals_name=resources("co2_totals.csv"), - energy_totals=resources("energy_totals.csv"), - output: - regret_network=RESULTS - + "no_flex_regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + + "{regret_network}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", shadow: shadow_config log: solver=RESULTS - + "logs/no_flex_decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", + + "logs/{regret_network}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", memory=RESULTS - + "logs/no_flex_decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", + + "logs/{regret_network}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", python=RESULTS - + "logs/no_flex_decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", + + "logs/{regret_network}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", threads: solver_threads resources: mem_mb=config_provider("solving", "mem_mb"), @@ -1065,7 +1036,7 @@ rule solve_regret_no_flex: "scripts/pypsa-de/solve_regret_network.py" -rule export_regret_variables_no_flex: +rule export_regret_variables: params: planning_horizons=config_provider("scenario", "planning_horizons"), max_hours=config_provider("electricity", "max_hours"), @@ -1088,7 +1059,7 @@ rule export_regret_variables_no_flex: ), networks=expand( RESULTS - + "no_flex_regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + + "{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], allow_missing=True, ), @@ -1112,100 +1083,29 @@ rule export_regret_variables_no_flex: industrial_production=resources("industrial_production_per_country.csv"), energy_totals=resources("energy_totals.csv"), output: - exported_variables=RESULTS - + "no_flex_regret_variables/regret_variables_{decision}.xlsx", + exported_variables=RESULTS + "{regret_dir}/regret_variables_{decision}.xlsx", exported_variables_full=RESULTS - + "no_flex_regret_variables/regret_variables_{decision}_full.xlsx", + + "{regret_dir}/regret_variables_{decision}_full.xlsx", resources: mem_mb=16000, log: - RESULTS + "logs/no_flex_export_regret_variables_{decision}.log", + RESULTS + "{regret_dir}/logs/export_regret_variables_{decision}.log", script: "scripts/pypsa-de/export_ariadne_variables.py" -rule regret_all_no_flex: - input: - regret_networks=expand( - RESULTS - + "no_flex_regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", - run=config_provider("run", "name"), - decision=config_provider("run", "name"), - **config["scenario"], - ), - elec_capa_comp_de_2025=f"results/{run_prefix}/no_flex_regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", - elec_price_comp_de=f"results/{run_prefix}/no_flex_regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", - - rule regret_all_variables_no_flex: input: exported_variables=expand( - RESULTS + "no_flex_regret_variables/regret_variables_{decision}_full.xlsx", + RESULTS + "{regret_dir}/regret_variables_{decision}_full.xlsx", run=config_provider("run", "name"), decision=config_provider("run", "name"), + regret_dir="no_flex_regret_networks", ), script: "scripts/pypsa-de/plot_scenario_comparison.py" -rule export_regret_variables: - params: - planning_horizons=config_provider("scenario", "planning_horizons"), - max_hours=config_provider("electricity", "max_hours"), - costs=config_provider("costs"), - config_industry=config_provider("industry"), - energy_totals_year=config_provider("energy", "energy_totals_year"), - co2_price_add_on_fossils=config_provider("co2_price_add_on_fossils"), - co2_sequestration_cost=config_provider("sector", "co2_sequestration_cost"), - post_discretization=config_provider("solving", "options", "post_discretization"), - NEP_year=config_provider("costs", "NEP"), - NEP_transmission=config_provider("costs", "transmission"), - input: - template="data/template_ariadne_database.xlsx", - industry_demands=expand( - resources( - "industrial_energy_demand_base_s_{clusters}_{planning_horizons}.csv" - ), - **config["scenario"], - allow_missing=True, - ), - networks=expand( - RESULTS - + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", - **config["scenario"], - allow_missing=True, - ), - costs=expand( - resources("costs_{planning_horizons}.csv"), - **config["scenario"], - allow_missing=True, - ), - industrial_production_per_country_tomorrow=expand( - resources( - "industrial_production_per_country_tomorrow_{planning_horizons}-modified.csv" - ), - **config["scenario"], - allow_missing=True, - ), - industry_sector_ratios=expand( - resources("industry_sector_ratios_{planning_horizons}.csv"), - **config["scenario"], - allow_missing=True, - ), - industrial_production=resources("industrial_production_per_country.csv"), - energy_totals=resources("energy_totals.csv"), - output: - exported_variables=RESULTS + "regret_variables/regret_variables_{decision}.xlsx", - exported_variables_full=RESULTS - + "regret_variables/regret_variables_{decision}_full.xlsx", - resources: - mem_mb=16000, - log: - RESULTS + "logs/export_regret_variables_{decision}.log", - script: - "scripts/pypsa-de/export_ariadne_variables.py" - - rule regret_all: input: regret_networks=expand( @@ -1222,9 +1122,10 @@ rule regret_all: rule regret_all_variables: input: exported_variables=expand( - RESULTS + "regret_variables/regret_variables_{decision}_full.xlsx", + RESULTS + "{regret_dir}/regret_variables_{decision}_full.xlsx", run=config_provider("run", "name"), decision=config_provider("run", "name"), + regret_dir="regret_networks", ), script: "scripts/pypsa-de/plot_scenario_comparison.py" diff --git a/scripts/pypsa-de/solve_regret_network.py b/scripts/pypsa-de/solve_regret_network.py index baaa38bab..285fa9a08 100644 --- a/scripts/pypsa-de/solve_regret_network.py +++ b/scripts/pypsa-de/solve_regret_network.py @@ -42,7 +42,10 @@ ) np.random.seed(snakemake.params.solving["options"].get("seed", 123)) - if snakemake.params.get("no_flex_sensitivity"): + if snakemake.params.get("no_flex_sensitivity") == True: + logger.info( + "No flexibility sensitivity analysis activated. Removing decentral TES, batteries, and BEV DSM from the network." + ) carriers_to_drop = [ "urban decentral water tanks charger", "urban decentral water tanks discharger", From e9e8e956da9bd895cf84c12bf733e759556c6b89 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 3 Sep 2025 11:50:56 +0200 Subject: [PATCH 095/202] streamline plotting rules --- Snakefile | 46 ++++++++++++++++++++++++---------------------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/Snakefile b/Snakefile index ac48f86eb..dec692162 100644 --- a/Snakefile +++ b/Snakefile @@ -42,10 +42,6 @@ cutout_dir = config["atlite"]["cutout_directory"] CDIR = Path(cutout_dir).joinpath("" if run["shared_cutouts"] else RDIR) RESULTS = "results/" + RDIR -run_prefix = config["run"]["prefix"] -regret_scenarios = ["AriadneDemand", "LowDemand"] -horizons = [2025, 2030, 2035] - localrules: purge, @@ -1110,13 +1106,14 @@ rule regret_all: input: regret_networks=expand( RESULTS - + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + + "{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", run=config_provider("run", "name"), decision=config_provider("run", "name"), + regret_dir="regret_networks", **config["scenario"], ), - elec_capa_comp_de_2025=f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", - elec_price_comp_de=f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", + elec_capa_comp_de_2025=f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", + elec_price_comp_de=f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", rule regret_all_variables: @@ -1138,19 +1135,21 @@ rule regret_plots_lt: plotting=config_provider("plotting"), input: networks=expand( - "results/{run}/{scenario}/networks/base_s_27__none_{year}.nc", - run=run_prefix, - scenario=regret_scenarios, - year=horizons, + RESULTS + + "regret_networks/decision_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + **config["scenario"], + allow_missing=True, + run=config["run"]["name"], ), regret_variables=expand( - "results/{run}/{scenario}/regret_variables/regret_variables_{scenario}_full.xlsx", - run=run_prefix, - scenario=regret_scenarios, + RESULTS + "regret_networks/regret_variables_{run}_full.xlsx", + run=config["run"]["name"], ), output: - elec_capa_comp_de_2025=f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", - dir=directory(f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand_LT"), + elec_capa_comp_de_2025=f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", + dir=directory( + f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand_LT" + ), resources: mem_mb=32000, script: @@ -1164,15 +1163,18 @@ rule regret_plots: plotting=config_provider("plotting"), input: regret_networks=expand( - "results/{run}/{scenario}/regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", - run=run_prefix, - scenario=regret_scenarios, - decision=config_provider("run", "name"), + RESULTS + + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], + allow_missing=True, + run=config["run"]["name"], + decision=config["run"]["name"], ), output: - elec_price_comp_de=f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", - dir=directory(f"results/{run_prefix}/regret_plots/Ariadne_vs_LowDemand"), + elec_price_comp_de=f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", + dir=directory( + f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand" + ), resources: mem_mb=32000, script: From 071b4365ca628c61b6688ef339c2e4b16de9b0d0 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 3 Sep 2025 14:36:11 +0200 Subject: [PATCH 096/202] further improvements to snakemake rules --- Snakefile | 93 +++++++++++++------- config/config.de.yaml | 2 +- scripts/pypsa-de/plot_scenario_comparison.py | 11 +-- scripts/pypsa-de/regret_plots_lt.py | 18 ++-- scripts/pypsa-de/solve_regret_network.py | 7 +- 5 files changed, 75 insertions(+), 56 deletions(-) diff --git a/Snakefile b/Snakefile index dec692162..99c56166e 100644 --- a/Snakefile +++ b/Snakefile @@ -849,8 +849,17 @@ rule plot_ariadne_variables: rule ariadne_all: + input: + price_carbon="results/" + + config["run"]["prefix"] + + "/scenario_comparison/Price-Carbon.png", + + +rule plot_ariadne_scenario_comparison: params: - output_dir="results/" + config["run"]["prefix"] + "/scenario_comparison/", + output_dir=directory( + "results/" + config["run"]["prefix"] + "/scenario_comparison/" + ), input: expand(RESULTS + "graphs/costs.svg", run=config_provider("run", "name")), # expand( @@ -868,6 +877,10 @@ rule ariadne_all: RESULTS + "ariadne/exported_variables_full.xlsx", run=config_provider("run", "name"), ), + output: + price_carbon="results/" + + config["run"]["prefix"] + + "/scenario_comparison/Price-Carbon.png", script: "scripts/pypsa-de/plot_scenario_comparison.py" @@ -1001,7 +1014,7 @@ rule prepare_regret_network: rule solve_regret_network: params: no_flex_sensitivity=lambda w: ( - True if w.regret_network == "no_flex_regret_networks" else False + True if w.regret_dir == "no_flex_regret_networks" else False ), solving=config_provider("solving"), regret_run=True, @@ -1014,16 +1027,16 @@ rule solve_regret_network: energy_totals=resources("energy_totals.csv"), output: regret_network=RESULTS - + "{regret_network}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + + "{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", shadow: shadow_config log: solver=RESULTS - + "logs/{regret_network}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", + + "logs/{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", memory=RESULTS - + "logs/{regret_network}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", + + "logs/{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", python=RESULTS - + "logs/{regret_network}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", + + "logs/{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", threads: solver_threads resources: mem_mb=config_provider("solving", "mem_mb"), @@ -1090,40 +1103,49 @@ rule export_regret_variables: "scripts/pypsa-de/export_ariadne_variables.py" -rule regret_all_variables_no_flex: +rule regret_no_flex: input: - exported_variables=expand( - RESULTS + "{regret_dir}/regret_variables_{decision}_full.xlsx", - run=config_provider("run", "name"), - decision=config_provider("run", "name"), - regret_dir="no_flex_regret_networks", - ), - script: - "scripts/pypsa-de/plot_scenario_comparison.py" + "results/" + + config["run"]["prefix"] + + "/scenario_comparison/no_flex_regret_networks/Price-Carbon.png", + + +rule regret_base: + input: + "results/" + + config["run"]["prefix"] + + "/scenario_comparison/regret_networks/Price-Carbon.png", rule regret_all: input: - regret_networks=expand( - RESULTS - + "{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", - run=config_provider("run", "name"), - decision=config_provider("run", "name"), - regret_dir="regret_networks", - **config["scenario"], + expand( + "results/" + + config["run"]["prefix"] + + "/scenario_comparison/{regret_dir}/Price-Carbon.png", + regret_dir=["no_flex_regret_networks", "regret_networks"], ), - elec_capa_comp_de_2025=f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", - elec_price_comp_de=f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", + f"results/{config['run']['prefix']}/regret_plots/LT_comparison/elec_capa_comp_de_2025.png", + # expand("results/" + config["run"]["prefix"] + "/regret_plots/{regret_dir}/ST_comparison/elec_price_comp_de.png", + # regret_dir=["no_flex_regret_networks", "regret_networks"]), -rule regret_all_variables: +rule plot_scenario_comparison_regrets: + params: + output_dir=directory( + "results/" + config["run"]["prefix"] + "/scenario_comparison/{regret_dir}" + ), input: exported_variables=expand( RESULTS + "{regret_dir}/regret_variables_{decision}_full.xlsx", run=config_provider("run", "name"), decision=config_provider("run", "name"), - regret_dir="regret_networks", + allow_missing=True, ), + output: + price_carbon="results/" + + config["run"]["prefix"] + + "/scenario_comparison/{regret_dir}/Price-Carbon.png", script: "scripts/pypsa-de/plot_scenario_comparison.py" @@ -1133,12 +1155,14 @@ rule regret_plots_lt: scenarios=get_scenarios(run), planning_horizons=config_provider("scenario", "planning_horizons"), plotting=config_provider("plotting"), + output_dir=directory( + f"results/{config['run']['prefix']}/regret_plots/LT_comparison" + ), input: networks=expand( RESULTS + "regret_networks/decision_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], - allow_missing=True, run=config["run"]["name"], ), regret_variables=expand( @@ -1146,10 +1170,7 @@ rule regret_plots_lt: run=config["run"]["name"], ), output: - elec_capa_comp_de_2025=f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand_LT/elec_capa_comp_de_2025.png", - dir=directory( - f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand_LT" - ), + elec_capa_comp_de_2025=f"results/{config['run']['prefix']}/regret_plots/LT_comparison/elec_capa_comp_de_2025.png", resources: mem_mb=32000, script: @@ -1164,16 +1185,20 @@ rule regret_plots: input: regret_networks=expand( RESULTS - + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + + "{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], allow_missing=True, run=config["run"]["name"], decision=config["run"]["name"], ), output: - elec_price_comp_de=f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand/elec_price_comp_de.png", + elec_price_comp_de="results/" + + config["run"]["prefix"] + + "/regret_plots/{regret_dir}/ST_comparison/elec_price_comp_de.png", dir=directory( - f"results/{config["run"]["prefix"]}/regret_plots/Ariadne_vs_LowDemand" + f"results/" + + config["run"]["prefix"] + + "/regret_plots/{regret_dir}/ST_comparison" ), resources: mem_mb=32000, diff --git a/config/config.de.yaml b/config/config.de.yaml index cae88fa74..02c082899 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -70,7 +70,7 @@ scenario: #- 2020 - 2025 - 2030 - - 2035 + # - 2035 # - 2040 # - 2045 # - 2050 diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index 03582599c..eb6843fa3 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -46,16 +46,7 @@ def scenario_plot(df, output_dir, var): df = pd.concat(dfs, axis=0) - prefix = snakemake.config["run"]["prefix"] - root_dir = snakemake.input[0][: snakemake.input[0].find(prefix)] - comparison_dir = ( - "no_flex_regret_comparison/" - if "no_flex_regret_variables" in snakemake.input[0] - else "regret_comparison/" - if "regret_variables" in snakemake.input[0] - else "scenario_comparison/" - ) - output_dir = root_dir + prefix + "/" + comparison_dir + output_dir = snakemake.params.output_dir if not os.path.exists(output_dir): os.makedirs(output_dir) diff --git a/scripts/pypsa-de/regret_plots_lt.py b/scripts/pypsa-de/regret_plots_lt.py index a77928800..491b04eeb 100644 --- a/scripts/pypsa-de/regret_plots_lt.py +++ b/scripts/pypsa-de/regret_plots_lt.py @@ -256,8 +256,8 @@ def plot_capacity_comparison( vars_dict["LowDemand"] = df # ensure output directory exist - if not os.path.exists(snakemake.output[-1]): - os.makedirs(snakemake.output[-1]) + if not os.path.exists(snakemake.params.output_dir): + os.makedirs(snakemake.params.output_dir) # Capacity plot DE @@ -308,7 +308,7 @@ def plot_capacity_comparison( title=f"Electricity capacities in DE: {year}", save_path=snakemake.output.elec_capa_comp_de_2025 if year == 2025 - else snakemake.output[-1] + f"/elec_capa_comp_de_{year}.png", + else snakemake.params.output_dir + f"/elec_capa_comp_de_{year}.png", ) plot_capacity_comparison( @@ -317,7 +317,7 @@ def plot_capacity_comparison( tech_colors=tech_colors, plot_diff=True, title=f"Difference of electricity capacities in DE: {year}", - save_path=snakemake.output[-1] + f"/elec_capa_diff_de_{year}.png", + save_path=snakemake.params.output_dir + f"/elec_capa_diff_de_{year}.png", ) # Capacity plot outside DE @@ -353,7 +353,7 @@ def plot_capacity_comparison( tech_colors=tech_colors, plot_diff=False, title=f"Electricity capacities in EU (outside DE): {year}", - save_path=snakemake.output[-1] + f"/elec_capa_comp_eu_{year}.png", + save_path=snakemake.params.output_dir + f"/elec_capa_comp_eu_{year}.png", ) plot_capacity_comparison( @@ -362,7 +362,7 @@ def plot_capacity_comparison( tech_colors=tech_colors, plot_diff=True, title=f"Difference of electricity capacities in EU (outside DE): {year}", - save_path=snakemake.output[-1] + f"/elec_capa_diff_eu_{year}.png", + save_path=snakemake.params.output_dir + f"/elec_capa_diff_eu_{year}.png", ) # Electricity demand as bar plot @@ -394,7 +394,7 @@ def plot_capacity_comparison( plot_diff=False, title=f"Electricity demand in DE: {year}", ylabel="TWh", - save_path=snakemake.output[-1] + f"/elec_demand_comp_de_{year}.png", + save_path=snakemake.params.output_dir + f"/elec_demand_comp_de_{year}.png", ) plot_capacity_comparison( @@ -404,7 +404,7 @@ def plot_capacity_comparison( plot_diff=True, title=f"Difference of electricity demand in DE: {year}", ylabel="TWh", - save_path=snakemake.output[-1] + f"/elec_demand_diff_de_{year}.png", + save_path=snakemake.params.output_dir + f"/elec_demand_diff_de_{year}.png", ) # ToDo @@ -609,5 +609,5 @@ def plot_capacity_comparison( # Adjust y-axis limits to accommodate top labels y_max = max(max(bottom_low), max(bottom_ariadne)) ax.set_ylim(0, y_max * 1.1) - plt.savefig(snakemake.output[-1] + "/capex_de.png", bbox_inches="tight") + plt.savefig(snakemake.params.output_dir + "/capex_de.png", bbox_inches="tight") plt.close() diff --git a/scripts/pypsa-de/solve_regret_network.py b/scripts/pypsa-de/solve_regret_network.py index 285fa9a08..1363b8949 100644 --- a/scripts/pypsa-de/solve_regret_network.py +++ b/scripts/pypsa-de/solve_regret_network.py @@ -18,13 +18,14 @@ if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( - "solve_regret_no_flex", + "solve_regret_network", clusters=27, opts="", sector_opts="none", planning_horizons="2035", decision="LowDemand", run="AriadneDemand", + regret_dir="no_flex_regret_networks", ) configure_logging(snakemake) @@ -63,7 +64,9 @@ ] n.remove("Link", n.links.query("carrier in @carriers_to_drop").index) n.remove("Store", n.stores.query("carrier in @carriers_to_drop").index) - # n.remove("Bus", n.buses.query("carrier in @carriers_to_drop").index) + # Need to keep the EV battery bus + carriers_to_drop.remove("EV battery") + n.remove("Bus", n.buses.query("carrier in @carriers_to_drop").index) with memory_logger( filename=getattr(snakemake.log, "memory", None), interval=logging_frequency From e87cdfdcf4f5b11aedaafc8cfb2fcf9836786944 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 3 Sep 2025 14:51:33 +0200 Subject: [PATCH 097/202] revert to default pypsa-de electricity imports --- config/config.de.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 02c082899..be096fbbc 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -596,12 +596,12 @@ solving: electricity_import: DE: 2020: -20 - 2025: 25 - 2030: 50 - 2035: 75 - 2040: 100 + 2025: 0 + 2030: 0 + 2035: 40 + 2040: 80 2045: 125 - 2050: 150 + 2050: 125 electrolysis: # boundary condition lower? DE: From 4aaa57e5fa4ce18721ef46f5d1bd0736505c7876 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 3 Sep 2025 14:53:01 +0200 Subject: [PATCH 098/202] delay offwind expansion by two years to match onwind targets (which are for 2028) --- config/scenarios.manual.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 8ab1eea0a..96474e555 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -78,6 +78,8 @@ AriadneDemand: options: transmission_losses: 0 noisy_costs: false + offshore_nep_force: + delay_years: 2 # electricity: # transmission_limit: v1 @@ -86,6 +88,8 @@ LowDemand: reference_scenario: ExPol # e.g., CO2 emissions from REMIND will follow ExPol uba_for_industry: true uba_for_mobility: true + offshore_nep_force: + delay_years: 2 # electricity: # transmission_limit: v1 solving: From 869bede03270fdf03758167511faad98df60be70 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 3 Sep 2025 15:18:17 +0200 Subject: [PATCH 099/202] update prefix --- config/config.de.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index be096fbbc..cce393e5f 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250826_regrets + prefix: 20250903_regret_less_offwind name: # - ExPol # - KN2045_Mix From fd19750fab989b54ab4693413427926a194e75ee Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 4 Sep 2025 13:57:12 +0200 Subject: [PATCH 100/202] more RES capacities in HighDemand; AriadneDemand -> HighDemand rename --- config/config.de.yaml | 2 +- config/scenarios.manual.yaml | 104 +++++++++++-------- scripts/pypsa-de/export_ariadne_variables.py | 3 +- scripts/pypsa-de/prepare_regret_network.py | 2 +- scripts/pypsa-de/regret_plots.py | 6 +- scripts/pypsa-de/regret_plots_lt.py | 30 +++--- scripts/pypsa-de/solve_regret_network.py | 2 +- 7 files changed, 84 insertions(+), 65 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index cce393e5f..c21ff304b 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -8,7 +8,7 @@ run: name: # - ExPol # - KN2045_Mix - - AriadneDemand + - HighDemand - LowDemand # - KN2045_Elek # - KN2045_H2 diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 96474e555..451e1018a 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -2,6 +2,67 @@ # SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors # # SPDX-License-Identifier: MIT +HighDemand: + solving: + options: + transmission_losses: 0 + noisy_costs: false + constraints: + limits_capacity_min: + Generator: + onwind: + DE: + 2030: 115 # Wind-an-Land Law + 2035: 157 # Wind-an-Land Law + 2040: 160 # Wind-an-Land Law + offwind: + DE: + 2035: 40 # 40 Wind-auf-See Law + 2040: 50 # assuming 1/3 of buildout reached in 2040 + 2045: 70 # 70 Wind-auf-See Law + solar: + DE: + # EEG2023; Ziel for 2024: 88 GW and for 2026: 128 GW, + 2025: 101 # assuming at least 1/3 of buildout reached in 2025 + 2030: 215 # PV strategy + 2035: 309 + 2040: 400 # PV strategy + + +LowDemand: + iiasa_database: + reference_scenario: ExPol # e.g., CO2 emissions from REMIND will follow ExPol + uba_for_industry: true + uba_for_mobility: true + offshore_nep_force: + delay_years: 2 + solving: + options: + transmission_losses: 0 + noisy_costs: false + constraints: + decentral_heat_pump_budgets: + DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + 2025: 12.0 + 2030: 26.0 + 2035: 38.0 + limits_capacity_min: + Generator: + onwind: + DE: + 2030: 0 + 2035: 0 + offwind: + DE: + 2030: 0 + 2035: 0 + Link: + H2 electrolysis: + DE: + 2030: 0 + + + ExPol: # UBA CO2 pathway instead of KSG targets # Europen CO2 pathway fixed at 2030 for 2035-2050 @@ -73,49 +134,6 @@ KN2045_Mix: # Importe erneuerbar erzeugter Energien auf mittlerem Niveau # dient als Referenzszenario in der Familie der Ariadne-Szenarien -AriadneDemand: - solving: - options: - transmission_losses: 0 - noisy_costs: false - offshore_nep_force: - delay_years: 2 - # electricity: - # transmission_limit: v1 - -LowDemand: - iiasa_database: - reference_scenario: ExPol # e.g., CO2 emissions from REMIND will follow ExPol - uba_for_industry: true - uba_for_mobility: true - offshore_nep_force: - delay_years: 2 - # electricity: - # transmission_limit: v1 - solving: - options: - transmission_losses: 0 - noisy_costs: false - constraints: - decentral_heat_pump_budgets: - DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 - 2025: 12.0 - 2030: 26.0 - 2035: 38.0 - limits_capacity_min: - Generator: - onwind: - DE: - 2030: 0 - 2035: 0 - offwind: - DE: - 2030: 0 - 2035: 0 - Link: - H2 electrolysis: - DE: - 2030: 0 KN2045_Elek: # Fokus auf dem Hochlauf von Technologien zur direkten Elektrifizierung der Sektoren diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 21ad4940e..70637b06b 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -5471,8 +5471,9 @@ def get_data( opts="", ll="vopt", sector_opts="None", - run="AriadneDemand", + run="HighDemand", decision="LowDemand", + regret_dir="regret_networks", ) configure_logging(snakemake) config = snakemake.config diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index 45f6198f1..e8bc9d61e 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -153,7 +153,7 @@ def fix_capacities(realization, decision, scope="DE", strict=False): sector_opts="none", planning_horizons="2025", decision="LowDemand", - run="AriadneDemand", + run="HighDemand", ) configure_logging(snakemake) diff --git a/scripts/pypsa-de/regret_plots.py b/scripts/pypsa-de/regret_plots.py index a5724f70a..a46c78801 100644 --- a/scripts/pypsa-de/regret_plots.py +++ b/scripts/pypsa-de/regret_plots.py @@ -71,7 +71,7 @@ def aggregate_by_keywords(opex_comp_agg, groups): configure_logging(snakemake) config = snakemake.config planning_horizons = snakemake.params.planning_horizons - scenarios = ["AriadneDemand", "LowDemand"] + scenarios = ["HighDemand", "LowDemand"] tech_colors = snakemake.params.plotting["tech_colors"] # Nested dict: networks[year][scenario][decision] = Network @@ -114,8 +114,8 @@ def aggregate_by_keywords(opex_comp_agg, groups): ax = ax.flatten() years = [2025, 2030, 2035] - scenarios = ["AriadneDemand", "LowDemand"] - decisions = ["decision_AriadneDemand", "decision_LowDemand"] + scenarios = ["HighDemand", "LowDemand"] + decisions = ["decision_HighDemand", "decision_LowDemand"] for i, year in enumerate(years): for scenario, decision in itertools.product(scenarios, decisions): diff --git a/scripts/pypsa-de/regret_plots_lt.py b/scripts/pypsa-de/regret_plots_lt.py index 491b04eeb..9780b5a25 100644 --- a/scripts/pypsa-de/regret_plots_lt.py +++ b/scripts/pypsa-de/regret_plots_lt.py @@ -58,7 +58,7 @@ def aggregate_by_keywords(df, groups): def plot_capacity_comparison( df, - scenarios=("AriadneDemand", "LowDemand"), + scenarios=("HighDemand", "LowDemand"), tech_colors=None, plot_diff=False, title="Electricity capacities", @@ -224,7 +224,7 @@ def plot_capacity_comparison( configure_logging(snakemake) config = snakemake.config planning_horizons = snakemake.params.planning_horizons - scenarios = ["AriadneDemand", "LowDemand"] + scenarios = ["HighDemand", "LowDemand"] tech_colors = snakemake.params.plotting["tech_colors"] # Load networks @@ -250,8 +250,8 @@ def plot_capacity_comparison( .sum() ).round(5) - if "AriadneDemand" in fn: - vars_dict["AriadneDemand"] = df + if "HighDemand" in fn: + vars_dict["HighDemand"] = df elif "LowDemand" in fn: vars_dict["LowDemand"] = df @@ -302,7 +302,7 @@ def plot_capacity_comparison( plot_capacity_comparison( df=capa_comp_agg, - scenarios=["AriadneDemand", "LowDemand"], + scenarios=["HighDemand", "LowDemand"], tech_colors=tech_colors, plot_diff=False, title=f"Electricity capacities in DE: {year}", @@ -313,7 +313,7 @@ def plot_capacity_comparison( plot_capacity_comparison( df=capa_comp_agg, - scenarios=["AriadneDemand", "LowDemand"], + scenarios=["HighDemand", "LowDemand"], tech_colors=tech_colors, plot_diff=True, title=f"Difference of electricity capacities in DE: {year}", @@ -349,7 +349,7 @@ def plot_capacity_comparison( plot_capacity_comparison( df=capa_comp_agg, - scenarios=["AriadneDemand", "LowDemand"], + scenarios=["HighDemand", "LowDemand"], tech_colors=tech_colors, plot_diff=False, title=f"Electricity capacities in EU (outside DE): {year}", @@ -358,7 +358,7 @@ def plot_capacity_comparison( plot_capacity_comparison( df=capa_comp_agg, - scenarios=["AriadneDemand", "LowDemand"], + scenarios=["HighDemand", "LowDemand"], tech_colors=tech_colors, plot_diff=True, title=f"Difference of electricity capacities in EU (outside DE): {year}", @@ -389,7 +389,7 @@ def plot_capacity_comparison( plot_capacity_comparison( df=demand_comp_agg, - scenarios=["AriadneDemand", "LowDemand"], + scenarios=["HighDemand", "LowDemand"], tech_colors=tech_colors, plot_diff=False, title=f"Electricity demand in DE: {year}", @@ -399,7 +399,7 @@ def plot_capacity_comparison( plot_capacity_comparison( df=demand_comp_agg, - scenarios=["AriadneDemand", "LowDemand"], + scenarios=["HighDemand", "LowDemand"], tech_colors=tech_colors, plot_diff=True, title=f"Difference of electricity demand in DE: {year}", @@ -446,7 +446,7 @@ def plot_capacity_comparison( # Extract for easier access capex_low_plot = capex_data["LowDemand"] - capex_ariadne_plot = capex_data["AriadneDemand"] + capex_ariadne_plot = capex_data["HighDemand"] # Set up the plot fig, ax = plt.subplots(figsize=(12, 8)) @@ -483,7 +483,7 @@ def plot_capacity_comparison( capex_ariadne_plot.loc[category, col] for col in year_cols ] - # Plot bars - LowDemand with hatching, AriadneDemand without + # Plot bars - LowDemand with hatching, HighDemand without ax.bar( x - width / 2, low_values, @@ -518,7 +518,7 @@ def plot_capacity_comparison( color="white", ) - # AriadneDemand category values + # HighDemand category values if ariadne_values[j] > 0.5: # Only show if value is significant ax.text( j + width / 2, @@ -539,7 +539,7 @@ def plot_capacity_comparison( ax.set_xlabel("Year", fontsize=12) ax.set_ylabel("billion €", fontsize=12) ax.set_title( - "System Cost CAPEX Comparison: LowDemand vs AriadneDemand", + "System Cost CAPEX Comparison: LowDemand vs HighDemand", fontsize=14, fontweight="bold", ) @@ -563,7 +563,7 @@ def plot_capacity_comparison( hatch="///", label="LowDemand", ), - Rectangle((0, 0), 1, 1, facecolor="gray", alpha=0.8, label="AriadneDemand"), + Rectangle((0, 0), 1, 1, facecolor="gray", alpha=0.8, label="HighDemand"), ] # Create separate legends diff --git a/scripts/pypsa-de/solve_regret_network.py b/scripts/pypsa-de/solve_regret_network.py index 1363b8949..1093be6b5 100644 --- a/scripts/pypsa-de/solve_regret_network.py +++ b/scripts/pypsa-de/solve_regret_network.py @@ -24,7 +24,7 @@ sector_opts="none", planning_horizons="2035", decision="LowDemand", - run="AriadneDemand", + run="HighDemand", regret_dir="no_flex_regret_networks", ) From e74e691c1f97d56c6ca8c8971d82e24ae783c781 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 4 Sep 2025 13:57:35 +0200 Subject: [PATCH 101/202] minor --- config/scenarios.manual.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 451e1018a..4259ac880 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -21,8 +21,7 @@ HighDemand: 2040: 50 # assuming 1/3 of buildout reached in 2040 2045: 70 # 70 Wind-auf-See Law solar: - DE: - # EEG2023; Ziel for 2024: 88 GW and for 2026: 128 GW, + DE: # EEG2023; Ziel for 2024: 88 GW and for 2026: 128 GW, 2025: 101 # assuming at least 1/3 of buildout reached in 2025 2030: 215 # PV strategy 2035: 309 From 482d6e5c38274fac520aa400b60e712e9299e292 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 8 Sep 2025 11:51:44 +0200 Subject: [PATCH 102/202] add a config for triggering short-term no_flex runs --- Snakefile | 10 ++++++++-- config/config.de.yaml | 1 + 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/Snakefile b/Snakefile index 99c56166e..f4ceec158 100644 --- a/Snakefile +++ b/Snakefile @@ -1119,11 +1119,17 @@ rule regret_base: rule regret_all: input: - expand( + lambda w: expand( "results/" + config["run"]["prefix"] + "/scenario_comparison/{regret_dir}/Price-Carbon.png", - regret_dir=["no_flex_regret_networks", "regret_networks"], + regret_dir=( + ["regret_networks", "no_flex_regret_networks"] + if config_provider( + "iiasa_database", "regret_run", "no_flex_sensitivity" + )(w) + else ["regret_networks"] + ), ), f"results/{config['run']['prefix']}/regret_plots/LT_comparison/elec_capa_comp_de_2025.png", # expand("results/" + config["run"]["prefix"] + "/regret_plots/{regret_dir}/ST_comparison/elec_price_comp_de.png", diff --git a/config/config.de.yaml b/config/config.de.yaml index c21ff304b..926e687fe 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -49,6 +49,7 @@ iiasa_database: scope_to_fix: EU # Supported values are DE and EU strict: false # If false, the model allows capacity expansion for virtual links and bottlenecks h2_vent: true + no_flex_sensitivity: false # CAVEAT: This behaves like a wildcard ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 uba_for_industry: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 From 194e9181d2e64b84e558dc8e088fe1dbea5d4b98 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 8 Sep 2025 13:23:44 +0200 Subject: [PATCH 103/202] default to UBA projection data for 2025 --- config/config.de.yaml | 6 ++++-- config/scenarios.manual.yaml | 15 ++++++++++++--- scripts/pypsa-de/build_exogenous_mobility_data.py | 10 +++++----- scripts/pypsa-de/modify_prenetwork.py | 4 ++-- 4 files changed, 23 insertions(+), 12 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 926e687fe..3fa88c4ca 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -51,8 +51,10 @@ iiasa_database: h2_vent: true no_flex_sensitivity: false # CAVEAT: This behaves like a wildcard ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles - uba_for_mobility: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - uba_for_industry: false # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 + uba_for_mobility: # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 + - 2025 + uba_for_industry: # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 + - 2025 scale_industry_non_energy: false # Scale non-energy industry demand directly proportional to energy demand # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 4259ac880..85dd9d639 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -8,6 +8,9 @@ HighDemand: transmission_losses: 0 noisy_costs: false constraints: + decentral_heat_pump_budgets: + DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + 2025: 12.0 limits_capacity_min: Generator: onwind: @@ -31,8 +34,14 @@ HighDemand: LowDemand: iiasa_database: reference_scenario: ExPol # e.g., CO2 emissions from REMIND will follow ExPol - uba_for_industry: true - uba_for_mobility: true + uba_for_industry: + - 2025 + - 2030 + - 2035 + uba_for_mobility: + - 2025 + - 2030 + - 2035 offshore_nep_force: delay_years: 2 solving: @@ -64,7 +73,7 @@ LowDemand: ExPol: # UBA CO2 pathway instead of KSG targets -# Europen CO2 pathway fixed at 2030 for 2035-2050 +# European CO2 pathway fixed at 2030 for 2035-2050 # Kernnetz is not assumed to be built # Not forcing renewables after 2030 diff --git a/scripts/pypsa-de/build_exogenous_mobility_data.py b/scripts/pypsa-de/build_exogenous_mobility_data.py index 2d316baea..9698266ab 100644 --- a/scripts/pypsa-de/build_exogenous_mobility_data.py +++ b/scripts/pypsa-de/build_exogenous_mobility_data.py @@ -12,7 +12,7 @@ def get_mobility_data( year, non_land_liquids, ageb_for_mobility=True, - uba_for_mobility=False, + uba_for_mobility="", ): """ Retrieve the German mobility demand from the transport model. @@ -62,7 +62,7 @@ def get_mobility_data( # FZ27_202101, table FZ 27.2, 1. January 2021: mobility_data["million_EVs"] = 0.358498 + 0.280149 - elif year == "2025" and uba_for_mobility: + elif year == "2025" and year in uba_for_mobility: # https://www.umweltbundesamt.de/sites/default/files/medien/11850/publikationen/projektionsbericht_2025.pdf, Abbildung 64 & 59, mobility_data = pd.Series( { @@ -77,7 +77,7 @@ def get_mobility_data( mobility_data = mobility_data.mul(1e6) # convert TWh to MWh mobility_data["million_EVs"] = 2.7 + 1.2 # BEV + PHEV - elif year == "2030" and uba_for_mobility: + elif year == "2030" and year in uba_for_mobility: mobility_data = pd.Series( { "Electricity": 57.0, @@ -89,7 +89,7 @@ def get_mobility_data( mobility_data = mobility_data.mul(1e6) mobility_data["million_EVs"] = 8.7 + 1.8 - elif year == "2035" and uba_for_mobility: + elif year == "2035" and year in uba_for_mobility: mobility_data = pd.Series( { "Electricity": 117.0, @@ -102,7 +102,7 @@ def get_mobility_data( mobility_data["million_EVs"] = 18.9 + 1.8 else: - if uba_for_mobility: + if year in uba_for_mobility: # here year > 2035 logger.error( f"Year {year} is not supported for UBA mobility projections. Please use only 2020, 2025, 2030, 2035." ) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index ffacab32c..cb2d5d3e9 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1476,8 +1476,8 @@ def modify_industry_demand( sanitize_custom_columns(n) - if snakemake.params.uba_for_industry and current_year >= 2025: - if current_year >= 2040: + if current_year in snakemake.params.uba_for_industry: + if current_year not in [2025, 2030, 2035]: logger.error( "The UBA for industry data is only available for 2025, 2030 and 2035. Please check your config." ) From 4caf484b50a9ac40ed6c89538a01a4e0fe575b7b Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 8 Sep 2025 17:30:03 +0200 Subject: [PATCH 104/202] preliminary long term no flex scenario setup --- Snakefile | 1 + config/config.de.yaml | 7 ++--- config/scenarios.manual.yaml | 31 ++++++++++++++++++-- scripts/pypsa-de/modify_prenetwork.py | 28 ++++++++++++++++++ scripts/pypsa-de/plot_scenario_comparison.py | 9 ++++-- scripts/pypsa-de/prepare_regret_network.py | 29 +++++++++++++++--- 6 files changed, 93 insertions(+), 12 deletions(-) diff --git a/Snakefile b/Snakefile index f4ceec158..75269f7ca 100644 --- a/Snakefile +++ b/Snakefile @@ -523,6 +523,7 @@ rule modify_district_heat_share: rule modify_prenetwork: params: + no_flex_lt_run=config_provider("iiasa-database", "no_flex_lt_run"), efuel_export_ban=config_provider("solving", "constraints", "efuel_export_ban"), enable_kernnetz=config_provider("wasserstoff_kernnetz", "enable"), costs=config_provider("costs"), diff --git a/config/config.de.yaml b/config/config.de.yaml index 3fa88c4ca..9bbf23081 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,15 +4,13 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250903_regret_less_offwind + prefix: 20250908_regret_no_flex_lt_run name: # - ExPol # - KN2045_Mix - HighDemand - LowDemand - # - KN2045_Elek - # - KN2045_H2 - # - KN2045_NFniedrig + - HighDemandNoFlex # !!! CAVEAT The 'NoFlex' substring in the scenario name determines how the regrets are computed!!! # - KN2045_NFhoch scenarios: enable: true @@ -55,6 +53,7 @@ iiasa_database: - 2025 uba_for_industry: # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - 2025 + no_flex_lt_run: false # If true, removes all flexibility options scale_industry_non_energy: false # Scale non-energy industry demand directly proportional to energy demand # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 85dd9d639..ba3192934 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -30,6 +30,35 @@ HighDemand: 2035: 309 2040: 400 # PV strategy +HighDemandNoFlex: + iiasa_database: + no_flex_lt_run: true + solving: + options: + transmission_losses: 0 + noisy_costs: false + constraints: + decentral_heat_pump_budgets: + DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + 2025: 12.0 + limits_capacity_min: + Generator: + onwind: + DE: + 2030: 115 # Wind-an-Land Law + 2035: 157 # Wind-an-Land Law + 2040: 160 # Wind-an-Land Law + offwind: + DE: + 2035: 40 # 40 Wind-auf-See Law + 2040: 50 # assuming 1/3 of buildout reached in 2040 + 2045: 70 # 70 Wind-auf-See Law + solar: + DE: # EEG2023; Ziel for 2024: 88 GW and for 2026: 128 GW, + 2025: 101 # assuming at least 1/3 of buildout reached in 2025 + 2030: 215 # PV strategy + 2035: 309 + 2040: 400 # PV strategy LowDemand: iiasa_database: @@ -69,8 +98,6 @@ LowDemand: DE: 2030: 0 - - ExPol: # UBA CO2 pathway instead of KSG targets # European CO2 pathway fixed at 2030 for 2035-2050 diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index cb2d5d3e9..43f0f73ca 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1394,6 +1394,30 @@ def modify_industry_demand( ) +def remove_flexibility_options(n): + logger.info("Removing decentral TES, batteries, and BEV DSM from the network.") + carriers_to_drop = [ + "urban decentral water tanks charger", + "urban decentral water tanks discharger", + "urban decentral water tanks", + "rural water tanks charger", + "rural water tanks discharger", + "rural water tanks", + "battery charger", + "battery discharger", + "home battery charger", + "home battery discharger", + "battery", + "home battery", + "EV battery", + ] + n.remove("Link", n.links.query("carrier in @carriers_to_drop").index) + n.remove("Store", n.stores.query("carrier in @carriers_to_drop").index) + # Need to keep the EV battery bus + carriers_to_drop.remove("EV battery") + n.remove("Bus", n.buses.query("carrier in @carriers_to_drop").index) + + if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( @@ -1493,6 +1517,10 @@ def modify_industry_demand( # For regret runs deactivate_late_transmission_projects(n) + if snakemake.params.no_flex_lt_run: + logger.info("Run without flexibility options detected.") + remove_flexibility_options(n) + fix_transmission_DE(n) n.export_to_netcdf(snakemake.output.network) diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index eb6843fa3..670452d5c 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -23,7 +23,8 @@ def scenario_plot(df, output_dir, var): if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( - "ariadne_all", + "plot_scenario_comparison_regrets", + regret_dir="regret_networks", # simpl="", # clusters=22, # opts="", @@ -35,9 +36,13 @@ def scenario_plot(df, output_dir, var): dfs = [] fns = snakemake.input.exported_variables - if "regret_variables" in fns[0]: + if "regret_variables" in fns[0] and len(fns) == 4: # reorder indices of fns as 0312 fns = [fns[i] for i in [0, 3, 2, 1] if i < len(fns)] + if "regret_variables" in fns[0] and len(fns) == 8: + fns = [ + fn for fn in fns if not fn.contains("NoFlex/") + ] # !!! CAVEAT AGAIN DISPATCHING ON FILENAME for file in fns: _df = pd.read_excel( file, index_col=list(range(5)), sheet_name="data" diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index e8bc9d61e..e48636643 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -1,3 +1,5 @@ +# Import the function dynamically since the folder name contains a hyphen which is invalid in a module name. +import importlib.util import logging import pathlib @@ -12,6 +14,14 @@ ) from scripts.solve_network import prepare_network +_spec_path = pathlib.Path(__file__).resolve().parent / "modify_prenetwork.py" +_spec = importlib.util.spec_from_file_location( + "scripts.pypsa_de.modify_prenetwork", _spec_path +) +_modify_prenetwork = importlib.util.module_from_spec(_spec) +_spec.loader.exec_module(_modify_prenetwork) +remove_flexibility_options = _modify_prenetwork.remove_flexibility_options + logger = logging.getLogger(__name__) @@ -82,7 +92,7 @@ def _unfix_bottlenecks(new, deci, name, extendable_i): return -def fix_capacities(realization, decision, scope="DE", strict=False): +def fix_capacities(realization, decision, scope="DE", strict=False, no_flex=False): logger.info(f"Fixing all capacities for scope: {scope}") if scope == "EU": scope = "" @@ -141,17 +151,20 @@ def fix_capacities(realization, decision, scope="DE", strict=False): new.loc[_idx, attr] = deci.loc[_idx, attr] + if no_flex: + logger.info("Realization network is from a run without flexibility.") + remove_flexibility_options(n) return n if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( - "solve_regret", + "prepare_regret_network", clusters=27, opts="", sector_opts="none", - planning_horizons="2025", + planning_horizons="2030", decision="LowDemand", run="HighDemand", ) @@ -180,7 +193,15 @@ def fix_capacities(realization, decision, scope="DE", strict=False): scope_to_fix = snakemake.params["scope_to_fix"] h2_vent = snakemake.params["h2_vent"] - n = fix_capacities(realization, decision, scope=scope_to_fix, strict=strict) + # CAVEAT The 'NoFlex' string in the scenario actually controls the behavior of this function + n = fix_capacities( + realization, + decision, + scope=scope_to_fix, + strict=strict, + no_flex="NoFlex" in snakemake.input.decision, + ) + if strict: logger.info( "Strict regret run chosen. No capacities are extendable. Activating load shedding to prevent infeasibilites." From d4533864ca2d916774350251b39dd74dc9dc7c76 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Tue, 9 Sep 2025 17:52:33 +0200 Subject: [PATCH 105/202] plotting fixes --- Snakefile | 16 ++++++---------- scripts/pypsa-de/regret_plots.py | 17 ++++++++--------- 2 files changed, 14 insertions(+), 19 deletions(-) diff --git a/Snakefile b/Snakefile index 75269f7ca..f077a151a 100644 --- a/Snakefile +++ b/Snakefile @@ -1163,7 +1163,7 @@ rule regret_plots_lt: planning_horizons=config_provider("scenario", "planning_horizons"), plotting=config_provider("plotting"), output_dir=directory( - f"results/{config['run']['prefix']}/regret_plots/LT_comparison" + "results/" + config['run']['prefix'] + "/regret_plots/LT_comparison" ), input: networks=expand( @@ -1189,24 +1189,20 @@ rule regret_plots: scenarios=get_scenarios(run), planning_horizons=config_provider("scenario", "planning_horizons"), plotting=config_provider("plotting"), + output_dir=directory( + f"results/{config['run']['prefix']}/regret_plots/ST_comparison" + ), input: regret_networks=expand( RESULTS - + "{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], allow_missing=True, run=config["run"]["name"], decision=config["run"]["name"], ), output: - elec_price_comp_de="results/" - + config["run"]["prefix"] - + "/regret_plots/{regret_dir}/ST_comparison/elec_price_comp_de.png", - dir=directory( - f"results/" - + config["run"]["prefix"] - + "/regret_plots/{regret_dir}/ST_comparison" - ), + elec_price_comp_de=f"results/{config['run']['prefix']}/regret_plots/ST_comparison/elec_price_comp_de.png", resources: mem_mb=32000, script: diff --git a/scripts/pypsa-de/regret_plots.py b/scripts/pypsa-de/regret_plots.py index a46c78801..fd16ce6f4 100644 --- a/scripts/pypsa-de/regret_plots.py +++ b/scripts/pypsa-de/regret_plots.py @@ -105,19 +105,17 @@ def aggregate_by_keywords(opex_comp_agg, groups): networks[year][scenario][decision] = pypsa.Network(fn) # ensure output directory exist - if not os.path.exists(snakemake.output[-1]): - os.makedirs(snakemake.output[-1]) + if not os.path.exists(snakemake.params.output_dir): + os.makedirs(snakemake.params.output_dir) # Plot electricity price duration curves - fig, ax = plt.subplots(figsize=(10, 15), nrows=3, ncols=1) + fig, ax = plt.subplots(figsize=(10, 5 * len(planning_horizons)), nrows=len(planning_horizons), ncols=1) ax = ax.flatten() - years = [2025, 2030, 2035] - scenarios = ["HighDemand", "LowDemand"] decisions = ["decision_HighDemand", "decision_LowDemand"] - for i, year in enumerate(years): + for i, year in enumerate(planning_horizons): for scenario, decision in itertools.product(scenarios, decisions): n = networks[year][scenario][decision] lmps = n.buses_t.marginal_price.loc[ @@ -136,6 +134,7 @@ def aggregate_by_keywords(opex_comp_agg, groups): label=f"{scenario}_{decision} (avg: {lmps_sorted['lmp'].mean():.2f})", ) + ax[i].set_ylim(-50, 300) ax[i].legend() ax[i].set_xlabel("Percentage of time") ax[i].set_ylabel("€/MWh") @@ -162,10 +161,10 @@ def aggregate_by_keywords(opex_comp_agg, groups): "nice_names": False, } - fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(12, 18)) + fig, axes = plt.subplots(nrows=len(planning_horizons), ncols=1, figsize=(12, 6 * len(planning_horizons))) axes = axes.flatten() - for i, year in enumerate(years): + for i, year in enumerate(planning_horizons): opex_comp = pd.DataFrame( columns=["_".join(tup) for tup in itertools.product(scenarios, decisions)] ) @@ -242,5 +241,5 @@ def aggregate_by_keywords(opex_comp_agg, groups): # Legend outside axes[-1].legend(loc="upper left", bbox_to_anchor=(1, 1)) - plt.savefig(snakemake.output[-1] + "/opex_comp_de.png", bbox_inches="tight") + plt.savefig(snakemake.params.output_dir + "/opex_comp_de.png", bbox_inches="tight") plt.close() From 94dbc094f207087fce7413e9e42c6d10ed50140c Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 10 Sep 2025 17:02:48 +0200 Subject: [PATCH 106/202] fix type of year --- scripts/pypsa-de/build_exogenous_mobility_data.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/pypsa-de/build_exogenous_mobility_data.py b/scripts/pypsa-de/build_exogenous_mobility_data.py index 9698266ab..553b343aa 100644 --- a/scripts/pypsa-de/build_exogenous_mobility_data.py +++ b/scripts/pypsa-de/build_exogenous_mobility_data.py @@ -62,7 +62,7 @@ def get_mobility_data( # FZ27_202101, table FZ 27.2, 1. January 2021: mobility_data["million_EVs"] = 0.358498 + 0.280149 - elif year == "2025" and year in uba_for_mobility: + elif year == "2025" and int(year) in uba_for_mobility: # https://www.umweltbundesamt.de/sites/default/files/medien/11850/publikationen/projektionsbericht_2025.pdf, Abbildung 64 & 59, mobility_data = pd.Series( { @@ -77,7 +77,7 @@ def get_mobility_data( mobility_data = mobility_data.mul(1e6) # convert TWh to MWh mobility_data["million_EVs"] = 2.7 + 1.2 # BEV + PHEV - elif year == "2030" and year in uba_for_mobility: + elif year == "2030" and int(year) in uba_for_mobility: mobility_data = pd.Series( { "Electricity": 57.0, @@ -89,7 +89,7 @@ def get_mobility_data( mobility_data = mobility_data.mul(1e6) mobility_data["million_EVs"] = 8.7 + 1.8 - elif year == "2035" and year in uba_for_mobility: + elif year == "2035" and int(year) in uba_for_mobility: mobility_data = pd.Series( { "Electricity": 117.0, @@ -102,7 +102,7 @@ def get_mobility_data( mobility_data["million_EVs"] = 18.9 + 1.8 else: - if year in uba_for_mobility: # here year > 2035 + if int(year) in uba_for_mobility: # here year > 2035 logger.error( f"Year {year} is not supported for UBA mobility projections. Please use only 2020, 2025, 2030, 2035." ) @@ -132,8 +132,8 @@ def get_mobility_data( opts="", ll="vopt", sector_opts="none", - planning_horizons="2020", - run="KN2045_Mix", + planning_horizons="2025", + run="LowDemand", ) configure_logging(snakemake) From 24f9180e6630dfb788cf6e33c3172374b4325365 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 11 Sep 2025 15:05:12 +0200 Subject: [PATCH 107/202] limit urban central water tanks/pits expansion in 2025 --- config/config.de.yaml | 7 +++++++ scripts/pypsa-de/modify_prenetwork.py | 7 +++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 9bbf23081..649e47a26 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -277,6 +277,10 @@ first_technology_occurrence: H2 pipeline: 2025 H2 Electrolysis: 2025 H2 pipeline retrofitted: 2025 + urban central water pits charger: 2030 + urban central water pits discharger: 2030 + Store: + urban central water pits: 2030 costs: horizon: "mean" # "optimist", "pessimist" or "mean" @@ -519,6 +523,9 @@ solving: 2040: 50000 2045: 80000 2050: 80000 + urban central water tanks: + DE: + 2025: 120 # GWh, https://www.hamburg-institut.com/wp-content/uploads/2023/12/Referenzblatt_SysGF-1.pdf Link: methanolisation: DE: diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 43f0f73ca..8a9592e1b 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -22,8 +22,11 @@ def first_technology_occurrence(n): for c, carriers in snakemake.params.technology_occurrence.items(): for carrier, first_year in carriers.items(): if int(snakemake.wildcards.planning_horizons) < first_year: - logger.info(f"{carrier} not extendable before {first_year}.") - n.df(c).loc[n.df(c).carrier == carrier, "p_nom_extendable"] = False + attr = "e_nom" if c == "Store" else "p_nom" + logger.info( + f"{carrier} is configured to be not extendable before {first_year}." + ) + n.df(c).loc[n.df(c).carrier == carrier, f"{attr}_extendable"] = False def fix_new_boiler_profiles(n): From fbe22d7bbe2836fcc13fde007d9567605ae7d49e Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 11 Sep 2025 16:58:46 +0200 Subject: [PATCH 108/202] define a noflex lowdemand lt run --- config/scenarios.manual.yaml | 39 ++++++++++++++++++++ scripts/pypsa-de/plot_scenario_comparison.py | 2 +- 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index ba3192934..228066d12 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -98,6 +98,45 @@ LowDemand: DE: 2030: 0 +LowDemandNoFlex: + iiasa_database: + no_flex_lt_run: true + reference_scenario: ExPol # e.g., CO2 emissions from REMIND will follow ExPol + uba_for_industry: + - 2025 + - 2030 + - 2035 + uba_for_mobility: + - 2025 + - 2030 + - 2035 + offshore_nep_force: + delay_years: 2 + solving: + options: + transmission_losses: 0 + noisy_costs: false + constraints: + decentral_heat_pump_budgets: + DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + 2025: 12.0 + 2030: 26.0 + 2035: 38.0 + limits_capacity_min: + Generator: + onwind: + DE: + 2030: 0 + 2035: 0 + offwind: + DE: + 2030: 0 + 2035: 0 + Link: + H2 electrolysis: + DE: + 2030: 0 + ExPol: # UBA CO2 pathway instead of KSG targets # European CO2 pathway fixed at 2030 for 2035-2050 diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index 670452d5c..9a7dea4ce 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -39,7 +39,7 @@ def scenario_plot(df, output_dir, var): if "regret_variables" in fns[0] and len(fns) == 4: # reorder indices of fns as 0312 fns = [fns[i] for i in [0, 3, 2, 1] if i < len(fns)] - if "regret_variables" in fns[0] and len(fns) == 8: + if "regret_variables" in fns[0] and len(fns) in [9, 16]: fns = [ fn for fn in fns if not fn.contains("NoFlex/") ] # !!! CAVEAT AGAIN DISPATCHING ON FILENAME From ea7c3a76f78de740c3232b4b2a4a9b57b7009cfb Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 11 Sep 2025 17:00:41 +0200 Subject: [PATCH 109/202] small fix --- scripts/pypsa-de/plot_scenario_comparison.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index 9a7dea4ce..cbfcb9824 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -41,7 +41,7 @@ def scenario_plot(df, output_dir, var): fns = [fns[i] for i in [0, 3, 2, 1] if i < len(fns)] if "regret_variables" in fns[0] and len(fns) in [9, 16]: fns = [ - fn for fn in fns if not fn.contains("NoFlex/") + fn for fn in fns if "NoFlex/" not in fn ] # !!! CAVEAT AGAIN DISPATCHING ON FILENAME for file in fns: _df = pd.read_excel( From c5ec0c2ba50c2d6fd5552423665998fdef0d8654 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 12 Sep 2025 10:16:26 +0200 Subject: [PATCH 110/202] clip extreme scarcity prices for electricity --- scripts/pypsa-de/export_ariadne_variables.py | 22 +++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 70637b06b..07c4773e0 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -3192,7 +3192,7 @@ def get_nodal_supply(n, bus_carrier, query="index == index or index != index"): return result -def price_load(n, load_carrier, region): +def price_load(n, load_carrier, region, clip=None): """ Calculate the average price of a specific load carrier in a given region. @@ -3213,7 +3213,8 @@ def price_load(n, load_carrier, region): if n.loads_t.p[load.index].values.sum() < 1: return np.nan, 0 result = ( - n.loads_t.p[load.index] * n.buses_t.marginal_price[load.bus].values + n.loads_t.p[load.index] + * n.buses_t.marginal_price[load.bus].clip(upper=clip).values ).values.sum() result /= n.loads_t.p[load.index].values.sum() return result, n.loads_t.p[load.index].values.sum() @@ -3473,10 +3474,13 @@ def get_prices(n, region): # reported: 8/14 # Price|Secondary Energy|Electricity + max_elec_price = 4000 # to clip extreme prices during scarcity nodal_flows_ac = get_nodal_flows( n, "AC", region, query="not carrier.str.contains('gas')" ) - nodal_prices_ac = n.buses_t.marginal_price[nodal_flows_ac.columns] + nodal_prices_ac = n.buses_t.marginal_price[nodal_flows_ac.columns].clip( + upper=max_elec_price + ) var["Price|Secondary Energy|Electricity"] = ( nodal_flows_ac.mul(nodal_prices_ac).values.sum() / nodal_flows_ac.values.sum() @@ -3596,7 +3600,7 @@ def get_prices(n, region): # Price|Final Energy|Transportation|Passenger|Electricity var["Price|Final Energy|Transportation|Passenger|Electricity"] = price_load( - n, "land transport EV", region + n, "land transport EV", region, clip=max_elec_price )[0] # Price|Final Energy|Transportation|Passenger|Gases # Price|Final Energy|Transportation|Passenger|Hydrogen @@ -3656,14 +3660,14 @@ def get_prices(n, region): # Price|Final Energy|Transportation|Electricity var["Price|Final Energy|Transportation|Electricity"] = price_load( - n, "land transport EV", region + n, "land transport EV", region, clip=max_elec_price )[0] # Price|Final Energy|Transportation|Electricity|Sales Margin # Price|Final Energy|Transportation|Electricity|Transport and Distribution # Price|Final Energy|Transportation|Electricity|Other Taxes # Price|Final Energy|Transportation|Liquids|Kerosene - var["Price|Final Energy|Transportation|Electricity"] = price_load( + var["Price|Final Energy|Transportation|Liquids|Kerosene"] = price_load( n, "kerosene for aviation", region )[0] # Price|Final Energy|Transportation|Liquids|Kerosene|Sales Margin @@ -3784,7 +3788,9 @@ def get_prices(n, region): "& not carrier.str.contains('industry')" "& not carrier.str.contains('urban central')", ) - nodal_prices_lv = n.buses_t.marginal_price[nodal_flows_lv.columns] + nodal_prices_lv = n.buses_t.marginal_price[nodal_flows_lv.columns].clip( + upper=max_elec_price + ) var["Price|Final Energy|Residential and Commercial|Electricity"] = ( nodal_flows_lv.mul(nodal_prices_lv).values.sum() / nodal_flows_lv.values.sum() ) @@ -3883,7 +3889,7 @@ def get_prices(n, region): # Price|Final Energy|Industry|Electricity var["Price|Final Energy|Industry|Electricity"] = price_load( - n, "industry electricity", region + n, "industry electricity", region, clip=max_elec_price )[0] # Price|Final Energy|Industry|Electricity|Sales Margin # Price|Final Energy|Industry|Electricity|Transport and Distribution From 9b76a385e28b8e54e204cc6e4dd54b0d114875ba Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 16 Sep 2025 17:16:20 +0200 Subject: [PATCH 111/202] access scenario config via n.meta --- scripts/pypsa-de/plot_scenario_comparison.py | 2 +- scripts/pypsa-de/prepare_regret_network.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index cbfcb9824..6eb639b28 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -42,7 +42,7 @@ def scenario_plot(df, output_dir, var): if "regret_variables" in fns[0] and len(fns) in [9, 16]: fns = [ fn for fn in fns if "NoFlex/" not in fn - ] # !!! CAVEAT AGAIN DISPATCHING ON FILENAME + ] # !!! CAVEAT DISPATCHING ON FILENAME for file in fns: _df = pd.read_excel( file, index_col=list(range(5)), sheet_name="data" diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index e48636643..410a41cd9 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -193,13 +193,12 @@ def fix_capacities(realization, decision, scope="DE", strict=False, no_flex=Fals scope_to_fix = snakemake.params["scope_to_fix"] h2_vent = snakemake.params["h2_vent"] - # CAVEAT The 'NoFlex' string in the scenario actually controls the behavior of this function n = fix_capacities( realization, decision, scope=scope_to_fix, strict=strict, - no_flex="NoFlex" in snakemake.input.decision, + no_flex=decision.meta.get("iiasa_database").get("no_flex_lt_run", False), ) if strict: From 78177065301ebe85e4c593ffd06ce5e4835eba87 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 16 Sep 2025 17:23:58 +0200 Subject: [PATCH 112/202] rename config for short term runs without no flexibility --- Snakefile | 16 +++++++--------- config/config.de.yaml | 2 +- scripts/pypsa-de/solve_regret_network.py | 4 ++-- 3 files changed, 10 insertions(+), 12 deletions(-) diff --git a/Snakefile b/Snakefile index f077a151a..0c70a3ec2 100644 --- a/Snakefile +++ b/Snakefile @@ -1014,8 +1014,8 @@ rule prepare_regret_network: rule solve_regret_network: params: - no_flex_sensitivity=lambda w: ( - True if w.regret_dir == "no_flex_regret_networks" else False + no_flex_st_run=lambda w: ( + True if w.regret_dir == "no_flex_st_regret_networks" else False ), solving=config_provider("solving"), regret_run=True, @@ -1108,7 +1108,7 @@ rule regret_no_flex: input: "results/" + config["run"]["prefix"] - + "/scenario_comparison/no_flex_regret_networks/Price-Carbon.png", + + "/scenario_comparison/no_flex_st_regret_networks/Price-Carbon.png", rule regret_base: @@ -1125,16 +1125,14 @@ rule regret_all: + config["run"]["prefix"] + "/scenario_comparison/{regret_dir}/Price-Carbon.png", regret_dir=( - ["regret_networks", "no_flex_regret_networks"] - if config_provider( - "iiasa_database", "regret_run", "no_flex_sensitivity" - )(w) + ["regret_networks", "no_flex_st_regret_networks"] + if config_provider("iiasa_database", "regret_run", "no_flex_st_run")(w) else ["regret_networks"] ), ), f"results/{config['run']['prefix']}/regret_plots/LT_comparison/elec_capa_comp_de_2025.png", # expand("results/" + config["run"]["prefix"] + "/regret_plots/{regret_dir}/ST_comparison/elec_price_comp_de.png", - # regret_dir=["no_flex_regret_networks", "regret_networks"]), + # regret_dir=["no_flex_st_regret_networks", "regret_networks"]), rule plot_scenario_comparison_regrets: @@ -1163,7 +1161,7 @@ rule regret_plots_lt: planning_horizons=config_provider("scenario", "planning_horizons"), plotting=config_provider("plotting"), output_dir=directory( - "results/" + config['run']['prefix'] + "/regret_plots/LT_comparison" + "results/" + config["run"]["prefix"] + "/regret_plots/LT_comparison" ), input: networks=expand( diff --git a/config/config.de.yaml b/config/config.de.yaml index 649e47a26..3d1ec2bb3 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -47,7 +47,7 @@ iiasa_database: scope_to_fix: EU # Supported values are DE and EU strict: false # If false, the model allows capacity expansion for virtual links and bottlenecks h2_vent: true - no_flex_sensitivity: false # CAVEAT: This behaves like a wildcard + no_flex_st_run: false # CAVEAT: This behaves like a wildcard. Activates a run without flexibility for all combinations of long term scenarios. ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - 2025 diff --git a/scripts/pypsa-de/solve_regret_network.py b/scripts/pypsa-de/solve_regret_network.py index 1093be6b5..b313dbed6 100644 --- a/scripts/pypsa-de/solve_regret_network.py +++ b/scripts/pypsa-de/solve_regret_network.py @@ -25,7 +25,7 @@ planning_horizons="2035", decision="LowDemand", run="HighDemand", - regret_dir="no_flex_regret_networks", + regret_dir="no_flex_st_regret_networks", ) configure_logging(snakemake) @@ -43,7 +43,7 @@ ) np.random.seed(snakemake.params.solving["options"].get("seed", 123)) - if snakemake.params.get("no_flex_sensitivity") == True: + if snakemake.params.get("no_flex_st_run") == True: logger.info( "No flexibility sensitivity analysis activated. Removing decentral TES, batteries, and BEV DSM from the network." ) From 4e20492a94d9898c32a78006e9e2668d177d3035 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 16 Sep 2025 17:34:02 +0200 Subject: [PATCH 113/202] import function for removing flexibility in st runs --- scripts/pypsa-de/solve_regret_network.py | 35 ++++++++---------------- 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/scripts/pypsa-de/solve_regret_network.py b/scripts/pypsa-de/solve_regret_network.py index b313dbed6..cec6245d1 100644 --- a/scripts/pypsa-de/solve_regret_network.py +++ b/scripts/pypsa-de/solve_regret_network.py @@ -1,3 +1,4 @@ +import importlib.util import logging import pathlib @@ -13,6 +14,15 @@ ) from scripts.solve_network import solve_network +_spec_path = pathlib.Path(__file__).resolve().parent / "modify_prenetwork.py" +_spec = importlib.util.spec_from_file_location( + "scripts.pypsa_de.modify_prenetwork", _spec_path +) +_modify_prenetwork = importlib.util.module_from_spec(_spec) +_spec.loader.exec_module(_modify_prenetwork) +remove_flexibility_options = _modify_prenetwork.remove_flexibility_options + + logger = logging.getLogger(__name__) if __name__ == "__main__": @@ -44,29 +54,8 @@ np.random.seed(snakemake.params.solving["options"].get("seed", 123)) if snakemake.params.get("no_flex_st_run") == True: - logger.info( - "No flexibility sensitivity analysis activated. Removing decentral TES, batteries, and BEV DSM from the network." - ) - carriers_to_drop = [ - "urban decentral water tanks charger", - "urban decentral water tanks discharger", - "urban decentral water tanks", - "rural water tanks charger", - "rural water tanks discharger", - "rural water tanks", - "battery charger", - "battery discharger", - "home battery charger", - "home battery discharger", - "battery", - "home battery", - "EV battery", - ] - n.remove("Link", n.links.query("carrier in @carriers_to_drop").index) - n.remove("Store", n.stores.query("carrier in @carriers_to_drop").index) - # Need to keep the EV battery bus - carriers_to_drop.remove("EV battery") - n.remove("Bus", n.buses.query("carrier in @carriers_to_drop").index) + logger.info("No flexibility short term analysis activated.") + remove_flexibility_options(n) with memory_logger( filename=getattr(snakemake.log, "memory", None), interval=logging_frequency From 89904ed0e7147eec433bd5cad4c2048c844d1a31 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 17 Sep 2025 11:21:33 +0200 Subject: [PATCH 114/202] improve workflwo for st sensitivites; add gas_price sensitivity --- Snakefile | 48 +++++++++++--------- config/config.de.yaml | 5 +- scripts/pypsa-de/export_ariadne_variables.py | 2 +- scripts/pypsa-de/plot_scenario_comparison.py | 2 +- scripts/pypsa-de/solve_regret_network.py | 21 +++++++-- 5 files changed, 49 insertions(+), 29 deletions(-) diff --git a/Snakefile b/Snakefile index 0c70a3ec2..7b195b887 100644 --- a/Snakefile +++ b/Snakefile @@ -1014,9 +1014,7 @@ rule prepare_regret_network: rule solve_regret_network: params: - no_flex_st_run=lambda w: ( - True if w.regret_dir == "no_flex_st_regret_networks" else False - ), + st_sensitivity="{sensitivity}", solving=config_provider("solving"), regret_run=True, energy_year=config_provider("energy", "energy_totals_year"), @@ -1028,16 +1026,16 @@ rule solve_regret_network: energy_totals=resources("energy_totals.csv"), output: regret_network=RESULTS - + "{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + + "regret_networks/{sensitivity}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", shadow: shadow_config log: solver=RESULTS - + "logs/{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", + + "logs/regret_networks/{sensitivity}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_solver.log", memory=RESULTS - + "logs/{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", + + "logs/regret_networks/{sensitivity}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_memory.log", python=RESULTS - + "logs/{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", + + "logs/regret_networks/{sensitivity}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_python.log", threads: solver_threads resources: mem_mb=config_provider("solving", "mem_mb"), @@ -1069,7 +1067,7 @@ rule export_regret_variables: ), networks=expand( RESULTS - + "{regret_dir}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + + "regret_networks/{sensitivity}/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], allow_missing=True, ), @@ -1093,13 +1091,15 @@ rule export_regret_variables: industrial_production=resources("industrial_production_per_country.csv"), energy_totals=resources("energy_totals.csv"), output: - exported_variables=RESULTS + "{regret_dir}/regret_variables_{decision}.xlsx", + exported_variables=RESULTS + + "regret_variables/{sensitivity}/regret_variables_{decision}.xlsx", exported_variables_full=RESULTS - + "{regret_dir}/regret_variables_{decision}_full.xlsx", + + "regret_variables/{sensitivity}/regret_variables_{decision}_full.xlsx", resources: mem_mb=16000, log: - RESULTS + "{regret_dir}/logs/export_regret_variables_{decision}.log", + RESULTS + + "regret_variables/{sensitivity}/logs/export_regret_variables_{decision}.log", script: "scripts/pypsa-de/export_ariadne_variables.py" @@ -1118,31 +1118,35 @@ rule regret_base: + "/scenario_comparison/regret_networks/Price-Carbon.png", +def get_st_sensitivities(w): + dirs = ["base"] + for sens in config_provider("iiasa_database", "regret_run", "st_sensitivities")(w): + dirs.append(f"st_sensitivities/{sens}") + return dirs + + rule regret_all: input: lambda w: expand( "results/" + config["run"]["prefix"] - + "/scenario_comparison/{regret_dir}/Price-Carbon.png", - regret_dir=( - ["regret_networks", "no_flex_st_regret_networks"] - if config_provider("iiasa_database", "regret_run", "no_flex_st_run")(w) - else ["regret_networks"] - ), + + "/scenario_comparison/{sensitivity}/Price-Carbon.png", + sensitivity=get_st_sensitivities, ), f"results/{config['run']['prefix']}/regret_plots/LT_comparison/elec_capa_comp_de_2025.png", - # expand("results/" + config["run"]["prefix"] + "/regret_plots/{regret_dir}/ST_comparison/elec_price_comp_de.png", - # regret_dir=["no_flex_st_regret_networks", "regret_networks"]), + # expand("results/" + config["run"]["prefix"] + "/regret_plots/{sensitivity}/ST_comparison/elec_price_comp_de.png", + # sensitivity=get_st_sensitivities), rule plot_scenario_comparison_regrets: params: output_dir=directory( - "results/" + config["run"]["prefix"] + "/scenario_comparison/{regret_dir}" + "results/" + config["run"]["prefix"] + "/scenario_comparison/{sensitivity}" ), input: exported_variables=expand( - RESULTS + "{regret_dir}/regret_variables_{decision}_full.xlsx", + RESULTS + + "regret_variables/{sensitivity}/regret_variables_{decision}_full.xlsx", run=config_provider("run", "name"), decision=config_provider("run", "name"), allow_missing=True, @@ -1150,7 +1154,7 @@ rule plot_scenario_comparison_regrets: output: price_carbon="results/" + config["run"]["prefix"] - + "/scenario_comparison/{regret_dir}/Price-Carbon.png", + + "/scenario_comparison/{sensitivity}/Price-Carbon.png", script: "scripts/pypsa-de/plot_scenario_comparison.py" diff --git a/config/config.de.yaml b/config/config.de.yaml index 3d1ec2bb3..8fc441864 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -47,7 +47,10 @@ iiasa_database: scope_to_fix: EU # Supported values are DE and EU strict: false # If false, the model allows capacity expansion for virtual links and bottlenecks h2_vent: true - no_flex_st_run: false # CAVEAT: This behaves like a wildcard. Activates a run without flexibility for all combinations of long term scenarios. + st_sensitivities: # CAVEAT: These behave like wildcards + #- no_flex + - gas_price_60 + #- gas_price_80 ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - 2025 diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 07c4773e0..72627c2ed 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -5479,7 +5479,7 @@ def get_data( sector_opts="None", run="HighDemand", decision="LowDemand", - regret_dir="regret_networks", + sensitivity="base", ) configure_logging(snakemake) config = snakemake.config diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index 6eb639b28..1ba2fe5b7 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -24,7 +24,7 @@ def scenario_plot(df, output_dir, var): if "snakemake" not in globals(): snakemake = mock_snakemake( "plot_scenario_comparison_regrets", - regret_dir="regret_networks", + sensitivity="base", # simpl="", # clusters=22, # opts="", diff --git a/scripts/pypsa-de/solve_regret_network.py b/scripts/pypsa-de/solve_regret_network.py index cec6245d1..846a2b3b1 100644 --- a/scripts/pypsa-de/solve_regret_network.py +++ b/scripts/pypsa-de/solve_regret_network.py @@ -1,6 +1,7 @@ import importlib.util import logging import pathlib +import re import numpy as np import pypsa @@ -32,10 +33,10 @@ clusters=27, opts="", sector_opts="none", - planning_horizons="2035", + planning_horizons="2025", decision="LowDemand", run="HighDemand", - regret_dir="no_flex_st_regret_networks", + sensitivity="gas_price_50", ) configure_logging(snakemake) @@ -53,10 +54,22 @@ ) np.random.seed(snakemake.params.solving["options"].get("seed", 123)) - if snakemake.params.get("no_flex_st_run") == True: - logger.info("No flexibility short term analysis activated.") + if "no_flex" in snakemake.params.st_sensitivity: + logger.info( + "Running sensitivity of the short term model with less flexibility options." + ) remove_flexibility_options(n) + gas_price = re.findall(r"gas_price_(\d{2,3})", snakemake.params.st_sensitivity) + if gas_price: + gas_price = int(gas_price[0]) + logger.info( + f"Running sensitivity of the short term model with gas price set to {gas_price} €/MWh." + ) + n.generators.loc[n.generators.carrier == "gas primary", "marginal_cost"] = ( + gas_price + ) + with memory_logger( filename=getattr(snakemake.log, "memory", None), interval=logging_frequency ) as mem: From c9c0de83d5b2e982de6af14a87d5ccdf35817e76 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Wed, 17 Sep 2025 11:30:16 +0200 Subject: [PATCH 115/202] fix lt plots --- Snakefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Snakefile b/Snakefile index 7b195b887..1559f572e 100644 --- a/Snakefile +++ b/Snakefile @@ -1170,12 +1170,12 @@ rule regret_plots_lt: input: networks=expand( RESULTS - + "regret_networks/decision_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + + "regret_networks/base/decision_{run}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], run=config["run"]["name"], ), regret_variables=expand( - RESULTS + "regret_networks/regret_variables_{run}_full.xlsx", + RESULTS + "regret_variables/base/regret_variables_{run}_full.xlsx", run=config["run"]["name"], ), output: From c9fdd0d813ca55bb5ef95daae188c518a53b72d9 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 17 Sep 2025 15:42:47 +0200 Subject: [PATCH 116/202] simplify directories further --- Snakefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Snakefile b/Snakefile index 1559f572e..9cf8ff945 100644 --- a/Snakefile +++ b/Snakefile @@ -1121,7 +1121,7 @@ rule regret_base: def get_st_sensitivities(w): dirs = ["base"] for sens in config_provider("iiasa_database", "regret_run", "st_sensitivities")(w): - dirs.append(f"st_sensitivities/{sens}") + dirs.append(f"{sens}") return dirs From 70e729c3ede4c1e25fd2d6a2c43b389c0f9777e2 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 17 Sep 2025 15:42:57 +0200 Subject: [PATCH 117/202] add VRE market values to exporter --- scripts/pypsa-de/export_ariadne_variables.py | 59 ++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 72627c2ed..6875550a4 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -3353,6 +3353,64 @@ def get_weighted_costs(costs, flows): return result +def get_vre_market_values(n, region): + def get_einheitspreis(n, region): + ac_buses = n.buses.query( + f"index.str.startswith('{region}') and carrier == 'AC'" + ).index + nodal_prices = n.buses_t.marginal_price[ac_buses] + + nodal_flows = ( + n.statistics.withdrawal( + bus_carrier="AC", + groupby=["name", "bus", "carrier"], + aggregate_time=False, + ) + .groupby("bus") + .sum() + .T.filter( + like="DE", + axis=1, + ) + ) + + weighted_mean_nodal_price = ( + nodal_flows.mul(nodal_prices).sum(axis=1).div(nodal_flows.sum(axis=1)) + ) + + return weighted_mean_nodal_price + + def get_vre_market_value(n, region, carriers): + idx = n.generators.query( + f"carrier in {carriers} and bus.str.contains('{region}')" + ).index + gen = n.generators_t.p[idx].sum(axis=1) + einheitspreis = get_einheitspreis(n, region) + + return einheitspreis.mul(gen).sum() / gen.sum() + + var = pd.Series() + var["Market Value|Electricity|Wind|Onshore"] = get_vre_market_value( + n, region, ["onwind"] + ) + var["Market Value|Electricity|Wind|Offshore|AC"] = get_vre_market_value( + n, region, ["offwind-ac"] + ) + var["Market Value|Electricity|Wind|Offshore|DC"] = get_vre_market_value( + n, region, ["offwind-dc"] + ) + var["Market Value|Electricity|Wind|Offshore"] = get_vre_market_value( + n, region, ["offwind-ac", "offwind-dc"] + ) + var["Market Value|Electricity|Solar|PV"] = get_vre_market_value( + n, region, ["solar", "solar-hsat"] + ) + var["Market Value|Electricity|Solar|Rooftop"] = get_vre_market_value( + n, region, ["solar rooftop"] + ) + return var + + def get_prices(n, region): """ Calculate the prices of various energy sources in the Ariadne model. @@ -5360,6 +5418,7 @@ def get_ariadne_var( industry_production, ), get_prices(n, region), + get_vre_market_values(n, region), get_emissions(n, region, energy_totals, industry_demand), get_policy(n, year), get_trade(n, region), From 21ba40abb8ab349443e8bf60e8ed7c166a9b7c72 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 17 Sep 2025 15:50:03 +0200 Subject: [PATCH 118/202] add more market values --- scripts/pypsa-de/export_ariadne_variables.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 6875550a4..947b58861 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -3408,6 +3408,14 @@ def get_vre_market_value(n, region, carriers): var["Market Value|Electricity|Solar|Rooftop"] = get_vre_market_value( n, region, ["solar rooftop"] ) + var["Market Value|Electricity|Solar"] = get_vre_market_value( + n, region, ["solar", "solar-hsat", "solar rooftop"] + ) + var["Market Value|Electricity|VRE"] = get_vre_market_value( + n, + region, + ["onwind", "offwind-ac", "offwind-dc", "solar", "solar-hsat", "solar rooftop"], + ) return var From 9e154bed93b6ab433bc980e3099364de62565718 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 17 Sep 2025 15:53:32 +0200 Subject: [PATCH 119/202] prevent memory issues with too many ST scenarios --- scripts/pypsa-de/plot_scenario_comparison.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/pypsa-de/plot_scenario_comparison.py b/scripts/pypsa-de/plot_scenario_comparison.py index 1ba2fe5b7..9559514ce 100644 --- a/scripts/pypsa-de/plot_scenario_comparison.py +++ b/scripts/pypsa-de/plot_scenario_comparison.py @@ -43,6 +43,9 @@ def scenario_plot(df, output_dir, var): fns = [ fn for fn in fns if "NoFlex/" not in fn ] # !!! CAVEAT DISPATCHING ON FILENAME + if len(fns) > 16: + raise ValueError("Too many files to plot together.") + for file in fns: _df = pd.read_excel( file, index_col=list(range(5)), sheet_name="data" From 375b53f2847fa803b9fca53fae206779d9239611 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Wed, 17 Sep 2025 17:10:51 +0200 Subject: [PATCH 120/202] add unit commitment --- Snakefile | 1 + config/config.de.yaml | 3 +- scripts/pypsa-de/prepare_regret_network.py | 124 +++++++++++++++++++++ scripts/pypsa-de/regret_plots_lt.py | 4 +- 4 files changed, 129 insertions(+), 3 deletions(-) diff --git a/Snakefile b/Snakefile index 9cf8ff945..50dbe18d1 100644 --- a/Snakefile +++ b/Snakefile @@ -997,6 +997,7 @@ rule prepare_regret_network: scope_to_fix=config_provider("iiasa_database", "regret_run", "scope_to_fix"), h2_vent=config_provider("iiasa_database", "regret_run", "h2_vent"), strict=config_provider("iiasa_database", "regret_run", "strict"), + unit_commitment=config_provider("iiasa_database", "regret_run", "unit_commitment"), input: decision=RESULTS.replace("{run}", "{decision}") + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/config/config.de.yaml b/config/config.de.yaml index 8fc441864..96c5a0e4e 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250908_regret_no_flex_lt_run + prefix: 20250917_regret_uc name: # - ExPol # - KN2045_Mix @@ -51,6 +51,7 @@ iiasa_database: #- no_flex - gas_price_60 #- gas_price_80 + unit_commitment: true ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - 2025 diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index 410a41cd9..75f25cd91 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -25,6 +25,124 @@ logger = logging.getLogger(__name__) +def add_unit_commitment(n, carriers=["OCGT", "CCGT", "coal", "lignite", "nuclear", "oil","urban central solid biomass CHP"], regions=["DE"]): + """ + Add unit commitment for conventionals + based on + https://discord.com/channels/914472852571426846/1042037164088766494/1042395972438868030 + from DIW + [1] https://www.diw.de/documents/publikationen/73/diw_01.c.424566.de/diw_datadoc_2013-068.pdf + + [2] update with p.48 https://www.agora-energiewende.de/fileadmin/Projekte/2017/Flexibility_in_thermal_plants/115_flexibility-report-WEB.pdf + + [3] SI Schill et al. p.26 https://static-content.springer.com/esm/art%3A10.1038%2Fnenergy.2017.50/MediaObjects/41560_2017_BFnenergy201750_MOESM196_ESM.pdf + [4] MA https://zenodo.org/record/6421682 + """ + + # Helper function to filter links by carrier and region + def get_filtered_links(carrier_list): + # Filter by carrier + carrier_mask = n.links.carrier.isin(carrier_list) + + # Filter by region - check if bus0 or bus1 is in the specified regions + region_mask = ( + n.links.bus0.str.contains('|'.join(regions), na=False) | + n.links.bus1.str.contains('|'.join(regions), na=False) + ) + + return n.links[carrier_mask & region_mask].index + + # Only process carriers that are both requested and available + available_carriers = set(carriers) & set(n.links.carrier.unique()) + + # OCGT + if "OCGT" in available_carriers: + links_i = get_filtered_links(["OCGT"]) + if len(links_i) > 0: + n.links.loc[links_i, "p_min_pu"] = 0.2 # [3] # removed since otherwise NL is not solving + n.links.loc[links_i, "start_up_cost"] = 24 * 0.4 # [3] start-up depreciation costs Eur/MW + n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 8-12% per min + n.links.loc[links_i, "ramp_limit_start_up"] = 0.2 # [4] p.41 + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.2 # [4] p.41 + # cold/warm start up time within minutes, complete ramp up within one hour + + # CCGT + if "CCGT" in available_carriers: + links_i = get_filtered_links(["CCGT"]) + if len(links_i) > 0: + n.links.loc[links_i, "p_min_pu"] = 0.45 # [2] mean of Minimum load Most commonly used power plants + n.links.loc[links_i, "start_up_cost"] = 144 * 0.57 # [3] start-up depreciation costs Eur/MW, in [4] 144 + n.links.loc[links_i, "min_up_time"] = 3 # mean of "Cold start-up time" [2] Most commonly used power plants + n.links.loc[links_i, "min_down_time"] = 2 # [3] Minimum offtime [hours] + n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 2-4% per min + n.links.loc[links_i, "ramp_limit_start_up"] = 0.45 # [4] p.41 + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.45 # [4] p.41 + + # coal + if "coal" in available_carriers: + links_i = get_filtered_links(["coal"]) + if len(links_i) > 0: + n.links.loc[links_i, "p_min_pu"] = 0.325 # [2] mean of Minimum load Most commonly used power plants + n.links.loc[links_i, "start_up_cost"] = 108 * 0.33 # [4] p.41 + n.links.loc[links_i, "min_up_time"] = 5 # mean of "Cold start-up time" [2] Most commonly used power plants + n.links.loc[links_i, "min_down_time"] = 6 # [3] Minimum offtime [hours], large plant + n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 1.5-4% per minute + n.links.loc[links_i, "ramp_limit_start_up"] = 0.38 # [4] p.41 + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.38 # [4] p.41 + + # lignite + if "lignite" in available_carriers: + links_i = get_filtered_links(["lignite"]) + if len(links_i) > 0: + n.links.loc[links_i, "p_min_pu"] = 0.325 # 0.4 # [3] + n.links.loc[links_i, "start_up_cost"] = 58 * 0.33 # [4] p.41 + n.links.loc[links_i, "min_up_time"] = 7 # mean of "Cold start-up time" [2] Most commonly used power plants + n.links.loc[links_i, "min_down_time"] = 6 # [3] Minimum offtime [hours], large plant + n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 1-2% per minute + n.links.loc[links_i, "ramp_limit_start_up"] = 0.4 # [4] p.41 + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.4 # [4] p.41 + + # nuclear + if "nuclear" in available_carriers: + links_i = get_filtered_links(["nuclear"]) + if len(links_i) > 0: + n.links.loc[links_i, "p_min_pu"] = 0.5 # [3] + n.links.loc[links_i, "start_up_cost"] = 50 * 0.33 # [3] start-up depreciation costs Eur/MW + n.links.loc[links_i, "min_up_time"] = 6 # [1] + n.links.loc[links_i, "ramp_limit_up"] = 0.3 # [4] + n.links.loc[links_i, "min_down_time"] = 10 # [3] Minimum offtime [hours] + n.links.loc[links_i, "ramp_limit_start_up"] = 0.5 # [4] p.41 + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.5 # [4] p.41 + + # oil + if "oil" in available_carriers: + links_i = get_filtered_links(["oil"]) + if len(links_i) > 0: + n.links.loc[links_i, "p_min_pu"] = 0.2 # [4] + n.links.loc[links_i, "start_up_cost"] = 1 * 0.35 # [4] start-up depreciation costs Eur/MW + n.links.loc[links_i, "ramp_limit_start_up"] = 0.2 # [4] p.41 + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.2 # [4] p.41 + + # biomass + if "urban central solid biomass CHP" in available_carriers: + links_i = get_filtered_links(["urban central solid biomass CHP"]) + if len(links_i) > 0: + n.links.loc[links_i, "p_min_pu"] = 0.38 # [4] + n.links.loc[links_i, "start_up_cost"] = 78 * 0.27 # [4] + n.links.loc[links_i, "min_up_time"] = 2 # [4] + n.links.loc[links_i, "min_down_time"] = 2 # [4] + n.links.loc[links_i, "ramp_limit_start_up"] = 0.38 # [4] p.41 + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.38 # [4] p.41 + + # Set committable flag for all processed carriers in specified regions + processed_carriers = [c for c in carriers if c in available_carriers] + if processed_carriers: + links_i = get_filtered_links(processed_carriers) + if len(links_i) > 0: + n.links.loc[links_i, "committable"] = True + + + def _unfix_bottlenecks(new, deci, name, extendable_i): if name == "links": # Links that have 0-cost and are extendable @@ -201,6 +319,12 @@ def fix_capacities(realization, decision, scope="DE", strict=False, no_flex=Fals no_flex=decision.meta.get("iiasa_database").get("no_flex_lt_run", False), ) + unit_commitment = snakemake.params.get("unit_commitment", False) + + if unit_commitment: + logger.info("Add unit commitment to the network.") + add_unit_commitment(n, carriers=["coal", "lignite"], regions=["DE"]) + if strict: logger.info( "Strict regret run chosen. No capacities are extendable. Activating load shedding to prevent infeasibilites." diff --git a/scripts/pypsa-de/regret_plots_lt.py b/scripts/pypsa-de/regret_plots_lt.py index 9780b5a25..95fbd0b47 100644 --- a/scripts/pypsa-de/regret_plots_lt.py +++ b/scripts/pypsa-de/regret_plots_lt.py @@ -224,14 +224,14 @@ def plot_capacity_comparison( configure_logging(snakemake) config = snakemake.config planning_horizons = snakemake.params.planning_horizons - scenarios = ["HighDemand", "LowDemand"] + scenarios = ["HighDemand", "LowDemand"] #config["run"]["name"] tech_colors = snakemake.params.plotting["tech_colors"] # Load networks networks = defaultdict(dict) for fn in snakemake.input.networks: - scenario = fn.split(os.sep)[-3] + scenario = fn.split(os.sep)[-4] year = int(re.search(r"_(\d{4})\.nc$", fn).group(1)) networks[scenario][year] = pypsa.Network(fn) From f6447f22b42b6db8c773461b64b1530f7955faa7 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 18 Sep 2025 16:48:47 +0200 Subject: [PATCH 121/202] harmonize co2 emissions across scenario and with UBA projektionsbericht --- config/scenarios.manual.yaml | 2 -- scripts/pypsa-de/build_scenarios.py | 22 ++++++++++------------ 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 228066d12..bf84165af 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -62,7 +62,6 @@ HighDemandNoFlex: LowDemand: iiasa_database: - reference_scenario: ExPol # e.g., CO2 emissions from REMIND will follow ExPol uba_for_industry: - 2025 - 2030 @@ -101,7 +100,6 @@ LowDemand: LowDemandNoFlex: iiasa_database: no_flex_lt_run: true - reference_scenario: ExPol # e.g., CO2 emissions from REMIND will follow ExPol uba_for_industry: - 2025 - 2030 diff --git a/scripts/pypsa-de/build_scenarios.py b/scripts/pypsa-de/build_scenarios.py index 29a030937..87c2cfba6 100644 --- a/scripts/pypsa-de/build_scenarios.py +++ b/scripts/pypsa-de/build_scenarios.py @@ -66,7 +66,11 @@ def get_co2_budget(df, source): ## GHG targets according to KSG initial_years_co2 = pd.Series( index=[2020, 2025, 2030], - data=[813, 643, 438], + data=[ + 813, + 629, # From UBA Projektionsbericht 2025 + 438, + ], ) later_years_co2 = pd.Series( @@ -85,18 +89,8 @@ def get_co2_budget(df, source): ) else: raise ValueError("Invalid source for CO2 budget.") - ## Compute nonco2 from Ariadne-Leitmodell (REMIND) - # co2 = ( - # df.loc["Emissions|CO2 incl Bunkers","Mt CO2/yr"] - # - df.loc["Emissions|CO2|Land-Use Change","Mt CO2-equiv/yr"] - # - df.loc["Emissions|CO2|Energy|Demand|Bunkers","Mt CO2/yr"] - # ) - # ghg = ( - # df.loc["Emissions|Kyoto Gases","Mt CO2-equiv/yr"] - # - df.loc["Emissions|Kyoto Gases|Land-Use Change","Mt CO2-equiv/yr"] - # # No Kyoto Gas emissions for Bunkers recorded in Ariadne DB - # ) + ## Compute nonco2 from Ariadne-Leitmodell (REMIND) try: co2_land_use_change = df.loc["Emissions|CO2|Land-Use Change", "Mt CO2-equiv/yr"] @@ -121,6 +115,10 @@ def get_co2_budget(df, source): targets_pypsa = targets_co2 - nonco2 + logger.info("Non-CO2 GHG emissions assumed (in Mt CO2-equiv/yr):") + for year in nonco2.index: + logger.info(f"{year}: {nonco2.loc[year]:.1f}") + target_fractions_pypsa = targets_pypsa.loc[targets_co2.index] / baseline_pypsa target_fractions_pypsa[2020] = ( 0.671 # Hard-coded based on REMIND data from ariadne2-internal DB From 7b40c40917bc2ff1aade284302a447530dd7759b Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 18 Sep 2025 16:50:47 +0200 Subject: [PATCH 122/202] update prefix --- config/config.de.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 96c5a0e4e..ea1315238 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250917_regret_uc + prefix: 20250918_regret_same_nonco2 name: # - ExPol # - KN2045_Mix From 37e6e1a1dadc79cb9df17081a0931457b6fdb716 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Fri, 19 Sep 2025 11:09:21 +0200 Subject: [PATCH 123/202] improve unit commitment representation --- config/config.de.yaml | 8 +- scripts/pypsa-de/prepare_regret_network.py | 264 +++++++++++++-------- 2 files changed, 165 insertions(+), 107 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index ea1315238..082e82d2c 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250918_regret_same_nonco2 + prefix: 20250919_regret_uc name: # - ExPol # - KN2045_Mix @@ -51,7 +51,11 @@ iiasa_database: #- no_flex - gas_price_60 #- gas_price_80 - unit_commitment: true + unit_commitment: + enable: false + params: custom # options: conservative, average, optimistic, custom + carriers: ["OCGT", "coal", "lignite", "urban central solid biomass CHP"] # subset of ["OCGT", "CCGT", "coal", "lignite", "nuclear", "oil","urban central solid biomass CHP"] + regions: ["DE"] ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - 2025 diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index 75f25cd91..2e8461e9d 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -24,122 +24,164 @@ logger = logging.getLogger(__name__) - -def add_unit_commitment(n, carriers=["OCGT", "CCGT", "coal", "lignite", "nuclear", "oil","urban central solid biomass CHP"], regions=["DE"]): +uc_params_custom = { + "OCGT": { + "p_min_pu": 0.2, + "start_up_cost": 20, + "min_up_time": 1, + "min_down_time": 1, + "ramp_limit_up": 1, + }, + "CCGT": { + "p_min_pu": 0.45, + "start_up_cost": 80, + "min_up_time": 3, + "min_down_time": 2, + "ramp_limit_up": 1, + }, + "coal": { + "p_min_pu": 0.325, + "start_up_cost": 60, + "min_up_time": 6, + "min_down_time": 6, + "ramp_limit_up": 1, + }, + "lignite": { + "p_min_pu": 0.325, + "start_up_cost": 80, + "min_up_time": 10, + "min_down_time": 10, + "ramp_limit_up": 1, + }, + "nuclear": { + "p_min_pu": 0.5, + "start_up_cost": 100, + "min_up_time": 8, + "min_down_time": 10, + }, + "oil": { + "p_min_pu": 0.2, + "start_up_cost": 30, + "min_up_time": 1, + "min_down_time": 1, + "ramp_limit_up": 1, + }, + "urban central solid biomass CHP": { + "p_min_pu": 0.38, + "start_up_cost": 50, + "min_up_time": 2, + "min_down_time": 2, + } +} + +uc_params_optimistic = { + "OCGT": {"p_min_pu": 0.15, "start_up_cost": 20, "min_up_time": 1, "min_down_time": 1, + "ramp_limit_up": 1, "ramp_limit_start_up": 0.3, "ramp_limit_shut_down": 0.3}, + "CCGT": {"p_min_pu": 0.4, "start_up_cost": 100, "min_up_time": 2, "min_down_time": 2, + "ramp_limit_up": 1, "ramp_limit_start_up": 0.5, "ramp_limit_shut_down": 0.5}, + "coal": {"p_min_pu": 0.3, "start_up_cost": 80, "min_up_time": 4, "min_down_time": 4, + "ramp_limit_up": 0.8, "ramp_limit_start_up": 0.3, "ramp_limit_shut_down": 0.3}, + "lignite": {"p_min_pu": 0.3, "start_up_cost": 120, "min_up_time": 5, "min_down_time": 5, + "ramp_limit_up": 0.7, "ramp_limit_start_up": 0.3, "ramp_limit_shut_down": 0.3}, + "nuclear": {"p_min_pu": 0.45, "start_up_cost": 200, "min_up_time": 6, "min_down_time": 8, + "ramp_limit_up": 0.2, "ramp_limit_start_up": 0.2, "ramp_limit_shut_down": 0.2}, + "oil": {"p_min_pu": 0.2, "start_up_cost": 30, "min_up_time": 1, "min_down_time": 1, + "ramp_limit_up": 1, "ramp_limit_start_up": 0.3, "ramp_limit_shut_down": 0.3}, + "urban central solid biomass CHP": {"p_min_pu": 0.35, "start_up_cost": 50, "min_up_time": 1, "min_down_time": 1, + "ramp_limit_up": 0.9, "ramp_limit_start_up": 0.4, "ramp_limit_shut_down": 0.4}, +} + +uc_params_average = { + "OCGT": {"p_min_pu": 0.2, "start_up_cost": 40, "min_up_time": 1, "min_down_time": 1, + "ramp_limit_up": 1, "ramp_limit_start_up": 0.2, "ramp_limit_shut_down": 0.2}, + "CCGT": {"p_min_pu": 0.45, "start_up_cost": 150, "min_up_time": 3, "min_down_time": 2, + "ramp_limit_up": 1, "ramp_limit_start_up": 0.45, "ramp_limit_shut_down": 0.45}, + "coal": {"p_min_pu": 0.325, "start_up_cost": 120, "min_up_time": 5, "min_down_time": 6, + "ramp_limit_up": 0.7, "ramp_limit_start_up": 0.38, "ramp_limit_shut_down": 0.38}, + "lignite": {"p_min_pu": 0.325, "start_up_cost": 150, "min_up_time": 7, "min_down_time": 6, + "ramp_limit_up": 0.6, "ramp_limit_start_up": 0.4, "ramp_limit_shut_down": 0.4}, + "nuclear": {"p_min_pu": 0.5, "start_up_cost": 250, "min_up_time": 6, "min_down_time": 10, + "ramp_limit_up": 0.3, "ramp_limit_start_up": 0.3, "ramp_limit_shut_down": 0.3}, + "oil": {"p_min_pu": 0.2, "start_up_cost": 50, "min_up_time": 1, "min_down_time": 1, + "ramp_limit_up": 0.8, "ramp_limit_start_up": 0.2, "ramp_limit_shut_down": 0.2}, + "urban central solid biomass CHP": {"p_min_pu": 0.38, "start_up_cost": 80, "min_up_time": 2, "min_down_time": 2, + "ramp_limit_up": 0.7, "ramp_limit_start_up": 0.38, "ramp_limit_shut_down": 0.38}, +} + +uc_params_conservative = { + "OCGT": {"p_min_pu": 0.25, "start_up_cost": 60, "min_up_time": 2, "min_down_time": 2, + "ramp_limit_up": 0.9, "ramp_limit_start_up": 0.15, "ramp_limit_shut_down": 0.15}, + "CCGT": {"p_min_pu": 0.5, "start_up_cost": 200, "min_up_time": 4, "min_down_time": 3, + "ramp_limit_up": 0.8, "ramp_limit_start_up": 0.35, "ramp_limit_shut_down": 0.35}, + "coal": {"p_min_pu": 0.35, "start_up_cost": 160, "min_up_time": 6, "min_down_time": 8, + "ramp_limit_up": 0.5, "ramp_limit_start_up": 0.25, "ramp_limit_shut_down": 0.25}, + "lignite": {"p_min_pu": 0.35, "start_up_cost": 200, "min_up_time": 8, "min_down_time": 10, + "ramp_limit_up": 0.4, "ramp_limit_start_up": 0.2, "ramp_limit_shut_down": 0.2}, + "nuclear": {"p_min_pu": 0.55, "start_up_cost": 400, "min_up_time": 10, "min_down_time": 12, + "ramp_limit_up": 0.15, "ramp_limit_start_up": 0.15, "ramp_limit_shut_down": 0.15}, + "oil": {"p_min_pu": 0.25, "start_up_cost": 80, "min_up_time": 2, "min_down_time": 2, + "ramp_limit_up": 0.6, "ramp_limit_start_up": 0.15, "ramp_limit_shut_down": 0.15}, + "urban central solid biomass CHP": {"p_min_pu": 0.4, "start_up_cost": 120, "min_up_time": 3, "min_down_time": 3, + "ramp_limit_up": 0.5, "ramp_limit_start_up": 0.3, "ramp_limit_shut_down": 0.3}, +} + +def add_unit_commitment(n, uc_params=uc_params_average, carriers=["OCGT", "coal", "lignite", "urban central solid biomass CHP"], regions=["DE"]): """ - Add unit commitment for conventionals - based on - https://discord.com/channels/914472852571426846/1042037164088766494/1042395972438868030 - from DIW - [1] https://www.diw.de/documents/publikationen/73/diw_01.c.424566.de/diw_datadoc_2013-068.pdf + Add unit commitment parameters to links in the network based on a UC parameter dictionary. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network. + + uc_params : dict + Nested dict with carrier names as keys and dict of UC parameters as values. + Example: + { + "OCGT": { + "p_min_pu": 0.2, + "start_up_cost": 40, + "min_up_time": 1, + "min_down_time": 1, + "ramp_limit_up": 1, + "ramp_limit_start_up": 0.2, + "ramp_limit_shut_down": 0.2, + }, + ... + } - [2] update with p.48 https://www.agora-energiewende.de/fileadmin/Projekte/2017/Flexibility_in_thermal_plants/115_flexibility-report-WEB.pdf + carriers : list, optional + List of carriers to process (default = all carriers in uc_params). - [3] SI Schill et al. p.26 https://static-content.springer.com/esm/art%3A10.1038%2Fnenergy.2017.50/MediaObjects/41560_2017_BFnenergy201750_MOESM196_ESM.pdf - [4] MA https://zenodo.org/record/6421682 + regions : list + List of region codes to filter buses (default = ["DE"]). """ - # Helper function to filter links by carrier and region def get_filtered_links(carrier_list): - # Filter by carrier carrier_mask = n.links.carrier.isin(carrier_list) - - # Filter by region - check if bus0 or bus1 is in the specified regions region_mask = ( n.links.bus0.str.contains('|'.join(regions), na=False) | n.links.bus1.str.contains('|'.join(regions), na=False) ) - return n.links[carrier_mask & region_mask].index - # Only process carriers that are both requested and available - available_carriers = set(carriers) & set(n.links.carrier.unique()) - - # OCGT - if "OCGT" in available_carriers: - links_i = get_filtered_links(["OCGT"]) - if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.2 # [3] # removed since otherwise NL is not solving - n.links.loc[links_i, "start_up_cost"] = 24 * 0.4 # [3] start-up depreciation costs Eur/MW - n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 8-12% per min - n.links.loc[links_i, "ramp_limit_start_up"] = 0.2 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.2 # [4] p.41 - # cold/warm start up time within minutes, complete ramp up within one hour - - # CCGT - if "CCGT" in available_carriers: - links_i = get_filtered_links(["CCGT"]) - if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.45 # [2] mean of Minimum load Most commonly used power plants - n.links.loc[links_i, "start_up_cost"] = 144 * 0.57 # [3] start-up depreciation costs Eur/MW, in [4] 144 - n.links.loc[links_i, "min_up_time"] = 3 # mean of "Cold start-up time" [2] Most commonly used power plants - n.links.loc[links_i, "min_down_time"] = 2 # [3] Minimum offtime [hours] - n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 2-4% per min - n.links.loc[links_i, "ramp_limit_start_up"] = 0.45 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.45 # [4] p.41 - - # coal - if "coal" in available_carriers: - links_i = get_filtered_links(["coal"]) - if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.325 # [2] mean of Minimum load Most commonly used power plants - n.links.loc[links_i, "start_up_cost"] = 108 * 0.33 # [4] p.41 - n.links.loc[links_i, "min_up_time"] = 5 # mean of "Cold start-up time" [2] Most commonly used power plants - n.links.loc[links_i, "min_down_time"] = 6 # [3] Minimum offtime [hours], large plant - n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 1.5-4% per minute - n.links.loc[links_i, "ramp_limit_start_up"] = 0.38 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.38 # [4] p.41 + # If no carriers specified, use all available in uc_params + carriers_to_process = carriers if carriers is not None else list(uc_params.keys()) - # lignite - if "lignite" in available_carriers: - links_i = get_filtered_links(["lignite"]) - if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.325 # 0.4 # [3] - n.links.loc[links_i, "start_up_cost"] = 58 * 0.33 # [4] p.41 - n.links.loc[links_i, "min_up_time"] = 7 # mean of "Cold start-up time" [2] Most commonly used power plants - n.links.loc[links_i, "min_down_time"] = 6 # [3] Minimum offtime [hours], large plant - n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 1-2% per minute - n.links.loc[links_i, "ramp_limit_start_up"] = 0.4 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.4 # [4] p.41 + available_carriers = set(carriers_to_process) & set(n.links.carrier.unique()) - # nuclear - if "nuclear" in available_carriers: - links_i = get_filtered_links(["nuclear"]) - if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.5 # [3] - n.links.loc[links_i, "start_up_cost"] = 50 * 0.33 # [3] start-up depreciation costs Eur/MW - n.links.loc[links_i, "min_up_time"] = 6 # [1] - n.links.loc[links_i, "ramp_limit_up"] = 0.3 # [4] - n.links.loc[links_i, "min_down_time"] = 10 # [3] Minimum offtime [hours] - n.links.loc[links_i, "ramp_limit_start_up"] = 0.5 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.5 # [4] p.41 - - # oil - if "oil" in available_carriers: - links_i = get_filtered_links(["oil"]) - if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.2 # [4] - n.links.loc[links_i, "start_up_cost"] = 1 * 0.35 # [4] start-up depreciation costs Eur/MW - n.links.loc[links_i, "ramp_limit_start_up"] = 0.2 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.2 # [4] p.41 - - # biomass - if "urban central solid biomass CHP" in available_carriers: - links_i = get_filtered_links(["urban central solid biomass CHP"]) - if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.38 # [4] - n.links.loc[links_i, "start_up_cost"] = 78 * 0.27 # [4] - n.links.loc[links_i, "min_up_time"] = 2 # [4] - n.links.loc[links_i, "min_down_time"] = 2 # [4] - n.links.loc[links_i, "ramp_limit_start_up"] = 0.38 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.38 # [4] p.41 - - # Set committable flag for all processed carriers in specified regions - processed_carriers = [c for c in carriers if c in available_carriers] - if processed_carriers: - links_i = get_filtered_links(processed_carriers) - if len(links_i) > 0: - n.links.loc[links_i, "committable"] = True + for carrier in available_carriers: + links_i = get_filtered_links([carrier]) + if len(links_i) == 0: + continue + + # apply UC parameters from dict + for param, value in uc_params[carrier].items(): + if param in n.links.columns: + n.links.loc[links_i, param] = value + + # ensure committable flag + n.links.loc[links_i, "committable"] = True @@ -319,11 +361,23 @@ def fix_capacities(realization, decision, scope="DE", strict=False, no_flex=Fals no_flex=decision.meta.get("iiasa_database").get("no_flex_lt_run", False), ) - unit_commitment = snakemake.params.get("unit_commitment", False) + unit_commitment = snakemake.params.get("unit_commitment") + + if unit_commitment["enable"]: + logger.info(f"Add unit commitment in {unit_commitment['regions']} for carriers {unit_commitment['carriers']} with parameter set '{unit_commitment['params']}' to the network.") + + uc_params_str = unit_commitment["params"] + if uc_params_str == "custom": + uc_params = uc_params_custom + elif uc_params_str == "optimistic": + uc_params = uc_params_optimistic + elif uc_params_str == "conservative": + uc_params = uc_params_conservative + elif uc_params_str == "average": + uc_params = uc_params_average + + add_unit_commitment(n, uc_params, carriers=unit_commitment["carriers"], regions=unit_commitment["regions"]) - if unit_commitment: - logger.info("Add unit commitment to the network.") - add_unit_commitment(n, carriers=["coal", "lignite"], regions=["DE"]) if strict: logger.info( From ad631bf2da831265ac5919185ca9dca813854fd0 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 19 Sep 2025 14:12:57 +0200 Subject: [PATCH 124/202] copy loads_t from realization network; deactivate PTES via first_occurence year --- .gitignore | 1 + config/config.de.yaml | 6 +- config/scenarios.manual.yaml | 29 +++++ scripts/pypsa-de/prepare_regret_network.py | 133 +++++++++++++-------- scripts/pypsa-de/solve_regret_network.py | 4 +- 5 files changed, 119 insertions(+), 54 deletions(-) diff --git a/.gitignore b/.gitignore index d3b9924c9..173c9dfbe 100644 --- a/.gitignore +++ b/.gitignore @@ -85,6 +85,7 @@ cutouts # custom local files local +figures # private dev folder dev/* diff --git a/config/config.de.yaml b/config/config.de.yaml index ea1315238..6827101c1 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -281,10 +281,10 @@ first_technology_occurrence: H2 pipeline: 2025 H2 Electrolysis: 2025 H2 pipeline retrofitted: 2025 - urban central water pits charger: 2030 - urban central water pits discharger: 2030 + urban central water pits charger: 2035 + urban central water pits discharger: 2035 Store: - urban central water pits: 2030 + urban central water pits: 2035 costs: horizon: "mean" # "optimist", "pessimist" or "mean" diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index bf84165af..dbebe068a 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -29,6 +29,12 @@ HighDemand: 2030: 215 # PV strategy 2035: 309 2040: 400 # PV strategy + first_technology_occurrence: + Link: + urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them + urban central water pits discharger: 2040 + Store: + urban central water pits: 2040 HighDemandNoFlex: iiasa_database: @@ -59,6 +65,12 @@ HighDemandNoFlex: 2030: 215 # PV strategy 2035: 309 2040: 400 # PV strategy + first_technology_occurrence: + Link: + urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them + urban central water pits discharger: 2040 + Store: + urban central water pits: 2040 LowDemand: iiasa_database: @@ -82,6 +94,11 @@ LowDemand: 2025: 12.0 2030: 26.0 2035: 38.0 + # central_heat_pump_budgets: + # DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 22 + # 2025: 7.0 + # 2030: 26.0 + # 2035: 52.0 limits_capacity_min: Generator: onwind: @@ -96,6 +113,12 @@ LowDemand: H2 electrolysis: DE: 2030: 0 + first_technology_occurrence: + Link: + urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them + urban central water pits discharger: 2040 + Store: + urban central water pits: 2040 LowDemandNoFlex: iiasa_database: @@ -134,6 +157,12 @@ LowDemandNoFlex: H2 electrolysis: DE: 2030: 0 + first_technology_occurrence: + Link: + urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them + urban central water pits discharger: 2040 + Store: + urban central water pits: 2040 ExPol: # UBA CO2 pathway instead of KSG targets diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index 75f25cd91..5d657d70b 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -25,115 +25,150 @@ logger = logging.getLogger(__name__) -def add_unit_commitment(n, carriers=["OCGT", "CCGT", "coal", "lignite", "nuclear", "oil","urban central solid biomass CHP"], regions=["DE"]): +def add_unit_commitment( + n, + carriers=[ + "OCGT", + "CCGT", + "coal", + "lignite", + "nuclear", + "oil", + "urban central solid biomass CHP", + ], + regions=["DE"], +): """ Add unit commitment for conventionals based on https://discord.com/channels/914472852571426846/1042037164088766494/1042395972438868030 from DIW [1] https://www.diw.de/documents/publikationen/73/diw_01.c.424566.de/diw_datadoc_2013-068.pdf - + [2] update with p.48 https://www.agora-energiewende.de/fileadmin/Projekte/2017/Flexibility_in_thermal_plants/115_flexibility-report-WEB.pdf - + [3] SI Schill et al. p.26 https://static-content.springer.com/esm/art%3A10.1038%2Fnenergy.2017.50/MediaObjects/41560_2017_BFnenergy201750_MOESM196_ESM.pdf [4] MA https://zenodo.org/record/6421682 """ - + # Helper function to filter links by carrier and region def get_filtered_links(carrier_list): # Filter by carrier carrier_mask = n.links.carrier.isin(carrier_list) - + # Filter by region - check if bus0 or bus1 is in the specified regions - region_mask = ( - n.links.bus0.str.contains('|'.join(regions), na=False) | - n.links.bus1.str.contains('|'.join(regions), na=False) - ) - + region_mask = n.links.bus0.str.contains( + "|".join(regions), na=False + ) | n.links.bus1.str.contains("|".join(regions), na=False) + return n.links[carrier_mask & region_mask].index - + # Only process carriers that are both requested and available available_carriers = set(carriers) & set(n.links.carrier.unique()) - + # OCGT if "OCGT" in available_carriers: links_i = get_filtered_links(["OCGT"]) if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.2 # [3] # removed since otherwise NL is not solving - n.links.loc[links_i, "start_up_cost"] = 24 * 0.4 # [3] start-up depreciation costs Eur/MW + n.links.loc[links_i, "p_min_pu"] = ( + 0.2 # [3] # removed since otherwise NL is not solving + ) + n.links.loc[links_i, "start_up_cost"] = ( + 24 * 0.4 + ) # [3] start-up depreciation costs Eur/MW n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 8-12% per min n.links.loc[links_i, "ramp_limit_start_up"] = 0.2 # [4] p.41 n.links.loc[links_i, "ramp_limit_shut_down"] = 0.2 # [4] p.41 # cold/warm start up time within minutes, complete ramp up within one hour - + # CCGT if "CCGT" in available_carriers: links_i = get_filtered_links(["CCGT"]) if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.45 # [2] mean of Minimum load Most commonly used power plants - n.links.loc[links_i, "start_up_cost"] = 144 * 0.57 # [3] start-up depreciation costs Eur/MW, in [4] 144 - n.links.loc[links_i, "min_up_time"] = 3 # mean of "Cold start-up time" [2] Most commonly used power plants - n.links.loc[links_i, "min_down_time"] = 2 # [3] Minimum offtime [hours] + n.links.loc[links_i, "p_min_pu"] = ( + 0.45 # [2] mean of Minimum load Most commonly used power plants + ) + n.links.loc[links_i, "start_up_cost"] = ( + 144 * 0.57 + ) # [3] start-up depreciation costs Eur/MW, in [4] 144 + n.links.loc[links_i, "min_up_time"] = ( + 3 # mean of "Cold start-up time" [2] Most commonly used power plants + ) + n.links.loc[links_i, "min_down_time"] = 2 # [3] Minimum offtime [hours] n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 2-4% per min n.links.loc[links_i, "ramp_limit_start_up"] = 0.45 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.45 # [4] p.41 - + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.45 # [4] p.41 + # coal if "coal" in available_carriers: links_i = get_filtered_links(["coal"]) if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.325 # [2] mean of Minimum load Most commonly used power plants - n.links.loc[links_i, "start_up_cost"] = 108 * 0.33 # [4] p.41 - n.links.loc[links_i, "min_up_time"] = 5 # mean of "Cold start-up time" [2] Most commonly used power plants - n.links.loc[links_i, "min_down_time"] = 6 # [3] Minimum offtime [hours], large plant + n.links.loc[links_i, "p_min_pu"] = ( + 0.325 # [2] mean of Minimum load Most commonly used power plants + ) + n.links.loc[links_i, "start_up_cost"] = 108 * 0.33 # [4] p.41 + n.links.loc[links_i, "min_up_time"] = ( + 5 # mean of "Cold start-up time" [2] Most commonly used power plants + ) + n.links.loc[links_i, "min_down_time"] = ( + 6 # [3] Minimum offtime [hours], large plant + ) n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 1.5-4% per minute - n.links.loc[links_i, "ramp_limit_start_up"] = 0.38 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.38 # [4] p.41 - + n.links.loc[links_i, "ramp_limit_start_up"] = 0.38 # [4] p.41 + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.38 # [4] p.41 + # lignite if "lignite" in available_carriers: links_i = get_filtered_links(["lignite"]) if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.325 # 0.4 # [3] - n.links.loc[links_i, "start_up_cost"] = 58 * 0.33 # [4] p.41 - n.links.loc[links_i, "min_up_time"] = 7 # mean of "Cold start-up time" [2] Most commonly used power plants - n.links.loc[links_i, "min_down_time"] = 6 # [3] Minimum offtime [hours], large plant + n.links.loc[links_i, "p_min_pu"] = 0.325 # 0.4 # [3] + n.links.loc[links_i, "start_up_cost"] = 58 * 0.33 # [4] p.41 + n.links.loc[links_i, "min_up_time"] = ( + 7 # mean of "Cold start-up time" [2] Most commonly used power plants + ) + n.links.loc[links_i, "min_down_time"] = ( + 6 # [3] Minimum offtime [hours], large plant + ) n.links.loc[links_i, "ramp_limit_up"] = 1 # [2] 1-2% per minute - n.links.loc[links_i, "ramp_limit_start_up"] = 0.4 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.4 # [4] p.41 - + n.links.loc[links_i, "ramp_limit_start_up"] = 0.4 # [4] p.41 + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.4 # [4] p.41 + # nuclear if "nuclear" in available_carriers: links_i = get_filtered_links(["nuclear"]) if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.5 # [3] - n.links.loc[links_i, "start_up_cost"] = 50 * 0.33 # [3] start-up depreciation costs Eur/MW - n.links.loc[links_i, "min_up_time"] = 6 # [1] + n.links.loc[links_i, "p_min_pu"] = 0.5 # [3] + n.links.loc[links_i, "start_up_cost"] = ( + 50 * 0.33 + ) # [3] start-up depreciation costs Eur/MW + n.links.loc[links_i, "min_up_time"] = 6 # [1] n.links.loc[links_i, "ramp_limit_up"] = 0.3 # [4] n.links.loc[links_i, "min_down_time"] = 10 # [3] Minimum offtime [hours] n.links.loc[links_i, "ramp_limit_start_up"] = 0.5 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.5 # [4] p.41 - + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.5 # [4] p.41 + # oil if "oil" in available_carriers: links_i = get_filtered_links(["oil"]) if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.2 # [4] - n.links.loc[links_i, "start_up_cost"] = 1 * 0.35 # [4] start-up depreciation costs Eur/MW + n.links.loc[links_i, "p_min_pu"] = 0.2 # [4] + n.links.loc[links_i, "start_up_cost"] = ( + 1 * 0.35 + ) # [4] start-up depreciation costs Eur/MW n.links.loc[links_i, "ramp_limit_start_up"] = 0.2 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.2 # [4] p.41 + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.2 # [4] p.41 # biomass if "urban central solid biomass CHP" in available_carriers: links_i = get_filtered_links(["urban central solid biomass CHP"]) if len(links_i) > 0: - n.links.loc[links_i, "p_min_pu"] = 0.38 # [4] - n.links.loc[links_i, "start_up_cost"] = 78 * 0.27 # [4] - n.links.loc[links_i, "min_up_time"] = 2 # [4] + n.links.loc[links_i, "p_min_pu"] = 0.38 # [4] + n.links.loc[links_i, "start_up_cost"] = 78 * 0.27 # [4] + n.links.loc[links_i, "min_up_time"] = 2 # [4] n.links.loc[links_i, "min_down_time"] = 2 # [4] n.links.loc[links_i, "ramp_limit_start_up"] = 0.38 # [4] p.41 - n.links.loc[links_i, "ramp_limit_shut_down"] = 0.38 # [4] p.41 - + n.links.loc[links_i, "ramp_limit_shut_down"] = 0.38 # [4] p.41 + # Set committable flag for all processed carriers in specified regions processed_carriers = [c for c in carriers if c in available_carriers] if processed_carriers: @@ -142,7 +177,6 @@ def get_filtered_links(carrier_list): n.links.loc[links_i, "committable"] = True - def _unfix_bottlenecks(new, deci, name, extendable_i): if name == "links": # Links that have 0-cost and are extendable @@ -223,6 +257,7 @@ def fix_capacities(realization, decision, scope="DE", strict=False, no_flex=Fals # The constraints and loads are taken from the realization network n.global_constraints = realization.global_constraints.copy() n.loads = realization.loads.copy() + n.loads_t = realization.loads_t.copy() nominal_attrs = { "generators": "p_nom", diff --git a/scripts/pypsa-de/solve_regret_network.py b/scripts/pypsa-de/solve_regret_network.py index 846a2b3b1..fa5f2ca1a 100644 --- a/scripts/pypsa-de/solve_regret_network.py +++ b/scripts/pypsa-de/solve_regret_network.py @@ -33,10 +33,10 @@ clusters=27, opts="", sector_opts="none", - planning_horizons="2025", + planning_horizons="2030", decision="LowDemand", run="HighDemand", - sensitivity="gas_price_50", + sensitivity="base", ) configure_logging(snakemake) From 2a082a80a1e00abf39fdcf60446aded9c534b192 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 19 Sep 2025 12:20:12 +0000 Subject: [PATCH 125/202] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- Snakefile | 4 +++- scripts/pypsa-de/regret_plots.py | 8 ++++++-- scripts/pypsa-de/regret_plots_lt.py | 2 +- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/Snakefile b/Snakefile index 50dbe18d1..1753ba8f2 100644 --- a/Snakefile +++ b/Snakefile @@ -997,7 +997,9 @@ rule prepare_regret_network: scope_to_fix=config_provider("iiasa_database", "regret_run", "scope_to_fix"), h2_vent=config_provider("iiasa_database", "regret_run", "h2_vent"), strict=config_provider("iiasa_database", "regret_run", "strict"), - unit_commitment=config_provider("iiasa_database", "regret_run", "unit_commitment"), + unit_commitment=config_provider( + "iiasa_database", "regret_run", "unit_commitment" + ), input: decision=RESULTS.replace("{run}", "{decision}") + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/scripts/pypsa-de/regret_plots.py b/scripts/pypsa-de/regret_plots.py index fd16ce6f4..4a1b1733b 100644 --- a/scripts/pypsa-de/regret_plots.py +++ b/scripts/pypsa-de/regret_plots.py @@ -110,7 +110,9 @@ def aggregate_by_keywords(opex_comp_agg, groups): # Plot electricity price duration curves - fig, ax = plt.subplots(figsize=(10, 5 * len(planning_horizons)), nrows=len(planning_horizons), ncols=1) + fig, ax = plt.subplots( + figsize=(10, 5 * len(planning_horizons)), nrows=len(planning_horizons), ncols=1 + ) ax = ax.flatten() decisions = ["decision_HighDemand", "decision_LowDemand"] @@ -161,7 +163,9 @@ def aggregate_by_keywords(opex_comp_agg, groups): "nice_names": False, } - fig, axes = plt.subplots(nrows=len(planning_horizons), ncols=1, figsize=(12, 6 * len(planning_horizons))) + fig, axes = plt.subplots( + nrows=len(planning_horizons), ncols=1, figsize=(12, 6 * len(planning_horizons)) + ) axes = axes.flatten() for i, year in enumerate(planning_horizons): diff --git a/scripts/pypsa-de/regret_plots_lt.py b/scripts/pypsa-de/regret_plots_lt.py index 95fbd0b47..4911a66b5 100644 --- a/scripts/pypsa-de/regret_plots_lt.py +++ b/scripts/pypsa-de/regret_plots_lt.py @@ -224,7 +224,7 @@ def plot_capacity_comparison( configure_logging(snakemake) config = snakemake.config planning_horizons = snakemake.params.planning_horizons - scenarios = ["HighDemand", "LowDemand"] #config["run"]["name"] + scenarios = ["HighDemand", "LowDemand"] # config["run"]["name"] tech_colors = snakemake.params.plotting["tech_colors"] # Load networks From 9c503bad3cb94181032d0601a43279fff5f45152 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 19 Sep 2025 15:46:05 +0200 Subject: [PATCH 126/202] re-activate transmission losses (because we optimize iteratively now) --- config/scenarios.manual.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index dbebe068a..476064fcd 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -5,7 +5,6 @@ HighDemand: solving: options: - transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -41,7 +40,6 @@ HighDemandNoFlex: no_flex_lt_run: true solving: options: - transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -86,7 +84,6 @@ LowDemand: delay_years: 2 solving: options: - transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -135,7 +132,6 @@ LowDemandNoFlex: delay_years: 2 solving: options: - transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: From be35dece36eed69d73ef59787b377a7c61231fbd Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 19 Sep 2025 16:13:54 +0200 Subject: [PATCH 127/202] fix biomass trade variable --- scripts/pypsa-de/export_ariadne_variables.py | 47 ++++++++------------ 1 file changed, 18 insertions(+), 29 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 947b58861..bbb1ea433 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -4922,44 +4922,33 @@ def get_export_import_links(n, region, carriers): # Biomass Trade - biomass_potential_DE = ( - n.stores.query("carrier.str.contains('solid biomass')") - .filter(like=region, axis=0) - .e_nom.sum() + biomass_primary_gens = n.generators.query( + f"index.str.startswith('{region}') and index.str.endswith('solid biomass')" + ) # use endswith to avoid the biomass transport generators + biomass_transport_gens = n.generators.query( + f"index.str.startswith('{region}') and index.str.endswith('solid biomass transported')" ) - biomass_usage_local = ( - n.stores_t.p[ - n.stores.query("carrier.str.contains('solid biomass')") - .filter(like=region, axis=0) - .index - ] - .sum() - .multiply(n.snapshot_weightings["stores"].unique().item()) - .sum() - ) + local_biomass_potential = biomass_primary_gens.e_sum_max.sum() - biomass_usage_transported = ( - n.generators_t.p[ - n.generators.query("carrier.str.contains('solid biomass')") - .filter(like=region, axis=0) - .index - ] - .sum() - .multiply(n.snapshot_weightings["generators"].unique().item()) + local_biomass_usage = ( + ( + n.generators_t.p[ + biomass_primary_gens.index.union(biomass_transport_gens.index) + ] + ) + .sum(axis=1) + .multiply(n.snapshot_weightings.generators) .sum() ) - biomass_net_exports = ( - biomass_potential_DE - biomass_usage_local - biomass_usage_transported - ) - var["Trade|Primary Energy|Biomass|Volume"] = biomass_net_exports + biomass_imports = local_biomass_usage - local_biomass_potential + + var["Trade|Primary Energy|Biomass|Net Imports"] = biomass_imports logger.info( f"""Share of imported biomass: { - round( - -biomass_net_exports / (biomass_potential_DE + biomass_net_exports), 3 - ) + round(biomass_imports / local_biomass_usage, 3) }""" ) From 4109c8688d6fc249d8fc64f2bab0cf216f1796b4 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 22 Sep 2025 12:19:06 +0200 Subject: [PATCH 128/202] first attempts at fixing the cluster runs --- config/config.de.yaml | 2 +- scripts/pypsa-de/additional_functionality.py | 50 -------------------- scripts/pypsa-de/modify_prenetwork.py | 12 ++--- scripts/pypsa-de/prepare_regret_network.py | 3 +- 4 files changed, 8 insertions(+), 59 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index ba57f9885..52b1be9ad 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -498,7 +498,7 @@ solving: DE: 2020: 54.5 2025: 69 - 2030: 115 # EEG2023 Ziel für 2030 + 2030: 120 # EEG2023 Ziel für 2030 + 5 GW 2035: 160 # EEG2023 Ziel für 2040 2040: 250 2045: 250 diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 60a4a4972..80d6a4ab6 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -610,55 +610,6 @@ def add_decentral_heat_pump_budgets(n, decentral_heat_pump_budgets, investment_y ) -def force_boiler_profiles_existing_per_load(n): - """ - This scales the boiler dispatch to the load profile with a factor common to - all boilers at load. - """ - - logger.info("Forcing boiler profiles for existing ones") - - decentral_boilers = n.links.index[ - n.links.carrier.str.contains("boiler") - & ~n.links.carrier.str.contains("urban central") - & ~n.links.p_nom_extendable - ] - - if decentral_boilers.empty: - return - - boiler_loads = n.links.loc[decentral_boilers, "bus1"] - boiler_loads = boiler_loads[boiler_loads.isin(n.loads_t.p_set.columns)] - decentral_boilers = boiler_loads.index - boiler_profiles_pu = n.loads_t.p_set[boiler_loads].div( - n.loads_t.p_set[boiler_loads].max(), axis=1 - ) - boiler_profiles_pu.columns = decentral_boilers - boiler_profiles = DataArray( - boiler_profiles_pu.multiply(n.links.loc[decentral_boilers, "p_nom"], axis=1) - ) - - boiler_load_index = pd.Index(boiler_loads.unique()) - boiler_load_index.name = "Load" - - # per load scaling factor - n.model.add_variables(coords=[boiler_load_index], name="Load-profile_factor") - - # clumsy indicator matrix to map boilers to loads - df = pd.DataFrame(index=boiler_load_index, columns=decentral_boilers, data=0.0) - for k, v in boiler_loads.items(): - df.loc[v, k] = 1.0 - - lhs = n.model["Link-p"].loc[:, decentral_boilers] - ( - boiler_profiles * DataArray(df) * n.model["Load-profile_factor"] - ).sum("Load") - - n.model.add_constraints(lhs, "=", 0, "Link-fixed_profile") - - # hack so that PyPSA doesn't complain there is nowhere to store the variable - n.loads["profile_factor_opt"] = 0.0 - - def force_boiler_profiles_existing_per_boiler(n): """ This scales each boiler dispatch to be proportional to the load profile. @@ -868,7 +819,6 @@ def additional_functionality(n, snapshots, snakemake): add_power_limits(n, investment_year, constraints["limits_power_max"]) - # force_boiler_profiles_existing_per_load(n) force_boiler_profiles_existing_per_boiler(n) if isinstance(constraints.get("decentral_heat_pump_budgets"), dict): diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 8a9592e1b..961fb6af3 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -15,18 +15,16 @@ def first_technology_occurrence(n): """ - Sets p_nom_extendable to false for carriers with configured first - occurrence if investment year is before configured year. + Drop configured technologies before configured year. """ for c, carriers in snakemake.params.technology_occurrence.items(): for carrier, first_year in carriers.items(): if int(snakemake.wildcards.planning_horizons) < first_year: - attr = "e_nom" if c == "Store" else "p_nom" - logger.info( - f"{carrier} is configured to be not extendable before {first_year}." - ) - n.df(c).loc[n.df(c).carrier == carrier, f"{attr}_extendable"] = False + to_drop = n.df(c).query(f"carrier == '{carrier}'").index + if to_drop.empty: + continue + n.remove(c, to_drop) def fix_new_boiler_profiles(n): diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index 58d9bb77a..298075254 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -416,7 +416,8 @@ def fix_capacities(realization, decision, scope="DE", strict=False, no_flex=Fals # The constraints and loads are taken from the realization network n.global_constraints = realization.global_constraints.copy() n.loads = realization.loads.copy() - n.loads_t = realization.loads_t.copy() + # Copy the whole realization network, because copying loads_t directly is not type stable + n.loads_t = realization.copy().loads_t nominal_attrs = { "generators": "p_nom", From 345695ac27d55ac41becfdde874453eddff9ca20 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 23 Sep 2025 16:33:58 +0200 Subject: [PATCH 129/202] Limiting interrconnectors: no transmission losses, no line expanion - only NEP, no post-discretization --- config/config.de.yaml | 7 ++++--- config/scenarios.manual.yaml | 12 ++++++++++++ scripts/pypsa-de/modify_prenetwork.py | 14 +++++++++----- 3 files changed, 25 insertions(+), 8 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 52b1be9ad..218da3721 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,13 +4,14 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250919_regret_uc + prefix: 20250919_regret_less_interconnectors name: # - ExPol # - KN2045_Mix - HighDemand - LowDemand - - HighDemandNoFlex # !!! CAVEAT The 'NoFlex' substring in the scenario name determines how the regrets are computed!!! + #- HighDemandNoFlex # !!! CAVEAT The 'NoFlex' substring in the scenario name determines how the regrets are computed!!! + - LowDemandNoFlex # - KN2045_NFhoch scenarios: enable: true @@ -460,7 +461,7 @@ solving: options: assign_all_duals: true load_shedding: false - skip_iterations: false # settings for post-discretization: false + skip_iterations: true # settings for post-discretization: false min_iterations: 1 # settings for post-discretization: 1 max_iterations: 1 # settings for post-discretization: 1 post_discretization: diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 476064fcd..8ebe81846 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -3,8 +3,11 @@ # # SPDX-License-Identifier: MIT HighDemand: + electricity: + transmission_limit: v1 solving: options: + transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -36,10 +39,13 @@ HighDemand: urban central water pits: 2040 HighDemandNoFlex: + electricity: + transmission_limit: v1 iiasa_database: no_flex_lt_run: true solving: options: + transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -71,6 +77,8 @@ HighDemandNoFlex: urban central water pits: 2040 LowDemand: + electricity: + transmission_limit: v1 iiasa_database: uba_for_industry: - 2025 @@ -84,6 +92,7 @@ LowDemand: delay_years: 2 solving: options: + transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -118,6 +127,8 @@ LowDemand: urban central water pits: 2040 LowDemandNoFlex: + electricity: + transmission_limit: v1 iiasa_database: no_flex_lt_run: true uba_for_industry: @@ -132,6 +143,7 @@ LowDemandNoFlex: delay_years: 2 solving: options: + transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 961fb6af3..c2864e187 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1203,13 +1203,17 @@ def drop_duplicate_transmission_projects(n): n.links.loc[to_deactivate, "active"] = False -def deactivate_late_transmission_projects(n): - year = snakemake.params.onshore_nep_force["cutout_year"] +def deactivate_late_transmission_projects(n, current_year): + nep_year = snakemake.params.onshore_nep_force["cutout_year"] - to_deactivate = n.links.query(f"carrier == 'DC' and build_year > {year}").index + cutout_year = min(nep_year, current_year) + + to_deactivate = n.links.query( + f"carrier == 'DC' and build_year > {cutout_year}" + ).index n.links.loc[to_deactivate, "active"] = False - to_deactivate = n.lines.query(f"build_year > {year}").index + to_deactivate = n.lines.query(f"build_year > {cutout_year}").index n.lines.loc[to_deactivate, "active"] = False @@ -1516,7 +1520,7 @@ def remove_flexibility_options(n): ) # For regret runs - deactivate_late_transmission_projects(n) + deactivate_late_transmission_projects(n, current_year) if snakemake.params.no_flex_lt_run: logger.info("Run without flexibility options detected.") From 29daaf8376cd78519e0e904f6707a0b1ba7b2b49 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Tue, 23 Sep 2025 16:49:40 +0200 Subject: [PATCH 130/202] unit commitment and scaling of cross border elec capa --- Snakefile | 3 + config/config.de.yaml | 5 +- scripts/pypsa-de/prepare_regret_network.py | 174 +++++++++++++++++++-- 3 files changed, 168 insertions(+), 14 deletions(-) diff --git a/Snakefile b/Snakefile index 1753ba8f2..ffe8e8ccf 100644 --- a/Snakefile +++ b/Snakefile @@ -1000,6 +1000,9 @@ rule prepare_regret_network: unit_commitment=config_provider( "iiasa_database", "regret_run", "unit_commitment" ), + scale_cross_border_elec_capa=config_provider( + "iiasa_database", "regret_run", "scale_cross_border_elec_capa" + ), input: decision=RESULTS.replace("{run}", "{decision}") + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/config/config.de.yaml b/config/config.de.yaml index 218da3721..ff17a95fb 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -55,8 +55,9 @@ iiasa_database: unit_commitment: enable: false params: custom # options: conservative, average, optimistic, custom - carriers: ["OCGT", "coal", "lignite", "urban central solid biomass CHP"] # subset of ["OCGT", "CCGT", "coal", "lignite", "nuclear", "oil","urban central solid biomass CHP"] - regions: ["DE"] + carriers: ["OCGT", "CCGT", "coal", "lignite", "nuclear", "oil", "urban central solid biomass CHP"] # subset of ["OCGT", "CCGT", "coal", "lignite", "nuclear", "oil","urban central solid biomass CHP"] + regions: ['DE'] # subset of ['AT', 'BE', 'CH', 'CZ', 'DE', 'DK', 'FR', 'GB', 'LU', 'NL', 'NO', 'PL', 'SE', 'ES', 'IT'] + scale_cross_border_elec_capa: false # If true, scales cross-border electricity capacities to target values given in prepare_regret_network.py ageb_for_mobility: true # In 2020 use AGEB data for final energy demand and KBA for vehicles uba_for_mobility: # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - 2025 diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index 298075254..51b796dbf 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -5,6 +5,7 @@ import numpy as np import pypsa +import pandas as pd from scripts._helpers import ( configure_logging, @@ -28,6 +29,7 @@ "OCGT": { "p_min_pu": 0.2, "start_up_cost": 20, + "shut_down_cost": 20, "min_up_time": 1, "min_down_time": 1, "ramp_limit_up": 1, @@ -35,42 +37,48 @@ "CCGT": { "p_min_pu": 0.45, "start_up_cost": 80, + "shut_down_cost": 80, "min_up_time": 3, "min_down_time": 2, "ramp_limit_up": 1, }, "coal": { - "p_min_pu": 0.325, - "start_up_cost": 60, - "min_up_time": 6, - "min_down_time": 6, + "p_min_pu": 0.5, + "start_up_cost": 200, + "shut_down_cost": 200, + "min_up_time": 24, + "min_down_time": 24, "ramp_limit_up": 1, }, "lignite": { - "p_min_pu": 0.325, - "start_up_cost": 80, - "min_up_time": 10, - "min_down_time": 10, + "p_min_pu": 0.5, + "start_up_cost": 200, + "shut_down_cost": 200, + "min_up_time": 24, + "min_down_time": 24, "ramp_limit_up": 1, }, "nuclear": { "p_min_pu": 0.5, "start_up_cost": 100, + "shut_down_cost": 100, "min_up_time": 8, "min_down_time": 10, }, "oil": { "p_min_pu": 0.2, "start_up_cost": 30, + "shut_down_cost": 30, "min_up_time": 1, "min_down_time": 1, "ramp_limit_up": 1, }, "urban central solid biomass CHP": { - "p_min_pu": 0.38, - "start_up_cost": 50, - "min_up_time": 2, - "min_down_time": 2, + "p_min_pu": 0.5, + "start_up_cost": 150, + "shut_down_cost": 150, + "min_up_time": 5, + "min_down_time": 5, }, } @@ -336,6 +344,140 @@ def get_filtered_links(carrier_list): n.links.loc[links_i, "committable"] = True +target_caps = { + "GB": 0, + "CH": 8000, + "CZ": 5500, + "NL": 8000, + "FR": 9500, + "PL": 4500, + "NO": 2500, + "BE": 0, + "DK": 6000, + "AT": 6000, + "LU": 1500, + "SE": 1000, +} + + +def scale_transmission_capacity(n, target_capacities): + """ + Scale transmission capacities in PyPSA network to match target values. + + Parameters + ---------- + n : pypsa.Network + PyPSA network object + target_capacities : pd.Series or dict + Target transmission capacities by country in MW (already rounded) + """ + + if isinstance(target_capacities, dict): + target_capacities = pd.Series(target_capacities) + + # Calculate current capacities (without reversed links) + countries_ac = ["AT", "CH", "CZ", "DK", "FR", "LU", "NL", "PL"] + countries_dc = ["BE", "FR", "GB", "DK", "SE", "NO", "CH"] + + current_capa = pd.DataFrame( + data=0, + index=list(set(countries_ac + countries_dc)), + columns=["AC_MW", "DC_MW", "Total_MW"], + ) + + # AC lines + for ct in countries_ac: + ac_lines = n.lines # No need to filter out reversed for AC lines + ind = ac_lines[ + (ac_lines.bus0.str.startswith("DE") & ac_lines.bus1.str.startswith(ct)) + | (ac_lines.bus0.str.startswith(ct) & ac_lines.bus1.str.startswith("DE")) + ].index + current_capa.loc[ct, "AC_MW"] = n.lines.loc[ind, "s_nom_opt"].sum() + + # DC links + for ct in countries_dc: + dc_links = n.links[ + n.links.carrier.isin(["DC"]) & ~n.links.index.str.contains("reversed") + ] + ind = dc_links[ + (dc_links.bus0.str.startswith("DE") & dc_links.bus1.str.startswith(ct)) + | (dc_links.bus0.str.startswith(ct) & dc_links.bus1.str.startswith("DE")) + ].index + current_capa.loc[ct, "DC_MW"] = n.links.loc[ind, "p_nom_opt"].sum() + + current_capa["Total_MW"] = current_capa["AC_MW"] + current_capa["DC_MW"] + + # Calculate scaling factors + for country in target_capacities.index: + if country in current_capa.index: + current_total = current_capa.loc[country, "Total_MW"] + target_total = target_capacities[country] + + if current_total > 0: + scaling_factor = target_total / current_total + + print( + f"{country}: {current_total:.0f} MW -> {target_total:.0f} MW (factor: {scaling_factor:.3f})" + ) + + # Scale AC lines (no reversed links for AC) + if current_capa.loc[country, "AC_MW"] > 0: + ac_lines = n.lines # No need to filter out reversed for AC lines + ac_ind = ac_lines[ + ( + ac_lines.bus0.str.startswith("DE") + & ac_lines.bus1.str.startswith(country) + ) + | ( + ac_lines.bus0.str.startswith(country) + & ac_lines.bus1.str.startswith("DE") + ) + ].index + + # Scale AC lines + n.lines.loc[ac_ind, "s_nom_opt"] *= scaling_factor + n.lines.loc[ac_ind, "s_nom"] *= scaling_factor + + # Scale DC links + if current_capa.loc[country, "DC_MW"] > 0: + dc_links = n.links[ + n.links.carrier.isin(["DC"]) + & ~n.links.index.str.contains("reversed") + ] + dc_ind = dc_links[ + ( + dc_links.bus0.str.startswith("DE") + & dc_links.bus1.str.startswith(country) + ) + | ( + dc_links.bus0.str.startswith(country) + & dc_links.bus1.str.startswith("DE") + ) + ].index + + # Scale main DC links + n.links.loc[dc_ind, "p_nom_opt"] *= scaling_factor + n.links.loc[dc_ind, "p_nom"] *= scaling_factor + + # Scale reversed DC links to match + for link_id in dc_ind: + reversed_id = link_id + "-reversed" + if reversed_id in n.links.index: + n.links.loc[reversed_id, "p_nom_opt"] = n.links.loc[ + link_id, "p_nom_opt" + ] + n.links.loc[reversed_id, "p_nom"] = n.links.loc[ + link_id, "p_nom" + ] + + elif target_total > 0: + logger.info( + f"WARNING: {country} has target capacity {target_total:.0f} MW but no current capacity to scale" + ) + else: + logger.info(f"{country}: Target is 0 MW - no scaling needed") + + def _unfix_bottlenecks(new, deci, name, extendable_i): if name == "links": # Links that have 0-cost and are extendable @@ -538,6 +680,14 @@ def fix_capacities(realization, decision, scope="DE", strict=False, no_flex=Fals regions=unit_commitment["regions"], ) + scale_cross_border_elec_capa = snakemake.params.get( + "scale_cross_border_elec_capa", False + ) + + if scale_cross_border_elec_capa: + logger.info("Scaling cross-border electricity capacities to target values.") + scale_transmission_capacity(n, target_caps) + if strict: logger.info( "Strict regret run chosen. No capacities are extendable. Activating load shedding to prevent infeasibilites." From cc5ef355904c72c05e24944181e394335be33ec9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 23 Sep 2025 14:49:58 +0000 Subject: [PATCH 131/202] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/pypsa-de/prepare_regret_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index 51b796dbf..f499fd6c2 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -4,8 +4,8 @@ import pathlib import numpy as np -import pypsa import pandas as pd +import pypsa from scripts._helpers import ( configure_logging, From 8e277b470720f122bab3526146da956d29e4137e Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 23 Sep 2025 16:56:55 +0200 Subject: [PATCH 132/202] export hypothetical subsidy for limiting electricity price to 4000 --- scripts/pypsa-de/export_ariadne_variables.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index bbb1ea433..b5fb220df 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -3547,11 +3547,16 @@ def get_prices(n, region): nodal_prices_ac = n.buses_t.marginal_price[nodal_flows_ac.columns].clip( upper=max_elec_price ) + nodal_subsidies_ac = ( + n.buses_t.marginal_price[nodal_flows_ac.columns] - nodal_prices_ac + ) var["Price|Secondary Energy|Electricity"] = ( nodal_flows_ac.mul(nodal_prices_ac).values.sum() / nodal_flows_ac.values.sum() ) + var["Subsidy|Electricity"] = nodal_flows_ac.mul(nodal_subsidies_ac).values.sum() + # Price|Secondary Energy|Gases|Natural Gas var["Price|Secondary Energy|Gases|Natural Gas"] = ( costs_gen_generators(n, region, "gas")[0] + co2_cost_gas From 4679b1cc3b47597985ac43e08878fa218ef653f9 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Tue, 23 Sep 2025 17:13:22 +0200 Subject: [PATCH 133/202] restrict cross border flows of ac lines --- Snakefile | 3 +++ config/config.de.yaml | 5 ++++- scripts/pypsa-de/modify_prenetwork.py | 16 +++++++++++++++- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/Snakefile b/Snakefile index ffe8e8ccf..646672a8a 100644 --- a/Snakefile +++ b/Snakefile @@ -560,6 +560,9 @@ rule modify_prenetwork: scale_industry_non_energy=config_provider( "iiasa_database", "scale_industry_non_energy" ), + restrict_cross_border_flows=config_provider( + "iiasa_database", "restrict_cross_border_flows" + ), input: costs_modifications="ariadne-data/costs_{planning_horizons}-modifications.csv", network=resources( diff --git a/config/config.de.yaml b/config/config.de.yaml index ff17a95fb..d512f262f 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250919_regret_less_interconnectors + prefix: 20250923_regret_uc_scaleCBF name: # - ExPol # - KN2045_Mix @@ -65,6 +65,9 @@ iiasa_database: - 2025 no_flex_lt_run: false # If true, removes all flexibility options scale_industry_non_energy: false # Scale non-energy industry demand directly proportional to energy demand + restrict_cross_border_flows: + 2025: 0.5 + 2030: 0.6 # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight foresight: myopic diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index c2864e187..8323ec749 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1423,6 +1423,15 @@ def remove_flexibility_options(n): n.remove("Bus", n.buses.query("carrier in @carriers_to_drop").index) +def restrict_cross_border_flows(n, s_max_pu): + logger.info(f"Restricting cross-border flows to {s_max_pu}.") + ind = n.lines[ + (n.lines.bus0.str.startswith("DE") & ~n.lines.bus1.str.startswith("DE")) + | (~n.lines.bus0.str.startswith("DE") & n.lines.bus1.str.startswith("DE")) + ].index + n.lines.loc[ind, "s_max_pu"] = s_max_pu + + if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( @@ -1433,7 +1442,7 @@ def remove_flexibility_options(n): ll="vopt", sector_opts="none", planning_horizons="2025", - run="KN2045_Mix", + run="HighDemand", ) configure_logging(snakemake) @@ -1528,4 +1537,9 @@ def remove_flexibility_options(n): fix_transmission_DE(n) + if current_year in snakemake.params.restrict_cross_border_flows: + restrict_cross_border_flows( + n, snakemake.params.restrict_cross_border_flows[current_year] + ) + n.export_to_netcdf(snakemake.output.network) From 15e387601ff15d8dbc74286736508866a603e047 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Tue, 23 Sep 2025 18:09:48 +0200 Subject: [PATCH 134/202] splot power limit for elec trade into import and export constraint --- scripts/pypsa-de/additional_functionality.py | 31 ++++++++------------ 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 80d6a4ab6..f2a1b67a9 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -106,16 +106,14 @@ def add_capacity_limits( def add_power_limits(n, investment_year, limits_power_max): """ - " Restricts the maximum inflow/outflow of electricity from/to a country. + Restricts the maximum inflow and outflow of electricity from/to a country separately. """ for ct in limits_power_max: if investment_year not in limits_power_max[ct].keys(): continue - limit = 1e3 * limits_power_max[ct][investment_year] / 10 - logger.info( - f"Adding constraint on electricity import/export from/to {ct} to be < {limit} MW" + f"Adding constraints on electricity import and export from/to {ct} to be < {limit * 10} MW each" ) incoming_line = n.lines.index[ (n.lines.carrier == "AC") @@ -127,7 +125,6 @@ def add_power_limits(n, investment_year, limits_power_max): & (n.lines.bus0.str[:2] == ct) & (n.lines.bus1.str[:2] != ct) ] - incoming_link = n.links.index[ (n.links.carrier == "DC") & (n.links.bus0.str[:2] != ct) @@ -139,29 +136,25 @@ def add_power_limits(n, investment_year, limits_power_max): & (n.links.bus1.str[:2] != ct) ] - # iterate over snapshots - otherwise exporting of postnetwork fails since - # the constraints are time dependent + # iterate over snapshots for t in n.snapshots: incoming_line_p = n.model["Line-s"].loc[t, incoming_line] outgoing_line_p = n.model["Line-s"].loc[t, outgoing_line] incoming_link_p = n.model["Link-p"].loc[t, incoming_link] outgoing_link_p = n.model["Link-p"].loc[t, outgoing_link] - lhs = ( - incoming_link_p.sum() - - outgoing_link_p.sum() - + incoming_line_p.sum() - - outgoing_line_p.sum() - ) / 10 - # divide by 10 to avoid numerical issues + # Total inflow (imports) - only positive flows into the country + inflow_lhs = (incoming_link_p.sum() + incoming_line_p.sum()) / 10 - cname_upper = f"Power-import-limit-{ct}-{t}" - cname_lower = f"Power-export-limit-{ct}-{t}" + # Total outflow (exports) - only positive flows out of the country + outflow_lhs = (outgoing_link_p.sum() + outgoing_line_p.sum()) / 10 - n.model.add_constraints(lhs <= limit, name=cname_upper) - n.model.add_constraints(lhs >= -limit, name=cname_lower) + # Separate constraints for inflow and outflow + cname_inflow = f"Power-inflow-limit-{ct}-{t}" + cname_outflow = f"Power-outflow-limit-{ct}-{t}" - # not adding to network as the shadow prices are not needed + n.model.add_constraints(inflow_lhs <= limit, name=cname_inflow) + n.model.add_constraints(outflow_lhs <= limit, name=cname_outflow) def h2_import_limits(n, investment_year, limits_volume_max): From a32e38dbe6c147981e39c2826e8bb215ed4975b8 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Tue, 23 Sep 2025 18:22:35 +0200 Subject: [PATCH 135/202] fix regret ST plots --- Snakefile | 20 ++++++++++++-------- config/config.de.yaml | 2 +- scripts/pypsa-de/regret_plots.py | 18 +++++++++--------- 3 files changed, 22 insertions(+), 18 deletions(-) diff --git a/Snakefile b/Snakefile index 646672a8a..3aef346d9 100644 --- a/Snakefile +++ b/Snakefile @@ -1197,23 +1197,27 @@ rule regret_plots_lt: rule regret_plots: params: - scenarios=get_scenarios(run), + scenarios=config["run"]["name"], + scenarios_config=get_scenarios(run), planning_horizons=config_provider("scenario", "planning_horizons"), plotting=config_provider("plotting"), output_dir=directory( - f"results/{config['run']['prefix']}/regret_plots/ST_comparison" + f"results/{config['run']['prefix']}/regret_plots/{{sensitivity}}/ST_comparison" ), + input: - regret_networks=expand( - RESULTS - + "regret_networks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", - **config["scenario"], - allow_missing=True, + regret_networks=lambda wildcards: expand( + f"results/{config['run']['prefix']}/{{run}}/regret_networks/{wildcards.sensitivity}/decision_{{decision}}_s_{{clusters}}_{{opts}}_{{sector_opts}}_{{planning_horizons}}.nc", run=config["run"]["name"], decision=config["run"]["name"], + clusters=config["scenario"]["clusters"], + opts=config["scenario"]["opts"], + sector_opts=config["scenario"]["sector_opts"], + planning_horizons=config["scenario"]["planning_horizons"], + allow_missing=True, ), output: - elec_price_comp_de=f"results/{config['run']['prefix']}/regret_plots/ST_comparison/elec_price_comp_de.png", + elec_price_comp_de=f"results/{config['run']['prefix']}/regret_plots/{{sensitivity}}/ST_comparison/elec_price_comp_de.png", resources: mem_mb=32000, script: diff --git a/config/config.de.yaml b/config/config.de.yaml index d512f262f..67f1f43a9 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250923_regret_uc_scaleCBF + prefix: 20250923_split_power_limit name: # - ExPol # - KN2045_Mix diff --git a/scripts/pypsa-de/regret_plots.py b/scripts/pypsa-de/regret_plots.py index 4a1b1733b..c3759bffe 100644 --- a/scripts/pypsa-de/regret_plots.py +++ b/scripts/pypsa-de/regret_plots.py @@ -64,14 +64,16 @@ def aggregate_by_keywords(opex_comp_agg, groups): from _helpers import mock_snakemake - snakemake = mock_snakemake( - "regret_plots", - ) + snakemake = mock_snakemake( + "regret_plots", + sensitivity="gas_price_60", + ) configure_logging(snakemake) config = snakemake.config planning_horizons = snakemake.params.planning_horizons - scenarios = ["HighDemand", "LowDemand"] + scenarios = snakemake.params.scenarios + decisions = ["decision_" + d for d in scenarios] tech_colors = snakemake.params.plotting["tech_colors"] # Nested dict: networks[year][scenario][decision] = Network @@ -81,7 +83,7 @@ def aggregate_by_keywords(opex_comp_agg, groups): parts = fn.split(os.sep) # scenario is the folder name 2 levels up - scenario = parts[-3] + scenario = parts[-4] if scenario not in scenarios: raise ValueError( f"Unexpected scenario '{scenario}' in {fn}. Allowed: {scenarios}" @@ -111,12 +113,10 @@ def aggregate_by_keywords(opex_comp_agg, groups): # Plot electricity price duration curves fig, ax = plt.subplots( - figsize=(10, 5 * len(planning_horizons)), nrows=len(planning_horizons), ncols=1 + figsize=(4*len(scenarios), 5 * len(planning_horizons)), nrows=len(planning_horizons), ncols=1 ) ax = ax.flatten() - decisions = ["decision_HighDemand", "decision_LowDemand"] - for i, year in enumerate(planning_horizons): for scenario, decision in itertools.product(scenarios, decisions): n = networks[year][scenario][decision] @@ -164,7 +164,7 @@ def aggregate_by_keywords(opex_comp_agg, groups): } fig, axes = plt.subplots( - nrows=len(planning_horizons), ncols=1, figsize=(12, 6 * len(planning_horizons)) + nrows=len(planning_horizons), ncols=1, figsize=(6*len(scenarios), 6 * len(planning_horizons)) ) axes = axes.flatten() From 1f18c4dca9d2229dd7ca34e8e1790e61d7fbb43c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 23 Sep 2025 16:22:52 +0000 Subject: [PATCH 136/202] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- Snakefile | 1 - scripts/pypsa-de/regret_plots.py | 10 +++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/Snakefile b/Snakefile index 3aef346d9..52346aa2c 100644 --- a/Snakefile +++ b/Snakefile @@ -1204,7 +1204,6 @@ rule regret_plots: output_dir=directory( f"results/{config['run']['prefix']}/regret_plots/{{sensitivity}}/ST_comparison" ), - input: regret_networks=lambda wildcards: expand( f"results/{config['run']['prefix']}/{{run}}/regret_networks/{wildcards.sensitivity}/decision_{{decision}}_s_{{clusters}}_{{opts}}_{{sector_opts}}_{{planning_horizons}}.nc", diff --git a/scripts/pypsa-de/regret_plots.py b/scripts/pypsa-de/regret_plots.py index c3759bffe..6b21f22b8 100644 --- a/scripts/pypsa-de/regret_plots.py +++ b/scripts/pypsa-de/regret_plots.py @@ -66,7 +66,7 @@ def aggregate_by_keywords(opex_comp_agg, groups): snakemake = mock_snakemake( "regret_plots", - sensitivity="gas_price_60", + sensitivity="gas_price_60", ) configure_logging(snakemake) @@ -113,7 +113,9 @@ def aggregate_by_keywords(opex_comp_agg, groups): # Plot electricity price duration curves fig, ax = plt.subplots( - figsize=(4*len(scenarios), 5 * len(planning_horizons)), nrows=len(planning_horizons), ncols=1 + figsize=(4 * len(scenarios), 5 * len(planning_horizons)), + nrows=len(planning_horizons), + ncols=1, ) ax = ax.flatten() @@ -164,7 +166,9 @@ def aggregate_by_keywords(opex_comp_agg, groups): } fig, axes = plt.subplots( - nrows=len(planning_horizons), ncols=1, figsize=(6*len(scenarios), 6 * len(planning_horizons)) + nrows=len(planning_horizons), + ncols=1, + figsize=(6 * len(scenarios), 6 * len(planning_horizons)), ) axes = axes.flatten() From 7e47c0ec2acf45dec77c6a24c5572c45ecb33307 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 25 Sep 2025 15:27:17 +0200 Subject: [PATCH 137/202] refactor trade accounting --- scripts/pypsa-de/export_ariadne_variables.py | 327 +++++++------------ 1 file changed, 126 insertions(+), 201 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index b5fb220df..536d2e8fa 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -4559,246 +4559,171 @@ def get_tsc(n, country): return tsc - def get_link_opex(n, carriers, region, sw, add_congestion_rent=False): - # get flow of electricity/hydrogen... - # multiply it with the marginal costs - supplying = n.links[ - (n.links.carrier.isin(carriers)) - & (n.links.bus0.str.startswith(region)) - & (~n.links.bus1.str.startswith(region)) - ].index - - receiving = n.links[ - (n.links.carrier.isin(carriers)) - & (~n.links.bus0.str.startswith(region)) - & (n.links.bus1.str.startswith(region)) - ].index - - trade_out = 0 - for index in supplying: - # price of energy in trade country - marg_price = n.buses_t.marginal_price[n.links.loc[index].bus0] - if add_congestion_rent: - marg_price = ( - marg_price + n.buses_t.marginal_price[n.links.loc[index].bus1] - ) / 2 - trade = n.links_t.p1[index].mul(sw) - trade_out += marg_price.mul(trade).sum() - - trade_in = 0 - for index in receiving: - # price of energy in Germany - marg_price = n.buses_t.marginal_price[n.links.loc[index].bus0] - if add_congestion_rent: - marg_price = ( - marg_price + n.buses_t.marginal_price[n.links.loc[index].bus1] - ) / 2 - trade = n.links_t.p1[index].mul(sw) - trade_in += marg_price.mul(trade).sum() - return abs(trade_in) - abs(trade_out) - # > 0: costs for Germany - # < 0: profit for Germany - - def get_line_opex(n, region, sw, add_congestion_rent=False): - supplying = n.lines[ - (n.lines.carrier.isin(["AC"])) - & (n.lines.bus0.str.startswith(region)) - & (~n.lines.bus1.str.startswith(region)) - ].index - receiving = n.lines[ - (n.lines.carrier.isin(["AC"])) - & (~n.lines.bus0.str.startswith(region)) - & (n.lines.bus1.str.startswith(region)) - ].index - - # i have to clip the trade - net_out = 0 - for index in supplying: - trade = n.lines_t.p1[index].mul(sw) - trade_out = trade.clip(lower=0) # positive - trade_in = trade.clip(upper=0) # negative - marg_price_DE = n.buses_t.marginal_price[n.lines.loc[index].bus0] - marg_price_EU = n.buses_t.marginal_price[n.lines.loc[index].bus1] - price_out = marg_price_DE - price_in = marg_price_EU - if add_congestion_rent: - price_out = price_in = (marg_price_DE + marg_price_EU) / 2 - net_out += trade_out.mul(price_out).sum() + trade_in.mul(price_in).sum() - # net_out > 0: Germany is exporting more electricity - # net_out < 0: Germany is importing more electricity - - net_in = 0 - for index in receiving: - trade = n.lines_t.p1[index].mul(sw) - trade_in = trade.clip(lower=0) # positive - trade_out = trade.clip(upper=0) # negative - trade_out = trade_out.clip(upper=0) - marg_price_EU = n.buses_t.marginal_price[n.lines.loc[index].bus0] - marg_price_DE = n.buses_t.marginal_price[n.lines.loc[index].bus1] - price_out = marg_price_DE - price_in = marg_price_EU - if add_congestion_rent: - price_out = price_in = (marg_price_DE + marg_price_EU) / 2 - net_in += trade_in.mul(price_in).sum() + trade_out.mul(price_out).sum() - # net_in > 0: Germany is importing more electricity - # net_in < 0: Germany is exporting more electricity - - return -net_out + net_in - - trade_carriers = [ - "DC", - "H2 pipeline", - "H2 pipeline (Kernnetz)", - "H2 pipeline retrofitted", - "renewable oil", - "renewable gas", - "methanol", - ] + def get_trade_cost(n, region, carriers): + """ + Positive values mean cost for the domestic energy system (imports > exports) + Negative values mean revenue for the domestic energy system (exports > imports) + """ + export_revenue, import_cost = get_export_import(n, region, carriers, unit="€") + return import_cost - export_revenue - sw = n.snapshot_weightings.generators - tsc = get_tsc(n, region).sum().sum() - trade_costs = get_link_opex(n, trade_carriers, region, sw) + get_line_opex( - n, region, sw - ) - var["Cost|Total Energy System Cost|Trade"] = trade_costs / 1e9 var["Cost|Total Energy System Cost|Trade|Electricity"] = ( - get_line_opex(n, region, sw) / 1e9 + get_link_opex(n, ["DC"], region, sw) / 1e9 - ) + get_trade_cost(n, region, ["AC"]) + get_trade_cost(n, region, ["DC"]) + ) / 1e9 var["Cost|Total Energy System Cost|Trade|Efuels"] = ( - get_link_opex(n, ["renewable oil", "renewable gas", "methanol"], region, sw) - / 1e9 + get_trade_cost(n, region, ["renewable oil", "renewable gas", "methanol"]) / 1e9 ) var["Cost|Total Energy System Cost|Trade|Hydrogen"] = ( - get_link_opex( + get_trade_cost( n, - ["H2 pipeline", "H2 pipeline (Kernnetz)", "H2 pipeline retrofitted"], region, - sw, + ["H2 pipeline", "H2 pipeline (Kernnetz)", "H2 pipeline retrofitted"], ) / 1e9 ) - if not isclose( - var["Cost|Total Energy System Cost|Trade"], + var["Cost|Total Energy System Cost|Trade"] = ( var["Cost|Total Energy System Cost|Trade|Electricity"] + var["Cost|Total Energy System Cost|Trade|Efuels"] - + var["Cost|Total Energy System Cost|Trade|Hydrogen"], - ): - logger.error( - "Total Energy System Cost|Trade does not equal the sum of its components. This should be fixed!" - ) - + + var["Cost|Total Energy System Cost|Trade|Hydrogen"] + ) # Cost|Total Energy System Cost in billion EUR2020/yr - var["Cost|Total Energy System Cost"] = round((tsc + trade_costs) / 1e9, 4) - - return var - - -def get_trade(n, region): - var = pd.Series() - - def get_export_import_links(n, region, carriers): - # note: links can also used bidirectional if efficiency=1 (e.g. "H2 pipeline retrofitted") - outgoing = n.links.index[ - (n.links.carrier.isin(carriers)) - & (n.links.bus0.str[:2] == region) - & (n.links.bus1.str[:2] != region) - ] + var["Cost|Total Energy System Cost|Non Trade"] = ( + get_tsc(n, region).sum().sum() / 1e9 + ) - incoming = n.links.index[ - (n.links.carrier.isin(carriers)) - & (n.links.bus0.str[:2] != region) - & (n.links.bus1.str[:2] == region) - ] + var["Cost|Total Energy System Cost"] = ( + var["Cost|Total Energy System Cost|Non Trade"] + + var["Cost|Total Energy System Cost|Trade"] + ) - exporting_p = ( - # if p0 > 0 (=clip(lower=0)) system is withdrawing from bus0 (DE) and feeding into bus1 (non-DE) -> export - n.links_t.p0.loc[:, outgoing] - .clip(lower=0) - .multiply(n.snapshot_weightings.generators, axis=0) - .values.sum() - + - # if p1 > 0 system is withdrawing from bus1 (DE) and feeding into bus0 (non-DE) -> export - n.links_t.p1.loc[:, incoming] - .clip(lower=0) - .multiply(n.snapshot_weightings.generators, axis=0) - .values.sum() - ) + return var - importing_p = ( - # if p1 < 0 (=clip(upper=0)) system is feeding into bus1 (DE) and withdrawing from bus0 (non-DE) -> import (with negative sign here) - n.links_t.p1.loc[:, incoming] - .clip(upper=0) - .multiply(n.snapshot_weightings.generators, axis=0) - .values.sum() - * -1 - + - # if p0 < 0 (=clip(upper=0)) system is feeding into bus0 (DE) and withdrawing from bus1 (non-DE) -> import (with negative sign here) - n.links_t.p0.loc[:, outgoing] - .clip(upper=0) - .multiply(n.snapshot_weightings.generators, axis=0) - .values.sum() - * -1 - ) - return exporting_p, importing_p +def get_export_import(n, region, carriers, aggregate=True, unit="MWh"): + # note: links can also used bidirectional if efficiency=1 (e.g. "H2 pipeline retrofitted") + if "AC" not in carriers: + df = n.links + df_t = n.links_t + if "AC" in carriers: + if len(carriers) > 1: + raise NotImplementedError( + "AC lines cannot be combined with other carriers. Use carrier=['AC'] to get export_import for n.lines." + ) + df = n.lines + df_t = n.lines_t - # Trade|Secondary Energy|Electricity|Volume - outgoing_ac = n.lines.index[ - (n.lines.carrier == "AC") - & (n.lines.bus0.str[:2] == region) - & (n.lines.bus1.str[:2] != region) + outgoing = df[ + (df.carrier.isin(carriers)) + & (df.bus0.str[:2] == region) + & (df.bus1.str[:2] != region) ] - incoming_ac = n.lines.index[ - (n.lines.carrier == "AC") - & (n.lines.bus0.str[:2] != region) - & (n.lines.bus1.str[:2] == region) + incoming = df[ + (df.carrier.isin(carriers)) + & (df.bus0.str[:2] != region) + & (df.bus1.str[:2] == region) ] - - exporting_p_ac = ( - # if p0 > 0 (=clip(lower=0)) system is withdrawing from bus0 (DE) and feeding into bus1 (non-DE) -> export - n.lines_t.p0.loc[:, outgoing_ac] + # if p0 > 0 (=clip(lower=0)) system is withdrawing from bus0 (DE) and feeding into bus1 (non-DE) -> export + export_outgoing = ( + df_t.p0.loc[:, outgoing.index] .clip(lower=0) .multiply(n.snapshot_weightings.generators, axis=0) - .values.sum() - + - # if p1 > 0 system is withdrawing from bus1 (DE) and feeding into bus0 (non-DE) -> export - n.lines_t.p1.loc[:, incoming_ac] + ) + if unit == "€": + # bus0 is DE for outgoing links + domestic_prices = pd.concat( + [ + n.buses_t.marginal_price[bus].rename(link) + for link, bus in outgoing.bus0.items() + ], + axis=1, + ) + export_outgoing *= domestic_prices + + # if p1 > 0 system is withdrawing from bus1 (DE) and feeding into bus0 (non-DE) -> export + export_incoming = ( + df_t.p1.loc[:, incoming.index] .clip(lower=0) .multiply(n.snapshot_weightings.generators, axis=0) - .values.sum() ) + if unit == "€": + # bus1 is DE for incoming links + domestic_prices = pd.concat( + [ + n.buses_t.marginal_price[bus].rename(link) + for link, bus in incoming.bus1.items() + ], + axis=1, + ) + export_incoming *= domestic_prices - importing_p_ac = ( - # if p1 < 0 (=clip(upper=0)) system is feeding into bus1 (DE) and withdrawing from bus0 (non-DE) -> import (with negative sign here) - n.lines_t.p1.loc[:, incoming_ac] + exporting_p = pd.concat([export_outgoing, export_incoming], axis=1) + if aggregate: + exporting_p = exporting_p.values.sum() + + # if p1 < 0 (=clip(upper=0)) system is feeding into bus1 (DE) and withdrawing from bus0 (non-DE) -> import (with negative sign here) + import_incoming = ( + df_t.p1.loc[:, incoming.index] .clip(upper=0) .multiply(n.snapshot_weightings.generators, axis=0) - .values.sum() * -1 - + - # if p0 < 0 (=clip(upper=0)) system is feeding into bus0 (DE) and withdrawing from bus1 (non-DE) -> import (with negative sign here) - n.lines_t.p0.loc[:, outgoing_ac] + ) + if unit == "€": + # bus1 is DE for incoming links + domestic_prices = pd.concat( + [ + n.buses_t.marginal_price[bus].rename(link) + for link, bus in incoming.bus1.items() + ], + axis=1, + ) + import_incoming *= domestic_prices + + # if p0 < 0 (=clip(upper=0)) system is feeding into bus0 (DE) and withdrawing from bus1 (non-DE) -> import (with negative sign here) + import_outgoing = ( + df_t.p0.loc[:, outgoing.index] .clip(upper=0) .multiply(n.snapshot_weightings.generators, axis=0) - .values.sum() * -1 ) + if unit == "€": + # bus0 is DE for outgoing links + domestic_prices = pd.concat( + [ + n.buses_t.marginal_price[bus].rename(link) + for link, bus in outgoing.bus0.items() + ], + axis=1, + ) + import_outgoing *= domestic_prices - exports_dc, imports_dc = get_export_import_links(n, region, ["DC"]) + importing_p = pd.concat([import_outgoing, import_incoming], axis=1) + if aggregate: + importing_p = importing_p.values.sum() - var["Trade|Secondary Energy|Electricity|Volume"] = ( - exporting_p_ac - importing_p_ac - ) + (exports_dc - imports_dc) + return exporting_p, importing_p + + +def get_trade(n, region): + var = pd.Series() + + # Trade|Secondary Energy|Electricity|Volume + exports_ac, imports_ac = get_export_import(n, region, ["AC"]) + + exports_dc, imports_dc = get_export_import(n, region, ["DC"]) + + var["Trade|Secondary Energy|Electricity|Volume"] = (exports_ac - imports_ac) + ( + exports_dc - imports_dc + ) var["Trade|Secondary Energy|Electricity|Gross Import|Volume"] = ( - importing_p_ac + imports_dc + imports_ac + imports_dc ) # var["Trade|Secondary Energy|Electricity|Volume|Exports"] = \ # (exporting_p_ac + exports_dc) # Trade|Secondary Energy|Hydrogen|Volume h2_carriers = ["H2 pipeline", "H2 pipeline (Kernnetz)", "H2 pipeline retrofitted"] - exports_h2, imports_h2 = get_export_import_links(n, region, h2_carriers) + exports_h2, imports_h2 = get_export_import(n, region, h2_carriers) var["Trade|Secondary Energy|Hydrogen|Volume"] = exports_h2 - imports_h2 var["Trade|Secondary Energy|Hydrogen|Gross Import|Volume"] = imports_h2 # var["Trade|Secondary Energy|Hydrogen|Volume|Exports"] = \ @@ -4831,7 +4756,7 @@ def get_export_import_links(n, region, carriers): EU_renewable_oil.filter(like="bio").sum() / EU_renewable_oil.sum() ) - exports_oil_renew, imports_oil_renew = get_export_import_links( + exports_oil_renew, imports_oil_renew = get_export_import( n, region, ["renewable oil"] ) @@ -4843,7 +4768,7 @@ def get_export_import_links(n, region, carriers): imports_oil_renew * EU_bio_fraction ) - exports_meoh, imports_meoh = get_export_import_links(n, region, ["methanol"]) + exports_meoh, imports_meoh = get_export_import(n, region, ["methanol"]) var["Trade|Secondary Energy|Liquids|Hydrogen|Volume"] = ( exports_oil_renew * (1 - DE_bio_fraction) @@ -4887,7 +4812,7 @@ def get_export_import_links(n, region, carriers): assert region == "DE" # only DE is implemented at the moment - exports_gas_renew, imports_gas_renew = get_export_import_links( + exports_gas_renew, imports_gas_renew = get_export_import( n, region, ["renewable gas"] ) var["Trade|Secondary Energy|Gases|Hydrogen|Volume"] = exports_gas_renew * ( @@ -4910,7 +4835,7 @@ def get_export_import_links(n, region, carriers): gas_fractions = _get_fuel_fractions(n, region, "gas") if "gas pipeline" in n.links.carrier.unique(): - exports_gas, imports_gas = get_export_import_links( + exports_gas, imports_gas = get_export_import( n, region, ["gas pipeline", "gas pipeline new"] ) var["Trade|Primary Energy|Gas|Volume"] = ( From c02a5561ae5a3b08ef093014eddac6fc80760fdc Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 25 Sep 2025 15:28:00 +0200 Subject: [PATCH 138/202] less sensitivities, more robust workflow --- Snakefile | 7 ++++++- config/config.de.yaml | 4 ++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/Snakefile b/Snakefile index 646672a8a..d2cc95540 100644 --- a/Snakefile +++ b/Snakefile @@ -1129,7 +1129,12 @@ rule regret_base: def get_st_sensitivities(w): dirs = ["base"] - for sens in config_provider("iiasa_database", "regret_run", "st_sensitivities")(w): + sensitivities = config_provider("iiasa_database", "regret_run", "st_sensitivities")( + w + ) + if sensitivities is None: + return dirs + for sens in sensitivities: dirs.append(f"{sens}") return dirs diff --git a/config/config.de.yaml b/config/config.de.yaml index d512f262f..7386e7951 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250923_regret_uc_scaleCBF + prefix: 20250925_line_expansion name: # - ExPol # - KN2045_Mix @@ -50,7 +50,7 @@ iiasa_database: h2_vent: true st_sensitivities: # CAVEAT: These behave like wildcards #- no_flex - - gas_price_60 + #- gas_price_60 #- gas_price_80 unit_commitment: enable: false From df3b2389df6ce87703794731ab68b3276a9e5e94 Mon Sep 17 00:00:00 2001 From: JulianGeis Date: Thu, 25 Sep 2025 17:18:31 +0200 Subject: [PATCH 139/202] revert splitting of power_limits constraint; add restriction of cross border flows to whole system --- config/config.de.yaml | 4 +-- scripts/pypsa-de/additional_functionality.py | 31 ++++++++++++-------- scripts/pypsa-de/modify_prenetwork.py | 15 ++++++---- 3 files changed, 30 insertions(+), 20 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 7386e7951..2693a38b8 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250925_line_expansion + prefix: 20250925_cross_border_flows name: # - ExPol # - KN2045_Mix @@ -65,7 +65,7 @@ iiasa_database: - 2025 no_flex_lt_run: false # If true, removes all flexibility options scale_industry_non_energy: false # Scale non-energy industry demand directly proportional to energy demand - restrict_cross_border_flows: + restrict_cross_border_flows: # restricts cross border flows between all countries (AC) 2025: 0.5 2030: 0.6 diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index f2a1b67a9..80d6a4ab6 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -106,14 +106,16 @@ def add_capacity_limits( def add_power_limits(n, investment_year, limits_power_max): """ - Restricts the maximum inflow and outflow of electricity from/to a country separately. + " Restricts the maximum inflow/outflow of electricity from/to a country. """ for ct in limits_power_max: if investment_year not in limits_power_max[ct].keys(): continue + limit = 1e3 * limits_power_max[ct][investment_year] / 10 + logger.info( - f"Adding constraints on electricity import and export from/to {ct} to be < {limit * 10} MW each" + f"Adding constraint on electricity import/export from/to {ct} to be < {limit} MW" ) incoming_line = n.lines.index[ (n.lines.carrier == "AC") @@ -125,6 +127,7 @@ def add_power_limits(n, investment_year, limits_power_max): & (n.lines.bus0.str[:2] == ct) & (n.lines.bus1.str[:2] != ct) ] + incoming_link = n.links.index[ (n.links.carrier == "DC") & (n.links.bus0.str[:2] != ct) @@ -136,25 +139,29 @@ def add_power_limits(n, investment_year, limits_power_max): & (n.links.bus1.str[:2] != ct) ] - # iterate over snapshots + # iterate over snapshots - otherwise exporting of postnetwork fails since + # the constraints are time dependent for t in n.snapshots: incoming_line_p = n.model["Line-s"].loc[t, incoming_line] outgoing_line_p = n.model["Line-s"].loc[t, outgoing_line] incoming_link_p = n.model["Link-p"].loc[t, incoming_link] outgoing_link_p = n.model["Link-p"].loc[t, outgoing_link] - # Total inflow (imports) - only positive flows into the country - inflow_lhs = (incoming_link_p.sum() + incoming_line_p.sum()) / 10 + lhs = ( + incoming_link_p.sum() + - outgoing_link_p.sum() + + incoming_line_p.sum() + - outgoing_line_p.sum() + ) / 10 + # divide by 10 to avoid numerical issues - # Total outflow (exports) - only positive flows out of the country - outflow_lhs = (outgoing_link_p.sum() + outgoing_line_p.sum()) / 10 + cname_upper = f"Power-import-limit-{ct}-{t}" + cname_lower = f"Power-export-limit-{ct}-{t}" - # Separate constraints for inflow and outflow - cname_inflow = f"Power-inflow-limit-{ct}-{t}" - cname_outflow = f"Power-outflow-limit-{ct}-{t}" + n.model.add_constraints(lhs <= limit, name=cname_upper) + n.model.add_constraints(lhs >= -limit, name=cname_lower) - n.model.add_constraints(inflow_lhs <= limit, name=cname_inflow) - n.model.add_constraints(outflow_lhs <= limit, name=cname_outflow) + # not adding to network as the shadow prices are not needed def h2_import_limits(n, investment_year, limits_volume_max): diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 8323ec749..3ff9ad121 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1424,12 +1424,15 @@ def remove_flexibility_options(n): def restrict_cross_border_flows(n, s_max_pu): - logger.info(f"Restricting cross-border flows to {s_max_pu}.") - ind = n.lines[ - (n.lines.bus0.str.startswith("DE") & ~n.lines.bus1.str.startswith("DE")) - | (~n.lines.bus0.str.startswith("DE") & n.lines.bus1.str.startswith("DE")) - ].index - n.lines.loc[ind, "s_max_pu"] = s_max_pu + logger.info( + f"Restricting cross-border flows between all countries (AC) to {s_max_pu}." + ) + cross_border_lines = n.lines.index[ + (n.lines.active) + & (n.lines.carrier == "AC") + & (n.lines.bus0.str[:2] != n.lines.bus1.str[:2]) + ] + n.lines.loc[cross_border_lines, "s_max_pu"] = s_max_pu if __name__ == "__main__": From 71dbec814bd2ac6ed9860907f730c671667a75c4 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 26 Sep 2025 14:01:25 +0200 Subject: [PATCH 140/202] apply power limit to gross imports/exports --- CHANGELOG.md | 1 + config/config.de.yaml | 2 +- scripts/pypsa-de/additional_functionality.py | 146 +++++++++++++++---- 3 files changed, 122 insertions(+), 27 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6eb4a94bc..0ab12607d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Changelog - Added an option to source industry energy demand from UBA MWMS (Projektionsbericht 2025) for the years 2025-2035 - renamed some scripts +- Bugfix: Enforce stricter power import limit to avoid that import from one country compensate from exports to another - Bugfix: Enforce stricter H2 derivative import limit to avoid that exports of one type of derivative compensate for imports of another - Added an option to source mobility demand from UBA MWMS (Projektionsbericht 2025) for the years 2025-2035 - Renamed functions and script for exogenous mobility demand diff --git a/config/config.de.yaml b/config/config.de.yaml index 2693a38b8..446e83910 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250925_cross_border_flows + prefix: 20250926_improve_power_limits name: # - ExPol # - KN2045_Mix diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 80d6a4ab6..5c4afc97e 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -112,56 +112,150 @@ def add_power_limits(n, investment_year, limits_power_max): if investment_year not in limits_power_max[ct].keys(): continue - limit = 1e3 * limits_power_max[ct][investment_year] / 10 + lim = 1e3 * limits_power_max[ct][investment_year] # in MW logger.info( - f"Adding constraint on electricity import/export from/to {ct} to be < {limit} MW" + f"Adding constraint on electricity import/export from/to {ct} to be < {lim} MW" ) - incoming_line = n.lines.index[ + # identify interconnectors + + incoming_lines = n.lines[ (n.lines.carrier == "AC") & (n.lines.bus0.str[:2] != ct) & (n.lines.bus1.str[:2] == ct) + & n.lines.active ] - outgoing_line = n.lines.index[ + outgoing_lines = n.lines[ (n.lines.carrier == "AC") & (n.lines.bus0.str[:2] == ct) & (n.lines.bus1.str[:2] != ct) + & n.lines.active ] - - incoming_link = n.links.index[ + incoming_links = n.links[ (n.links.carrier == "DC") & (n.links.bus0.str[:2] != ct) & (n.links.bus1.str[:2] == ct) + & n.links.active ] - outgoing_link = n.links.index[ + outgoing_links = n.links[ (n.links.carrier == "DC") & (n.links.bus0.str[:2] == ct) & (n.links.bus1.str[:2] != ct) + & n.links.active ] - # iterate over snapshots - otherwise exporting of postnetwork fails since - # the constraints are time dependent for t in n.snapshots: - incoming_line_p = n.model["Line-s"].loc[t, incoming_line] - outgoing_line_p = n.model["Line-s"].loc[t, outgoing_line] - incoming_link_p = n.model["Link-p"].loc[t, incoming_link] - outgoing_link_p = n.model["Link-p"].loc[t, outgoing_link] - - lhs = ( - incoming_link_p.sum() - - outgoing_link_p.sum() - + incoming_line_p.sum() - - outgoing_line_p.sum() - ) / 10 - # divide by 10 to avoid numerical issues + # For incoming flows s > 0 means imports, s < 0 exports + # For outgoing flows s > 0 means exports, s < 0 imports + # to get the positive and negative parts separately, we use auxiliary variables + incoming_lines_var = n.model["Line-s"].loc[t, incoming_lines.index] + n.model.add_variables( + coords=[incoming_lines.index], + name=f"Line-s-incoming-{ct}-aux-pos-{t}", + lower=0, + upper=incoming_lines.s_nom_max, + ) + n.model.add_variables( + coords=[incoming_lines.index], + name=f"Line-s-incoming-{ct}-aux-neg-{t}", + lower=-incoming_lines.s_nom_max, + upper=0, + ) + n.model.add_constraints( + n.model[f"Line-s-incoming-{ct}-aux-pos-{t}"] >= incoming_lines_var, + name=f"Line-s-incoming-{ct}-aux-pos-constr-{t}", + ) + n.model.add_constraints( + n.model[f"Line-s-incoming-{ct}-aux-neg-{t}"] <= incoming_lines_var, + name=f"Line-s-incoming-{ct}-aux-neg-constr-{t}", + ) - cname_upper = f"Power-import-limit-{ct}-{t}" - cname_lower = f"Power-export-limit-{ct}-{t}" + outgoing_lines_var = n.model["Line-s"].loc[t, outgoing_lines.index] + n.model.add_variables( + coords=[outgoing_lines.index], + name=f"Line-s-outgoing-{ct}-aux-pos-{t}", + lower=0, + upper=outgoing_lines.s_nom_max, + ) + n.model.add_variables( + coords=[outgoing_lines.index], + name=f"Line-s-outgoing-{ct}-aux-neg-{t}", + lower=-outgoing_lines.s_nom_max, + upper=0, + ) + n.model.add_constraints( + n.model[f"Line-s-outgoing-{ct}-aux-pos-{t}"] >= outgoing_lines_var, + name=f"Line-s-outgoing-{ct}-aux-pos-constr-{t}", + ) + n.model.add_constraints( + n.model[f"Line-s-outgoing-{ct}-aux-neg-{t}"] <= outgoing_lines_var, + name=f"Line-s-outgoing-{ct}-aux-neg-constr-{t}", + ) - n.model.add_constraints(lhs <= limit, name=cname_upper) - n.model.add_constraints(lhs >= -limit, name=cname_lower) + incoming_links_var = n.model["Link-p"].loc[t, incoming_links.index] + n.model.add_variables( + coords=[incoming_links.index], + name=f"Link-p-incoming-{ct}-aux-pos-{t}", + lower=0, + upper=incoming_links.p_nom_max, + ) + n.model.add_variables( + coords=[incoming_links.index], + name=f"Link-p-incoming-{ct}-aux-neg-{t}", + lower=-incoming_links.p_nom_max, + upper=0, + ) + n.model.add_constraints( + n.model[f"Link-p-incoming-{ct}-aux-pos-{t}"] >= incoming_links_var, + name=f"Link-p-incoming-{ct}-aux-pos-constr-{t}", + ) + n.model.add_constraints( + n.model[f"Link-p-incoming-{ct}-aux-neg-{t}"] <= incoming_links_var, + name=f"Link-p-incoming-{ct}-aux-neg-constr-{t}", + ) - # not adding to network as the shadow prices are not needed + outgoing_links_var = n.model["Link-p"].loc[t, outgoing_links.index] + n.model.add_variables( + coords=[outgoing_links.index], + name=f"Link-p-outgoing-{ct}-aux-pos-{t}", + lower=0, + upper=outgoing_links.p_nom_max, + ) + n.model.add_variables( + coords=[outgoing_links.index], + name=f"Link-p-outgoing-{ct}-aux-neg-{t}", + lower=-outgoing_links.p_nom_max, + upper=0, + ) + n.model.add_constraints( + n.model[f"Link-p-outgoing-{ct}-aux-pos-{t}"] >= outgoing_links_var, + name=f"Link-p-outgoing-{ct}-aux-pos-constr-{t}", + ) + n.model.add_constraints( + n.model[f"Link-p-outgoing-{ct}-aux-neg-{t}"] <= outgoing_links_var, + name=f"Link-p-outgoing-{ct}-aux-neg-constr-{t}", + ) + # To constraint the absolute values of imports and exports, we have to sum the + # corresponding positive and negative flows separately, using auxiliary variables + import_lhs = ( + n.model[f"Link-p-incoming-{ct}-aux-pos-{t}"].sum() + + n.model[f"Line-s-incoming-{ct}-aux-pos-{t}"].sum() + - n.model[f"Link-p-outgoing-{ct}-aux-neg-{t}"].sum() + - n.model[f"Line-s-outgoing-{ct}-aux-neg-{t}"].sum() + ) / 10 # divide by 10 to improve numerical stability + export_lhs = ( + n.model[f"Link-p-outgoing-{ct}-aux-pos-{t}"].sum() + + n.model[f"Line-s-outgoing-{ct}-aux-pos-{t}"].sum() + - n.model[f"Link-p-incoming-{ct}-aux-neg-{t}"].sum() + - n.model[f"Line-s-incoming-{ct}-aux-neg-{t}"].sum() + ) / 10 + + n.model.add_constraints( + import_lhs <= lim / 10, name=f"Power-import-limit-{ct}-{t}" + ) + n.model.add_constraints( + export_lhs <= lim / 10, name=f"Power-export-limit-{ct}-{t}" + ) def h2_import_limits(n, investment_year, limits_volume_max): From 42317959b45c3dd10f61ce26bd3ce7c5278b808f Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 26 Sep 2025 14:43:11 +0200 Subject: [PATCH 141/202] simplify indexing --- scripts/pypsa-de/modify_prenetwork.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 3ff9ad121..f563ecfcc 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1427,11 +1427,7 @@ def restrict_cross_border_flows(n, s_max_pu): logger.info( f"Restricting cross-border flows between all countries (AC) to {s_max_pu}." ) - cross_border_lines = n.lines.index[ - (n.lines.active) - & (n.lines.carrier == "AC") - & (n.lines.bus0.str[:2] != n.lines.bus1.str[:2]) - ] + cross_border_lines = n.lines.index[n.lines.bus0.str[:2] != n.lines.bus1.str[:2]] n.lines.loc[cross_border_lines, "s_max_pu"] = s_max_pu From 546a718ddabc3a52f90b1c20ffc16fcb55245430 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 26 Sep 2025 16:46:47 +0200 Subject: [PATCH 142/202] define an decidedly LOW RES scenario --- config/config.de.yaml | 3 +- config/scenarios.manual.yaml | 61 ++++++++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+), 1 deletion(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 446e83910..298ecb9ed 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -10,8 +10,9 @@ run: # - KN2045_Mix - HighDemand - LowDemand + - LowDemandLowRES #- HighDemandNoFlex # !!! CAVEAT The 'NoFlex' substring in the scenario name determines how the regrets are computed!!! - - LowDemandNoFlex + #- LowDemandNoFlex # - KN2045_NFhoch scenarios: enable: true diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 8ebe81846..8d9b71f3c 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -126,6 +126,67 @@ LowDemand: Store: urban central water pits: 2040 +LowDemandLowRES: + electricity: + transmission_limit: v1 + iiasa_database: + uba_for_industry: + - 2025 + - 2030 + - 2035 + uba_for_mobility: + - 2025 + - 2030 + - 2035 + offshore_nep_force: + delay_years: 2 + solving: + options: + transmission_losses: 0 + noisy_costs: false + constraints: + decentral_heat_pump_budgets: + DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + 2025: 12.0 + 2030: 26.0 + 2035: 38.0 + # central_heat_pump_budgets: + # DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 22 + # 2025: 7.0 + # 2030: 26.0 + # 2035: 52.0 + limits_capacity_min: + Generator: + onwind: + DE: + 2030: 0 + 2035: 0 + offwind: + DE: + 2030: 0 + 2035: 0 + Link: + H2 electrolysis: + DE: + 2030: 0 + limits_capacity_max: + Generator: + onwind: + DE: + 2030: 100 + offwind: + DE: + 2030: 20 + solar: + DE: + 2030: 150 + first_technology_occurrence: + Link: + urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them + urban central water pits discharger: 2040 + Store: + urban central water pits: 2040 + LowDemandNoFlex: electricity: transmission_limit: v1 From d1ecf2339b215d58d200259aa77d9214c5abc0a3 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 26 Sep 2025 17:03:50 +0200 Subject: [PATCH 143/202] regret_runs only for those scens that will be analysed --- Snakefile | 6 +++++- config/config.de.yaml | 4 ++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/Snakefile b/Snakefile index cc99c15a8..654cb2102 100644 --- a/Snakefile +++ b/Snakefile @@ -1161,7 +1161,11 @@ rule plot_scenario_comparison_regrets: exported_variables=expand( RESULTS + "regret_variables/{sensitivity}/regret_variables_{decision}_full.xlsx", - run=config_provider("run", "name"), + run=lambda w: [ + r + for r in config_provider("run", "name")(w) + if r in ["LowDemand", "HighDemand"] + ], decision=config_provider("run", "name"), allow_missing=True, ), diff --git a/config/config.de.yaml b/config/config.de.yaml index 298ecb9ed..189dcd9aa 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,11 +4,11 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250926_improve_power_limits + prefix: 20250926_less_regret_runs name: # - ExPol # - KN2045_Mix - - HighDemand + # - HighDemand - LowDemand - LowDemandLowRES #- HighDemandNoFlex # !!! CAVEAT The 'NoFlex' substring in the scenario name determines how the regrets are computed!!! From f680e7ef4f78f5e9078641c340efb569873839f4 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 26 Sep 2025 17:19:03 +0200 Subject: [PATCH 144/202] fix RAM bottlenack --- Snakefile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Snakefile b/Snakefile index 654cb2102..fe2f28708 100644 --- a/Snakefile +++ b/Snakefile @@ -1014,6 +1014,8 @@ rule prepare_regret_network: output: regret_prenetwork=RESULTS + "regret_prenetworks/decision_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + resources: + mem_mb=16000, log: RESULTS + "logs/regret_prenetwork_{decision}_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.log", From 0e68848b9b9f168490650f39ba9b43775b898de5 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 29 Sep 2025 17:10:28 +0200 Subject: [PATCH 145/202] update solar-hsat costs in myopic optimization and small refactor --- scripts/prepare_sector_network.py | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 478c30191..eaa59b920 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -471,19 +471,22 @@ def update_wind_solar_costs( # NB: solar costs are also manipulated for rooftop # when distribution grid is inserted - n.generators.loc[n.generators.carrier == "solar", "capital_cost"] = costs.at[ - "solar-utility", "capital_cost" - ] - n.generators.loc[n.generators.carrier == "solar", "overnight_cost"] = costs.at[ - "solar-utility", "investment" - ] + carrier_cost_dict = { + "solar": "solar-utility", + "solar-hsat": "solar-hsat", + "onwind": "onwind", + } + + for carrier, cost_key in carrier_cost_dict.items(): + if carrier not in n.generators.carrier.values: + continue + n.generators.loc[n.generators.carrier == carrier, "lifetime"] = costs.at[ + cost_key, "lifetime" + ] + n.generators.loc[n.generators.carrier == carrier, "efficiency"] = costs.at[ + cost_key, "efficiency" + ] - n.generators.loc[n.generators.carrier == "onwind", "capital_cost"] = costs.at[ - "onwind", "capital_cost" - ] - n.generators.loc[n.generators.carrier == "onwind", "overnight_cost"] = costs.at[ - "onwind", "investment" - ] # for offshore wind, need to calculated connection costs for key, fn in profiles.items(): tech = key[len("profile_") :] From c3bb8cbc9591f84fc155cb5fbe3d0ef5097eb9c8 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 29 Sep 2025 17:35:13 +0200 Subject: [PATCH 146/202] fix chatgpt garbage --- scripts/pypsa-de/export_ariadne_variables.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 536d2e8fa..494eac28e 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -5463,7 +5463,7 @@ def get_data( opts="", ll="vopt", sector_opts="None", - run="HighDemand", + run="LowDemand", decision="LowDemand", sensitivity="base", ) From 5b680a3464e0d7dd7f0d0e3526fcb54a8259d3b8 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 29 Sep 2025 17:38:32 +0200 Subject: [PATCH 147/202] change the correct cost attributes --- scripts/prepare_sector_network.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index eaa59b920..70a58db7d 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -480,11 +480,11 @@ def update_wind_solar_costs( for carrier, cost_key in carrier_cost_dict.items(): if carrier not in n.generators.carrier.values: continue - n.generators.loc[n.generators.carrier == carrier, "lifetime"] = costs.at[ - cost_key, "lifetime" + n.generators.loc[n.generators.carrier == carrier, "capital_cost"] = costs.at[ + cost_key, "capital_cost" ] - n.generators.loc[n.generators.carrier == carrier, "efficiency"] = costs.at[ - cost_key, "efficiency" + n.generators.loc[n.generators.carrier == carrier, "overnight_cost"] = costs.at[ + cost_key, "investment" ] # for offshore wind, need to calculated connection costs From ee5069d1822cb6f9a00556e95a6b3000300444b2 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 30 Sep 2025 13:51:43 +0200 Subject: [PATCH 148/202] add variables for decentral boilers --- scripts/pypsa-de/export_ariadne_variables.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 494eac28e..3edf8c6fb 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -1000,6 +1000,14 @@ def _get_capacities(n, region, cap_func, cap_string="Capacity|"): like="heat pump" ).sum() + var[cap_string + "Decentral Heat|Biomass"] = capacities_decentral_heat.filter( + like="biomass" + ).sum() + + var[cap_string + "Decentral Heat|Gas"] = capacities_decentral_heat.filter( + like="gas" + ).sum() + capacities_h2 = ( cap_func( bus_carrier="H2", From e84d49b73c968381db2cbffe664bf52370d8973a Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 30 Sep 2025 13:52:22 +0200 Subject: [PATCH 149/202] expand HVC to air instead of waste CHP in regret runs --- scripts/pypsa-de/prepare_regret_network.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index f499fd6c2..d1b05ce12 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -504,7 +504,7 @@ def _unfix_bottlenecks(new, deci, name, extendable_i): # Bottleneck links can be extended, but not reduced to fix infeasibilities due to numerical inconsistencies bottleneck_links = [ "electricity distribution grid", - "waste CHP", + "HVC to air", # waste CHP would get used as a flexible energy source otherwise "SMR", # Boilers create bottlenecks AND should be extendable for fixed_profile_scaling constraints to be applied correctly "rural gas boiler", @@ -519,13 +519,6 @@ def _unfix_bottlenecks(new, deci, name, extendable_i): new.loc[_idx, "p_nom_extendable"] = True new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - # Waste outside DE can also be burned directly - _idx = new.query( - "carrier == 'HVC to air' and not index.str.startswith('DE')" - ).index.intersection(extendable_i) - new.loc[_idx, "p_nom_extendable"] = True - new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] - if name == "generators": fuels = [ "lignite", From 7db3e85593b67982fcc640441203b42469f8789d Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 30 Sep 2025 14:06:50 +0200 Subject: [PATCH 150/202] latest gas price is somewhat lower in EUR2020 --- ariadne-data/costs_2025-modifications.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ariadne-data/costs_2025-modifications.csv b/ariadne-data/costs_2025-modifications.csv index 8fe241d25..2c1dd0fcf 100644 --- a/ariadne-data/costs_2025-modifications.csv +++ b/ariadne-data/costs_2025-modifications.csv @@ -1,5 +1,5 @@ technology,parameter,value,unit,source,further description -gas,fuel,40,EUR/MWh_th,Ariadne, +gas,fuel,32.8,EUR/MWh_th,Ariadne,"40 EUR in 2025, converted to EUR2020 with 0.82" oil,fuel,32.9876,EUR2020/MWh,Ariadne,"$2020 = 0.8775 EUR2020, 1bbl = 1.6998MWh" coal,fuel,10.6694,EUR2020/MWh,Ariadne,"$2020 = 0.8775 EUR2020, 1t = 8.06 MWh" decentral air-sourced heat pump,investment,1604,EUR2020/kW_th,https://ariadneprojekt.de/media/2024/01/Ariadne-Analyse_HeizkostenEmissionenGebaeude_Januar2024.pdf https://www.enpal.de/waermepumpe/kosten/ https://www.bdew.de/media/documents/BDEW-HKV_Altbau.pdf and cost reduction from DEA From 059f0526b618e7a15bb1a74ad33fb7be862101d8 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 1 Oct 2025 09:46:43 +0200 Subject: [PATCH 151/202] export decentral heat capacities --- scripts/pypsa-de/export_ariadne_variables.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 3edf8c6fb..5a3b838eb 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -1000,6 +1000,10 @@ def _get_capacities(n, region, cap_func, cap_string="Capacity|"): like="heat pump" ).sum() + var[cap_string + "Decentral Heat|Resistive Heater"] = ( + capacities_decentral_heat.filter(like="resistive heater").sum() + ) + var[cap_string + "Decentral Heat|Biomass"] = capacities_decentral_heat.filter( like="biomass" ).sum() @@ -1008,6 +1012,18 @@ def _get_capacities(n, region, cap_func, cap_string="Capacity|"): like="gas" ).sum() + var[cap_string + "Decentral Heat|Oil"] = capacities_decentral_heat.filter( + like="oil" + ).sum() + + var[cap_string + "Decentral Heat|Storage Converter"] = capacities_decentral_heat[ + capacities_decentral_heat.index.str.contains("water tanks discharger") + ].sum() + + var[cap_string + "Decentral Heat|Storage Reservoir"] = storage_capacities[ + storage_capacities.index.str.contains("(?:decentral|rural) water tanks") + ].sum() + capacities_h2 = ( cap_func( bus_carrier="H2", @@ -1039,7 +1055,9 @@ def _get_capacities(n, region, cap_func, cap_string="Capacity|"): capacities_h2.get("H2 Electrolysis", 0) + var[cap_string + "Hydrogen|Gas"] ) - var[cap_string + "Hydrogen|Reservoir"] = storage_capacities.get("H2 Store", 0) + var[cap_string + "Hydrogen|Storage Reservoir"] = var[ + cap_string + "Hydrogen|Reservoir" + ] = storage_capacities.get("H2 Store", 0) capacities_gas = ( cap_func( From 9fc0af744de184be8297120c7a7fa851f512bc3e Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 1 Oct 2025 10:05:01 +0200 Subject: [PATCH 152/202] adjust upper solar limit for 2025 slightly upward --- config/config.de.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 189dcd9aa..d6d7765c0 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -521,7 +521,7 @@ solving: solar: DE: 2020: 53.7 - 2025: 110 # EEG2023; assumes for 2026: 128 GW, assuming a fair share reached by end of 2025 + 2025: 115 # EEG2023; assumes for 2026: 128 GW, assuming a fair share reached by end of 2025 2030: 235 # PV Ziel 2030 + 20 GW 2035: 400 2040: 800 From cab999d9c0a4ebdedd1fa74580acd84c384dd66f Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 1 Oct 2025 10:05:54 +0200 Subject: [PATCH 153/202] transmission losses, stricter upper limit for EE, new LowRES scenario --- config/scenarios.manual.yaml | 119 ++++++++++++++++++++++++++++++----- 1 file changed, 102 insertions(+), 17 deletions(-) diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 8d9b71f3c..9b0a51dac 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -7,7 +7,6 @@ HighDemand: transmission_limit: v1 solving: options: - transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -19,18 +18,23 @@ HighDemand: DE: 2030: 115 # Wind-an-Land Law 2035: 157 # Wind-an-Land Law - 2040: 160 # Wind-an-Land Law offwind: DE: 2035: 40 # 40 Wind-auf-See Law - 2040: 50 # assuming 1/3 of buildout reached in 2040 - 2045: 70 # 70 Wind-auf-See Law solar: DE: # EEG2023; Ziel for 2024: 88 GW and for 2026: 128 GW, 2025: 101 # assuming at least 1/3 of buildout reached in 2025 2030: 215 # PV strategy 2035: 309 - 2040: 400 # PV strategy + limits_capacity_max: + Generator: + onwind: + DE: + 2030: 115 # EEG2023 Ziel für 2030 + 5 GW + solar: + DE: + 2030: 215 # PV Ziel 2030 + 2035: 309 first_technology_occurrence: Link: urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them @@ -45,7 +49,6 @@ HighDemandNoFlex: no_flex_lt_run: true solving: options: - transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -57,18 +60,23 @@ HighDemandNoFlex: DE: 2030: 115 # Wind-an-Land Law 2035: 157 # Wind-an-Land Law - 2040: 160 # Wind-an-Land Law offwind: DE: 2035: 40 # 40 Wind-auf-See Law - 2040: 50 # assuming 1/3 of buildout reached in 2040 - 2045: 70 # 70 Wind-auf-See Law solar: DE: # EEG2023; Ziel for 2024: 88 GW and for 2026: 128 GW, 2025: 101 # assuming at least 1/3 of buildout reached in 2025 2030: 215 # PV strategy 2035: 309 - 2040: 400 # PV strategy + limits_capacity_max: + Generator: + onwind: + DE: + 2030: 115 # EEG2023 Ziel für 2030 + 5 GW + solar: + DE: + 2030: 215 # PV Ziel 2030 + 2035: 309 first_technology_occurrence: Link: urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them @@ -92,7 +100,6 @@ LowDemand: delay_years: 2 solving: options: - transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -119,6 +126,15 @@ LowDemand: H2 electrolysis: DE: 2030: 0 + limits_capacity_max: + Generator: + onwind: + DE: + 2030: 115 # EEG2023 Ziel für 2030 + 5 GW + solar: + DE: + 2030: 215 # PV Ziel 2030 + 2035: 309 first_technology_occurrence: Link: urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them @@ -126,7 +142,61 @@ LowDemand: Store: urban central water pits: 2040 -LowDemandLowRES: +LowDemandNoFlex: + electricity: + transmission_limit: v1 + iiasa_database: + no_flex_lt_run: true + uba_for_industry: + - 2025 + - 2030 + - 2035 + uba_for_mobility: + - 2025 + - 2030 + - 2035 + offshore_nep_force: + delay_years: 2 + solving: + options: + noisy_costs: false + constraints: + decentral_heat_pump_budgets: + DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + 2025: 12.0 + 2030: 26.0 + 2035: 38.0 + limits_capacity_min: + Generator: + onwind: + DE: + 2030: 0 + 2035: 0 + offwind: + DE: + 2030: 0 + 2035: 0 + Link: + H2 electrolysis: + DE: + 2030: 0 + limits_capacity_max: + Generator: + onwind: + DE: + 2030: 115 # EEG2023 Ziel für 2030 + 5 GW + solar: + DE: + 2030: 215 # PV Ziel 2030 + 2035: 309 + first_technology_occurrence: + Link: + urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them + urban central water pits discharger: 2040 + Store: + urban central water pits: 2040 + +LowRES: electricity: transmission_limit: v1 iiasa_database: @@ -142,7 +212,6 @@ LowDemandLowRES: delay_years: 2 solving: options: - transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -176,10 +245,10 @@ LowDemandLowRES: 2030: 100 offwind: DE: - 2030: 20 + 2030: 21 solar: DE: - 2030: 150 + 2030: 180 first_technology_occurrence: Link: urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them @@ -187,7 +256,8 @@ LowDemandLowRES: Store: urban central water pits: 2040 -LowDemandNoFlex: + +LowRESNoFlex: electricity: transmission_limit: v1 iiasa_database: @@ -204,7 +274,6 @@ LowDemandNoFlex: delay_years: 2 solving: options: - transmission_losses: 0 noisy_costs: false constraints: decentral_heat_pump_budgets: @@ -212,6 +281,11 @@ LowDemandNoFlex: 2025: 12.0 2030: 26.0 2035: 38.0 + # central_heat_pump_budgets: + # DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 22 + # 2025: 7.0 + # 2030: 26.0 + # 2035: 52.0 limits_capacity_min: Generator: onwind: @@ -226,6 +300,17 @@ LowDemandNoFlex: H2 electrolysis: DE: 2030: 0 + limits_capacity_max: + Generator: + onwind: + DE: + 2030: 100 + offwind: + DE: + 2030: 21 + solar: + DE: + 2030: 180 first_technology_occurrence: Link: urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them From 69d6134fc322681e4702a95dc4c43f1315887c10 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 1 Oct 2025 10:12:42 +0200 Subject: [PATCH 154/202] setup LowRES run --- Snakefile | 2 +- config/config.de.yaml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Snakefile b/Snakefile index fe2f28708..a3d90159a 100644 --- a/Snakefile +++ b/Snakefile @@ -602,7 +602,7 @@ rule modify_prenetwork: "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_final.nc" ), resources: - mem_mb=4000, + mem_mb=8000, log: RESULTS + "logs/modify_prenetwork_base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.log", diff --git a/config/config.de.yaml b/config/config.de.yaml index d6d7765c0..c7d1d2a10 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,13 +4,13 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250926_less_regret_runs + prefix: 20251001_low_RES name: # - ExPol # - KN2045_Mix # - HighDemand - - LowDemand - - LowDemandLowRES + - HighDemand + - LowRES #- HighDemandNoFlex # !!! CAVEAT The 'NoFlex' substring in the scenario name determines how the regrets are computed!!! #- LowDemandNoFlex # - KN2045_NFhoch From 50b43a5e2e259f016a31491bb1aa40cd279afb20 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 1 Oct 2025 14:01:45 +0200 Subject: [PATCH 155/202] allow OCGT as a last resort in ST runs --- scripts/pypsa-de/prepare_regret_network.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index d1b05ce12..3e06662ae 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -512,6 +512,9 @@ def _unfix_bottlenecks(new, deci, name, extendable_i): # Biomass for 2035 when gas is banned "rural biomass boiler", "urban decentral biomass boiler", + # OCGT as last resort to avoid load shedding + # (previously the model sometimes expanded waste CHPs) + "OCGT", ] _idx = new.loc[new.carrier.isin(bottleneck_links)].index.intersection( extendable_i From 0150144ffd2fdaa55223831d795ac53930c8ccb8 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 1 Oct 2025 14:35:42 +0200 Subject: [PATCH 156/202] restrict OCGT expansion in ST models to DE --- scripts/pypsa-de/prepare_regret_network.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/scripts/pypsa-de/prepare_regret_network.py b/scripts/pypsa-de/prepare_regret_network.py index 3e06662ae..baac54480 100644 --- a/scripts/pypsa-de/prepare_regret_network.py +++ b/scripts/pypsa-de/prepare_regret_network.py @@ -512,15 +512,20 @@ def _unfix_bottlenecks(new, deci, name, extendable_i): # Biomass for 2035 when gas is banned "rural biomass boiler", "urban decentral biomass boiler", - # OCGT as last resort to avoid load shedding - # (previously the model sometimes expanded waste CHPs) - "OCGT", ] _idx = new.loc[new.carrier.isin(bottleneck_links)].index.intersection( extendable_i ) new.loc[_idx, "p_nom_extendable"] = True new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] + # OCGT as last resort to avoid load shedding + # allowed only in DE + # (previously the model sometimes expanded waste CHPs) + _idx = new.loc[ + (new.carrier == "OCGT") & (new.index.str.startswith("DE")) + ].index.intersection(extendable_i) + new.loc[_idx, "p_nom_extendable"] = True + new.loc[_idx, "p_nom_min"] = deci.loc[_idx, "p_nom_opt"] if name == "generators": fuels = [ From 23c4240b2ee5347aaa913cae523eac7e9f7ad0a6 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 1 Oct 2025 14:54:08 +0200 Subject: [PATCH 157/202] move demand_baselines from snakefile to config --- Snakefile | 5 ++++- config/config.de.yaml | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/Snakefile b/Snakefile index a3d90159a..09763f203 100644 --- a/Snakefile +++ b/Snakefile @@ -1166,7 +1166,10 @@ rule plot_scenario_comparison_regrets: run=lambda w: [ r for r in config_provider("run", "name")(w) - if r in ["LowDemand", "HighDemand"] + if r + in config_provider("iiasa_database", "regret_run", "demand_baselines")( + w + ) ], decision=config_provider("run", "name"), allow_missing=True, diff --git a/config/config.de.yaml b/config/config.de.yaml index c7d1d2a10..fba3aacf9 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -46,6 +46,9 @@ iiasa_database: reference_scenario: KN2045_Mix region: Deutschland regret_run: + demand_baselines: # names of scenarios that define the demand variations to be considered in the short-term runs + - HighDemand + - LowDemand scope_to_fix: EU # Supported values are DE and EU strict: false # If false, the model allows capacity expansion for virtual links and bottlenecks h2_vent: true From 5df46bb550d009a8bf651c7fd3f83699eef01579 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 1 Oct 2025 15:02:23 +0200 Subject: [PATCH 158/202] redefine capacities in the LowDemand Scenario --- config/config.de.yaml | 2 +- config/scenarios.manual.yaml | 51 +++++++++++++++++++++++++----------- 2 files changed, 36 insertions(+), 17 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index fba3aacf9..a6d4fc97b 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -48,7 +48,7 @@ iiasa_database: regret_run: demand_baselines: # names of scenarios that define the demand variations to be considered in the short-term runs - HighDemand - - LowDemand + - LowRES scope_to_fix: EU # Supported values are DE and EU strict: false # If false, the model allows capacity expansion for virtual links and bottlenecks h2_vent: true diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 9b0a51dac..7837b7bc0 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -84,7 +84,7 @@ HighDemandNoFlex: Store: urban central water pits: 2040 -LowDemand: +LowDemand: # Demand and capacities from UBA electricity: transmission_limit: v1 iiasa_database: @@ -112,16 +112,21 @@ LowDemand: # 2025: 7.0 # 2030: 26.0 # 2035: 52.0 + # Capacities from UBA are here: https://datacube.uba.de/vis?lc=de&df[ds]=dc-release&df[id]=DF_CROSS_PROJECTION_REPORT_CORE_INDICATORS_25&df[ag]=UBA&df[vs]=1.0&av=true&dq=..INSTALLIERTE_LEISTUNG_GROSSBATTERIESPEICHER%2BINSTALLIERTE_LEISTUNG_WIND_AN_LAND%2BINSTALLIERTE_LEISTUNG_WIND_AUF_SEE%2BINSTALLIERTE_LEISTUNG_PV...&pd=2025%2C&to[TIME_PERIOD]=false&pg=0&vw=tb limits_capacity_min: Generator: onwind: DE: - 2030: 0 - 2035: 0 + 2030: 103.99 + 2035: 145.41 offwind: DE: - 2030: 0 - 2035: 0 + 2030: 26.77 + 2035: 39.97 + solar: + DE: + 2030: 214.55 + 2035: 308.60 Link: H2 electrolysis: DE: @@ -130,11 +135,16 @@ LowDemand: Generator: onwind: DE: - 2030: 115 # EEG2023 Ziel für 2030 + 5 GW + 2030: 103.99 + 2035: 145.41 + offwind: + DE: + 2030: 26.77 + 2035: 39.97 solar: DE: - 2030: 215 # PV Ziel 2030 - 2035: 309 + 2030: 214.55 + 2035: 308.60 first_technology_occurrence: Link: urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them @@ -170,12 +180,16 @@ LowDemandNoFlex: Generator: onwind: DE: - 2030: 0 - 2035: 0 + 2030: 103.99 + 2035: 145.41 offwind: DE: - 2030: 0 - 2035: 0 + 2030: 26.77 + 2035: 39.97 + solar: + DE: + 2030: 214.55 + 2035: 308.60 Link: H2 electrolysis: DE: @@ -184,11 +198,16 @@ LowDemandNoFlex: Generator: onwind: DE: - 2030: 115 # EEG2023 Ziel für 2030 + 5 GW + 2030: 103.99 + 2035: 145.41 + offwind: + DE: + 2030: 26.77 + 2035: 39.97 solar: DE: - 2030: 215 # PV Ziel 2030 - 2035: 309 + 2030: 214.55 + 2035: 308.60 first_technology_occurrence: Link: urban central water pits charger: 2040 # essentially moving pits out of the optimization horizon to forbid them @@ -196,7 +215,7 @@ LowDemandNoFlex: Store: urban central water pits: 2040 -LowRES: +LowRES: # Demand from UBA, lower RES capacities electricity: transmission_limit: v1 iiasa_database: From 793b7b7bbb76f8a51f5a528a9a5c498382e76ef1 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 1 Oct 2025 17:15:58 +0200 Subject: [PATCH 159/202] use query to hopefully avoid fragmentation --- scripts/pypsa-de/additional_functionality.py | 36 +++++++------------- 1 file changed, 12 insertions(+), 24 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 5c4afc97e..c700fbbd0 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -119,30 +119,18 @@ def add_power_limits(n, investment_year, limits_power_max): ) # identify interconnectors - incoming_lines = n.lines[ - (n.lines.carrier == "AC") - & (n.lines.bus0.str[:2] != ct) - & (n.lines.bus1.str[:2] == ct) - & n.lines.active - ] - outgoing_lines = n.lines[ - (n.lines.carrier == "AC") - & (n.lines.bus0.str[:2] == ct) - & (n.lines.bus1.str[:2] != ct) - & n.lines.active - ] - incoming_links = n.links[ - (n.links.carrier == "DC") - & (n.links.bus0.str[:2] != ct) - & (n.links.bus1.str[:2] == ct) - & n.links.active - ] - outgoing_links = n.links[ - (n.links.carrier == "DC") - & (n.links.bus0.str[:2] == ct) - & (n.links.bus1.str[:2] != ct) - & n.links.active - ] + incoming_lines = n.lines.query( + f"carrier == 'AC' and active and bus0.str[:2] != {ct} and bus1.str[:2] == {ct}" + ) + outgoing_lines = n.lines.query( + f"carrier == 'AC' and active and bus0.str[:2] == {ct} and bus1.str[:2] != {ct}" + ) + incoming_links = n.links.query( + f"carrier == 'DC' and active and bus0.str[:2] != {ct} and bus1.str[:2] == {ct}" + ) + outgoing_links = n.links.query( + f"carrier == 'DC' and active and bus0.str[:2] == {ct} and bus1.str[:2] != {ct}" + ) for t in n.snapshots: # For incoming flows s > 0 means imports, s < 0 exports From de2e86c42abffffccae74ceef6c89a4b9d582d8a Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 2 Oct 2025 16:03:13 +0200 Subject: [PATCH 160/202] fix query --- scripts/pypsa-de/additional_functionality.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index c700fbbd0..87a9b6433 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -120,16 +120,16 @@ def add_power_limits(n, investment_year, limits_power_max): # identify interconnectors incoming_lines = n.lines.query( - f"carrier == 'AC' and active and bus0.str[:2] != {ct} and bus1.str[:2] == {ct}" + f"not bus0.str.startswith('{ct}') and bus1.str.startswith('{ct}') and active" ) outgoing_lines = n.lines.query( - f"carrier == 'AC' and active and bus0.str[:2] == {ct} and bus1.str[:2] != {ct}" + f"bus0.str.startswith('{ct}') and not bus1.str.startswith('{ct}') and active" ) incoming_links = n.links.query( - f"carrier == 'DC' and active and bus0.str[:2] != {ct} and bus1.str[:2] == {ct}" + f"not bus0.str.startswith('{ct}') and bus1.str.startswith('{ct}') and carrier == 'DC' and active" ) outgoing_links = n.links.query( - f"carrier == 'DC' and active and bus0.str[:2] == {ct} and bus1.str[:2] != {ct}" + f"bus0.str.startswith('{ct}') and not bus1.str.startswith('{ct}') and carrier == 'DC' and active" ) for t in n.snapshots: From ce4571341f5d6083c2eda5f8e3c4afcf0ab04394 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 2 Oct 2025 16:29:21 +0200 Subject: [PATCH 161/202] basic setup for st_runs with lowered RES --- Snakefile | 147 +++++++++++- config/config.de.yaml | 2 + scripts/pypsa-de/additional_functionality.py | 6 +- .../pypsa-de/prepare_st_low_res_network.py | 227 ++++++++++++++++++ scripts/pypsa-de/solve_st_low_res_network.py | 87 +++++++ 5 files changed, 455 insertions(+), 14 deletions(-) create mode 100644 scripts/pypsa-de/prepare_st_low_res_network.py create mode 100644 scripts/pypsa-de/solve_st_low_res_network.py diff --git a/Snakefile b/Snakefile index 09763f203..5737c93e8 100644 --- a/Snakefile +++ b/Snakefile @@ -990,6 +990,141 @@ rule ariadne_report_only: ), +def get_st_sensitivities(w): + dirs = ["base"] + sensitivities = config_provider("iiasa_database", "regret_run", "st_sensitivities")( + w + ) + if sensitivities is None: + return dirs + for sens in sensitivities: + dirs.append(f"{sens}") + return dirs + + +rule prepare_st_low_res_network: + params: + solving=config_provider("solving"), + foresight=config_provider("foresight"), + co2_sequestration_potential=config_provider( + "sector", "co2_sequestration_potential", default=200 + ), + scope_to_fix=config_provider("iiasa_database", "regret_run", "scope_to_fix"), + h2_vent=config_provider("iiasa_database", "regret_run", "h2_vent"), + strict=config_provider("iiasa_database", "regret_run", "strict"), + unit_commitment=config_provider( + "iiasa_database", "regret_run", "unit_commitment" + ), + scale_cross_border_elec_capa=config_provider( + "iiasa_database", "regret_run", "scale_cross_border_elec_capa" + ), + input: + network=RESULTS + + "networks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", + output: + st_low_res_prenetwork=RESULTS + + "st_low_res_prenetworks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", + resources: + mem_mb=16000, + log: + RESULTS + + "logs/st_low_res_prenetwork_s_{clusters}_{opts}_{sector_opts}_{st_years}.log", + script: + "scripts/pypsa-de/prepare_st_low_res_network.py" + + +rule solve_st_low_res_network: + params: + st_sensitivity="{sensitivity}", + solving=config_provider("solving"), + regret_run=True, + energy_year=config_provider("energy", "energy_totals_year"), + custom_extra_functionality=input_custom_extra_functionality, + input: + st_low_res_prenetwork=RESULTS + + "st_low_res_prenetworks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", + co2_totals_name=resources("co2_totals.csv"), + energy_totals=resources("energy_totals.csv"), + output: + st_low_res_network=RESULTS + + "st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", + shadow: + shadow_config + log: + solver=RESULTS + + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_solver.log", + memory=RESULTS + + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_memory.log", + python=RESULTS + + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_python.log", + threads: solver_threads + resources: + mem_mb=config_provider("solving", "mem_mb"), + runtime=config_provider("solving", "runtime", default="6h"), + script: + "scripts/pypsa-de/solve_st_low_res_network.py" + + +use rule export_ariadne_variables as export_st_variables with: + input: + template="data/template_ariadne_database.xlsx", + industry_demands=expand( + resources( + "industrial_energy_demand_base_s_{clusters}_{planning_horizons}.csv" + ), + **config["scenario"], + allow_missing=True, + ), + networks=expand( + RESULTS + + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + **config["scenario"], + allow_missing=True, + ), + costs=expand( + resources("costs_{planning_horizons}.csv"), + **config["scenario"], + allow_missing=True, + ), + industrial_production_per_country_tomorrow=expand( + resources( + "industrial_production_per_country_tomorrow_{planning_horizons}-modified.csv" + ), + **config["scenario"], + allow_missing=True, + ), + industry_sector_ratios=expand( + resources("industry_sector_ratios_{planning_horizons}.csv"), + **config["scenario"], + allow_missing=True, + ), + industrial_production=resources("industrial_production_per_country.csv"), + energy_totals=resources("energy_totals.csv"), + st_low_res_networks=expand( + RESULTS + + "st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", + **config["scenario"], + st_years=config_provider("iiasa_database", "regret_run", "st_years"), + allow_missing=True, + ), + output: + exported_variables=RESULTS + + "st_low_res_variables/{sensitivity}/st_low_res_variables.xlsx", + exported_variables_full=RESULTS + + "st_low_res_variables/{sensitivity}/st_low_res_variables_full.xlsx", + log: + RESULTS + "logs/st_low_res_variables/{sensitivity}/st_low_res_variables.log", + + +rule st_all: + input: + expand( + RESULTS + "st_low_res_variables/{sensitivity}/st_low_res_variables.xlsx", + sensitivity=get_st_sensitivities, + run=config_provider("run", "name"), + ), + + rule prepare_regret_network: params: solving=config_provider("solving"), @@ -1129,18 +1264,6 @@ rule regret_base: + "/scenario_comparison/regret_networks/Price-Carbon.png", -def get_st_sensitivities(w): - dirs = ["base"] - sensitivities = config_provider("iiasa_database", "regret_run", "st_sensitivities")( - w - ) - if sensitivities is None: - return dirs - for sens in sensitivities: - dirs.append(f"{sens}") - return dirs - - rule regret_all: input: lambda w: expand( diff --git a/config/config.de.yaml b/config/config.de.yaml index a6d4fc97b..63edb16cc 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -56,6 +56,8 @@ iiasa_database: #- no_flex #- gas_price_60 #- gas_price_80 + st_years: + - 2030 unit_commitment: enable: false params: custom # options: conservative, average, optimistic, custom diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 87a9b6433..5c1a262a2 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -860,8 +860,10 @@ def add_empty_co2_atmosphere_store_constraint(n): def additional_functionality(n, snapshots, snakemake): logger.info("Adding Ariadne-specific functionality") - - investment_year = int(snakemake.wildcards.planning_horizons[-4:]) + try: + investment_year = int(snakemake.wildcards.planning_horizons[-4:]) + except AttributeError: + investment_year = int(snakemake.wildcards.st_years[-4:]) constraints = snakemake.params.solving["constraints"] if not snakemake.params.get("regret_run"): diff --git a/scripts/pypsa-de/prepare_st_low_res_network.py b/scripts/pypsa-de/prepare_st_low_res_network.py new file mode 100644 index 000000000..53972de0f --- /dev/null +++ b/scripts/pypsa-de/prepare_st_low_res_network.py @@ -0,0 +1,227 @@ +import importlib.util +import logging +import pathlib + +import numpy as np +import pypsa + +from scripts._helpers import ( + configure_logging, + mock_snakemake, + set_scenario_config, + update_config_from_wildcards, +) +from scripts.solve_network import prepare_network + + +def _load_attr_from_file(filename: str, attr_name: str) -> object: + """ + Load attribute attr_name from a local python file given by filename (including '.py'). + """ + if not filename.endswith(".py"): + raise ValueError("filename must include the '.py' extension") + module_stem = pathlib.Path(filename).stem + _spec_path = pathlib.Path(__file__).resolve().parent / filename + _spec = importlib.util.spec_from_file_location( + f"scripts.pypsa_de.{module_stem}", _spec_path + ) + _mod = importlib.util.module_from_spec(_spec) + assert _spec is not None and _spec.loader is not None + _spec.loader.exec_module(_mod) + return getattr(_mod, attr_name) + + +_unfix_bottlenecks = _load_attr_from_file( + "prepare_regret_network.py", "_unfix_bottlenecks" +) +remove_flexibility_options = _load_attr_from_file( + "modify_prenetwork.py", "remove_flexibility_options" +) + + +logger = logging.getLogger(__name__) + +eeg_targets = { + 2030: { + "solar": 215000, + "onwind": 115000, + "offwind": 30000, + } +} + +co2_prices = { + 2030: 220, +} + + +def scale_new_res_to_target(n, targets, year, ratio=1.0): + for tech, target in targets[year].items(): + logger.info( + f"Scaling installed capacity of {tech} in DE to target of {target * ratio} MW." + ) + gens = n.generators.query( + f"carrier.str.contains('{tech}') and not carrier.str.contains('solar thermal') and index.str.startswith('DE')" + ) + existing_gens = gens[gens.build_year < year] + new_gens = gens[gens.build_year == year] + existing_cap = existing_gens.p_nom_opt.sum() + new_cap = new_gens.p_nom_opt.sum() + assert np.isclose(existing_cap + new_cap, gens.p_nom_opt.sum()) + + scale_factor = (target * ratio - existing_cap) / new_cap + + n.generators.loc[new_gens.index, "p_nom_opt"] *= scale_factor + n.generators.loc[new_gens.index, "p_nom"] *= scale_factor + + +def fix_capacities(n_lt, no_flex=False): + n = n_lt.copy() + + nominal_attrs = { + "generators": "p_nom", + "lines": "s_nom", + "links": "p_nom", + "stores": "e_nom", + } + + for name, attr in nominal_attrs.items(): + new = getattr(n, name) + lt = getattr(n_lt, name) + + extendable_i = new.query(f"{attr}_extendable").index + + new.loc[extendable_i, attr + "_extendable"] = False + new.loc[extendable_i, attr] = new.loc[extendable_i, attr + "_opt"] + + _unfix_bottlenecks(new, lt, name, extendable_i) + + # The CO2 constraints on atmosphere and sequestration need extendable stores to work correctly + if name == "stores": + logger.info("Freeing co2 atmosphere and sequestered stores.") + # there is only one co2 atmosphere store which should always be extendable, hence no intersection with extendable_i needed + _idx = new.query("carrier == 'co2'").index + new.loc[_idx, "e_nom_extendable"] = True + # co2 sequestered stores from previous planning horizons should not be extendable + _idx = new.query("carrier == 'co2 sequestered'").index.intersection( + extendable_i + ) + new.loc[_idx, "e_nom_extendable"] = True + + # Above several assets are switched to extendable again, for these the p_nom value is restored to the value from the decision network + + _idx = new.query(f"{attr}_extendable").index + + new.loc[_idx, attr] = lt.loc[_idx, attr] + + if no_flex: + logger.info("Realization network is from a run without flexibility.") + remove_flexibility_options(n) + return n + + +if __name__ == "__main__": + if "snakemake" not in globals(): + snakemake = mock_snakemake( + "prepare_st_low_res_network", + clusters=27, + opts="", + sector_opts="none", + st_years="2030", + run="HighDemand", + ) + + configure_logging(snakemake) + + configure_logging(snakemake) + set_scenario_config(snakemake) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) + + n_lt = pypsa.Network(snakemake.input.network) + st_years = snakemake.wildcards.get("st_years", None) + logging_frequency = snakemake.config.get("solving", {}).get( + "mem_logging_frequency", 30 + ) + solve_opts = snakemake.params.solving["options"] + assert solve_opts["noisy_costs"] == False, ( + "Noisy costs should not be used in regret runs." + ) + np.random.seed(solve_opts.get("seed", 123)) + + strict = snakemake.params["strict"] + scope_to_fix = snakemake.params["scope_to_fix"] + h2_vent = snakemake.params["h2_vent"] + + n = fix_capacities(n_lt, snakemake.params.get("no_flex_lt_run", False)) + + scale_new_res_to_target(n, eeg_targets, int(st_years), ratio=1.0) + + if h2_vent: + logger.info("H2 venting activated for short-term run.") + n.add("Carrier", "H2 vent", color="#dd2e23", nice_name="H2 vent") + + n.add( + "Generator", + n.buses.query("carrier=='H2'").index, + " vent", + bus=n.buses.query("carrier=='H2'").index, + carrier="H2 vent", + sign=-1, + marginal_cost=1, + p_nom=1e6, + ) + + # Manipulating the global constraints + to_keep = [ + "biomass limit", + "unsustainable biomass limit", + "co2_sequestration_limit", + "CO2Limit", + "co2_limit-DE", + ] + + to_drop = n.global_constraints.index.difference(to_keep) + logger.info("Short-term run detected. Dropping the following constraints:") + logger.info(to_drop) + + n.global_constraints.drop(to_drop, inplace=True) + + # If running with post-discretization the last lines of optimize_transmission_expansion_iteratively have to be undone for the operational run + if solve_opts["post_discretization"].get("enable") and not solve_opts.get( + "skip_iterations" + ): + n.lines.s_nom_extendable = False + n.lines.s_nom = n.lines.s_nom_opt + + discretized_links = n.links.query( + f"carrier in {list(solve_opts['post_discretization'].get('link_unit_size').keys())}" + ).index + n.links.loc[discretized_links, "p_nom_extendable"] = False + n.links.loc[discretized_links, "p_nom"] = n.links.loc[ + discretized_links, "p_nom_opt" + ] + + prepare_network( + n, + solve_opts=snakemake.params.solving["options"], + foresight=snakemake.params.foresight, + planning_horizons=st_years, + co2_sequestration_potential=snakemake.params["co2_sequestration_potential"], + limit_max_growth=snakemake.params.get("sector", {}).get("limit_max_growth"), + regret_run=True, + ) + + n.add( + "Generator", + "co2 atmosphere", + bus="co2 atmosphere", + p_min_pu=-1, + p_max_pu=0, + p_nom_extendable=True, + carrier="co2", + marginal_cost=co2_prices[int(st_years)], + ) + logger.info("Adding negative CO2 generator and dropping co2 limits.") + n.global_constraints.drop("CO2Limit", inplace=True) + n.global_constraints.drop("co2_limit-DE", inplace=True) + + n.export_to_netcdf(snakemake.output.st_low_res_prenetwork) diff --git a/scripts/pypsa-de/solve_st_low_res_network.py b/scripts/pypsa-de/solve_st_low_res_network.py new file mode 100644 index 000000000..a2e12f0b9 --- /dev/null +++ b/scripts/pypsa-de/solve_st_low_res_network.py @@ -0,0 +1,87 @@ +import importlib.util +import logging +import pathlib +import re + +import numpy as np +import pypsa + +from scripts._benchmark import memory_logger +from scripts._helpers import ( + configure_logging, + mock_snakemake, + set_scenario_config, + update_config_from_wildcards, +) +from scripts.solve_network import solve_network + +_spec_path = pathlib.Path(__file__).resolve().parent / "modify_prenetwork.py" +_spec = importlib.util.spec_from_file_location( + "scripts.pypsa_de.modify_prenetwork", _spec_path +) +_modify_prenetwork = importlib.util.module_from_spec(_spec) +_spec.loader.exec_module(_modify_prenetwork) +remove_flexibility_options = _modify_prenetwork.remove_flexibility_options + + +logger = logging.getLogger(__name__) + +if __name__ == "__main__": + if "snakemake" not in globals(): + snakemake = mock_snakemake( + "solve_st_low_res_network", + clusters=27, + opts="", + sector_opts="none", + planning_horizons="2030", + decision="LowDemand", + run="HighDemand", + sensitivity="base", + ) + + configure_logging(snakemake) + set_scenario_config(snakemake) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) + + n = pypsa.Network(snakemake.input.st_low_res_prenetwork) + + planning_horizons = snakemake.wildcards.get("planning_horizons", None) + logging_frequency = snakemake.config.get("solving", {}).get( + "mem_logging_frequency", 30 + ) + np.random.seed(snakemake.params.solving["options"].get("seed", 123)) + + if "no_flex" in snakemake.params.st_sensitivity: + logger.info( + "Running sensitivity of the short term model with less flexibility options." + ) + remove_flexibility_options(n) + + gas_price = re.findall(r"gas_price_(\d{2,3})", snakemake.params.st_sensitivity) + if gas_price: + gas_price = int(gas_price[0]) + logger.info( + f"Running sensitivity of the short term model with gas price set to {gas_price} €/MWh." + ) + n.generators.loc[n.generators.carrier == "gas primary", "marginal_cost"] = ( + gas_price + ) + + with memory_logger( + filename=getattr(snakemake.log, "memory", None), interval=logging_frequency + ) as mem: + solve_network( + n, + config=snakemake.config, + params=snakemake.params, + solving=snakemake.params.solving, + planning_horizons=planning_horizons, + rule_name=snakemake.rule, + log_fn=snakemake.log.solver, + snakemake=snakemake, + ) + logger.info(f"Maximum memory usage: {mem.mem_usage}") + + n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) + + n.export_to_netcdf(snakemake.output.st_low_res_network) From 82f4fb95e82299bc35e311bf565175134c3fe1ee Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 2 Oct 2025 18:01:34 +0200 Subject: [PATCH 162/202] first version of st variations should work now --- Snakefile | 27 ++++++++++--------- config/config.de.yaml | 8 ++++++ scripts/pypsa-de/export_ariadne_variables.py | 4 +++ .../pypsa-de/prepare_st_low_res_network.py | 10 ++++--- 4 files changed, 34 insertions(+), 15 deletions(-) diff --git a/Snakefile b/Snakefile index 5737c93e8..5279d7434 100644 --- a/Snakefile +++ b/Snakefile @@ -1023,12 +1023,12 @@ rule prepare_st_low_res_network: + "networks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", output: st_low_res_prenetwork=RESULTS - + "st_low_res_prenetworks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", + + "st_low_res_prenetworks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}.nc", resources: mem_mb=16000, log: RESULTS - + "logs/st_low_res_prenetwork_s_{clusters}_{opts}_{sector_opts}_{st_years}.log", + + "logs/st_low_res_prenetwork_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}.log", script: "scripts/pypsa-de/prepare_st_low_res_network.py" @@ -1042,21 +1042,21 @@ rule solve_st_low_res_network: custom_extra_functionality=input_custom_extra_functionality, input: st_low_res_prenetwork=RESULTS - + "st_low_res_prenetworks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", + + "st_low_res_prenetworks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}.nc", co2_totals_name=resources("co2_totals.csv"), energy_totals=resources("energy_totals.csv"), output: st_low_res_network=RESULTS - + "st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", + + "st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}.nc", shadow: shadow_config log: solver=RESULTS - + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_solver.log", + + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}_solver.log", memory=RESULTS - + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_memory.log", + + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}_memory.log", python=RESULTS - + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_python.log", + + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}_python.log", threads: solver_threads resources: mem_mb=config_provider("solving", "mem_mb"), @@ -1102,25 +1102,28 @@ use rule export_ariadne_variables as export_st_variables with: energy_totals=resources("energy_totals.csv"), st_low_res_networks=expand( RESULTS - + "st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", + + "st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}.nc", **config["scenario"], st_years=config_provider("iiasa_database", "regret_run", "st_years"), allow_missing=True, ), output: exported_variables=RESULTS - + "st_low_res_variables/{sensitivity}/st_low_res_variables.xlsx", + + "st_low_res_variables/{sensitivity}/st_low_res_variables_eeg_level_{eeg_level}.xlsx", exported_variables_full=RESULTS - + "st_low_res_variables/{sensitivity}/st_low_res_variables_full.xlsx", + + "st_low_res_variables/{sensitivity}/st_low_res_variables_full_eeg_level_{eeg_level}.xlsx", log: - RESULTS + "logs/st_low_res_variables/{sensitivity}/st_low_res_variables.log", + RESULTS + + "logs/st_low_res_variables/{sensitivity}/st_low_res_variables_eeg_level_{eeg_level}.log", rule st_all: input: expand( - RESULTS + "st_low_res_variables/{sensitivity}/st_low_res_variables.xlsx", + RESULTS + + "st_low_res_variables/{sensitivity}/st_low_res_variables_eeg_level_{eeg_level}.xlsx", sensitivity=get_st_sensitivities, + eeg_level=config_provider("iiasa_database", "regret_run", "EEG_levels"), run=config_provider("run", "name"), ), diff --git a/config/config.de.yaml b/config/config.de.yaml index 63edb16cc..8e275f021 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -58,6 +58,14 @@ iiasa_database: #- gas_price_80 st_years: - 2030 + EEG_levels: + - 1.00 + # - 0.95 + # - 0.90 + - 0.85 + # - 0.80 + # - 0.75 + - 0.70 unit_commitment: enable: false params: custom # options: conservative, average, optimistic, custom diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 5a3b838eb..a083e194a 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -5542,6 +5542,10 @@ def get_data( # Load data _networks = [pypsa.Network(fn) for fn in snakemake.input.networks] + if snakemake.input.st_low_res_networks: + _networks_st = [pypsa.Network(fn) for fn in snakemake.input.st_low_res_networks] + N_st = len(_networks_st) + _networks[-N_st:] = _networks_st nhours = _networks[0].snapshot_weightings.generators.sum() nyears = nhours / 8760 diff --git a/scripts/pypsa-de/prepare_st_low_res_network.py b/scripts/pypsa-de/prepare_st_low_res_network.py index 53972de0f..640b4c96c 100644 --- a/scripts/pypsa-de/prepare_st_low_res_network.py +++ b/scripts/pypsa-de/prepare_st_low_res_network.py @@ -128,6 +128,7 @@ def fix_capacities(n_lt, no_flex=False): sector_opts="none", st_years="2030", run="HighDemand", + eeg_level=0.7, ) configure_logging(snakemake) @@ -153,7 +154,9 @@ def fix_capacities(n_lt, no_flex=False): n = fix_capacities(n_lt, snakemake.params.get("no_flex_lt_run", False)) - scale_new_res_to_target(n, eeg_targets, int(st_years), ratio=1.0) + scale_new_res_to_target( + n, eeg_targets, int(st_years), ratio=float(snakemake.wildcards.eeg_level) + ) if h2_vent: logger.info("H2 venting activated for short-term run.") @@ -210,6 +213,8 @@ def fix_capacities(n_lt, no_flex=False): regret_run=True, ) + logger.info("Adding negative CO2 generator and dropping co2 limits.") + n.add( "Generator", "co2 atmosphere", @@ -218,9 +223,8 @@ def fix_capacities(n_lt, no_flex=False): p_max_pu=0, p_nom_extendable=True, carrier="co2", - marginal_cost=co2_prices[int(st_years)], + marginal_cost=-co2_prices[int(st_years)], ) - logger.info("Adding negative CO2 generator and dropping co2 limits.") n.global_constraints.drop("CO2Limit", inplace=True) n.global_constraints.drop("co2_limit-DE", inplace=True) From f126ab928ce5bc1dc17b8873816d16b73fd53f65 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Sun, 5 Oct 2025 14:09:19 +0200 Subject: [PATCH 163/202] fix no_flex_lt_runs --- Snakefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Snakefile b/Snakefile index 5279d7434..d7b89a05c 100644 --- a/Snakefile +++ b/Snakefile @@ -523,7 +523,7 @@ rule modify_district_heat_share: rule modify_prenetwork: params: - no_flex_lt_run=config_provider("iiasa-database", "no_flex_lt_run"), + no_flex_lt_run=config_provider("iiasa_database", "no_flex_lt_run"), efuel_export_ban=config_provider("solving", "constraints", "efuel_export_ban"), enable_kernnetz=config_provider("wasserstoff_kernnetz", "enable"), costs=config_provider("costs"), From a14504b55b4f9e09716dd401936f0fb63fa0b1cd Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 7 Oct 2025 14:32:42 +0200 Subject: [PATCH 164/202] improve linopy syntax --- scripts/pypsa-de/additional_functionality.py | 72 ++++++++++---------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 5c1a262a2..9198dc977 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -136,106 +136,106 @@ def add_power_limits(n, investment_year, limits_power_max): # For incoming flows s > 0 means imports, s < 0 exports # For outgoing flows s > 0 means exports, s < 0 imports # to get the positive and negative parts separately, we use auxiliary variables - incoming_lines_var = n.model["Line-s"].loc[t, incoming_lines.index] - n.model.add_variables( - coords=[incoming_lines.index], + incoming_lines_var = n.model["Line-s"].sel( + snapshot=t, Line=incoming_lines.index + ) + incoming_lines_aux_pos = n.model.add_variables( name=f"Line-s-incoming-{ct}-aux-pos-{t}", lower=0, upper=incoming_lines.s_nom_max, ) - n.model.add_variables( - coords=[incoming_lines.index], + incoming_lines_aux_neg = n.model.add_variables( name=f"Line-s-incoming-{ct}-aux-neg-{t}", lower=-incoming_lines.s_nom_max, upper=0, ) n.model.add_constraints( - n.model[f"Line-s-incoming-{ct}-aux-pos-{t}"] >= incoming_lines_var, + incoming_lines_aux_pos >= incoming_lines_var, name=f"Line-s-incoming-{ct}-aux-pos-constr-{t}", ) n.model.add_constraints( - n.model[f"Line-s-incoming-{ct}-aux-neg-{t}"] <= incoming_lines_var, + incoming_lines_aux_neg <= incoming_lines_var, name=f"Line-s-incoming-{ct}-aux-neg-constr-{t}", ) - outgoing_lines_var = n.model["Line-s"].loc[t, outgoing_lines.index] - n.model.add_variables( - coords=[outgoing_lines.index], + outgoing_lines_var = n.model["Line-s"].sel( + snapshot=t, Line=outgoing_lines.index + ) + outgoing_lines_aux_pos = n.model.add_variables( name=f"Line-s-outgoing-{ct}-aux-pos-{t}", lower=0, upper=outgoing_lines.s_nom_max, ) - n.model.add_variables( - coords=[outgoing_lines.index], + outgoing_lines_aux_neg = n.model.add_variables( name=f"Line-s-outgoing-{ct}-aux-neg-{t}", lower=-outgoing_lines.s_nom_max, upper=0, ) n.model.add_constraints( - n.model[f"Line-s-outgoing-{ct}-aux-pos-{t}"] >= outgoing_lines_var, + outgoing_lines_aux_pos >= outgoing_lines_var, name=f"Line-s-outgoing-{ct}-aux-pos-constr-{t}", ) n.model.add_constraints( - n.model[f"Line-s-outgoing-{ct}-aux-neg-{t}"] <= outgoing_lines_var, + outgoing_lines_aux_neg <= outgoing_lines_var, name=f"Line-s-outgoing-{ct}-aux-neg-constr-{t}", ) - incoming_links_var = n.model["Link-p"].loc[t, incoming_links.index] - n.model.add_variables( - coords=[incoming_links.index], + incoming_links_var = n.model["Link-p"].sel( + snapshot=t, Link=incoming_links.index + ) + incoming_links_aux_pos = n.model.add_variables( name=f"Link-p-incoming-{ct}-aux-pos-{t}", lower=0, upper=incoming_links.p_nom_max, ) - n.model.add_variables( - coords=[incoming_links.index], + incoming_links_aux_neg = n.model.add_variables( name=f"Link-p-incoming-{ct}-aux-neg-{t}", lower=-incoming_links.p_nom_max, upper=0, ) n.model.add_constraints( - n.model[f"Link-p-incoming-{ct}-aux-pos-{t}"] >= incoming_links_var, + incoming_links_aux_pos >= incoming_links_var, name=f"Link-p-incoming-{ct}-aux-pos-constr-{t}", ) n.model.add_constraints( - n.model[f"Link-p-incoming-{ct}-aux-neg-{t}"] <= incoming_links_var, + incoming_links_aux_neg <= incoming_links_var, name=f"Link-p-incoming-{ct}-aux-neg-constr-{t}", ) - outgoing_links_var = n.model["Link-p"].loc[t, outgoing_links.index] - n.model.add_variables( - coords=[outgoing_links.index], + outgoing_links_var = n.model["Link-p"].sel( + snapshot=t, Link=outgoing_links.index + ) + outgoing_links_aux_pos = n.model.add_variables( name=f"Link-p-outgoing-{ct}-aux-pos-{t}", lower=0, upper=outgoing_links.p_nom_max, ) - n.model.add_variables( - coords=[outgoing_links.index], + outgoing_links_aux_neg = n.model.add_variables( name=f"Link-p-outgoing-{ct}-aux-neg-{t}", lower=-outgoing_links.p_nom_max, upper=0, ) n.model.add_constraints( - n.model[f"Link-p-outgoing-{ct}-aux-pos-{t}"] >= outgoing_links_var, + outgoing_links_aux_pos >= outgoing_links_var, name=f"Link-p-outgoing-{ct}-aux-pos-constr-{t}", ) n.model.add_constraints( - n.model[f"Link-p-outgoing-{ct}-aux-neg-{t}"] <= outgoing_links_var, + outgoing_links_aux_neg <= outgoing_links_var, name=f"Link-p-outgoing-{ct}-aux-neg-constr-{t}", ) # To constraint the absolute values of imports and exports, we have to sum the # corresponding positive and negative flows separately, using auxiliary variables import_lhs = ( - n.model[f"Link-p-incoming-{ct}-aux-pos-{t}"].sum() - + n.model[f"Line-s-incoming-{ct}-aux-pos-{t}"].sum() - - n.model[f"Link-p-outgoing-{ct}-aux-neg-{t}"].sum() - - n.model[f"Line-s-outgoing-{ct}-aux-neg-{t}"].sum() + incoming_links_aux_pos.sum() + + incoming_lines_aux_pos.sum() + - outgoing_links_aux_neg.sum() + - outgoing_lines_aux_neg.sum() ) / 10 # divide by 10 to improve numerical stability export_lhs = ( - n.model[f"Link-p-outgoing-{ct}-aux-pos-{t}"].sum() - + n.model[f"Line-s-outgoing-{ct}-aux-pos-{t}"].sum() - - n.model[f"Link-p-incoming-{ct}-aux-neg-{t}"].sum() - - n.model[f"Line-s-incoming-{ct}-aux-neg-{t}"].sum() + outgoing_links_aux_pos.sum() + + outgoing_lines_aux_pos.sum() + - incoming_links_aux_neg.sum() + - incoming_lines_aux_neg.sum() ) / 10 n.model.add_constraints( From b2c303cd01dc87c3ec8971a9be689f805e37d320 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 8 Oct 2025 12:02:42 +0200 Subject: [PATCH 165/202] fix decentral oil boiler export --- scripts/pypsa-de/export_ariadne_variables.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index a083e194a..54ae059ec 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -1013,7 +1013,7 @@ def _get_capacities(n, region, cap_func, cap_string="Capacity|"): ).sum() var[cap_string + "Decentral Heat|Oil"] = capacities_decentral_heat.filter( - like="oil" + like="oil boiler" ).sum() var[cap_string + "Decentral Heat|Storage Converter"] = capacities_decentral_heat[ From adf6ea2a08d3c87428bb54594827e4047ef71bcd Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 8 Oct 2025 18:03:26 +0200 Subject: [PATCH 166/202] set up workflow for LT EEG sweeps --- Snakefile | 127 ++++++++++++++++-- config/config.de.yaml | 6 +- scripts/pypsa-de/additional_functionality.py | 22 ++- scripts/pypsa-de/export_ariadne_variables.py | 10 +- .../pypsa-de/prepare_st_low_res_network.py | 10 +- scripts/solve_network.py | 4 +- 6 files changed, 154 insertions(+), 25 deletions(-) diff --git a/Snakefile b/Snakefile index d7b89a05c..5512b96d7 100644 --- a/Snakefile +++ b/Snakefile @@ -1020,15 +1020,15 @@ rule prepare_st_low_res_network: ), input: network=RESULTS - + "networks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}.nc", + + "networks/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}.nc", output: st_low_res_prenetwork=RESULTS - + "st_low_res_prenetworks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}.nc", + + "st_low_res_prenetworks/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_eeg_level_{eeg_level}.nc", resources: mem_mb=16000, log: RESULTS - + "logs/st_low_res_prenetwork_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}.log", + + "logs/st_low_res_prenetwork_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_eeg_level_{eeg_level}.log", script: "scripts/pypsa-de/prepare_st_low_res_network.py" @@ -1042,21 +1042,21 @@ rule solve_st_low_res_network: custom_extra_functionality=input_custom_extra_functionality, input: st_low_res_prenetwork=RESULTS - + "st_low_res_prenetworks/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}.nc", + + "st_low_res_prenetworks/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_eeg_level_{eeg_level}.nc", co2_totals_name=resources("co2_totals.csv"), energy_totals=resources("energy_totals.csv"), output: st_low_res_network=RESULTS - + "st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}.nc", + + "st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_eeg_level_{eeg_level}.nc", shadow: shadow_config log: solver=RESULTS - + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}_solver.log", + + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_eeg_level_{eeg_level}_solver.log", memory=RESULTS - + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}_memory.log", + + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_eeg_level_{eeg_level}_memory.log", python=RESULTS - + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}_python.log", + + "logs/st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_eeg_level_{eeg_level}_python.log", threads: solver_threads resources: mem_mb=config_provider("solving", "mem_mb"), @@ -1102,9 +1102,11 @@ use rule export_ariadne_variables as export_st_variables with: energy_totals=resources("energy_totals.csv"), st_low_res_networks=expand( RESULTS - + "st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{st_years}_eeg_level_{eeg_level}.nc", + + "st_low_res_networks/{sensitivity}/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_eeg_level_{eeg_level}.nc", **config["scenario"], - st_years=config_provider("iiasa_database", "regret_run", "st_years"), + eeg_sweep_year=config_provider( + "iiasa_database", "regret_run", "eeg_sweep_year" + ), allow_missing=True, ), output: @@ -1128,6 +1130,111 @@ rule st_all: ), +rule solve_eeg_sweep_lt: + params: + solving=config_provider("solving"), + foresight=config_provider("foresight"), + co2_sequestration_potential=config_provider( + "sector", "co2_sequestration_potential", default=200 + ), + custom_extra_functionality=input_custom_extra_functionality, + energy_year=config_provider("energy", "energy_totals_year"), + input: + network=resources( + "networks/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_final.nc" + ), + co2_totals_name=resources("co2_totals.csv"), + energy_totals=resources("energy_totals.csv"), + output: + network=RESULTS + + "networks/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_EEG_{eeg_level}.nc", + config=RESULTS + + "configs/config.base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_EEG_{eeg_level}.yaml", + shadow: + shadow_config + log: + solver=RESULTS + + "logs/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_EEG_{eeg_level}_solver.log", + memory=RESULTS + + "logs/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_EEG_{eeg_level}_memory.log", + python=RESULTS + + "logs/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_EEG_{eeg_level}_python.log", + threads: solver_threads + resources: + mem_mb=config_provider("solving", "mem_mb"), + runtime=config_provider("solving", "runtime", default="6h"), + benchmark: + ( + RESULTS + + "benchmarks/solve_sector_network/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_EEG_{eeg_level}" + ) + conda: + "envs/environment.yaml" + script: + "scripts/solve_network.py" + + +use rule export_ariadne_variables as export_eeg_sweep_lt_variables with: + input: + template="data/template_ariadne_database.xlsx", + industry_demands=expand( + resources( + "industrial_energy_demand_base_s_{clusters}_{planning_horizons}.csv" + ), + **config["scenario"], + allow_missing=True, + ), + networks=expand( + RESULTS + + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc", + **config["scenario"], + allow_missing=True, + ), + costs=expand( + resources("costs_{planning_horizons}.csv"), + **config["scenario"], + allow_missing=True, + ), + industrial_production_per_country_tomorrow=expand( + resources( + "industrial_production_per_country_tomorrow_{planning_horizons}-modified.csv" + ), + **config["scenario"], + allow_missing=True, + ), + industry_sector_ratios=expand( + resources("industry_sector_ratios_{planning_horizons}.csv"), + **config["scenario"], + allow_missing=True, + ), + industrial_production=resources("industrial_production_per_country.csv"), + energy_totals=resources("energy_totals.csv"), + eeg_sweep_networks=expand( + RESULTS + + "networks/base_s_{clusters}_{opts}_{sector_opts}_{eeg_sweep_year}_EEG_{eeg_level}.nc", + **config["scenario"], + eeg_sweep_year=config_provider( + "iiasa_database", "regret_run", "eeg_sweep_year" + ), + allow_missing=True, + ), + output: + exported_variables=RESULTS + "ariadne/exported_variables_EEG_{eeg_level}.xlsx", + exported_variables_full=RESULTS + + "ariadne/exported_variables_full_EEG_{eeg_level}.xlsx", + log: + RESULTS + "logs/export_ariadne_variables_EEG_{eeg_level}.log", + + +rule eeg_sweep: + input: + expand( + RESULTS + "ariadne/exported_variables_full_EEG_{eeg_level}.xlsx", + eeg_level=config_provider("iiasa_database", "regret_run", "EEG_levels"), + run=config_provider("run", "name"), + ), + + rule prepare_regret_network: params: solving=config_provider("solving"), diff --git a/config/config.de.yaml b/config/config.de.yaml index 8e275f021..b6c1500d0 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -56,13 +56,13 @@ iiasa_database: #- no_flex #- gas_price_60 #- gas_price_80 - st_years: + eeg_sweep_year: - 2030 EEG_levels: - 1.00 # - 0.95 # - 0.90 - - 0.85 + # - 0.85 # - 0.80 # - 0.75 - 0.70 @@ -776,7 +776,7 @@ onshore_nep_force: offshore_nep_force: cutin_year: 2025 cutout_year: 2030 - delay_years: 0 + delay_years: 2 scale_capacity: 2020: diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 9198dc977..547802abb 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -863,9 +863,27 @@ def additional_functionality(n, snapshots, snakemake): try: investment_year = int(snakemake.wildcards.planning_horizons[-4:]) except AttributeError: - investment_year = int(snakemake.wildcards.st_years[-4:]) + investment_year = int(snakemake.wildcards.eeg_sweep_year) constraints = snakemake.params.solving["constraints"] - + if snakemake.wildcards.get("eeg_sweep_year"): + eeg_sweep_year = int(snakemake.wildcards.eeg_sweep_year) + assert eeg_sweep_year == 2030, "EEG sweep implemented only for 2030 " + lvl = float(snakemake.wildcards.eeg_level) + constraints["limits_capacity_min"]["Generator"]["onwind"]["DE"][ + eeg_sweep_year + ] = constraints["limits_capacity_max"]["Generator"]["onwind"]["DE"][ + eeg_sweep_year + ] = 115 * lvl + constraints["limits_capacity_min"]["Generator"]["offwind"]["DE"][ + eeg_sweep_year + ] = constraints["limits_capacity_max"]["Generator"]["offwind"]["DE"][ + eeg_sweep_year + ] = 30 * lvl + constraints["limits_capacity_min"]["Generator"]["solar"]["DE"][ + eeg_sweep_year + ] = constraints["limits_capacity_max"]["Generator"]["solar"]["DE"][ + eeg_sweep_year + ] = 215 * lvl if not snakemake.params.get("regret_run"): add_capacity_limits( n, investment_year, constraints["limits_capacity_min"], snakemake, "minimum" diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 54ae059ec..7773fba47 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -5542,10 +5542,12 @@ def get_data( # Load data _networks = [pypsa.Network(fn) for fn in snakemake.input.networks] - if snakemake.input.st_low_res_networks: - _networks_st = [pypsa.Network(fn) for fn in snakemake.input.st_low_res_networks] - N_st = len(_networks_st) - _networks[-N_st:] = _networks_st + if snakemake.input.eeg_sweep_networks: + _sweep_networks = [ + pypsa.Network(fn) for fn in snakemake.input.eeg_sweep_networks + ] + N_st = len(_sweep_networks) + _networks[-N_st:] = _sweep_networks nhours = _networks[0].snapshot_weightings.generators.sum() nyears = nhours / 8760 diff --git a/scripts/pypsa-de/prepare_st_low_res_network.py b/scripts/pypsa-de/prepare_st_low_res_network.py index 640b4c96c..c01ead06b 100644 --- a/scripts/pypsa-de/prepare_st_low_res_network.py +++ b/scripts/pypsa-de/prepare_st_low_res_network.py @@ -126,7 +126,7 @@ def fix_capacities(n_lt, no_flex=False): clusters=27, opts="", sector_opts="none", - st_years="2030", + eeg_sweep_year="2030", run="HighDemand", eeg_level=0.7, ) @@ -138,7 +138,7 @@ def fix_capacities(n_lt, no_flex=False): update_config_from_wildcards(snakemake.config, snakemake.wildcards) n_lt = pypsa.Network(snakemake.input.network) - st_years = snakemake.wildcards.get("st_years", None) + eeg_sweep_year = snakemake.wildcards.get("eeg_sweep_year", None) logging_frequency = snakemake.config.get("solving", {}).get( "mem_logging_frequency", 30 ) @@ -155,7 +155,7 @@ def fix_capacities(n_lt, no_flex=False): n = fix_capacities(n_lt, snakemake.params.get("no_flex_lt_run", False)) scale_new_res_to_target( - n, eeg_targets, int(st_years), ratio=float(snakemake.wildcards.eeg_level) + n, eeg_targets, int(eeg_sweep_year), ratio=float(snakemake.wildcards.eeg_level) ) if h2_vent: @@ -207,7 +207,7 @@ def fix_capacities(n_lt, no_flex=False): n, solve_opts=snakemake.params.solving["options"], foresight=snakemake.params.foresight, - planning_horizons=st_years, + planning_horizons=eeg_sweep_year, co2_sequestration_potential=snakemake.params["co2_sequestration_potential"], limit_max_growth=snakemake.params.get("sector", {}).get("limit_max_growth"), regret_run=True, @@ -223,7 +223,7 @@ def fix_capacities(n_lt, no_flex=False): p_max_pu=0, p_nom_extendable=True, carrier="co2", - marginal_cost=-co2_prices[int(st_years)], + marginal_cost=-co2_prices[int(eeg_sweep_year)], ) n.global_constraints.drop("CO2Limit", inplace=True) n.global_constraints.drop("co2_limit-DE", inplace=True) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 91a345588..15e9ab519 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -1457,7 +1457,9 @@ def solve_network( np.random.seed(solve_opts.get("seed", 123)) n = pypsa.Network(snakemake.input.network) - planning_horizons = snakemake.wildcards.get("planning_horizons", None) + planning_horizons = snakemake.wildcards.get( + "planning_horizons", snakemake.wildcards.get("eeg_sweep_year", None) + ) prepare_network( n, From eafef433ebc4936bbb39034d91d00f0c5569e7e3 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 9 Oct 2025 14:00:34 +0200 Subject: [PATCH 167/202] add Eu wide TSC --- scripts/pypsa-de/export_ariadne_variables.py | 30 +++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 7773fba47..6d65830e7 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -4593,13 +4593,13 @@ def get_trade_cost(n, region, carriers): export_revenue, import_cost = get_export_import(n, region, carriers, unit="€") return import_cost - export_revenue - var["Cost|Total Energy System Cost|Trade|Electricity"] = ( + var["Total Energy System Cost|Trade|Electricity"] = ( get_trade_cost(n, region, ["AC"]) + get_trade_cost(n, region, ["DC"]) ) / 1e9 - var["Cost|Total Energy System Cost|Trade|Efuels"] = ( + var["Total Energy System Cost|Trade|Efuels"] = ( get_trade_cost(n, region, ["renewable oil", "renewable gas", "methanol"]) / 1e9 ) - var["Cost|Total Energy System Cost|Trade|Hydrogen"] = ( + var["Total Energy System Cost|Trade|Hydrogen"] = ( get_trade_cost( n, region, @@ -4607,21 +4607,23 @@ def get_trade_cost(n, region, carriers): ) / 1e9 ) - var["Cost|Total Energy System Cost|Trade"] = ( - var["Cost|Total Energy System Cost|Trade|Electricity"] - + var["Cost|Total Energy System Cost|Trade|Efuels"] - + var["Cost|Total Energy System Cost|Trade|Hydrogen"] - ) - # Cost|Total Energy System Cost in billion EUR2020/yr - var["Cost|Total Energy System Cost|Non Trade"] = ( - get_tsc(n, region).sum().sum() / 1e9 + var["Total Energy System Cost|Trade"] = ( + var["Total Energy System Cost|Trade|Electricity"] + + var["Total Energy System Cost|Trade|Efuels"] + + var["Total Energy System Cost|Trade|Hydrogen"] ) + # Total Energy System Cost in billion EUR2020/yr + var["Total Energy System Cost|Non Trade"] = get_tsc(n, region).sum().sum() / 1e9 - var["Cost|Total Energy System Cost"] = ( - var["Cost|Total Energy System Cost|Non Trade"] - + var["Cost|Total Energy System Cost|Trade"] + var["Total Energy System Cost"] = ( + var["Total Energy System Cost|Non Trade"] + + var["Total Energy System Cost|Trade"] ) + var["Total Energy System Cost|EU"] = ( + n.statistics.capex().sum() + n.statistics.opex().sum() + ) / 1e9 + return var From 0743625df68b0c599c5136406d75df89a1a73c97 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 9 Oct 2025 14:51:54 +0200 Subject: [PATCH 168/202] add cfd calculation to exporter --- scripts/pypsa-de/export_ariadne_variables.py | 129 ++++++++++++++++++- 1 file changed, 128 insertions(+), 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 6d65830e7..adbdbb715 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -4410,9 +4410,136 @@ def get_grid_investments( return var +def calculate_cfd_payments( + n, + region="DE", + vre_carriers=[ + "solar", + "solar-hsat", + "solar rooftop", + "offwind-ac", + "offwind-dc", + "onwind", + ], + einheitspreis=True, + rooftop_price_factor=2, + rooftop_self_consumption=0.2, +): + build_years = [ + y for y in n.generators.build_year.unique() if y in [2025, 2030, 2035] + ] + yearly_costs = {} + + for year in build_years: + one_sided = True if year < 2027 else False + print( + f"Calculating new EEG payments for year {year} with {'one-sided' if one_sided else 'two-sided'} contracts for difference" + ) + + cost = pd.Series(dtype=float) + for carrier in vre_carriers: + # Keep in mind that, e.g., the 2025 assets represent the 2020-2025 assets + gens = n.generators.query( + f"carrier == '{carrier}' and index.str.startswith('{region}') and build_year == {year}" + ) + if gens.empty: + print( + f"No {carrier} generators found for year {year} in region {region}." + ) + cost[carrier] = 0.0 + continue + + flh = n.generators_t.p_max_pu[gens.index].mean() * 8760 + strike_price = gens.capital_cost / flh + + # Not equal to n.generators_t.p[gens.index] because of curtailment + avail_generation = n.generators_t.p_max_pu[gens.index] * gens.p_nom_opt + + if einheitspreis: + nodal_prices = n.buses_t.marginal_price[ + n.buses.query( + f"index.str.startswith('{region}') and carrier == 'AC'" + ).index + ] + + nodal_flows = ( + n.statistics.withdrawal( + bus_carrier="AC", + groupby=["name", "bus", "carrier"], + aggregate_time=False, + ) + .groupby("bus") + .sum() + .T.filter( + like=region, + axis=1, + ) + ) + + weighted_mean_nodal_price = ( + nodal_flows.mul(nodal_prices) + .sum(axis=1) + .div(nodal_flows.sum(axis=1)) + ) + + price = pd.DataFrame( + {loc: weighted_mean_nodal_price for loc in strike_price.index}, + index=weighted_mean_nodal_price.index, + ) + else: + price = n.buses_t.marginal_price[gens.bus] + price.columns = gens.index + + print("average hourly price", carrier, round(price.mean().mean(), 2)) + print("average strike price", carrier, round(strike_price.mean(), 2)) + + if carrier == "solar rooftop": + print( + "Correcting rooftop PV strike price by factor", + rooftop_price_factor, + "and accounting self-consumption with a factor", + rooftop_self_consumption, + ) + strike_price *= rooftop_price_factor + avail_generation *= 1 - rooftop_self_consumption + + if year == 2025: + if price.mean().mean() < 75: + print("price seems to be low") + + if one_sided: + remuneration = strike_price - price.clip(upper=strike_price, axis=1) + else: + remuneration = strike_price - price + + cost[carrier] = (avail_generation * remuneration).multiply( + n.snapshot_weightings.generators, axis=0 + ).values.sum() / 1e9 # in bn € + yearly_costs[year] = cost + + print("\nNew EEG payment per carrier in bn €:\n") + + return pd.DataFrame(yearly_costs) + + def get_policy(n, investment_year): var = pd.Series() + cfds = calculate_cfd_payments(n) + var["Policy|Renewable Energy Support|CfD|Solar|Rooftop"] = ( + cfds.sum(axis=1).filter(like="solar rooftop").sum() + ) + var["Policy|Renewable Energy Support|CfD|Solar|Utility"] = ( + cfds.sum(axis=1).reindex(["solar", "solar-hsat"]).sum() + ) + var["Policy|Renewable Energy Support|CfD|Wind|Onshore"] = ( + cfds.sum(axis=1).filter(like="onwind").sum() + ) + var["Policy|Renewable Energy Support|CfD|Wind|Offshore"] = ( + cfds.sum(axis=1).filter(like="offwind").sum() + ) + var["Policy|Renewable Energy Support|CfD|Total"] = cfds.sum(axis=1).sum() + # add carbon component to fossil fuels if specified if (snakemake.params.co2_price_add_on_fossils is not None) and ( investment_year in snakemake.params.co2_price_add_on_fossils.keys() @@ -5544,7 +5671,7 @@ def get_data( # Load data _networks = [pypsa.Network(fn) for fn in snakemake.input.networks] - if snakemake.input.eeg_sweep_networks: + if snakemake.input.get("eeg_sweep_networks"): _sweep_networks = [ pypsa.Network(fn) for fn in snakemake.input.eeg_sweep_networks ] From dd82783723274b016c2d2071ba584ce0cfb54778 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 10 Oct 2025 16:16:31 +0200 Subject: [PATCH 169/202] improve auxiliary variables, solve fragmentation issue --- scripts/pypsa-de/additional_functionality.py | 180 ++++++++----------- 1 file changed, 71 insertions(+), 109 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 547802abb..02b6a1b99 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -108,6 +108,43 @@ def add_power_limits(n, investment_year, limits_power_max): """ " Restricts the maximum inflow/outflow of electricity from/to a country. """ + + def add_pos_neg_aux_variables(n, idx, infix): + """ + For every snapshot in the network `n` this functions adds auxiliary variables corresponding to the positive and negative parts of the dynamical variables of the network components specified in the index `idx`. The `infix` parameter is used to create unique names for the auxiliary variables and constraints. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network object containing the model. + idx : pandas.Index + The index of the network component (e.g., lines or links) for which to create auxiliary variables. + infix : str + A string used to create unique names for the auxiliary variables and constraints. + """ + + var_key = f"{idx.name}-{'s' if idx.name == 'Line' else 'p'}" + var = n.model[var_key].sel({idx.name: idx}) + aux_pos = n.model.add_variables( + name=f"{var_key}-{infix}-aux-pos", + lower=0, + coords=[n.snapshots, idx], + ) + aux_neg = n.model.add_variables( + name=f"{var_key}-{infix}-aux-neg", + upper=0, + coords=[n.snapshots, idx], + ) + n.model.add_constraints( + aux_pos >= var, + name=f"{var_key}-{infix}-aux-pos-constr", + ) + n.model.add_constraints( + aux_neg <= var, + name=f"{var_key}-{infix}-aux-neg-constr", + ) + return aux_pos, aux_neg + for ct in limits_power_max: if investment_year not in limits_power_max[ct].keys(): continue @@ -132,118 +169,43 @@ def add_power_limits(n, investment_year, limits_power_max): f"bus0.str.startswith('{ct}') and not bus1.str.startswith('{ct}') and carrier == 'DC' and active" ) - for t in n.snapshots: - # For incoming flows s > 0 means imports, s < 0 exports - # For outgoing flows s > 0 means exports, s < 0 imports - # to get the positive and negative parts separately, we use auxiliary variables - incoming_lines_var = n.model["Line-s"].sel( - snapshot=t, Line=incoming_lines.index - ) - incoming_lines_aux_pos = n.model.add_variables( - name=f"Line-s-incoming-{ct}-aux-pos-{t}", - lower=0, - upper=incoming_lines.s_nom_max, - ) - incoming_lines_aux_neg = n.model.add_variables( - name=f"Line-s-incoming-{ct}-aux-neg-{t}", - lower=-incoming_lines.s_nom_max, - upper=0, - ) - n.model.add_constraints( - incoming_lines_aux_pos >= incoming_lines_var, - name=f"Line-s-incoming-{ct}-aux-pos-constr-{t}", - ) - n.model.add_constraints( - incoming_lines_aux_neg <= incoming_lines_var, - name=f"Line-s-incoming-{ct}-aux-neg-constr-{t}", - ) + # define auxiliary variables for positive and negative parts of line and link flows - outgoing_lines_var = n.model["Line-s"].sel( - snapshot=t, Line=outgoing_lines.index - ) - outgoing_lines_aux_pos = n.model.add_variables( - name=f"Line-s-outgoing-{ct}-aux-pos-{t}", - lower=0, - upper=outgoing_lines.s_nom_max, - ) - outgoing_lines_aux_neg = n.model.add_variables( - name=f"Line-s-outgoing-{ct}-aux-neg-{t}", - lower=-outgoing_lines.s_nom_max, - upper=0, - ) - n.model.add_constraints( - outgoing_lines_aux_pos >= outgoing_lines_var, - name=f"Line-s-outgoing-{ct}-aux-pos-constr-{t}", - ) - n.model.add_constraints( - outgoing_lines_aux_neg <= outgoing_lines_var, - name=f"Line-s-outgoing-{ct}-aux-neg-constr-{t}", - ) + incoming_lines_aux_pos, incoming_lines_aux_neg = add_pos_neg_aux_variables( + n, incoming_lines.index, f"incoming-{ct}" + ) - incoming_links_var = n.model["Link-p"].sel( - snapshot=t, Link=incoming_links.index - ) - incoming_links_aux_pos = n.model.add_variables( - name=f"Link-p-incoming-{ct}-aux-pos-{t}", - lower=0, - upper=incoming_links.p_nom_max, - ) - incoming_links_aux_neg = n.model.add_variables( - name=f"Link-p-incoming-{ct}-aux-neg-{t}", - lower=-incoming_links.p_nom_max, - upper=0, - ) - n.model.add_constraints( - incoming_links_aux_pos >= incoming_links_var, - name=f"Link-p-incoming-{ct}-aux-pos-constr-{t}", - ) - n.model.add_constraints( - incoming_links_aux_neg <= incoming_links_var, - name=f"Link-p-incoming-{ct}-aux-neg-constr-{t}", - ) + outgoing_lines_aux_pos, outgoing_lines_aux_neg = add_pos_neg_aux_variables( + n, outgoing_lines.index, f"outgoing-{ct}" + ) - outgoing_links_var = n.model["Link-p"].sel( - snapshot=t, Link=outgoing_links.index - ) - outgoing_links_aux_pos = n.model.add_variables( - name=f"Link-p-outgoing-{ct}-aux-pos-{t}", - lower=0, - upper=outgoing_links.p_nom_max, - ) - outgoing_links_aux_neg = n.model.add_variables( - name=f"Link-p-outgoing-{ct}-aux-neg-{t}", - lower=-outgoing_links.p_nom_max, - upper=0, - ) - n.model.add_constraints( - outgoing_links_aux_pos >= outgoing_links_var, - name=f"Link-p-outgoing-{ct}-aux-pos-constr-{t}", - ) - n.model.add_constraints( - outgoing_links_aux_neg <= outgoing_links_var, - name=f"Link-p-outgoing-{ct}-aux-neg-constr-{t}", - ) - # To constraint the absolute values of imports and exports, we have to sum the - # corresponding positive and negative flows separately, using auxiliary variables - import_lhs = ( - incoming_links_aux_pos.sum() - + incoming_lines_aux_pos.sum() - - outgoing_links_aux_neg.sum() - - outgoing_lines_aux_neg.sum() - ) / 10 # divide by 10 to improve numerical stability - export_lhs = ( - outgoing_links_aux_pos.sum() - + outgoing_lines_aux_pos.sum() - - incoming_links_aux_neg.sum() - - incoming_lines_aux_neg.sum() - ) / 10 - - n.model.add_constraints( - import_lhs <= lim / 10, name=f"Power-import-limit-{ct}-{t}" - ) - n.model.add_constraints( - export_lhs <= lim / 10, name=f"Power-export-limit-{ct}-{t}" - ) + incoming_links_aux_pos, incoming_links_aux_neg = add_pos_neg_aux_variables( + n, incoming_links.index, f"incoming-{ct}" + ) + + outgoing_links_aux_pos, outgoing_links_aux_neg = add_pos_neg_aux_variables( + n, outgoing_links.index, f"outgoing-{ct}" + ) + + # To constraint the absolute values of imports and exports, we have to sum the + # corresponding positive and negative flows separately, using the auxiliary variables + + import_lhs = ( + incoming_links_aux_pos.sum(dim="Link") + + incoming_lines_aux_pos.sum(dim="Line") + - outgoing_links_aux_neg.sum(dim="Link") + - outgoing_lines_aux_neg.sum(dim="Line") + ) / 10 + + export_lhs = ( + outgoing_links_aux_pos.sum(dim="Link") + + outgoing_lines_aux_pos.sum(dim="Line") + - incoming_links_aux_neg.sum(dim="Link") + - incoming_lines_aux_neg.sum(dim="Line") + ) / 10 + + n.model.add_constraints(import_lhs <= lim / 10, name=f"Power-import-limit-{ct}") + n.model.add_constraints(export_lhs <= lim / 10, name=f"Power-export-limit-{ct}") def h2_import_limits(n, investment_year, limits_volume_max): From 958dcd495030e146ab1adf06fde13669e909febe Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 10 Oct 2025 16:29:37 +0200 Subject: [PATCH 170/202] more conservative assumptions regarding cross border trade --- config/config.de.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index b6c1500d0..948086979 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -81,7 +81,8 @@ iiasa_database: scale_industry_non_energy: false # Scale non-energy industry demand directly proportional to energy demand restrict_cross_border_flows: # restricts cross border flows between all countries (AC) 2025: 0.5 - 2030: 0.6 + 2030: 0.5 + 2035: 0.5 # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight foresight: myopic From 846c3485d25bc8288d8280864f464dd88b77ddce Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 13 Oct 2025 16:22:05 +0200 Subject: [PATCH 171/202] Use UBA instead of REMIND for non-CO2 GHG in 2020 and 2025 --- scripts/pypsa-de/build_scenarios.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/pypsa-de/build_scenarios.py b/scripts/pypsa-de/build_scenarios.py index 87c2cfba6..d65eb0618 100644 --- a/scripts/pypsa-de/build_scenarios.py +++ b/scripts/pypsa-de/build_scenarios.py @@ -111,6 +111,11 @@ def get_co2_budget(df, source): nonco2 = ghg - co2 + # Hard-code values for 2020 and 2025 from UBA reports / projections + # Table 1, https://reportnet.europa.eu/public/dataflow/1478, GHG - CO2 + nonco2[2020] = 733.7 - 647.9 + nonco2[2025] = 628.8 - 554.6 + ## PyPSA disregards nonco2 GHG emissions, but includes bunkers targets_pypsa = targets_co2 - nonco2 @@ -120,9 +125,6 @@ def get_co2_budget(df, source): logger.info(f"{year}: {nonco2.loc[year]:.1f}") target_fractions_pypsa = targets_pypsa.loc[targets_co2.index] / baseline_pypsa - target_fractions_pypsa[2020] = ( - 0.671 # Hard-coded based on REMIND data from ariadne2-internal DB - ) return target_fractions_pypsa.round(3) From 0fecfe202428959b415630ab58dfaa5ad44d9c0f Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 13 Oct 2025 16:22:56 +0200 Subject: [PATCH 172/202] add domestic aviation (bugfix) and subtract international navigation from national co2 limits --- scripts/pypsa-de/additional_functionality.py | 64 ++++++++++++++++---- 1 file changed, 53 insertions(+), 11 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 02b6a1b99..cc08eb273 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -423,6 +423,7 @@ def add_national_co2_budgets(n, snakemake, national_co2_budgets, investment_year nyears = nhours / 8760 sectors = determine_emission_sectors(n.config["sector"]) + energy_totals = pd.read_csv(snakemake.input.energy_totals, index_col=[0, 1]) # convert MtCO2 to tCO2 co2_totals = 1e6 * pd.read_csv(snakemake.input.co2_totals_name, index_col=0) @@ -447,8 +448,8 @@ def add_national_co2_budgets(n, snakemake, national_co2_budgets, investment_year links = n.links.index[ (n.links.index.str[:2] == ct) & (n.links[f"bus{port}"] == "co2 atmosphere") - & ( - n.links.carrier != "kerosene for aviation" + & ~n.links.carrier.str.contains( + "shipping|aviation" ) # first exclude aviation to multiply it with a domestic factor later ] @@ -472,27 +473,68 @@ def add_national_co2_budgets(n, snakemake, national_co2_budgets, investment_year ) # Aviation demand - energy_totals = pd.read_csv(snakemake.input.energy_totals, index_col=[0, 1]) domestic_aviation = energy_totals.loc[ (ct, snakemake.params.energy_year), "total domestic aviation" ] international_aviation = energy_totals.loc[ (ct, snakemake.params.energy_year), "total international aviation" ] - domestic_factor = domestic_aviation / ( + domestic_aviation_factor = domestic_aviation / ( domestic_aviation + international_aviation ) aviation_links = n.links[ (n.links.index.str[:2] == ct) & (n.links.carrier == "kerosene for aviation") ] - lhs.append - ( - n.model["Link-p"].loc[:, aviation_links.index] - * aviation_links.efficiency2 - * n.snapshot_weightings.generators - ).sum() * domestic_factor + lhs.append( + ( + n.model["Link-p"].loc[:, aviation_links.index] + * aviation_links.efficiency2 + * n.snapshot_weightings.generators + ).sum() + * domestic_aviation_factor + ) + logger.info( + f"Adding domestic aviation emissions for {ct} with a factor of {domestic_aviation_factor}" + ) + + # Shipping oil + domestic_navigation = energy_totals.loc[ + (ct, snakemake.params.energy_year), "total domestic navigation" + ] + international_navigation = energy_totals.loc[ + (ct, snakemake.params.energy_year), "total international navigation" + ] + domestic_navigation_factor = domestic_navigation / ( + domestic_navigation + international_navigation + ) + shipping_links = n.links[ + (n.links.index.str[:2] == ct) & (n.links.carrier == "shipping oil") + ] + lhs.append( + ( + n.model["Link-p"].loc[:, shipping_links.index] + * shipping_links.efficiency2 + * n.snapshot_weightings.generators + ).sum() + * domestic_navigation_factor + ) + + # Shipping methanol + shipping_meoh_links = n.links[ + (n.links.index.str[:2] == ct) & (n.links.carrier == "shipping methanol") + ] + if not shipping_meoh_links.empty: # no shipping methanol in 2025 + lhs.append( + ( + n.model["Link-p"].loc[:, shipping_meoh_links.index] + * shipping_meoh_links.efficiency2 + * n.snapshot_weightings.generators + ).sum() + * domestic_navigation_factor + ) + logger.info( - f"Adding domestic aviation emissions for {ct} with a factor of {domestic_factor}" + f"Adding domestic shipping emissions for {ct} with a factor of {domestic_navigation_factor}" ) # Adding Efuel imports and exports to constraint From 1884d52ea86dc17ebc4541b656b8709866863df2 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 13 Oct 2025 16:23:39 +0200 Subject: [PATCH 173/202] export the emissions according to the country constraint for analysis --- scripts/pypsa-de/export_ariadne_variables.py | 163 +++++++++++++++++++ 1 file changed, 163 insertions(+) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index adbdbb715..0802ee36e 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -2627,6 +2627,167 @@ def get_final_energy( def get_emissions(n, region, _energy_totals, _industry_demand): + def get_constraint_emissions(n, ct): + lhs = [] + + for port in [col[3:] for col in n.links if col.startswith("bus")]: + links = n.links.index[ + (n.links.index.str[:2] == ct) + & (n.links[f"bus{port}"] == "co2 atmosphere") + & ~n.links.carrier.str.contains( + "shipping|aviation" + ) # first exclude aviation to multiply it with a domestic factor later + ] + + if port == "0": + efficiency = -1.0 + elif port == "1": + efficiency = n.links.loc[links, "efficiency"] + else: + efficiency = n.links.loc[links, f"efficiency{port}"] + + variables = ( + n.links_t.p0.loc[:, links] + .mul(efficiency) + .mul(n.snapshot_weightings.generators, axis=0) + .sum() + ) + if not variables.empty: + lhs.append(variables) + + # Aviation demand + energy_totals = pd.read_csv(snakemake.input.energy_totals, index_col=[0, 1]) + domestic_aviation = energy_totals.loc[ + (ct, snakemake.params.energy_totals_year), "total domestic aviation" + ] + international_aviation = energy_totals.loc[ + (ct, snakemake.params.energy_totals_year), "total international aviation" + ] + domestic_aviation_factor = domestic_aviation / ( + domestic_aviation + international_aviation + ) + aviation_links = n.links[ + (n.links.index.str[:2] == ct) & (n.links.carrier == "kerosene for aviation") + ] + lhs.append( + ( + n.links_t.p0.loc[:, aviation_links.index] + .mul(aviation_links.efficiency2) + .mul(n.snapshot_weightings.generators, axis=0) + ).sum() + * domestic_aviation_factor + ) + + # Shipping oil + domestic_navigation = energy_totals.loc[ + (ct, snakemake.params.energy_totals_year), "total domestic navigation" + ] + international_navigation = energy_totals.loc[ + (ct, snakemake.params.energy_totals_year), "total international navigation" + ] + domestic_navigation_factor = domestic_navigation / ( + domestic_navigation + international_navigation + ) + shipping_links = n.links[ + (n.links.index.str[:2] == ct) & (n.links.carrier == "shipping oil") + ] + lhs.append( + ( + n.links_t.p0.loc[:, shipping_links.index].mul( + n.snapshot_weightings.generators, axis=0 + ) + * shipping_links.efficiency2 + ).sum() + * domestic_navigation_factor + ) + + # Shipping methanol + shipping_meoh_links = n.links[ + (n.links.index.str[:2] == ct) & (n.links.carrier == "shipping methanol") + ] + lhs.append( + ( + n.links_t.p0.loc[:, shipping_meoh_links.index].mul( + n.snapshot_weightings.generators, axis=0 + ) + * shipping_meoh_links.efficiency2 + ).sum() + * domestic_navigation_factor + ) + + # Adding Efuel imports and exports to constraint + incoming_oil = n.links.index[n.links.index == f"EU renewable oil -> {ct} oil"] + outgoing_oil = n.links.index[n.links.index == f"{ct} renewable oil -> EU oil"] + + lhs.append( + ( + -1 + * n.links_t.p0.loc[:, incoming_oil].mul( + n.snapshot_weightings.generators, axis=0 + ) + * 0.2571 + ).sum() + ) + lhs.append( + ( + n.links_t.p0.loc[:, outgoing_oil].mul( + n.snapshot_weightings.generators, axis=0 + ) + * 0.2571 + ).sum() + ) + + incoming_methanol = n.links.index[ + n.links.index == f"EU methanol -> {ct} methanol" + ] + outgoing_methanol = n.links.index[ + n.links.index == f"{ct} methanol -> EU methanol" + ] + + lhs.append( + ( + -1 + * n.links_t.p0.loc[:, incoming_methanol].mul( + n.snapshot_weightings.generators, axis=0 + ) + / snakemake.config["sector"]["MWh_MeOH_per_tCO2"] + ).sum() + ) + + lhs.append( + ( + n.links_t.p0.loc[:, outgoing_methanol].mul( + n.snapshot_weightings.generators, axis=0 + ) + / snakemake.config["sector"]["MWh_MeOH_per_tCO2"] + ).sum() + ) + + # Methane + incoming_CH4 = n.links.index[n.links.index == f"EU renewable gas -> {ct} gas"] + outgoing_CH4 = n.links.index[n.links.index == f"{ct} renewable gas -> EU gas"] + + lhs.append( + ( + -1 + * n.links_t.p0.loc[:, incoming_CH4].mul( + n.snapshot_weightings.generators, axis=0 + ) + * 0.198 + ).sum() + ) + + lhs.append( + ( + n.links_t.p0.loc[:, outgoing_CH4].mul( + n.snapshot_weightings.generators, axis=0 + ) + * 0.198 + ).sum() + ) + + return pd.concat(lhs).sum() * t2Mt + energy_totals = _energy_totals.loc[region[0:2]] industry_demand = _industry_demand.filter( @@ -2641,6 +2802,8 @@ def get_emissions(n, region, _energy_totals, _industry_demand): var = pd.Series() + var["Emissions|CO2|Model|Constraint"] = get_constraint_emissions(n, region).sum() + co2_emissions = ( n.statistics.supply(bus_carrier="co2", **kwargs) .filter(like=region) From c7f64cf3893f07dbe936bc8a6ee47e5fc331a511 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 13 Oct 2025 16:25:12 +0200 Subject: [PATCH 174/202] rename and changelog --- CHANGELOG.md | 1 + config/config.de.yaml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ab12607d..59fa4b097 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,5 @@ # Changelog +- Minor improvements to the DE CO2 constraint - Added an option to source industry energy demand from UBA MWMS (Projektionsbericht 2025) for the years 2025-2035 - renamed some scripts - Bugfix: Enforce stricter power import limit to avoid that import from one country compensate from exports to another diff --git a/config/config.de.yaml b/config/config.de.yaml index 948086979..71baf4c1e 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20251001_low_RES + prefix: 20251013_improve_co2_limit name: # - ExPol # - KN2045_Mix From f2bed5569436a6d233c14b690ec833a06deb160a Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 13 Oct 2025 16:25:35 +0200 Subject: [PATCH 175/202] minor config changes --- config/config.de.yaml | 9 ++++++--- scripts/pypsa-de/export_ariadne_variables.py | 5 +++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 71baf4c1e..f552842ec 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -9,8 +9,9 @@ run: # - ExPol # - KN2045_Mix # - HighDemand - - HighDemand + # - HighDemandNoFlex - LowRES + - LowRESNoFlex #- HighDemandNoFlex # !!! CAVEAT The 'NoFlex' substring in the scenario name determines how the regrets are computed!!! #- LowDemandNoFlex # - KN2045_NFhoch @@ -47,8 +48,10 @@ iiasa_database: region: Deutschland regret_run: demand_baselines: # names of scenarios that define the demand variations to be considered in the short-term runs - - HighDemand - - LowRES + # - HighDemand + # - HighDemandNoFlex + # - LowRES + # - LowRESNoFlex scope_to_fix: EU # Supported values are DE and EU strict: false # If false, the model allows capacity expansion for virtual links and bottlenecks h2_vent: true diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 0802ee36e..9945c78b3 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -5775,15 +5775,16 @@ def get_data( if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( - "export_regret_variables", + "export_ariadne_variables", simpl="", clusters=27, opts="", ll="vopt", sector_opts="None", - run="LowDemand", + run="LowRES", decision="LowDemand", sensitivity="base", + eeg_level=0.7, ) configure_logging(snakemake) config = snakemake.config From 25dce1a54e5a34fbacdf90070629ddb47f7df00d Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 13 Oct 2025 16:49:59 +0200 Subject: [PATCH 176/202] no limits on electricity imports for now --- scripts/pypsa-de/additional_functionality.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index cc08eb273..7260d044c 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -900,9 +900,11 @@ def additional_functionality(n, snapshots, snakemake): if snakemake.wildcards.clusters != "1": h2_import_limits(n, investment_year, constraints["limits_volume_max"]) - electricity_import_limits( - n, investment_year, constraints["limits_volume_max"] - ) + # deactivate elec import limit since it may lead to strange flows + # TODO evaluate if this is necessary + # electricity_import_limits( + # n, investment_year, constraints["limits_volume_max"] + # ) if investment_year >= 2025: h2_production_limits( From 00e45f21dbada0b34cd7ddd67c468e6136223349 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 15 Oct 2025 16:47:11 +0200 Subject: [PATCH 177/202] adjust Jahresemissionsmenge 2030 --- scripts/pypsa-de/build_scenarios.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/build_scenarios.py b/scripts/pypsa-de/build_scenarios.py index d65eb0618..37afc6b54 100644 --- a/scripts/pypsa-de/build_scenarios.py +++ b/scripts/pypsa-de/build_scenarios.py @@ -69,7 +69,7 @@ def get_co2_budget(df, source): data=[ 813, 629, # From UBA Projektionsbericht 2025 - 438, + 456.8, # angepasste Jahresemissionsgesamtmenge laut UBA Projektionsbericht 2025, Tabelle 1 ], ) From 11cc8cded58f61d351e7fb62e76332aa5ad81814 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 15 Oct 2025 16:47:36 +0200 Subject: [PATCH 178/202] activate resource_classes --- config/config.de.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index f552842ec..f6fc261ec 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20251013_improve_co2_limit + prefix: 20251015_resource_classes name: # - ExPol # - KN2045_Mix @@ -138,6 +138,7 @@ atlite: renewable: onwind: + resource_classes: 5 capacity_per_sqkm: 1.4 cutout: europe-2019-sarah3-era5 correction_factor: 0.95 @@ -161,6 +162,7 @@ renewable: hub_height: 100. P: 3.06 offwind-ac: + resource_classes: 5 capacity_per_sqkm: 6 landfall_length: 30 cutout: europe-2019-sarah3-era5 @@ -185,6 +187,7 @@ renewable: hub_height: 140. P: 5.5 offwind-dc: + resource_classes: 5 capacity_per_sqkm: 6 landfall_length: 30 cutout: europe-2019-sarah3-era5 @@ -214,9 +217,11 @@ renewable: cutout: europe-2019-sarah3-era5 correction_factor: 0.95 solar: + resource_classes: 5 cutout: europe-2019-sarah3-era5 correction_factor: 0.9 # scaling to Abbildung 36 of https://www.ise.fraunhofer.de/de/veroeffentlichungen/studien/aktuelle-fakten-zur-photovoltaik-in-deutschland.html solar-hsat: + resource_classes: 5 cutout: europe-2019-sarah3-era5 correction_factor: 0.9 # scaling to Abbildung 36 of https://www.ise.fraunhofer.de/de/veroeffentlichungen/studien/aktuelle-fakten-zur-photovoltaik-in-deutschland.html hydro: From 539d1d1b7156124ce7e59aa3c263186b8e05f4ff Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 16 Oct 2025 10:00:59 +0200 Subject: [PATCH 179/202] more details on emissions in the energy sector --- scripts/pypsa-de/export_ariadne_variables.py | 81 ++++++++++++++++++-- 1 file changed, 76 insertions(+), 5 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 9945c78b3..9bf2df08d 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -3197,7 +3197,78 @@ def get_constraint_emissions(n, ct): var["Emissions|CO2|Energy|Demand"] + var["Emissions|CO2|Energy|Demand|Bunkers"] ) + CHP_emissions_E = CHP_emissions.multiply(CHP_E_fraction).groupby("carrier").sum() + CHP_emissions_H = ( + CHP_emissions.multiply(1 - CHP_E_fraction).groupby("carrier").sum() + ) + CHP_negative_emissions_E = ( + CHP_negative_emissions.multiply(negative_CHP_E_fraction) + .groupby("carrier") + .sum() + ) + CHP_negative_emissions_H = ( + CHP_negative_emissions.multiply(1 - negative_CHP_E_fraction) + .groupby("carrier") + .sum() + ) + + var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity|Gas"] = ( + co2_emissions.reindex( + [ + "OCGT", + "CCGT", + ], + ).sum() + + CHP_emissions_E.filter(like="gas").sum() + ) + var["Emissions|CO2|Energy|Supply|Electricity|Gas"] = ( + var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity|Gas"] + - CHP_negative_emissions_E.filter(like="gas").sum() + ) + + var["Emissions|CO2|Energy|Supply|Electricity|Coal"] = var[ + "Emissions|Gross Fossil CO2|Energy|Supply|Electricity|Coal" + ] = ( + co2_emissions.reindex( + [ + "coal", + "lignite", + ], + ).sum() + + CHP_emissions_E.filter(regex="coal|lignite").sum() + ) + var["Emissions|CO2|Energy|Supply|Electricity|Oil"] = var[ + "Emissions|Gross Fossil CO2|Energy|Supply|Electricity|Oil" + ] = ( + co2_emissions.reindex( + [ + "oil", + ], + ).sum() + + CHP_emissions_E.filter(like="oil").sum() + ) + + var["Emissions|CO2|Energy|Supply|Electricity|Biomass"] = ( + CHP_negative_emissions_E.filter(like="bio").sum() + ) + + var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity|Waste"] = ( + CHP_emissions_E.filter(like="waste CHP").sum() + ) + var["Emissions|CO2|Energy|Supply|Electricity|Waste"] = ( + var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity|Waste"] + - CHP_negative_emissions_E.filter(like="waste").sum() + ) + var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity"] = ( + var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity|Gas"] + + var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity|Coal"] + + var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity|Oil"] + + var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity|Waste"] + ) + + assert isclose( + var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity"], co2_emissions.reindex( [ "OCGT", @@ -3207,22 +3278,22 @@ def get_constraint_emissions(n, ct): "oil", ], ).sum() - + CHP_emissions.multiply(CHP_E_fraction).values.sum() + + CHP_emissions_E.sum(), ) var["Emissions|CO2|Energy|Supply|Electricity"] = ( var["Emissions|Gross Fossil CO2|Energy|Supply|Electricity"] - - CHP_negative_emissions.multiply(negative_CHP_E_fraction).values.sum() + - CHP_negative_emissions_E.sum() ) var["Emissions|Gross Fossil CO2|Energy|Supply|Heat"] = ( co2_emissions.filter(like="urban central").filter(like="boiler").sum() - + CHP_emissions.multiply(1 - CHP_E_fraction).values.sum() + + CHP_emissions_H.sum() ) var["Emissions|CO2|Energy|Supply|Heat"] = ( var["Emissions|Gross Fossil CO2|Energy|Supply|Heat"] - - CHP_negative_emissions.multiply(1 - negative_CHP_E_fraction).values.sum() + - CHP_negative_emissions_H.sum() ) var["Emissions|CO2|Energy|Supply|Electricity and Heat"] = ( @@ -5781,7 +5852,7 @@ def get_data( opts="", ll="vopt", sector_opts="None", - run="LowRES", + run="HighDemand", decision="LowDemand", sensitivity="base", eeg_level=0.7, From cf3bb1bcbc892e8d38a7ed175480638970cd6abe Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 16 Oct 2025 17:37:47 +0200 Subject: [PATCH 180/202] change weather year --- config/config.de.yaml | 2 +- config/scenarios.manual.yaml | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index f6fc261ec..c15d371a4 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20251015_resource_classes + prefix: 20251016_wetter_2012 name: # - ExPol # - KN2045_Mix diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index 7837b7bc0..d3a589582 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: MIT HighDemand: + weather_year: 2012 electricity: transmission_limit: v1 solving: @@ -43,6 +44,7 @@ HighDemand: urban central water pits: 2040 HighDemandNoFlex: + weather_year: 2012 electricity: transmission_limit: v1 iiasa_database: @@ -85,6 +87,7 @@ HighDemandNoFlex: urban central water pits: 2040 LowDemand: # Demand and capacities from UBA + weather_year: 2012 electricity: transmission_limit: v1 iiasa_database: @@ -153,6 +156,7 @@ LowDemand: # Demand and capacities from UBA urban central water pits: 2040 LowDemandNoFlex: + weather_year: 2012 electricity: transmission_limit: v1 iiasa_database: From 5254cc802230186b282b247c4af9077768014f90 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 31 Oct 2025 16:16:29 +0100 Subject: [PATCH 181/202] add agriculture machinery electric to exporter --- scripts/pypsa-de/export_ariadne_variables.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 9bf2df08d..b2f7db01a 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -2469,7 +2469,7 @@ def get_final_energy( var["Final Energy|Agriculture|Electricity"] = sum_load( n, "agriculture electricity", region - ) + ) + sum_load(n, "agriculture machinery electric", region) var["Final Energy|Agriculture|Heat"] = sum_load(n, "agriculture heat", region) var["Final Energy|Agriculture|Liquids"] = sum_load( n, "agriculture machinery oil", region From 079fa6ff310d81ebf18fd6cea045e61eca359355 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 31 Oct 2025 16:16:51 +0100 Subject: [PATCH 182/202] restrict corss border flows more --- config/config.de.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index c15d371a4..a175d7deb 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -83,8 +83,8 @@ iiasa_database: no_flex_lt_run: false # If true, removes all flexibility options scale_industry_non_energy: false # Scale non-energy industry demand directly proportional to energy demand restrict_cross_border_flows: # restricts cross border flows between all countries (AC) - 2025: 0.5 - 2030: 0.5 + 2025: 0.4 + 2030: 0.45 2035: 0.5 # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight From b77345802c245a1d08b3fea2d19353e6dbf2afa2 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 31 Oct 2025 17:14:53 +0100 Subject: [PATCH 183/202] Fix the co2 price to 200 for this run --- Snakefile | 1 + config/config.de.yaml | 4 +++- scripts/pypsa-de/additional_functionality.py | 17 +++++++++++------ scripts/pypsa-de/modify_prenetwork.py | 17 +++++++++++++++++ 4 files changed, 32 insertions(+), 7 deletions(-) diff --git a/Snakefile b/Snakefile index 5512b96d7..d9353b62c 100644 --- a/Snakefile +++ b/Snakefile @@ -563,6 +563,7 @@ rule modify_prenetwork: restrict_cross_border_flows=config_provider( "iiasa_database", "restrict_cross_border_flows" ), + force_co2_price=config_provider("iiasa_database", "force_co2_price"), input: costs_modifications="ariadne-data/costs_{planning_horizons}-modifications.csv", network=resources( diff --git a/config/config.de.yaml b/config/config.de.yaml index a175d7deb..200b18335 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20251016_wetter_2012 + prefix: 20251031_force_co2_price name: # - ExPol # - KN2045_Mix @@ -86,6 +86,8 @@ iiasa_database: 2025: 0.4 2030: 0.45 2035: 0.5 + force_co2_price: + 2030: 200.0 # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight foresight: myopic diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 7260d044c..c75587f74 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -916,12 +916,17 @@ def additional_functionality(n, snapshots, snakemake): add_h2_derivate_limit(n, investment_year, constraints["limits_volume_max"]) if isinstance(constraints["co2_budget_national"], dict): - add_national_co2_budgets( - n, - snakemake, - constraints["co2_budget_national"], - investment_year, - ) + if "co2 atmosphere" not in n.generators.index: + add_national_co2_budgets( + n, + snakemake, + constraints["co2_budget_national"], + investment_year, + ) + else: + logger.warning( + "CO2 atmosphere generator found. Skipping national CO2 budget constraints to avoid conflicts." + ) else: logger.warning("No national CO2 budget specified!") diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index f563ecfcc..03db2e6e5 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1541,4 +1541,21 @@ def restrict_cross_border_flows(n, s_max_pu): n, snakemake.params.restrict_cross_border_flows[current_year] ) + if current_year in snakemake.params.get("force_co2_price", {}): + logger.info("Adding negative CO2 generator and dropping co2 limits.") + + n.add( + "Generator", + "co2 atmosphere", + bus="co2 atmosphere", + p_min_pu=-1, + p_max_pu=0, + p_nom_extendable=True, + carrier="co2", + marginal_cost=-snakemake.params.force_co2_price[current_year], + ) + n.global_constraints.drop("CO2Limit", inplace=True) + # Instead of removing it here, never add it in additional_functionality + # n.global_constraints.drop("co2_limit-DE", inplace=True) + n.export_to_netcdf(snakemake.output.network) From 0ef6a76696969d82e544fe999b38a265c172a607 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 31 Oct 2025 17:19:46 +0100 Subject: [PATCH 184/202] Assume that no more than 25 GW can be reached for offshore wind --- scripts/pypsa-de/additional_functionality.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index c75587f74..3654b4166 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -882,7 +882,7 @@ def additional_functionality(n, snapshots, snakemake): eeg_sweep_year ] = constraints["limits_capacity_max"]["Generator"]["offwind"]["DE"][ eeg_sweep_year - ] = 30 * lvl + ] = 25 * lvl constraints["limits_capacity_min"]["Generator"]["solar"]["DE"][ eeg_sweep_year ] = constraints["limits_capacity_max"]["Generator"]["solar"]["DE"][ From 64a6ee1554a1530fccaaf69e5aa2a0bb62982ce2 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 31 Oct 2025 17:20:07 +0100 Subject: [PATCH 185/202] assume no more than 24 gw can be reached for offshore wind --- scripts/pypsa-de/additional_functionality.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 3654b4166..e13f80c52 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -882,7 +882,7 @@ def additional_functionality(n, snapshots, snakemake): eeg_sweep_year ] = constraints["limits_capacity_max"]["Generator"]["offwind"]["DE"][ eeg_sweep_year - ] = 25 * lvl + ] = 24 * lvl constraints["limits_capacity_min"]["Generator"]["solar"]["DE"][ eeg_sweep_year ] = constraints["limits_capacity_max"]["Generator"]["solar"]["DE"][ From 4b4bc8f249c126cbd321bd5fdee2adad3ecaa1cb Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 10 Nov 2025 16:22:41 +0100 Subject: [PATCH 186/202] fix curtailment calculation --- scripts/pypsa-de/export_ariadne_variables.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index b2f7db01a..f537e21fd 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -1614,7 +1614,19 @@ def get_secondary_energy(n, region, _industry_demand): var["Secondary Energy|Electricity|Curtailment"] = ( n.statistics.curtailment(bus_carrier=["AC", "low voltage"], **kwargs) .filter(like=region) - .values.sum() + .groupby("carrier") + .sum() + .reindex( + [ + "offwind-ac", + "offwind-dc", + "onwind", + "solar", + "solar rooftop", + "solar-hsat", + ] + ) + .sum() ) electricity_balance = ( From aedf7381ccabd5db96d8efbac35385fb02bc66b7 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 10 Nov 2025 17:39:46 +0100 Subject: [PATCH 187/202] add transmission losses --- scripts/pypsa-de/export_ariadne_variables.py | 37 +++++++++++++------- 1 file changed, 24 insertions(+), 13 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index f537e21fd..19216c01e 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -1653,19 +1653,29 @@ def get_secondary_energy(n, region, _industry_demand): ).sum() ) - # TODO Compute transmission losses via links_t - # var["Secondary Energy|Electricity|Transmission Losses"] = \ - # n.statistics.withdrawal( - # bus_carrier=["AC", "low voltage"], **kwargs - # ).filter(like=region).groupby(["carrier"]).sum().get( - # ["AC", "DC", "electricity distribution grid"] - # ).subtract( - # n.statistics.supply( - # bus_carrier=["AC", "low voltage"], **kwargs - # ).filter(like=region).groupby(["carrier"]).sum().get( - # ["AC", "DC", "electricity distribution grid"] - # ) - # ).sum() + idx = n.lines.query( + f"bus0.str.contains('{region}') and bus1.str.contains('{region}')" + ).index + acl = ( + n.lines_t.p0[idx].sum(axis=1).multiply(n.snapshot_weightings.generators).sum() + + n.lines_t.p1[idx].sum(axis=1).multiply(n.snapshot_weightings.generators).sum() + ) + idx = n.links.query( + f"bus0.str.contains('{region}') and bus1.str.contains('{region}') and carrier == 'DC'" + ).index + dcl = ( + n.links_t.p0[idx].sum(axis=1).multiply(n.snapshot_weightings.generators).sum() + + n.links_t.p1[idx].sum(axis=1).multiply(n.snapshot_weightings.generators).sum() + ) + idx = n.links.query( + f"bus0.str.contains('{region}') and bus1.str.contains('{region}') and carrier == 'electricity distribution grid'" + ).index + distril = ( + n.links_t.p0[idx].sum(axis=1).multiply(n.snapshot_weightings.generators).sum() + + n.links_t.p1[idx].sum(axis=1).multiply(n.snapshot_weightings.generators).sum() + ) + + var["Secondary Energy|Electricity|Transmission Losses"] = acl + dcl + distril # supply - withdrawal # var["Secondary Energy|Electricity|Storage"] = \ @@ -5819,6 +5829,7 @@ def get_data( var["Demand|Electricity"] = var.reindex( [ "Secondary Energy|Electricity|Storage Losses", + "Secondary Energy|Electricity|Transmission Losses", "Secondary Energy Input|Electricity|Heat", "Secondary Energy Input|Electricity|Hydrogen", "Secondary Energy Input|Electricity|Liquids", From 9321b283d57b4d327aa14ef5b5c4b78ae4afd62c Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 12 Nov 2025 18:04:55 +0100 Subject: [PATCH 188/202] adjust rescom electricity to UBA and industry H2 back to pypsa --- Snakefile | 3 +++ config/scenarios.manual.yaml | 8 +++++++ scripts/pypsa-de/modify_prenetwork.py | 33 ++++++++++++++++++++++++++- 3 files changed, 43 insertions(+), 1 deletion(-) diff --git a/Snakefile b/Snakefile index d9353b62c..94c914b41 100644 --- a/Snakefile +++ b/Snakefile @@ -557,6 +557,9 @@ rule modify_prenetwork: bev_energy=config_provider("sector", "bev_energy"), bev_dsm_availability=config_provider("sector", "bev_dsm_availability"), uba_for_industry=config_provider("iiasa_database", "uba_for_industry"), + uba_for_rescom_electricity=config_provider( + "iiasa_database", "uba_for_rescom_electricity" + ), scale_industry_non_energy=config_provider( "iiasa_database", "scale_industry_non_energy" ), diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index d3a589582..fc0c7ff59 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -99,6 +99,8 @@ LowDemand: # Demand and capacities from UBA - 2025 - 2030 - 2035 + uba_for_rescom_electricity: + - 2030 offshore_nep_force: delay_years: 2 solving: @@ -169,6 +171,8 @@ LowDemandNoFlex: - 2025 - 2030 - 2035 + uba_for_rescom_electricity: + - 2030 offshore_nep_force: delay_years: 2 solving: @@ -231,6 +235,8 @@ LowRES: # Demand from UBA, lower RES capacities - 2025 - 2030 - 2035 + uba_for_rescom_electricity: + - 2030 offshore_nep_force: delay_years: 2 solving: @@ -293,6 +299,8 @@ LowRESNoFlex: - 2025 - 2030 - 2035 + uba_for_rescom_electricity: + - 2030 offshore_nep_force: delay_years: 2 solving: diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 03db2e6e5..35bbb1d41 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -826,6 +826,31 @@ def must_run(n, params): n.links.loc[links_i, "p_min_pu"] = p_min_pu +def modify_rescom_demand(n): + logger.info( + "Modifying residential and commercial electricity demand in Germany towards UBA Projektionsbericht 2025." + ) + # UBA Projektionsbericht 2025, Tabelle 18, Verbrauch GHD + Haushalte ohne Wärmepumpen, 2030 + + uba_rescom = 94.9 - 3.1 + 143.5 - 23.6 + fraction_modelyear = n.snapshot_weightings.stores.sum() / 8760 + loads_i = n.loads[ + (n.loads.carrier == "electricity") & n.loads.index.str.startswith("DE") + ] + old_demand = ( + n.loads_t.p_set.loc[:, loads_i.index] + .sum(axis=1) + .mul(n.snapshot_weightings.stores) + .sum() + ) + new_demand = uba_rescom * fraction_modelyear + scale_factor = new_demand / old_demand + logger.info( + f"Scaling residential and commercial electricity loads in Germany by {scale_factor:.2f}.\nPrevious total demand: {old_demand:.2f} MWh/a, new total demand: {new_demand:.2f} MWh/a." + ) + n.loads_t.p_set.loc[:, loads_i.index] *= scale_factor + + def modify_mobility_demand(n, mobility_data_file): """ Change loads in Germany to use exogenous data for road demand. @@ -1361,7 +1386,7 @@ def modify_industry_demand( for carrier in [ "industry electricity", - "H2 for industry", + # "H2 for industry", # skip because UBA is too optimistic on H2 "solid biomass for industry", "low-temperature heat for industry", ]: @@ -1527,6 +1552,12 @@ def restrict_cross_border_flows(n, s_max_pu): scale_non_energy=snakemake.params.scale_industry_non_energy, ) + if current_year in snakemake.params.uba_for_rescom_electricity: + if current_year != 2030: + logger.error( + "The UBA for rescom electricity data is only available for 2030. Please check your config." + ) + modify_rescom_demand(n) # For regret runs deactivate_late_transmission_projects(n, current_year) From a3ee1ef60e42d6efa4ad795e7342623b8af77f14 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 12 Nov 2025 18:13:28 +0100 Subject: [PATCH 189/202] use correct unit --- scripts/pypsa-de/modify_prenetwork.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 35bbb1d41..c8cf606ca 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -832,15 +832,15 @@ def modify_rescom_demand(n): ) # UBA Projektionsbericht 2025, Tabelle 18, Verbrauch GHD + Haushalte ohne Wärmepumpen, 2030 - uba_rescom = 94.9 - 3.1 + 143.5 - 23.6 - fraction_modelyear = n.snapshot_weightings.stores.sum() / 8760 + uba_rescom = (94.9 - 3.1 + 143.5 - 23.6) * 1e6 + fraction_modelyear = n.snapshot_weightings.generators.sum() / 8760 loads_i = n.loads[ (n.loads.carrier == "electricity") & n.loads.index.str.startswith("DE") ] old_demand = ( n.loads_t.p_set.loc[:, loads_i.index] .sum(axis=1) - .mul(n.snapshot_weightings.stores) + .mul(n.snapshot_weightings.generators) .sum() ) new_demand = uba_rescom * fraction_modelyear @@ -1465,8 +1465,8 @@ def restrict_cross_border_flows(n, s_max_pu): opts="", ll="vopt", sector_opts="none", - planning_horizons="2025", - run="HighDemand", + planning_horizons="2030", + run="LowRES", ) configure_logging(snakemake) From 1134cecc98c9dd9f2958315fb5daad2839a0df51 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 17 Nov 2025 13:17:52 +0100 Subject: [PATCH 190/202] allow the 2025 batteries in NoFlex --- config/config.de.yaml | 12 ++++++------ scripts/pypsa-de/modify_prenetwork.py | 28 +++++++++++++++++++++------ 2 files changed, 28 insertions(+), 12 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 200b18335..8b2658fca 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,17 +4,17 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20251031_force_co2_price + prefix: 20251117_improve_no_flex name: # - ExPol # - KN2045_Mix # - HighDemand # - HighDemandNoFlex - - LowRES - - LowRESNoFlex - #- HighDemandNoFlex # !!! CAVEAT The 'NoFlex' substring in the scenario name determines how the regrets are computed!!! - #- LowDemandNoFlex - # - KN2045_NFhoch + # - LowRES + # - LowRESNoFlex + # - HighDemandNoFlex # !!! CAVEAT The 'NoFlex' substring in the scenario name determines how the regrets are computed!!! + # - LowDemand + - LowDemandNoFlex scenarios: enable: true manual_file: config/scenarios.manual.yaml diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index c8cf606ca..8b7a7d0c8 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1424,8 +1424,8 @@ def modify_industry_demand( ) -def remove_flexibility_options(n): - logger.info("Removing decentral TES, batteries, and BEV DSM from the network.") +def remove_flexibility_options(n, current_year): + logger.info("Removing decentral TES, home batteries, and BEV DSM from the network.") carriers_to_drop = [ "urban decentral water tanks charger", "urban decentral water tanks discharger", @@ -1433,11 +1433,8 @@ def remove_flexibility_options(n): "rural water tanks charger", "rural water tanks discharger", "rural water tanks", - "battery charger", - "battery discharger", "home battery charger", "home battery discharger", - "battery", "home battery", "EV battery", ] @@ -1447,6 +1444,25 @@ def remove_flexibility_options(n): carriers_to_drop.remove("EV battery") n.remove("Bus", n.buses.query("carrier in @carriers_to_drop").index) + if current_year == 2030: + carriers_to_drop = [ + "battery charger", + "battery discharger", + "battery", + ] + n.remove( + "Link", + n.links.query( + f"carrier in {carriers_to_drop} and build_year == {current_year}" + ).index, + ) + n.remove( + "Store", + n.stores.query( + f"carrier in {carriers_to_drop} and build_year == {current_year}" + ).index, + ) + def restrict_cross_border_flows(n, s_max_pu): logger.info( @@ -1563,7 +1579,7 @@ def restrict_cross_border_flows(n, s_max_pu): if snakemake.params.no_flex_lt_run: logger.info("Run without flexibility options detected.") - remove_flexibility_options(n) + remove_flexibility_options(n, current_year) fix_transmission_DE(n) From 832fcff3511cd05d53fa61fa4817dd8ec5516616 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Mon, 17 Nov 2025 13:20:03 +0100 Subject: [PATCH 191/202] avoid config being underspecified --- config/config.de.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config/config.de.yaml b/config/config.de.yaml index 8b2658fca..ba07971a9 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -80,6 +80,7 @@ iiasa_database: - 2025 uba_for_industry: # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - 2025 + uba_for_rescom_electricity: false no_flex_lt_run: false # If true, removes all flexibility options scale_industry_non_energy: false # Scale non-energy industry demand directly proportional to energy demand restrict_cross_border_flows: # restricts cross border flows between all countries (AC) From 1da3c4a1df596b30f90e54b55f9a70f1745975d7 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Tue, 18 Nov 2025 17:30:04 +0100 Subject: [PATCH 192/202] allow 2025 batteries in NoFlex scenario --- scripts/pypsa-de/modify_prenetwork.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/scripts/pypsa-de/modify_prenetwork.py b/scripts/pypsa-de/modify_prenetwork.py index 8b7a7d0c8..9d403305d 100644 --- a/scripts/pypsa-de/modify_prenetwork.py +++ b/scripts/pypsa-de/modify_prenetwork.py @@ -1425,7 +1425,8 @@ def modify_industry_demand( def remove_flexibility_options(n, current_year): - logger.info("Removing decentral TES, home batteries, and BEV DSM from the network.") + logger.info("Removing decentral TES and BEV DSM from the network.") + n.remove("Store", n.stores.query("carrier == 'EV battery'").index) carriers_to_drop = [ "urban decentral water tanks charger", "urban decentral water tanks discharger", @@ -1433,19 +1434,17 @@ def remove_flexibility_options(n, current_year): "rural water tanks charger", "rural water tanks discharger", "rural water tanks", - "home battery charger", - "home battery discharger", - "home battery", - "EV battery", ] - n.remove("Link", n.links.query("carrier in @carriers_to_drop").index) - n.remove("Store", n.stores.query("carrier in @carriers_to_drop").index) - # Need to keep the EV battery bus - carriers_to_drop.remove("EV battery") - n.remove("Bus", n.buses.query("carrier in @carriers_to_drop").index) + n.remove("Link", n.links.query(f"carrier in {carriers_to_drop}").index) + n.remove("Store", n.stores.query(f"carrier in {carriers_to_drop}").index) + n.remove("Bus", n.buses.query(f"carrier in {carriers_to_drop}").index) if current_year == 2030: + logger.info("Removing decentral TES and batteries from the network.") carriers_to_drop = [ + "home battery charger", + "home battery discharger", + "home battery", "battery charger", "battery discharger", "battery", From 2ccec72b04b6fb21194d366735246ff53a3e26df Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 21 Nov 2025 15:46:25 +0100 Subject: [PATCH 193/202] minor fix for rescom elec scaling --- config/config.de.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index ba07971a9..7cd018c01 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -80,7 +80,8 @@ iiasa_database: - 2025 uba_for_industry: # For 2025–2035 use MWMS scenario from UBA Projektionsbericht 2025 - 2025 - uba_for_rescom_electricity: false + uba_for_rescom_electricity: + - 1900 # dummy entry to disable UBA scaling for rescom electricity demand no_flex_lt_run: false # If true, removes all flexibility options scale_industry_non_energy: false # Scale non-energy industry demand directly proportional to energy demand restrict_cross_border_flows: # restricts cross border flows between all countries (AC) From dfe20c7328aa4c8cc2f7d24dbb162352686d047c Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 21 Nov 2025 15:47:59 +0100 Subject: [PATCH 194/202] improve decentral heat budgets --- config/config.de.yaml | 9 +- config/scenarios.manual.yaml | 110 ++++++++++++---- scripts/pypsa-de/additional_functionality.py | 125 ++++++++++--------- 3 files changed, 155 insertions(+), 89 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index 7cd018c01..c75efc621 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,16 +4,15 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20251117_improve_no_flex + prefix: 20251121_improve_decentral_heat name: # - ExPol # - KN2045_Mix - # - HighDemand - # - HighDemandNoFlex + - HighDemand + - HighDemandNoFlex # - LowRES # - LowRESNoFlex - # - HighDemandNoFlex # !!! CAVEAT The 'NoFlex' substring in the scenario name determines how the regrets are computed!!! - # - LowDemand + - LowDemand - LowDemandNoFlex scenarios: enable: true diff --git a/config/scenarios.manual.yaml b/config/scenarios.manual.yaml index fc0c7ff59..996a6eba1 100644 --- a/config/scenarios.manual.yaml +++ b/config/scenarios.manual.yaml @@ -10,9 +10,16 @@ HighDemand: options: noisy_costs: false constraints: - decentral_heat_pump_budgets: - DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 - 2025: 12.0 + decentral_heat_budgets: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + heat_pump: + DE: + 2025: 12.0 + resistive_heater: + DE: + 2025: 30.0 + biomass_boiler: + DE: + 2025: 103.0 limits_capacity_min: Generator: onwind: @@ -53,9 +60,16 @@ HighDemandNoFlex: options: noisy_costs: false constraints: - decentral_heat_pump_budgets: - DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 - 2025: 12.0 + decentral_heat_budgets: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + heat_pump: + DE: + 2025: 12.0 + resistive_heater: + DE: + 2025: 30.0 + biomass_boiler: + DE: + 2025: 103.0 limits_capacity_min: Generator: onwind: @@ -107,11 +121,22 @@ LowDemand: # Demand and capacities from UBA options: noisy_costs: false constraints: - decentral_heat_pump_budgets: - DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 - 2025: 12.0 - 2030: 26.0 - 2035: 38.0 + decentral_heat_budgets: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + heat_pump: + DE: + 2025: 12.0 + 2030: 26.0 + 2035: 38.0 + resistive_heater: + DE: + 2025: 30.0 + 2030: 23.0 + 2035: 19.0 + biomass_boiler: + DE: + 2025: 103.0 + 2030: 90.0 + 2035: 76.0 # central_heat_pump_budgets: # DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 22 # 2025: 7.0 @@ -179,11 +204,22 @@ LowDemandNoFlex: options: noisy_costs: false constraints: - decentral_heat_pump_budgets: - DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 - 2025: 12.0 - 2030: 26.0 - 2035: 38.0 + decentral_heat_budgets: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + heat_pump: + DE: + 2025: 12.0 + 2030: 26.0 + 2035: 38.0 + resistive_heater: + DE: + 2025: 30.0 + 2030: 23.0 + 2035: 19.0 + biomass_boiler: + DE: + 2025: 103.0 + 2030: 90.0 + 2035: 76.0 limits_capacity_min: Generator: onwind: @@ -243,11 +279,22 @@ LowRES: # Demand from UBA, lower RES capacities options: noisy_costs: false constraints: - decentral_heat_pump_budgets: - DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 - 2025: 12.0 - 2030: 26.0 - 2035: 38.0 + decentral_heat_budgets: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + heat_pump: + DE: + 2025: 12.0 + 2030: 26.0 + 2035: 38.0 + resistive_heater: + DE: + 2025: 30.0 + 2030: 23.0 + 2035: 19.0 + biomass_boiler: + DE: + 2025: 103.0 + 2030: 90.0 + 2035: 76.0 # central_heat_pump_budgets: # DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 22 # 2025: 7.0 @@ -307,11 +354,22 @@ LowRESNoFlex: options: noisy_costs: false constraints: - decentral_heat_pump_budgets: - DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 - 2025: 12.0 - 2030: 26.0 - 2035: 38.0 + decentral_heat_budgets: # UBA MWMS, Projektionsbericht 2025, Abbildung 48 + heat_pump: + DE: + 2025: 12.0 + 2030: 26.0 + 2035: 38.0 + resistive_heater: + DE: + 2025: 30.0 + 2030: 23.0 + 2035: 19.0 + biomass_boiler: + DE: + 2025: 103.0 + 2030: 90.0 + 2035: 76.0 # central_heat_pump_budgets: # DE: # UBA MWMS, Projektionsbericht 2025, Abbildung 22 # 2025: 7.0 diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index e13f80c52..7abee22fb 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -627,73 +627,82 @@ def add_national_co2_budgets(n, snakemake, national_co2_budgets, investment_year ) -def add_decentral_heat_pump_budgets(n, decentral_heat_pump_budgets, investment_year): - carriers = [ - "rural air heat pump", - "rural ground heat pump", - "urban decentral air heat pump", - "rural resistive heater", - "urban decentral resistive heater", - ] +def add_decentral_heat_budgets(n, decentral_heat_budgets, investment_year): + carrier_dict = { + "heat pump": [ + "rural air heat pump", + "rural ground heat pump", + "urban decentral air heat pump", + ], + "resistive heater": [ + "rural resistive heater", + "urban decentral resistive heater", + ], + "biomass boiler": [ + "rural biomass boiler", + "urban decentral biomass boiler", + ], + } + + for asset_type, budget_dict in decentral_heat_budgets.items(): + assets = n.links.index[n.links.carrier.isin(carrier_dict[asset_type])] + + if assets.empty: + logger.warning( + f"No {asset_type}s found in the network. Skipping decentral {asset_type} budgets." + ) + return - heat_pumps = n.links.index[n.links.carrier.isin(carriers)] + if investment_year not in budget_dict["DE"].keys(): + logger.warning( + f"No decentral {asset_type} budget for {investment_year} found in the config file. Skipping." + ) + return - if heat_pumps.empty: - logger.warning( - "No heat pumps found in the network. Skipping decentral heat pump budgets." - ) - return + logger.info(f"Adding decentral {asset_type} budgets") - if investment_year not in decentral_heat_pump_budgets["DE"].keys(): - logger.warning( - f"No decentral heat pump budget for {investment_year} found in the config file. Skipping." - ) - return + for ct in budget_dict: + if ct != "DE": + logger.error( + f"{asset_type.capitalize()} budget for countries other than `DE` is not yet supported. Found country {ct}. Please check the config file." + ) - logger.info("Adding decentral heat pump budgets") + limit = budget_dict[ct][investment_year] * 1e6 - for ct in decentral_heat_pump_budgets: - if ct != "DE": - logger.error( - f"Heat pump budget for countries other than `DE` is not yet supported. Found country {ct}. Please check the config file." + logger.info( + f"Limiting decentral {asset_type} electricity consumption in country {ct} to {budget_dict[ct][investment_year]:.1%} MWh.", ) + assets = assets[assets.str.startswith(ct)] - limit = decentral_heat_pump_budgets[ct][investment_year] * 1e6 + lhs = [] - logger.info( - f"Limiting decentral heat pump electricity consumption in country {ct} to {decentral_heat_pump_budgets[ct][investment_year]:.1%} MWh.", - ) - heat_pumps = heat_pumps[heat_pumps.str.startswith(ct)] - - lhs = [] + lhs.append( + ( + n.model["Link-p"].loc[:, assets] * n.snapshot_weightings.generators + ).sum() + ) - lhs.append( - ( - n.model["Link-p"].loc[:, heat_pumps] * n.snapshot_weightings.generators - ).sum() - ) + lhs = sum(lhs) - lhs = sum(lhs) + cname = f"decentral_{asset_type}_limit-{ct}" + if cname in n.global_constraints.index: + logger.warning( + f"Global constraint {cname} already exists. Dropping and adding it again." + ) + n.global_constraints.drop(cname, inplace=True) - cname = f"decentral_heat_pump_limit-{ct}" - if cname in n.global_constraints.index: - logger.warning( - f"Global constraint {cname} already exists. Dropping and adding it again." + n.model.add_constraints( + lhs <= limit, + name=f"GlobalConstraint-{cname}", + ) + n.add( + "GlobalConstraint", + cname, + constant=limit, + sense="<=", + type="", + carrier_attribute="", ) - n.global_constraints.drop(cname, inplace=True) - - n.model.add_constraints( - lhs <= limit, - name=f"GlobalConstraint-{cname}", - ) - n.add( - "GlobalConstraint", - cname, - constant=limit, - sense="<=", - type="", - carrier_attribute="", - ) def force_boiler_profiles_existing_per_boiler(n): @@ -934,10 +943,10 @@ def additional_functionality(n, snapshots, snakemake): force_boiler_profiles_existing_per_boiler(n) - if isinstance(constraints.get("decentral_heat_pump_budgets"), dict): - add_decentral_heat_pump_budgets( + if isinstance(constraints.get("decentral_heat_budgets"), dict): + add_decentral_heat_budgets( n, - constraints["decentral_heat_pump_budgets"], + constraints["decentral_heat_budgets"], investment_year, ) From 9edaa6ba40845d9fc71c63f62996319cda1ef571 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 21 Nov 2025 16:28:50 +0100 Subject: [PATCH 195/202] fix whitespace --- scripts/pypsa-de/additional_functionality.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index 7abee22fb..9f49ab9b4 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -629,16 +629,16 @@ def add_national_co2_budgets(n, snakemake, national_co2_budgets, investment_year def add_decentral_heat_budgets(n, decentral_heat_budgets, investment_year): carrier_dict = { - "heat pump": [ + "heat_pump": [ "rural air heat pump", "rural ground heat pump", "urban decentral air heat pump", ], - "resistive heater": [ + "resistive_heater": [ "rural resistive heater", "urban decentral resistive heater", ], - "biomass boiler": [ + "biomass_boiler": [ "rural biomass boiler", "urban decentral biomass boiler", ], From e3b9c1b65f13f49cf825f258113cd113f9a3ca1a Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 26 Nov 2025 14:13:57 +0100 Subject: [PATCH 196/202] add producer rents to exporter --- scripts/pypsa-de/export_ariadne_variables.py | 240 +++++++++++++++++++ 1 file changed, 240 insertions(+) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 19216c01e..8016a60a2 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -410,6 +410,246 @@ def fill_if_lifetime_inf(n, carrier, lifetime, component="links"): # n.links.query("carrier=='DC' and index.str.startswith('DC')")[["carrier","annuity","capital_cost","lifetime","FOM","build_year"]].sort_values("FOM") +def get_producer_rents(n, region): + carrier_dict = { + "Non-Biomass Renewables": [ + ( + "Generator", + "offwind-ac", + ), + ( + "Generator", + "offwind-dc", + ), + ( + "Generator", + "onwind", + ), + ( + "Generator", + "ror", + ), + ( + "Generator", + "solar rooftop", + ), + ("Generator", "solar-hsat"), + ( + "Link", + "hydro", + ), + ], + "Consumers": [ + ( + "Link", + "BEV charger", + ), + ( + "Link", + "DAC", + ), + ( + "Link", + "H2 Electrolysis", + ), + ( + "Link", + "H2 Fuel Cell", + ), + ( + "Link", + "methanolisation", + ), + ( + "Link", + "rural air heat pump", + ), + ( + "Link", + "rural ground heat pump", + ), + ( + "Link", + "rural resistive heater", + ), + ( + "Link", + "urban central air heat pump", + ), + ( + "Link", + "urban central resistive heater", + ), + ( + "Link", + "urban decentral air heat pump", + ), + ( + "Link", + "urban decentral resistive heater", + ), + ( + "Load", + "agriculture electricity", + ), + ( + "Load", + "agriculture machinery electric", + ), + ( + "Load", + "electricity", + ), + ( + "Load", + "industry electricity", + ), + ], + "Conventional": [ + ( + "Link", + "CCGT", + ), + ( + "Link", + "OCGT", + ), + ( + "Link", + "coal", + ), + ( + "Link", + "lignite", + ), + ( + "Link", + "nuclear", + ), + ("Link", "oil"), + ("Link", "urban central coal CHP"), + ("Link", "urban central gas CHP"), + ("Link", "urban central gas CHP CC"), + ("Link", "urban central lignite CHP"), + ("Link", "urban central oil CHP"), + ], + "Transmission": [ # is this correct? + ( + "Line", + "AC", + ), + ( + "Link", + "DC", + ), + # ("Link",'H2 pipeline',), + # ("Link","H2 pipeline (Kernnetz)",), + ( + "Link", + "electricity distribution grid", + ), + ], + "Biomass": [ + ( + "Link", + "solid biomass", + ), + ( + "Link", + "urban central solid biomass CHP", + ), + ( + "Link", + "urban central solid biomass CHP CC", + ), + ], + "Stores": [ + ( + "StorageUnit", + "PHS", + ), + ( + "Link", + "home battery charger", + ), + ( + "Link", + "home battery discharger", + ), + ( + "Link", + "battery charger", + ), + ( + "Link", + "battery discharger", + ), + ], + "Waste": [ + ( + "Link", + "waste CHP", + ), + ( + "Link", + "waste CHP CC", + ), + ], + } + # Invert the dictionary to map specific carriers to their general categories + carrier_map = {val: key for key, values in carrier_dict.items() for val in values} + revenue = ( + n.statistics.revenue(groupby=["bus", "carrier"], nice_names=False) + .filter(like="DE") + .groupby(level=["component", "carrier"]) + .sum() + ) + capex = ( + n.statistics.capex(groupby=["bus", "carrier"], nice_names=False) + .filter(like="DE") + .groupby(level=["component", "carrier"]) + .sum() + ) + capex_by_year = n.statistics.capex( + groupby=["bus", "carrier", "build_year"], nice_names=False + ) + _capex20 = capex_by_year[ + capex_by_year.index.get_level_values("build_year") >= (2030 - 20) + ] + capex20 = _capex20.filter(like="DE").groupby(level=["component", "carrier"]).sum() + excapex = ( + n.statistics.expanded_capex(groupby=["bus", "carrier"], nice_names=False) + .filter(like="DE") + .groupby(level=["component", "carrier"]) + .sum() + ) + opex = ( + n.statistics.opex(groupby=["bus", "carrier"], nice_names=False) + .filter(like="DE") + .groupby(level=["component", "carrier"]) + .sum() + ) + + pr_expanded = revenue.sub(excapex, fill_value=0).sub(opex, fill_value=0) + pr_expanded = pr_expanded.groupby(pr_expanded.index.map(carrier_map)).sum() + pr_expanded.index = ( + "Production Rent|Annual|Expanded Assets Only|Electricity|" + pr_expanded.index + ) + pr_20 = revenue.sub(capex20, fill_value=0).sub(opex, fill_value=0) + pr_20 = pr_20.groupby(pr_20.index.map(carrier_map)).sum() + pr_20.index = ( + "Production Rent|Annual|20-year Assets Only|Electricity|" + pr_20.index + ) + pr = revenue.sub(capex, fill_value=0).sub(opex, fill_value=0) + pr = pr.groupby(pr.index.map(carrier_map)).sum() + pr.index = "Production Rent|Annual|All Assets|Electricity|" + pr.index + pr_st = revenue.sub(opex, fill_value=0) + pr_st = pr_st.groupby(pr_st.index.map(carrier_map)).sum() + pr_st.index = "Production Rent|Short-term|Electricity|" + pr_st.index + + return pd.concat([pr_expanded, pr_20, pr, pr_st]) + + """ get_system_cost(n, region) From 85df67ced0181b72703bd1fb46cd26711a84d12a Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 26 Nov 2025 14:14:58 +0100 Subject: [PATCH 197/202] add variable for electricity from biomethane --- scripts/pypsa-de/export_ariadne_variables.py | 23 +++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 8016a60a2..61ea70dad 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -1783,15 +1783,26 @@ def get_secondary_energy(n, region, _industry_demand): ], ).sum() + gas_fractions = _get_fuel_fractions(n, region, "gas") + var["Secondary Energy|Electricity|Gas|Fossil"] = ( + var["Secondary Energy|Electricity|Gas"] * gas_fractions["Natural Gas"] + ) + var["Secondary Energy|Electricity|Gas|Biomass"] = ( + var["Secondary Energy|Electricity|Gas"] * gas_fractions["Biomass"] + ) + var["Secondary Energy|Electricity|Fossil"] = ( - var["Secondary Energy|Electricity|Gas"] + var["Secondary Energy|Electricity|Gas|Fossil"] + var["Secondary Energy|Electricity|Oil"] + var["Secondary Energy|Electricity|Coal"] ) - var["Secondary Energy|Electricity|Biomass|w/o CCS"] = electricity_supply.reindex( - ["urban central solid biomass CHP", "solid biomass", "biogas"] - ).sum() + var["Secondary Energy|Electricity|Biomass|w/o CCS"] = ( + electricity_supply.reindex( + ["urban central solid biomass CHP", "solid biomass", "biogas"] + ).sum() + + var["Secondary Energy|Electricity|Gas|Biomass"] + ) var["Secondary Energy|Electricity|Biomass|w/ CCS"] = electricity_supply.get( "urban central solid biomass CHP CC", 0 ) @@ -1799,8 +1810,8 @@ def get_secondary_energy(n, region, _industry_demand): like="solid biomass" ).sum() var["Secondary Energy|Electricity|Biomass|Gaseous and Liquid"] = ( - electricity_supply.get("biogas") - ) + electricity_supply.get("biogas", 0) + ) + var["Secondary Energy|Electricity|Gas|Biomass"] var["Secondary Energy|Electricity|Biomass"] = ( var["Secondary Energy|Electricity|Biomass|w/o CCS"] + var["Secondary Energy|Electricity|Biomass|w/ CCS"] From 27d0a95c99264bbf9475ceaeb9fd18b89832bb1f Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Wed, 26 Nov 2025 16:56:21 +0100 Subject: [PATCH 198/202] Actually export producer rent -.- --- scripts/pypsa-de/export_ariadne_variables.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 61ea70dad..9e83f99d6 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -633,19 +633,17 @@ def get_producer_rents(n, region): pr_expanded = revenue.sub(excapex, fill_value=0).sub(opex, fill_value=0) pr_expanded = pr_expanded.groupby(pr_expanded.index.map(carrier_map)).sum() pr_expanded.index = ( - "Production Rent|Annual|Expanded Assets Only|Electricity|" + pr_expanded.index + "Producer Rent|Annual|Expanded Assets Only|Electricity|" + pr_expanded.index ) pr_20 = revenue.sub(capex20, fill_value=0).sub(opex, fill_value=0) pr_20 = pr_20.groupby(pr_20.index.map(carrier_map)).sum() - pr_20.index = ( - "Production Rent|Annual|20-year Assets Only|Electricity|" + pr_20.index - ) + pr_20.index = "Producer Rent|Annual|20-year Assets Only|Electricity|" + pr_20.index pr = revenue.sub(capex, fill_value=0).sub(opex, fill_value=0) pr = pr.groupby(pr.index.map(carrier_map)).sum() - pr.index = "Production Rent|Annual|All Assets|Electricity|" + pr.index + pr.index = "Producer Rent|Annual|All Assets|Electricity|" + pr.index pr_st = revenue.sub(opex, fill_value=0) pr_st = pr_st.groupby(pr_st.index.map(carrier_map)).sum() - pr_st.index = "Production Rent|Short-term|Electricity|" + pr_st.index + pr_st.index = "Producer Rent|Short-term|Electricity|" + pr_st.index return pd.concat([pr_expanded, pr_20, pr, pr_st]) @@ -6013,6 +6011,7 @@ def get_ariadne_var( get_trade(n, region), get_economy(n, region), get_system_cost(n, region), + get_producer_rents(n, region), ] ) From ea1bcc6477db2dc498e06da8a730a1840d740f76 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 27 Nov 2025 15:05:27 +0100 Subject: [PATCH 199/202] add elec system cost --- scripts/pypsa-de/export_ariadne_variables.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 9e83f99d6..a92e06e9b 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -645,7 +645,11 @@ def get_producer_rents(n, region): pr_st = pr_st.groupby(pr_st.index.map(carrier_map)).sum() pr_st.index = "Producer Rent|Short-term|Electricity|" + pr_st.index - return pd.concat([pr_expanded, pr_20, pr, pr_st]) + var = pd.concat([pr_expanded, pr_20, pr, pr_st]) + + var["Electricity System Cost|CAPEX"] = capex.sum() + var["Electricity System Cost|OPEX"] = opex.sum() + return var """ @@ -6091,6 +6095,12 @@ def get_data( ] ).sum() + var["Electricity System Cost"] = ( + var["Total Energy System Cost|Trade|Electricity"] + + var["Electricity System Cost|CAPEX"] + + var["Electricity System Cost|OPEX"] + ) + data = [] for v in var.index: try: From 58e811a4a074b5ab52da877cd12c1f3806c32763 Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Thu, 27 Nov 2025 15:43:02 +0100 Subject: [PATCH 200/202] fix: restrict elec system cost to elec system --- scripts/pypsa-de/export_ariadne_variables.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index a92e06e9b..8d11e4337 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -647,8 +647,12 @@ def get_producer_rents(n, region): var = pd.concat([pr_expanded, pr_20, pr, pr_st]) - var["Electricity System Cost|CAPEX"] = capex.sum() - var["Electricity System Cost|OPEX"] = opex.sum() + var["Electricity System Cost|CAPEX"] = ( + capex.groupby(capex.index.map(carrier_map)).sum().sum() + ) + var["Electricity System Cost|OPEX"] = ( + opex.groupby(opex.index.map(carrier_map)).sum().sum() + ) return var From e188984446bcecf325b2dec4adc13599b19b64fa Mon Sep 17 00:00:00 2001 From: Michael Lindner Date: Fri, 28 Nov 2025 13:57:05 +0100 Subject: [PATCH 201/202] compute TESC via loads --- scripts/pypsa-de/export_ariadne_variables.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index 8d11e4337..f138f17d7 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -5264,6 +5264,11 @@ def get_trade_cost(n, region, carriers): n.statistics.capex().sum() + n.statistics.opex().sum() ) / 1e9 + rev = n.statistics.revenue(groupby=["bus"]).loc["Load"] + var["Total Energy System Cost|Load Revenue"] = ( + rev[rev.index.str.startswith(region)].sum() / 1e9 + ) + return var From 6c8d93c02d6985be84804974ac0702c0041cd879 Mon Sep 17 00:00:00 2001 From: Michael LINDNER Date: Thu, 8 Jan 2026 17:09:42 +0100 Subject: [PATCH 202/202] config for final run --- config/config.de.yaml | 84 +++++++++++++++++++++---------------------- 1 file changed, 42 insertions(+), 42 deletions(-) diff --git a/config/config.de.yaml b/config/config.de.yaml index c75efc621..4b31d1af8 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20251121_improve_decentral_heat + prefix: 20251113_lower_rescom_merge name: # - ExPol # - KN2045_Mix @@ -19,7 +19,7 @@ run: manual_file: config/scenarios.manual.yaml file: config/scenarios.automated.yaml shared_resources: - policy: base #stops recalculating + policy: false #stops recalculating exclude: - existing_heating.csv # specify files which should not be shared between scenarios - costs @@ -62,11 +62,11 @@ iiasa_database: - 2030 EEG_levels: - 1.00 - # - 0.95 - # - 0.90 - # - 0.85 - # - 0.80 - # - 0.75 + - 0.95 + - 0.90 + - 0.85 + - 0.80 + - 0.75 - 0.70 unit_commitment: enable: false @@ -97,7 +97,7 @@ foresight: myopic # Wildcard docs in https://pypsa-eur.readthedocs.io/en/latest/wildcards.html scenario: clusters: - - 27 #current options: 27, 49 + - 49 #current options: 27, 49 opts: - '' sector_opts: @@ -244,40 +244,40 @@ clustering: # print(fw.div(fw.sum()).subtract(5e-5).round(4).to_dict().__repr__().replace(",","\n")) focus_weights: # 27 nodes: 8 for Germany, 3 for Italy, 2 each for Denmark, UK and Spain, 1 per each of other 10 "Stromnachbarn" - 'DE': 0.2966 - 'AT': 0.0370 - 'BE': 0.0370 - 'CH': 0.0370 - 'CZ': 0.0370 - 'DK': 0.0741 - 'FR': 0.0370 - 'GB': 0.0741 - 'LU': 0.0370 - 'NL': 0.0370 - 'NO': 0.0370 - 'PL': 0.0370 - 'SE': 0.0370 - 'ES': 0.0741 - 'IT': 0.1111 + # 'DE': 0.2966 + # 'AT': 0.0370 + # 'BE': 0.0370 + # 'CH': 0.0370 + # 'CZ': 0.0370 + # 'DK': 0.0741 + # 'FR': 0.0370 + # 'GB': 0.0741 + # 'LU': 0.0370 + # 'NL': 0.0370 + # 'NO': 0.0370 + # 'PL': 0.0370 + # 'SE': 0.0370 + # 'ES': 0.0741 + # 'IT': 0.1111 # high spatial resolution: change clusters to 49 # 49 nodes: 30 for Germany, 3 for Italy, 2 each for Denmark, UK and Spain, 1 per each of other 10 "Stromnachbarn" - # 'DE': 0.6124 - # 'AT': 0.0204 - # 'BE': 0.0204 - # 'CH': 0.0204 - # 'CZ': 0.0204 - # 'DK': 0.0408 - # 'FR': 0.0204 - # 'GB': 0.0408 - # 'LU': 0.0204 - # 'NL': 0.0204 - # 'NO': 0.0204 - # 'PL': 0.0204 - # 'SE': 0.0204 - # 'ES': 0.0408 - # 'IT': 0.0612 + 'DE': 0.6124 + 'AT': 0.0204 + 'BE': 0.0204 + 'CH': 0.0204 + 'CZ': 0.0204 + 'DK': 0.0408 + 'FR': 0.0204 + 'GB': 0.0408 + 'LU': 0.0204 + 'NL': 0.0204 + 'NO': 0.0204 + 'PL': 0.0204 + 'SE': 0.0204 + 'ES': 0.0408 + 'IT': 0.0612 temporal: - resolution_sector: 365H + resolution_sector: 3H # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#co2-budget co2_budget: @@ -330,7 +330,7 @@ costs: # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#sector sector: v2g: false - solar_thermal: false + solar_thermal: true district_heating: potential: 0.3 progress: @@ -486,8 +486,8 @@ industry: # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#solving solving: - runtime: 12h - mem_mb: 70000 #30000 is OK for 22 nodes, 365H; 140000 for 22 nodes 3H; 400000 for 44 nodes 3H + runtime: 24h + mem_mb: 190000 #30000 is OK for 22 nodes, 365H; 140000 for 22 nodes 3H; 400000 for 44 nodes 3H options: assign_all_duals: true load_shedding: false