diff --git a/Snakefile b/Snakefile index 3bac19229..59fc95e0e 100644 --- a/Snakefile +++ b/Snakefile @@ -346,6 +346,102 @@ rule build_egon_data: "scripts/pypsa-de/build_egon_data.py" +rule prepare_district_heating_subnodes: + params: + district_heating=config_provider("sector", "district_heating"), + baseyear=config_provider("scenario", "planning_horizons", 0), + input: + heating_technologies_nuts3=resources("heating_technologies_nuts3.geojson"), + regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"), + fernwaermeatlas="data/fernwaermeatlas/fernwaermeatlas.xlsx", + cities="data/fernwaermeatlas/cities_geolocations.geojson", + lau_regions="data/lau_regions.zip", + census=storage( + "https://www.zensus2022.de/static/Zensus_Veroeffentlichung/Zensus2022_Heizungsart.zip", + keep_local=True, + ), + osm_land_cover=storage( + "https://heidata.uni-heidelberg.de/api/access/datafile/23053?format=original&gbrecs=true", + keep_local=True, + ), + natura=ancient("data/bundle/natura/natura.tiff"), + groundwater_depth=storage( + "http://thredds-gfnl.usc.es/thredds/fileServer/GLOBALWTDFTP/annualmeans/EURASIA_WTD_annualmean.nc", + keep_local=True, + ), + output: + district_heating_subnodes=resources( + "district_heating_subnodes_base_s_{clusters}.geojson" + ), + regions_onshore_extended=resources( + "regions_onshore_base-extended_s_{clusters}.geojson" + ), + regions_onshore_restricted=resources( + "regions_onshore_base-restricted_s_{clusters}.geojson" + ), + resources: + mem_mb=20000, + script: + "scripts/pypsa-de/prepare_district_heating_subnodes.py" + + +def baseyear_value(wildcards): + return config_provider("scenario", "planning_horizons", 0)(wildcards) + + +rule add_district_heating_subnodes: + params: + district_heating=config_provider("sector", "district_heating"), + baseyear=config_provider("scenario", "planning_horizons", 0), + sector=config_provider("sector"), + heat_pump_sources=config_provider( + "sector", "heat_pump_sources", "urban central" + ), + heat_utilisation_potentials=config_provider( + "sector", "district_heating", "heat_utilisation_potentials" + ), + direct_utilisation_heat_sources=config_provider( + "sector", "district_heating", "direct_utilisation_heat_sources" + ), + adjustments=config_provider("adjustments", "sector"), + input: + unpack(input_heat_source_power), + network=resources( + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc" + ), + subnodes=resources("district_heating_subnodes_base_s_{clusters}.geojson"), + nuts3=resources("nuts3_shapes.geojson"), + regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"), + fernwaermeatlas="data/fernwaermeatlas/fernwaermeatlas.xlsx", + cities="data/fernwaermeatlas/cities_geolocations.geojson", + cop_profiles=resources("cop_profiles_base_s_{clusters}_{planning_horizons}.nc"), + direct_heat_source_utilisation_profiles=resources( + "direct_heat_source_utilisation_profiles_base_s_{clusters}_{planning_horizons}.nc" + ), + existing_heating_distribution=lambda w: resources( + f"existing_heating_distribution_base_s_{{clusters}}_{baseyear_value(w)}.csv" + ), + lau_regions="data/lau_regions.zip", + output: + network=resources( + "networks/base-extended_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc" + ), + district_heating_subnodes=resources( + "district_heating_subnodes_base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.geojson" + ), + existing_heating_distribution_extended=( + resources( + "existing_heating_distribution_base-extended_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.csv" + ) + if baseyear_value != "{planning_horizons}" + else [] + ), + resources: + mem_mb=10000, + script: + "scripts/pypsa-de/add_district_heating_subnodes.py" + + ruleorder: modify_district_heat_share > build_district_heat_share @@ -481,6 +577,10 @@ rule retrieve_mastr: rule build_existing_chp_de: + params: + district_heating_subnodes=config_provider( + "sector", "district_heating", "subnodes" + ), input: mastr_biomass="data/mastr/bnetza_open_mastr_2023-08-08_B_biomass.csv", mastr_combustion="data/mastr/bnetza_open_mastr_2023-08-08_B_combustion.csv", @@ -489,8 +589,13 @@ rule build_existing_chp_de: keep_local=True, ), regions=resources("regions_onshore_base_s_{clusters}.geojson"), + district_heating_subnodes=lambda w: ( + resources("district_heating_subnodes_base_s_{clusters}.geojson") + if config_provider("sector", "district_heating", "subnodes", "enable")(w) + else [] + ), output: - german_chp=resources("german_chp_{clusters}.csv"), + german_chp=resources("german_chp_base_s_{clusters}.csv"), log: logs("build_existing_chp_de_{clusters}.log"), script: diff --git a/config/config.de.yaml b/config/config.de.yaml index 5e2e0f148..97aaa3327 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -4,7 +4,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: - prefix: 20250507_simple_scenarios + prefix: 20250514_dhsubnodes name: # - ExPol - KN2045_Mix @@ -289,6 +289,23 @@ sector: 2040: 0.6 2045: 0.8 2050: 1.0 + subnodes: + enable: false + nlargest: 40 + census_areas: + enable: true + min_district_heating_share: 0.01 + processing: + min_area: [10000, 100000, 0, 1000000] + buffer_factor: [0.01, 0.05, 0.05, 0] + limit_ptes_potential: + enable: true + limit_mother_nodes: true + excluder_resolution: 10 + osm_landcover_codes: [21, 23, 32, 33] + max_groundwater_depth: -10 + min_area: 10000 + default_capacity: 4500 heat_vent: urban central: true urban decentral: true diff --git a/config/config.sysgf.yaml b/config/config.sysgf.yaml new file mode 100644 index 000000000..3208c1d34 --- /dev/null +++ b/config/config.sysgf.yaml @@ -0,0 +1,97 @@ +# SPDX-FileCopyrightText: : 2017-2024 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + +run: + prefix: 20250514_dhsubnodes + name: + - Baseline + # - No_PTES + # - LowPTESCAPEX + # - HighPTESCAPEX + # - LowStandingLosses + # - HighStandingLosses + # - LowGroundWaterDepth + # - HighGroundWaterDepth + # - LowEtPRatio + # - HighEtPRatio + scenarios: + enable: true + manual_file: config/scenarios.sysgf.yaml + file: config/scenarios.automated.yaml + shared_resources: + policy: base #stops recalculating + exclude: + - existing_heating.csv # specify files which should not be shared between scenarios + - costs + - retrieve_cost # This is necessary to save retrieve_cost_data_{year}.log in the correct folder + - industry_sector_ratios + - build_industry_sector_ratios # This is necessary to save build_industry_sector_ratios_data.log in the correct folder + - modify_existing_heating + +foresight: myopic + +scenario: + ll: + - vopt + clusters: + - 27 #current options: 27, 49 + opts: + - '' + sector_opts: + - none + planning_horizons: + # - 2020 + # - 2025 + # - 2030 + # - 2035 + # - 2040 + - 2045 + +clustering: + temporal: + resolution_sector: 365H + +adjustments: + electricity: false + sector: + absolute: + StorageUnit: + urban central water pits: + standing_loss: 0.00012121 + urban central water tanks: + standing_loss: 0.00015476 + +wasserstoff_kernnetz: + enable: false + +sector: + district_heating: + potential: 0.5 + subnodes: + enable: true + limited_heat_sources: + geothermal: + ignore_missing_regions: true + heat_pump_sources: + urban central: + - air + - geothermal + urban decentral: + - air + rural: + - air + - ground + +solving: + constraints: + CCL: false + EQ: false + BAU: false + SAFE: false + efuel_export_ban: false + limits_capacity_max: {} + limits_capacity_min: {} + limits_volume_max: {} + limits_volume_min: {} + limits_power_max: {} diff --git a/config/scenarios.sysgf.yaml b/config/scenarios.sysgf.yaml new file mode 100644 index 000000000..a7edf4961 --- /dev/null +++ b/config/scenarios.sysgf.yaml @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT + +Baseline: + foresight: myopic + iiasa_database: + reference_scenario: KN2045_Mix + co2_budget_DE_source: UBA + +No_PTES: + adjustments: + electricity: false + sector: + absolute: + Store: + urban central water pits: + e_nom_max: 0 + +LowPTESCAPEX: + adjustments: + electricity: false + sector: + factor: + Store: + urban central water pits: + capital_cost: 0.5 + absolute: + Store: + urban central water pits: + standing_loss: 0.00012121 + urban central water tanks: + standing_loss: 0.00015476 + +HighPTESCAPEX: + adjustments: + electricity: false + sector: + factor: + Store: + urban central water pits: + capital_cost: 2 + absolute: + Store: + urban central water pits: + standing_loss: 0.00012121 + urban central water tanks: + standing_loss: 0.00015476 + +LowStandingLosses: + adjustments: + electricity: false + sector: + absolute: + Store: + urban central water pits: + standing_loss: 0 + +HighStandingLosses: + adjustments: + electricity: false + sector: + absolute: + Store: + urban central water pits: + standing_loss: 0.0012121 + +LowGroundWaterDepth: + sector: + district_heating: + subnodes: + limit_ptes_potential: + max_groundwater_depth: -25 + +HighGroundWaterDepth: + sector: + district_heating: + subnodes: + limit_ptes_potential: + max_groundwater_depth: 0 + +LowEtPRatio: + adjustments: + electricity: false + sector: + factor: + Link: + urban central water pits charger: + energy to power ratio: 0.5 + +HighEtPRatio: + adjustments: + electricity: false + sector: + factor: + Link: + urban central water pits charger: + energy to power ratio: 2 diff --git a/data/fernwaermeatlas/cities_geolocations.geojson b/data/fernwaermeatlas/cities_geolocations.geojson new file mode 100755 index 000000000..b67c63b0d --- /dev/null +++ b/data/fernwaermeatlas/cities_geolocations.geojson @@ -0,0 +1,167 @@ +{ +"type": "FeatureCollection", +"name": "cities_geolocation", +"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, +"features": [ +{ "type": "Feature", "properties": { "Stadt": "Berlin" }, "geometry": { "type": "Point", "coordinates": [ 13.3989421, 52.5108638 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Hamburg" }, "geometry": { "type": "Point", "coordinates": [ 10.000654, 53.550341 ] } }, +{ "type": "Feature", "properties": { "Stadt": "München" }, "geometry": { "type": "Point", "coordinates": [ 11.5753822, 48.1371079 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Köln" }, "geometry": { "type": "Point", "coordinates": [ 6.959974, 50.938361 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Mülheim an der Ruhr" }, "geometry": { "type": "Point", "coordinates": [ 6.8829192, 51.4272925 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Leverkusen" }, "geometry": { "type": "Point", "coordinates": [ 6.9881194, 51.0324743 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bonn" }, "geometry": { "type": "Point", "coordinates": [ 7.10066, 50.735851 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bergisch Gladbach" }, "geometry": { "type": "Point", "coordinates": [ 7.1277379, 50.9929303 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Frankfurt am Main" }, "geometry": { "type": "Point", "coordinates": [ 8.6820917, 50.1106444 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Offenbach am Main" }, "geometry": { "type": "Point", "coordinates": [ 8.7610698, 50.1055002 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Stuttgart" }, "geometry": { "type": "Point", "coordinates": [ 9.1800132, 48.7784485 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Düsseldorf" }, "geometry": { "type": "Point", "coordinates": [ 6.7763137, 51.2254018 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Neuss" }, "geometry": { "type": "Point", "coordinates": [ 6.6916476, 51.1981778 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Dortmund" }, "geometry": { "type": "Point", "coordinates": [ 7.4652789, 51.5142273 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Essen" }, "geometry": { "type": "Point", "coordinates": [ 7.0158171, 51.4582235 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Duisburg" }, "geometry": { "type": "Point", "coordinates": [ 6.759562, 51.434999 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bochum" }, "geometry": { "type": "Point", "coordinates": [ 7.2196635, 51.4818111 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Krefeld" }, "geometry": { "type": "Point", "coordinates": [ 6.5623343, 51.3331205 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Oberhausen" }, "geometry": { "type": "Point", "coordinates": [ 6.8514435, 51.4696137 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Herne" }, "geometry": { "type": "Point", "coordinates": [ 7.219985, 51.5380394 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Gelsenkirchen" }, "geometry": { "type": "Point", "coordinates": [ 7.0960124, 51.5110321 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bottrop" }, "geometry": { "type": "Point", "coordinates": [ 6.929204, 51.521581 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Recklinghausen" }, "geometry": { "type": "Point", "coordinates": [ 7.1978546, 51.6143815 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Moers" }, "geometry": { "type": "Point", "coordinates": [ 6.62843, 51.451283 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Leipzig" }, "geometry": { "type": "Point", "coordinates": [ 12.3747329, 51.3406321 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bremen" }, "geometry": { "type": "Point", "coordinates": [ 8.8071646, 53.0758196 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Dresden" }, "geometry": { "type": "Point", "coordinates": [ 13.7381437, 51.0493286 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Hannover" }, "geometry": { "type": "Point", "coordinates": [ 9.7385532, 52.3744779 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Nürnberg" }, "geometry": { "type": "Point", "coordinates": [ 11.077298, 49.453872 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Fürth" }, "geometry": { "type": "Point", "coordinates": [ 10.9893626, 49.4772475 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Wuppertal" }, "geometry": { "type": "Point", "coordinates": [ 7.1780374, 51.264018 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Hagen" }, "geometry": { "type": "Point", "coordinates": [ 7.473296, 51.3582945 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Solingen" }, "geometry": { "type": "Point", "coordinates": [ 7.0845893, 51.1721629 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Remscheid" }, "geometry": { "type": "Point", "coordinates": [ 7.228287474564793, 51.184517 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bielefeld" }, "geometry": { "type": "Point", "coordinates": [ 8.531007, 52.0191005 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Münster" }, "geometry": { "type": "Point", "coordinates": [ 7.6251879, 51.9625101 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Karlsruhe" }, "geometry": { "type": "Point", "coordinates": [ 8.4034195, 49.0068705 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Mannheim" }, "geometry": { "type": "Point", "coordinates": [ 8.4673098, 49.4892913 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Ludwigshafen am Rhein" }, "geometry": { "type": "Point", "coordinates": [ 8.4381568, 49.4704113 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Augsburg" }, "geometry": { "type": "Point", "coordinates": [ 10.8979522, 48.3690341 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Wiesbaden" }, "geometry": { "type": "Point", "coordinates": [ 8.2416556, 50.0820384 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Mainz" }, "geometry": { "type": "Point", "coordinates": [ 8.2762513, 50.0012314 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Mönchengladbach" }, "geometry": { "type": "Point", "coordinates": [ 6.4353792, 51.1947131 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Braunschweig" }, "geometry": { "type": "Point", "coordinates": [ 10.5236066, 52.2646577 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Kiel" }, "geometry": { "type": "Point", "coordinates": [ 10.135555, 54.3227085 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Chemnitz" }, "geometry": { "type": "Point", "coordinates": [ 12.918914, 50.8323531 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Aachen" }, "geometry": { "type": "Point", "coordinates": [ 6.083862, 50.776351 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Magdeburg" }, "geometry": { "type": "Point", "coordinates": [ 11.6399609, 52.1315889 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Halle" }, "geometry": { "type": "Point", "coordinates": [ 11.9705452, 51.4825041 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Freiburg im Breisgau" }, "geometry": { "type": "Point", "coordinates": [ 7.8494005, 47.9960901 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Lübeck" }, "geometry": { "type": "Point", "coordinates": [ 10.684738, 53.866444 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Erfurt" }, "geometry": { "type": "Point", "coordinates": [ 11.0287364, 50.9777974 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Rostock" }, "geometry": { "type": "Point", "coordinates": [ 12.1400211, 54.0886707 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Kassel" }, "geometry": { "type": "Point", "coordinates": [ 9.4924096, 51.3154546 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Saarbrücken" }, "geometry": { "type": "Point", "coordinates": [ 6.996379, 49.234362 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Hamm" }, "geometry": { "type": "Point", "coordinates": [ 7.815197, 51.6804093 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Potsdam" }, "geometry": { "type": "Point", "coordinates": [ 13.0591397, 52.4009309 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Oldenburg in Holstein" }, "geometry": { "type": "Point", "coordinates": [ 10.8809805, 54.2922574 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Osnabrück" }, "geometry": { "type": "Point", "coordinates": [ 8.047635, 52.2719595 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Heidelberg" }, "geometry": { "type": "Point", "coordinates": [ 8.694724, 49.4093582 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Darmstadt" }, "geometry": { "type": "Point", "coordinates": [ 8.6736295, 49.8851869 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Paderborn" }, "geometry": { "type": "Point", "coordinates": [ 8.764869778177559, 51.71895955 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Regensburg" }, "geometry": { "type": "Point", "coordinates": [ 12.0974869, 49.0195333 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Ingolstadt" }, "geometry": { "type": "Point", "coordinates": [ 11.4250395, 48.7630165 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Würzburg" }, "geometry": { "type": "Point", "coordinates": [ 9.9309779, 49.7933723 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Ulm" }, "geometry": { "type": "Point", "coordinates": [ 9.9912458, 48.3984968 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Wolfsburg" }, "geometry": { "type": "Point", "coordinates": [ 10.7861682, 52.4205588 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Heilbronn" }, "geometry": { "type": "Point", "coordinates": [ 9.218655, 49.142291 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Pforzheim" }, "geometry": { "type": "Point", "coordinates": [ 8.7029532, 48.8908846 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Göttingen" }, "geometry": { "type": "Point", "coordinates": [ 9.9351811, 51.5328328 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Reutlingen" }, "geometry": { "type": "Point", "coordinates": [ 9.2114144, 48.4919508 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Koblenz" }, "geometry": { "type": "Point", "coordinates": [ 7.5943951, 50.3533278 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bremerhaven" }, "geometry": { "type": "Point", "coordinates": [ 8.5851945, 53.5505392 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Jena" }, "geometry": { "type": "Point", "coordinates": [ 11.5879359, 50.9281717 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Erlangen" }, "geometry": { "type": "Point", "coordinates": [ 11.0056, 49.5928616 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Trier" }, "geometry": { "type": "Point", "coordinates": [ 6.6441878, 49.7596208 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Salzgitter" }, "geometry": { "type": "Point", "coordinates": [ 10.3593147, 52.1503721 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Siegen" }, "geometry": { "type": "Point", "coordinates": [ 8.0256131, 50.8751175 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Hildesheim" }, "geometry": { "type": "Point", "coordinates": [ 9.9513046, 52.1521636 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Cottbus" }, "geometry": { "type": "Point", "coordinates": [ 14.3357307, 51.7567447 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Aschaffenburg" }, "geometry": { "type": "Point", "coordinates": [ 9.1493636, 49.9740542 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Baunatal" }, "geometry": { "type": "Point", "coordinates": [ 9.4119007, 51.2550775 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bergkamen" }, "geometry": { "type": "Point", "coordinates": [ 7.6362876, 51.6149389 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Brühl" }, "geometry": { "type": "Point", "coordinates": [ 6.9037057, 50.8291313 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Datteln" }, "geometry": { "type": "Point", "coordinates": [ 7.3385906, 51.651468 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Dillingen-Saar" }, "geometry": { "type": "Point", "coordinates": [ 6.7243197, 49.3552721 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Dinslaken" }, "geometry": { "type": "Point", "coordinates": [ 6.7345106, 51.5623618 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Eisenhüttenstadt" }, "geometry": { "type": "Point", "coordinates": [ 14.6294413, 52.1448863 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Ensdorf" }, "geometry": { "type": "Point", "coordinates": [ 11.9353839, 49.3435347 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Esslingen am Neckar" }, "geometry": { "type": "Point", "coordinates": [ 9.3071685, 48.7427584 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Flensburg" }, "geometry": { "type": "Point", "coordinates": [ 9.4333264, 54.7833021 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Grevenbroich" }, "geometry": { "type": "Point", "coordinates": [ 6.584893682540318, 51.0862467 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Gladbeck" }, "geometry": { "type": "Point", "coordinates": [ 6.9877343, 51.5718665 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Glücksburg (Ostsee)" }, "geometry": { "type": "Point", "coordinates": [ 9.562033402693672, 54.84544485 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Hanau" }, "geometry": { "type": "Point", "coordinates": [ 8.9169797, 50.132881 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Harrislee" }, "geometry": { "type": "Point", "coordinates": [ 9.3915374, 54.8047109 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Herten" }, "geometry": { "type": "Point", "coordinates": [ 7.1368071, 51.5942009 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Hohenmölsen" }, "geometry": { "type": "Point", "coordinates": [ 12.0981844, 51.1573976 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Hünxe" }, "geometry": { "type": "Point", "coordinates": [ 6.7660319, 51.6414581 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Hürth" }, "geometry": { "type": "Point", "coordinates": [ 6.876568, 50.8807379 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Ketsch" }, "geometry": { "type": "Point", "coordinates": [ 8.5237178, 49.367538 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Langballig" }, "geometry": { "type": "Point", "coordinates": [ 9.663334714510913, 54.7919719 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Lünen" }, "geometry": { "type": "Point", "coordinates": [ 7.5228088, 51.6142482 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Marl" }, "geometry": { "type": "Point", "coordinates": [ 7.0829054, 51.6485843 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Merseburg" }, "geometry": { "type": "Point", "coordinates": [ 11.996148, 51.3564413 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Neukirchen-Vluyn" }, "geometry": { "type": "Point", "coordinates": [ 6.5467641, 51.4413742 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Neu-Ulm" }, "geometry": { "type": "Point", "coordinates": [ 9.9987169, 48.3943949 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Püttlingen" }, "geometry": { "type": "Point", "coordinates": [ 6.8827786, 49.287307 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Quierschied" }, "geometry": { "type": "Point", "coordinates": [ 7.052847, 49.3260163 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Saarlouis" }, "geometry": { "type": "Point", "coordinates": [ 6.749846, 49.3164661 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Schongau" }, "geometry": { "type": "Point", "coordinates": [ 10.8967857, 47.8134583 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Schwedt-Oder" }, "geometry": { "type": "Point", "coordinates": [ 14.2840858, 53.0586366 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Schwetzingen" }, "geometry": { "type": "Point", "coordinates": [ 8.5735135, 49.3832919 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Sindelfingen" }, "geometry": { "type": "Point", "coordinates": [ 9.0035455, 48.7084162 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Speyer" }, "geometry": { "type": "Point", "coordinates": [ 8.433615, 49.3165553 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Spremberg" }, "geometry": { "type": "Point", "coordinates": [ 14.3804302, 51.5714513 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Süderbrarup" }, "geometry": { "type": "Point", "coordinates": [ 9.775192, 54.6354193 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Tarp" }, "geometry": { "type": "Point", "coordinates": [ 9.4026852, 54.6641816 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Voerde (Niederrhein)" }, "geometry": { "type": "Point", "coordinates": [ 6.6811994, 51.5975224 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Völklingen" }, "geometry": { "type": "Point", "coordinates": [ 6.859519, 49.2522866 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Wadgassen" }, "geometry": { "type": "Point", "coordinates": [ 6.7922183, 49.2634657 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Wallerfangen" }, "geometry": { "type": "Point", "coordinates": [ 6.7183652, 49.3277048 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Wees" }, "geometry": { "type": "Point", "coordinates": [ 9.5186181, 54.8060252 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Zolling" }, "geometry": { "type": "Point", "coordinates": [ 11.7727339, 48.4514051 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Stendal" }, "geometry": { "type": "Point", "coordinates": [ 11.8594279, 52.6050782 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Schwerin" }, "geometry": { "type": "Point", "coordinates": [ 11.4148038, 53.6288297 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Schweinfurt" }, "geometry": { "type": "Point", "coordinates": [ 10.233302, 50.0499945 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Neumünster" }, "geometry": { "type": "Point", "coordinates": [ 9.9815377, 54.0757442 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Weißwasser-O.L." }, "geometry": { "type": "Point", "coordinates": [ 14.6373221, 51.5028807 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Lemgo" }, "geometry": { "type": "Point", "coordinates": [ 8.9012894, 52.0280674 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bergheim" }, "geometry": { "type": "Point", "coordinates": [ 6.6410004, 50.9540457 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Brandenburg an der Havel" }, "geometry": { "type": "Point", "coordinates": [ 12.5497933, 52.4108261 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Castrop-Rauxel" }, "geometry": { "type": "Point", "coordinates": [ 7.3106175, 51.5646195 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Dessau-Roßlau" }, "geometry": { "type": "Point", "coordinates": [ 12.2312238, 51.8465924 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Frankfurt (Oder)" }, "geometry": { "type": "Point", "coordinates": [ 14.549452, 52.3412273 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Gera" }, "geometry": { "type": "Point", "coordinates": [ 12.0832666, 50.8765537 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Greifswald" }, "geometry": { "type": "Point", "coordinates": [ 13.3815238, 54.095791 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Hameln" }, "geometry": { "type": "Point", "coordinates": [ 9.3561569, 52.1039941 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Iserlohn" }, "geometry": { "type": "Point", "coordinates": [ 7.6999713, 51.3746778 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Kaiserslautern" }, "geometry": { "type": "Point", "coordinates": [ 7.7689951, 49.4432174 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Kamp-Lintfort" }, "geometry": { "type": "Point", "coordinates": [ 6.547923, 51.5017981 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Altenholz" }, "geometry": { "type": "Point", "coordinates": [ 10.1187695, 54.3956762 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Annaberg-Buchholz" }, "geometry": { "type": "Point", "coordinates": [ 13.0106108, 50.5788781 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bad Homburg v.d. Höhe" }, "geometry": { "type": "Point", "coordinates": [ 8.6169093, 50.2267699 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bad Mergentheim" }, "geometry": { "type": "Point", "coordinates": [ 9.7730692, 49.490532 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Bernburg (Saale)" }, "geometry": { "type": "Point", "coordinates": [ 11.7391606, 51.7930788 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Coswig" }, "geometry": { "type": "Point", "coordinates": [ 12.458638, 51.8803541 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Dreieich" }, "geometry": { "type": "Point", "coordinates": [ 8.7123912, 50.011974 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Flensburg" }, "geometry": { "type": "Point", "coordinates": [ 9.4333264, 54.7833021 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Gelsenkirchen" }, "geometry": { "type": "Point", "coordinates": [ 7.0960124, 51.5110321 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Görlitz" }, "geometry": { "type": "Point", "coordinates": [ 14.991018, 51.1563185 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Kamen" }, "geometry": { "type": "Point", "coordinates": [ 7.6616804, 51.5918019 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Kiel" }, "geometry": { "type": "Point", "coordinates": [ 10.135555, 54.3227085 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Neustrelitz" }, "geometry": { "type": "Point", "coordinates": [ 13.0630004, 53.3617163 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Pfaffenhofen a.d. Ilm" }, "geometry": { "type": "Point", "coordinates": [ 11.5084954, 48.5296743 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Pullach i. Isartal" }, "geometry": { "type": "Point", "coordinates": [ 11.5217455, 48.0556122 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Rottweil" }, "geometry": { "type": "Point", "coordinates": [ 8.6251283, 48.165531 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Soltau" }, "geometry": { "type": "Point", "coordinates": [ 9.8433909, 52.9859666 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Traunreut" }, "geometry": { "type": "Point", "coordinates": [ 12.5952942, 47.9627599 ] } }, +{ "type": "Feature", "properties": { "Stadt": "Zittau" }, "geometry": { "type": "Point", "coordinates": [ 14.8064807, 50.8960964 ] } } +] +} diff --git a/data/fernwaermeatlas/fernwaermeatlas.xlsx b/data/fernwaermeatlas/fernwaermeatlas.xlsx new file mode 100755 index 000000000..4edd34dfa Binary files /dev/null and b/data/fernwaermeatlas/fernwaermeatlas.xlsx differ diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 4035639ec..391e07b98 100755 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -199,7 +199,11 @@ rule build_temperature_profiles: drop_leap_day=config_provider("enable", "drop_leap_day"), input: pop_layout=resources("pop_layout_total.nc"), - regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"), + regions_onshore=lambda w: ( + resources("regions_onshore_base-extended_s_{clusters}.geojson") + if config_provider("sector", "district_heating", "subnodes", "enable")(w) + else resources("regions_onshore_base_s_{clusters}.geojson") + ), cutout=lambda w: input_cutout( w, config_provider("sector", "heat_demand_cutout")(w) ), @@ -268,7 +272,11 @@ rule build_central_heating_temperature_profiles: energy_totals_year=config_provider("energy", "energy_totals_year"), input: temp_air_total=resources("temp_air_total_base_s_{clusters}.nc"), - regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"), + regions_onshore=lambda w: ( + resources("regions_onshore_base-extended_s_{clusters}.geojson") + if config_provider("sector", "district_heating", "subnodes", "enable")(w) + else resources("regions_onshore_base_s_{clusters}.geojson") + ), output: central_heating_forward_temperature_profiles=resources( "central_heating_forward_temperature_profiles_base_s_{clusters}_{planning_horizons}.nc" @@ -312,7 +320,11 @@ rule build_geothermal_heat_potential: ), input: isi_heat_potentials="data/isi_heat_utilisation_potentials.xlsx", - regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"), + regions_onshore=lambda w: ( + resources("regions_onshore_base-restricted_s_{clusters}.geojson") + if config_provider("sector", "district_heating", "subnodes", "enable")(w) + else resources("regions_onshore_base_s_{clusters}.geojson") + ), lau_regions="data/lau_regions.zip", output: heat_source_power=resources( @@ -355,7 +367,11 @@ rule build_cop_profiles: ), temp_soil_total=resources("temp_soil_total_base_s_{clusters}.nc"), temp_air_total=resources("temp_air_total_base_s_{clusters}.nc"), - regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"), + regions_onshore=lambda w: ( + resources("regions_onshore_base-extended_s_{clusters}.geojson") + if config_provider("sector", "district_heating", "subnodes", "enable")(w) + else resources("regions_onshore_base_s_{clusters}.geojson") + ), output: cop_profiles=resources("cop_profiles_base_s_{clusters}_{planning_horizons}.nc"), resources: diff --git a/rules/retrieve.smk b/rules/retrieve.smk index b233c9769..96628bdd7 100755 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -687,7 +687,7 @@ if config["enable"]["retrieve"]: keep_local=True, ), output: - lau_regions="data/lau_regions.geojson", + lau_regions="data/lau_regions.zip", log: "logs/retrieve_lau_regions.log", lau_regions="data/lau_regions.zip", diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index daeaef7ad..1df6afeec 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -11,9 +11,18 @@ rule add_existing_baseyear: costs=config_provider("costs"), heat_pump_sources=config_provider("sector", "heat_pump_sources"), energy_totals_year=config_provider("energy", "energy_totals_year"), + add_district_heating_subnodes=config_provider( + "sector", "district_heating", "subnodes", "enable" + ), input: - network=resources( - "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc" + network=lambda w: ( + resources( + "networks/base-extended_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc" + ) + if config_provider("sector", "district_heating", "subnodes", "enable")(w) + else resources( + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc" + ) ), powerplants=resources("powerplants_s_{clusters}.csv"), busmap_s=resources("busmap_base_s.csv"), @@ -25,11 +34,17 @@ rule add_existing_baseyear: ) ), cop_profiles=resources("cop_profiles_base_s_{clusters}_{planning_horizons}.nc"), - existing_heating_distribution=resources( - "existing_heating_distribution_base_s_{clusters}_{planning_horizons}.csv" + existing_heating_distribution=lambda w: ( + resources( + "existing_heating_distribution_base-extended_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.csv" + ) + if config_provider("sector", "district_heating", "subnodes", "enable")(w) + else resources( + "existing_heating_distribution_base_s_{clusters}_{planning_horizons}.csv" + ) ), heating_efficiencies=resources("heating_efficiencies.csv"), - custom_powerplants=resources("german_chp_{clusters}.csv"), + custom_powerplants=resources("german_chp_base_s_{clusters}.csv"), output: resources( "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}_brownfield.nc" @@ -83,8 +98,14 @@ rule add_brownfield: unpack(input_profile_tech_brownfield), simplify_busmap=resources("busmap_base_s.csv"), cluster_busmap=resources("busmap_base_s_{clusters}.csv"), - network=resources( - "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc" + network=lambda w: ( + resources( + "networks/base-extended_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc" + ) + if config_provider("sector", "district_heating", "subnodes", "enable")(w) + else resources( + "networks/base_s_{clusters}_{opts}_{sector_opts}_{planning_horizons}.nc" + ) ), network_p=solved_previous_horizon, #solved network at previous time step costs=resources("costs_{planning_horizons}.csv"), diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index 65035b585..c756c082b 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -264,12 +264,7 @@ def update_heat_pump_efficiency(n: pypsa.Network, n_p: pypsa.Network, year: int) # get names of heat pumps in previous iteration that cannot be replaced by direct utilisation in this iteration heat_pump_idx_previous_iteration = n_p.links.index[ - n_p.links.index.str.contains("heat pump") - & n_p.links.index.str[:-4].isin( - n.links_t.efficiency.columns.str.rstrip( # sources that can be directly used are no longer represented by heat pumps in the dynamic efficiency dataframe - str(year) - ) - ) + n_p.links.index.str.contains("urban central.*heat pump") ] # construct names of same-technology heat pumps in the current iteration corresponding_idx_this_iteration = heat_pump_idx_previous_iteration.str[:-4] + str( @@ -277,18 +272,22 @@ def update_heat_pump_efficiency(n: pypsa.Network, n_p: pypsa.Network, year: int) ) # update efficiency of heat pumps in previous iteration in-place to efficiency in this iteration n_p.links_t["efficiency"].loc[:, heat_pump_idx_previous_iteration] = ( - n.links_t["efficiency"].loc[:, corresponding_idx_this_iteration].values + n.links_t["efficiency"] + .reindex(columns=corresponding_idx_this_iteration, fill_value=0) + .values ) # Change efficiency2 for heat pumps that use an explicitly modelled heat source previous_iteration_columns = heat_pump_idx_previous_iteration.intersection( n_p.links_t["efficiency2"].columns ) - current_iteration_columns = corresponding_idx_this_iteration.intersection( - n.links_t["efficiency2"].columns + corresponding_columns_this_iteration = previous_iteration_columns.str[:-4] + str( + year ) n_p.links_t["efficiency2"].loc[:, previous_iteration_columns] = ( - n.links_t["efficiency2"].loc[:, current_iteration_columns].values + n.links_t["efficiency2"] + .reindex(columns=corresponding_columns_this_iteration, fill_value=0) + .values ) diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index b621f1ced..e2302ae03 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -611,6 +611,8 @@ def add_chp_plants(n, grouping_years, costs, baseyear): n.links.loc[bus + suffix, "efficiency2"] = efficiency_heat.loc[bus] continue + # bus1 represents electricity transmission node + bus1 = " ".join(bus.split()[:2]) if generator != "urban central solid biomass CHP": # lignite CHPs are not in DEA database - use coal CHP parameters key = keys[generator] @@ -618,12 +620,13 @@ def add_chp_plants(n, grouping_years, costs, baseyear): bus0 = vars(spatial)[generator].nodes[0] else: bus0 = vars(spatial)[generator].df.loc[bus, "nodes"] + n.add( "Link", bus, suffix=f" urban central {generator} CHP-{grouping_year}", bus0=bus0, - bus1=bus, + bus1=bus1, bus2=bus + " urban central heat", bus3="co2 atmosphere", carrier=f"urban central {generator} CHP", @@ -645,8 +648,8 @@ def add_chp_plants(n, grouping_years, costs, baseyear): "Link", bus, suffix=f" urban {key}-{grouping_year}", - bus0=spatial.biomass.df.loc[bus]["nodes"], - bus1=bus, + bus0=spatial.biomass.df.loc[bus1]["nodes"], + bus1=bus1, bus2=bus + " urban central heat", carrier=generator, p_nom=p_nom[bus], @@ -686,6 +689,8 @@ def add_chp_plants(n, grouping_years, costs, baseyear): n.links.loc[bus + suffix, "p_nom"] = p_nom.loc[bus] continue + # bus1 represents electricity transmission node + bus1 = " ".join(bus.split()[:2]) # CHPs are represented as EOP if no urban central heat bus is available if f"{bus} urban central heat" in n.buses.index: bus2 = bus + " urban central heat" @@ -707,7 +712,7 @@ def add_chp_plants(n, grouping_years, costs, baseyear): bus, suffix=f" urban central {generator} CHP-{grouping_year}", bus0=bus0, - bus1=bus, + bus1=bus1, bus2=bus2, bus3="co2 atmosphere", carrier=f"urban central {generator} CHP", @@ -729,8 +734,8 @@ def add_chp_plants(n, grouping_years, costs, baseyear): "Link", p_nom.index, suffix=f" urban {key}-{grouping_year}", - bus0=spatial.biomass.df.loc[p_nom.index]["nodes"], - bus1=bus, + bus0=spatial.biomass.df.loc[bus1]["nodes"], + bus1=bus1, bus2=bus2, carrier=generator, p_nom=p_nom[bus] / costs.at[key, "efficiency"], @@ -864,8 +869,10 @@ def add_heating_capacities_installed_before_baseyear( not heat_system == HeatSystem.URBAN_CENTRAL ) and use_electricity_distribution_grid: nodes_elec = nodes + " low voltage" + nodes_biomass = nodes else: - nodes_elec = nodes + nodes_elec = nodes.str.split().str[:2].str.join(" ") + nodes_biomass = nodes_elec too_large_grouping_years = [ gy for gy in grouping_years if gy >= int(baseyear) @@ -888,7 +895,8 @@ def add_heating_capacities_installed_before_baseyear( # get number of years of each interval _years = valid_grouping_years.diff() # Fill NA from .diff() with value for the first interval - _years[0] = valid_grouping_years[0] - baseyear + default_lifetime + if valid_grouping_years.size > 1: + _years[0] = valid_grouping_years[0] - baseyear + default_lifetime # Installation is assumed to be linear for the past ratios = _years / _years.sum() @@ -1023,7 +1031,7 @@ def add_heating_capacities_installed_before_baseyear( "Link", nodes, suffix=f" {heat_system} biomass boiler-{grouping_year}", - bus0=spatial.biomass.df.loc[nodes, "nodes"].values, + bus0=spatial.biomass.df.loc[nodes_biomass, "nodes"].values, bus1=nodes + " " + heat_system.value + " heat", carrier=heat_system.value + " biomass boiler", efficiency=costs.at["biomass boiler", "efficiency"], diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 111c10d02..9cbe86edb 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2812,7 +2812,7 @@ def add_land_transport( p_set = transport[nodes] # temperature for correction factor for heating/cooling - temperature = xr.open_dataarray(temp_air_total_file).to_pandas() + temperature = xr.open_dataarray(temp_air_total_file).to_pandas()[spatial.nodes] if shares["electric"] > 0: add_EVs( @@ -3381,6 +3381,7 @@ def add_heat( carrier=f"{heat_system} water pits", standing_loss=1 - np.exp(-1 / 24 / tes_time_constant_days), capital_cost=costs.at["central water pit storage", "capital_cost"], + overnight_cost=costs.at["central water pit storage", "investment"], lifetime=costs.at["central water pit storage", "lifetime"], ) @@ -4934,7 +4935,7 @@ def add_industry( ) n.add( "Link", - spatial.nodes + " waste CHP", + spatial.nodes + " urban central waste CHP", bus0=waste_source, bus1=spatial.nodes, bus2=urban_central_nodes, @@ -6593,9 +6594,10 @@ def add_import_options( if options["cluster_heat_buses"] and not first_year_myopic: cluster_heat_buses(n) - maybe_adjust_costs_and_potentials( - n, snakemake.params["adjustments"], investment_year - ) + if not options["district_heating"]["subnodes"]["enable"]: + maybe_adjust_costs_and_potentials( + n, snakemake.params["adjustments"], investment_year + ) n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) diff --git a/scripts/pypsa-de/add_district_heating_subnodes.py b/scripts/pypsa-de/add_district_heating_subnodes.py new file mode 100644 index 000000000..ad083a6ef --- /dev/null +++ b/scripts/pypsa-de/add_district_heating_subnodes.py @@ -0,0 +1,730 @@ +import logging + +logger = logging.getLogger(__name__) + +import os +import sys +from typing import Dict, List + +import geopandas as gpd +import pandas as pd +import pypsa +import xarray as xr + +sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..")) + +from scripts._helpers import ( + configure_logging, + set_scenario_config, + update_config_from_wildcards, +) +from scripts.prepare_network import maybe_adjust_costs_and_potentials + + +def add_buses(n: pypsa.Network, subnode: pd.Series, name: str) -> None: + """ + Add buses for a district heating subnode. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network object to which buses will be added. + subnode : pd.Series + Series containing information about the district heating subnode. + name : str + Name prefix for the buses. + + Returns + ------- + None + """ + buses = ( + n.buses.filter(like=f"{subnode['cluster']} urban central", axis=0) + .reset_index() + .replace( + { + f"{subnode['cluster']} urban central": name, + f"{subnode['cluster']}$": f"{subnode['cluster']} {subnode['Stadt']}", + }, + regex=True, + ) + .set_index("Bus") + ) + n.add("Bus", buses.index, **buses) + + +def get_district_heating_loads(n: pypsa.Network): + """ + Get the district heating loads from the network. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network object from which to extract district heating loads. + + Returns + ------- + float + The total district heating load in MWh/a. + """ + return ( + n.snapshot_weightings.generators + @ n.loads_t.p_set.filter( + like="urban central heat", + ) + ).sum() + n.loads.filter(like="low-temperature heat for industry", axis=0)[ + "p_set" + ].sum() * 8760 + + +def add_loads( + n: pypsa.Network, + n_copy: pypsa.Network, + subnode: pd.Series, + name: str, + subnodes_head: gpd.GeoDataFrame, +) -> None: + """ + Add loads for a district heating subnode and adjust mother node loads. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network object to which loads will be added. + n_copy : pypsa.Network + Copy of the original network. + subnode : pd.Series + Series containing data about the district heating subnode to be added. + name : str + Name prefix for the loads. + subnodes_head : gpd.GeoDataFrame + GeoDataFrame containing data about largest district heating systems. + + Returns + ------- + None + """ + # Get heat loads for urban central heat and low-temperature heat for industry + urban_central_heat_load_cluster = ( + n_copy.snapshot_weightings.generators + @ n_copy.loads_t.p_set[f"{subnode['cluster']} urban central heat"] + ) + low_temperature_heat_for_industry_load_cluster = ( + n_copy.loads.loc[ + f"{subnode['cluster']} low-temperature heat for industry", "p_set" + ] + * 8760 + ) + + # Calculate share of low-temperature heat for industry in total district heating load of cluster + dh_load_cluster = ( + urban_central_heat_load_cluster + low_temperature_heat_for_industry_load_cluster + ) + + dh_load_cluster_subnodes = subnodes_head.loc[ + subnodes_head.cluster == subnode["cluster"], "yearly_heat_demand_MWh" + ].sum() + lost_load = dh_load_cluster_subnodes - dh_load_cluster + + # District heating demand from Fernwärmeatlas exceeding the original cluster load is disregarded. The shares of the subsystems are set according to Fernwärmeatlas, while the aggregate load of cluster is preserved. + if lost_load > 0: + logger.warning( + f"Aggregated district heating load of systems within {subnode['cluster']} exceeds load of cluster." + ) + demand_ratio = subnode["yearly_heat_demand_MWh"] / dh_load_cluster_subnodes + + urban_central_heat_load = demand_ratio * n_copy.loads_t.p_set.filter( + regex=f"{subnode['cluster']}.*urban central heat" + ).sum(1).rename(f"{subnode['cluster']} {subnode['Stadt']} urban central heat") + + low_temperature_heat_for_industry_load = ( + demand_ratio + * n_copy.loads.filter( + regex=f"{subnode['cluster']}.*low-temperature heat for industry", + axis=0, + )["p_set"].sum() + ) + else: + # Calculate demand ratio between load of subnode according to Fernwärmeatlas and remaining load of assigned cluster + demand_ratio = subnode["yearly_heat_demand_MWh"] / dh_load_cluster + + urban_central_heat_load = demand_ratio * n_copy.loads_t.p_set[ + f"{subnode['cluster']} urban central heat" + ].rename(f"{subnode['cluster']} {subnode['Stadt']} urban central heat") + + low_temperature_heat_for_industry_load = ( + demand_ratio + * n_copy.loads.loc[ + f"{subnode['cluster']} low-temperature heat for industry", "p_set" + ] + ) + + # Add load components to subnode preserving the share of low-temperature heat for industry of the cluster + n.add( + "Load", + f"{name} heat", + bus=f"{name} heat", + p_set=urban_central_heat_load, + carrier="urban central heat", + ) + + n.add( + "Load", + f"{subnode['cluster']} {subnode['Stadt']} low-temperature heat for industry", + bus=f"{name} heat", + p_set=low_temperature_heat_for_industry_load, + carrier="low-temperature heat for industry", + ) + + # Adjust loads of cluster buses + n.loads_t.p_set.loc[:, f"{subnode['cluster']} urban central heat"] -= ( + urban_central_heat_load + ) + + n.loads.loc[f"{subnode['cluster']} low-temperature heat for industry", "p_set"] -= ( + low_temperature_heat_for_industry_load + ) + + if lost_load > 0: + lost_load_subnode = subnode["yearly_heat_demand_MWh"] - ( + n.snapshot_weightings.generators @ urban_central_heat_load + + low_temperature_heat_for_industry_load * 8760 + ) + logger.warning( + f"District heating load of {subnode['cluster']} {subnode['Stadt']} is reduced by {lost_load_subnode} MWh/a." + ) + + +def add_stores( + n: pypsa.Network, + subnode: pd.Series, + name: str, + subnodes_rest: gpd.GeoDataFrame, + dynamic_ptes_capacity: bool = False, + limit_ptes_potential_mother_nodes: bool = False, +) -> None: + """ + Add stores for a district heating subnode. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network object to which stores will be added. + subnode : pd.Series + Series containing information about the district heating subnode. + name : str + Name prefix for the stores. + subnodes_rest : gpd.GeoDataFrame + GeoDataFrame containing information about remaining district heating subnodes. + dynamic_ptes_capacity : bool, optional + Whether to use dynamic PTES capacity, by default False + limit_ptes_potential_mother_nodes : bool, optional + Whether to limit PTES potential in mother nodes, by default False + + Returns + ------- + None + """ + # Replicate district heating stores of mother node for subnodes + stores = ( + n.stores.filter(like=f"{subnode['cluster']} urban central", axis=0) + .reset_index() + .replace( + {f"{subnode['cluster']} urban central": name}, + regex=True, + ) + .set_index("Store") + ) + + # Restrict PTES capacity in subnodes + stores.loc[stores.carrier.str.contains("pits$").index, "e_nom_max"] = subnode[ + "ptes_pot_mwh" + ] + + if dynamic_ptes_capacity: + e_max_pu_static = stores.e_max_pu + e_max_pu = ( + n.stores_t.e_max_pu[f"{subnode['cluster']} urban central water pits"] + .rename(f"{name} water pits") + .to_frame() + .reindex(columns=stores.index) + .fillna(e_max_pu_static) + ) + n.add( + "Store", + stores.index, + e_max_pu=e_max_pu, + **stores.drop("e_max_pu", axis=1), + ) + else: + n.add("Store", stores.index, **stores) + + # Limit storage potential in mother nodes + if limit_ptes_potential_mother_nodes: + mother_nodes_ptes_pot = subnodes_rest.groupby("cluster").ptes_pot_mwh.sum() + + mother_nodes_ptes_pot.index = ( + mother_nodes_ptes_pot.index + " urban central water pits" + ) + n.stores.loc[mother_nodes_ptes_pot.index, "e_nom_max"] = mother_nodes_ptes_pot + + +def add_storage_units(n: pypsa.Network, subnode: pd.Series, name: str) -> None: + """ + Add storage units for a district heating subnode. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network object to which storage units will be added. + subnode : pd.Series + Series containing information about the district heating subnode. + name : str + Name prefix for the storage units. + + Returns + ------- + None + """ + # Replicate district heating storage units of mother node for subnodes + storage_units = ( + n.storage_units.filter(like=f"{subnode['cluster']} urban central", axis=0) + .reset_index() + .replace( + {f"{subnode['cluster']} urban central": name}, + regex=True, + ) + .set_index("StorageUnit") + ) + + n.add("StorageUnit", storage_units.index, **storage_units) + + +def add_generators(n: pypsa.Network, subnode: pd.Series, name: str) -> None: + """ + Add generators for a district heating subnode. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network object to which generators will be added. + subnode : pd.Series + Series containing information about the district heating subnode. + name : str + Name prefix for the generators. + + Returns + ------- + None + """ + # Replicate district heating generators of mother node for subnodes + generators = ( + n.generators.filter(like=f"{subnode['cluster']} urban central", axis=0) + .reset_index() + .replace( + {f"{subnode['cluster']} urban central": name}, + regex=True, + ) + .set_index("Generator") + ) + n.add("Generator", generators.index, **generators) + + +def add_links( + n: pypsa.Network, + subnode: pd.Series, + name: str, + cop: xr.DataArray, + direct_heat_source_utilisation_profile: xr.DataArray, + heat_pump_sources: List[str], + direct_utilisation_heat_sources: List[str], + time_dep_hp_cop: bool, + limited_heat_sources: List[str], + heat_source_potentials: Dict[str, str], +) -> None: + """ + Add links for a district heating subnode. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network object to which links will be added. + subnode : pd.Series + Series containing information about the district heating subnode. + name : str + Name prefix for the links. + cop : xr.DataArray + COPs for heat pumps. + direct_heat_source_utilisation_profile : xr.DataArray + Direct heat source utilisation profiles. + heat_pump_sources : List[str] + List of heat pump sources. + direct_utilisation_heat_sources : List[str] + List of heat sources that can be directly utilized. + time_dep_hp_cop : bool + Whether to use time-dependent COPs for heat pumps. + limited_heat_sources : List[str] + List of heat sources with limited potential. + heat_source_potentials : Dict[str, str] + Dictionary mapping heat sources to paths with potential data. + + Returns + ------- + None + """ + # Replicate district heating links of mother node for subnodes with separate treatment for links with dynamic efficiencies + links = ( + n.links.loc[~n.links.carrier.str.contains("heat pump|direct", regex=True)] + .filter(like=f"{subnode['cluster']} urban central", axis=0) + .reset_index() + .replace( + {f"{subnode['cluster']} urban central": name}, + regex=True, + ) + .set_index("Link") + ) + n.add("Link", links.index, **links) + + # Add heat pumps and direct heat source utilization to subnode + for heat_source in heat_pump_sources: + cop_heat_pump = ( + cop.sel( + heat_system="urban central", + heat_source=heat_source, + name=f"{subnode['cluster']} {subnode['Stadt']}", + ) + .to_pandas() + .to_frame(name=f"{name} {heat_source} heat pump") + .reindex(index=n.snapshots) + if time_dep_hp_cop + else n.links.filter(like=heat_source, axis=0).efficiency.mode() + ) + + heat_pump = ( + n.links.filter( + regex=f"{subnode['cluster']} urban central.*{heat_source}.*heat pump", + axis=0, + ) + .reset_index() + .replace( + {f"{subnode['cluster']} urban central": name}, + regex=True, + ) + .drop(["efficiency", "efficiency2"], axis=1) + .set_index("Link") + ) + if heat_pump["bus2"].str.match("$").any(): + n.add("Link", heat_pump.index, efficiency=cop_heat_pump, **heat_pump) + else: + n.add( + "Link", + heat_pump.index, + efficiency=-(cop_heat_pump - 1), + efficiency2=cop_heat_pump, + **heat_pump, + ) + + if heat_source in direct_utilisation_heat_sources: + # Add direct heat source utilization to subnode + efficiency_direct_utilisation = ( + direct_heat_source_utilisation_profile.sel( + heat_source=heat_source, + name=f"{subnode['cluster']} {subnode['Stadt']}", + ) + .to_pandas() + .to_frame(name=f"{name} {heat_source} heat direct utilisation") + .reindex(index=n.snapshots) + ) + + direct_utilization = ( + n.links.filter( + regex=f"{subnode['cluster']} urban central.*{heat_source}.*direct", + axis=0, + ) + .reset_index() + .replace( + {f"{subnode['cluster']} urban central": name}, + regex=True, + ) + .set_index("Link") + .drop("efficiency", axis=1) + ) + + n.add( + "Link", + direct_utilization.index, + efficiency=efficiency_direct_utilisation, + **direct_utilization, + ) + + # Restrict heat source potential in subnodes + if heat_source in limited_heat_sources: + # get potential + p_max_source = pd.read_csv( + heat_source_potentials[heat_source], + index_col=0, + ).squeeze()[f"{subnode['cluster']} {subnode['Stadt']}"] + # add potential to generator + n.generators.loc[ + f"{subnode['cluster']} {subnode['Stadt']} urban central {heat_source} heat", + "p_nom_max", + ] = p_max_source + + +def add_subnodes( + n: pypsa.Network, + subnodes: gpd.GeoDataFrame, + cop: xr.DataArray, + direct_heat_source_utilisation_profile: xr.DataArray, + head: int = 40, + dynamic_ptes_capacity: bool = False, + limit_ptes_potential_mother_nodes: bool = True, + heat_pump_sources: List[str] = None, + direct_utilisation_heat_sources: List[str] = None, + time_dep_hp_cop: bool = False, + limited_heat_sources: List[str] = None, + heat_source_potentials: Dict[str, str] = None, + output_path: str = None, +) -> None: + """ + Add the largest district heating systems as subnodes to the network based on + their heat demand. For each subnode, create individual district heating components + from the corresponding mother node template, including heat sources, storage options, + and heat pumps. Adjust loads of original mother nodes to maintain the overall + energy balance. + + They are initialized with: + - the total annual heat demand taken from the mother node, that is assigned to urban central heat and low-temperature heat for industry, + - the heat demand profiles taken from the mother node, + - the district heating investment options (stores, storage units, links, generators) from the mother node, + The district heating loads in the mother nodes are reduced accordingly. + + Parameters + ---------- + n : pypsa.Network + The PyPSA network object to which subnodes will be added. + subnodes : gpd.GeoDataFrame + GeoDataFrame containing information about district heating subnodes. + cop : xr.DataArray + COPs for heat pumps. + direct_heat_source_utilisation_profile : xr.DataArray + Direct heat source utilisation profiles. + head : int + Number of largest district heating systems to be added as subnodes. + dynamic_ptes_capacity : bool + Whether to use dynamic PTES capacity. + limit_ptes_potential_mother_nodes : bool + Whether to limit PTES potential in mother nodes. + heat_pump_sources : List[str] + List of heat pump sources. + direct_utilisation_heat_sources : List[str] + List of heat sources that can be directly utilized. + time_dep_hp_cop : bool + Whether to use time-dependent COPs for heat pumps. + limited_heat_sources : List[str] + List of heat sources with limited potential. + heat_source_potentials : Dict[str, str] + Dictionary mapping heat sources to paths with potential data. + output_path : str + Path to save the subnodes_head GeoDataFrame. + + Returns + ------- + None + """ + + # Keep only n largest district heating networks according to head parameter + subnodes_head = subnodes.sort_values( + by="Wärmeeinspeisung in GWh/a", ascending=False + ).head(head) + + if output_path: + subnodes_head.to_file(output_path, driver="GeoJSON") + + subnodes_rest = subnodes[~subnodes.index.isin(subnodes_head.index)] + + n_copy = n.copy() + + dh_loads_before = get_district_heating_loads(n) + # Add subnodes to network + for _, subnode in subnodes_head.iterrows(): + name = f"{subnode['cluster']} {subnode['Stadt']} urban central" + + # Add different component types + add_buses(n, subnode, name) + add_loads(n, n_copy, subnode, name, subnodes_head) + add_stores( + n, + subnode, + name, + subnodes_rest, + dynamic_ptes_capacity, + limit_ptes_potential_mother_nodes, + ) + add_storage_units(n, subnode, name) + add_generators(n, subnode, name) + add_links( + n, + subnode, + name, + cop, + direct_heat_source_utilisation_profile, + heat_pump_sources, + direct_utilisation_heat_sources, + time_dep_hp_cop, + limited_heat_sources, + heat_source_potentials, + ) + dh_loads_after = get_district_heating_loads(n) + # Check if the total district heating load is preserved + assert dh_loads_before == dh_loads_after, ( + "Total district heating load is not preserved after adding subnodes." + ) + + +def extend_heating_distribution( + existing_heating_distribution: pd.DataFrame, subnodes: gpd.GeoDataFrame +) -> pd.DataFrame: + """ + Extend heating distribution by subnodes mirroring the distribution of the + corresponding mother node. + + Parameters + ---------- + existing_heating_distribution : pd.DataFrame + DataFrame containing the existing heating distribution. + subnodes : gpd.GeoDataFrame + GeoDataFrame containing information about district heating subnodes. + + Returns + ------- + pd.DataFrame + Extended DataFrame with heating distribution for subnodes. + """ + # Merge the existing heating distribution with subnodes on the cluster name + mother_nodes = ( + existing_heating_distribution.loc[subnodes.cluster.unique()] + .unstack(-1) + .to_frame() + ) + cities_within_cluster = subnodes.groupby("cluster")["Stadt"].apply(list) + mother_nodes["cities"] = mother_nodes.apply( + lambda i: cities_within_cluster[i.name[2]], axis=1 + ) + # Explode the list of cities + mother_nodes = mother_nodes.explode("cities") + + # Reset index to temporarily flatten it + mother_nodes_reset = mother_nodes.reset_index() + + # Append city name to the third level of the index + mother_nodes_reset["name"] = ( + mother_nodes_reset["name"] + " " + mother_nodes_reset["cities"] + ) + + # Set the index back + mother_nodes = mother_nodes_reset.set_index(["heat name", "technology", "name"]) + + # Drop the temporary 'cities' column + mother_nodes.drop("cities", axis=1, inplace=True) + + # Reformat to match the existing heating distribution + mother_nodes = mother_nodes.squeeze().unstack(-1).T + + # Combine the exploded data with the existing heating distribution + existing_heating_distribution_extended = pd.concat( + [existing_heating_distribution, mother_nodes] + ) + return existing_heating_distribution_extended + + +if __name__ == "__main__": + if "snakemake" not in globals(): + from scripts._helpers import mock_snakemake + + # Change directory to this script directory + os.chdir(os.path.dirname(os.path.realpath(__file__))) + + snakemake = mock_snakemake( + "add_district_heating_subnodes", + simpl="", + clusters=27, + opts="", + ll="vopt", + sector_opts="none", + planning_horizons="2045", + run="Baseline", + ) + + configure_logging(snakemake) + set_scenario_config(snakemake) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) + + logger.info("Adding SysGF-specific functionality") + + n = pypsa.Network(snakemake.input.network) + + lau = gpd.read_file( + f"{snakemake.input.lau_regions}!LAU_RG_01M_2019_3035.geojson", + crs="EPSG:3035", + ).to_crs("EPSG:4326") + + fernwaermeatlas = pd.read_excel( + snakemake.input.fernwaermeatlas, + sheet_name="Fernwärmeatlas_öffentlich", + ) + cities = gpd.read_file(snakemake.input.cities) + regions_onshore = gpd.read_file(snakemake.input.regions_onshore).set_index("name") + + subnodes = gpd.read_file(snakemake.input.subnodes) + + # Create a dictionary of heat source potentials for the limited heat sources + heat_source_potentials = {} + for source in snakemake.params.district_heating["limited_heat_sources"]: + heat_source_potentials[source] = snakemake.input[source] + + add_subnodes( + n, + subnodes, + cop=xr.open_dataarray(snakemake.input.cop_profiles), + direct_heat_source_utilisation_profile=xr.open_dataarray( + snakemake.input.direct_heat_source_utilisation_profiles + ), + head=snakemake.params.district_heating["subnodes"]["nlargest"], + dynamic_ptes_capacity=snakemake.params.district_heating["ptes"][ + "dynamic_capacity" + ], + limit_ptes_potential_mother_nodes=snakemake.params.district_heating["subnodes"][ + "limit_ptes_potential" + ]["limit_mother_nodes"], + heat_pump_sources=snakemake.params.heat_pump_sources, + direct_utilisation_heat_sources=snakemake.params.district_heating[ + "direct_utilisation_heat_sources" + ], + time_dep_hp_cop=snakemake.params.sector["time_dep_hp_cop"], + limited_heat_sources=snakemake.params.district_heating["limited_heat_sources"], + heat_source_potentials=heat_source_potentials, + output_path=snakemake.output.district_heating_subnodes, + ) + + if snakemake.wildcards.planning_horizons == str(snakemake.params["baseyear"]): + existing_heating_distribution = pd.read_csv( + snakemake.input.existing_heating_distribution, + header=[0, 1], + index_col=0, + ) + existing_heating_distribution_extended = extend_heating_distribution( + existing_heating_distribution, subnodes + ) + existing_heating_distribution_extended.to_csv( + snakemake.output.existing_heating_distribution_extended + ) + else: + # write empty file to output + with open(snakemake.output.existing_heating_distribution_extended, "w") as f: + pass + + maybe_adjust_costs_and_potentials( + n, snakemake.params["adjustments"], snakemake.wildcards.planning_horizons + ) + n.export_to_netcdf(snakemake.output.network) diff --git a/scripts/pypsa-de/build_existing_chp_de.py b/scripts/pypsa-de/build_existing_chp_de.py index 9d965f81a..8eb92868d 100644 --- a/scripts/pypsa-de/build_existing_chp_de.py +++ b/scripts/pypsa-de/build_existing_chp_de.py @@ -206,6 +206,56 @@ def BP(cap, year): return CHP_de +def assign_subnode( + CHP_de: pd.DataFrame, subnodes: gpd.GeoDataFrame, head: int = 40 +) -> pd.DataFrame: + """ + Assign subnodes to the CHP plants based on their location. + + Parameters + ---------- + CHP_de : pd.DataFrame + DataFrame containing CHP plant data with latitude and longitude. + subnodes : gpd.GeoDataFrame + GeoDataFrame containing subnode data with geometries. + head : Union[bool, int] + If int, select the largest N subnodes. If True, use all subnodes. + + Returns + ------- + pd.DataFrame + DataFrame with assigned subnodes. + """ + + # Make a geodataframe from CHP_de using the lat and lon columns + CHP_de = gpd.GeoDataFrame( + CHP_de, geometry=gpd.points_from_xy(CHP_de.lon, CHP_de.lat) + ) + # Set LAU shape column as geometry + subnodes["geometry"] = gpd.GeoSeries.from_wkt(subnodes["lau_shape"]) + subnodes.drop("lau_shape", axis=1, inplace=True) + + # Set CRS to WGS84 + CHP_de.crs = 4326 + # Transform to the same CRS as the subnodes + CHP_de = CHP_de.to_crs(subnodes.crs) + + # Select largest subnodes + subnodes = subnodes.sort_values(by="yearly_heat_demand_MWh", ascending=False).head( + head + ) + subnodes.index.rename("city", inplace=True) + + # Assign subnode to CHP plants based on the nuts3 region + CHP_de = CHP_de.sjoin(subnodes, how="left", predicate="within") + # Insert leading whitespace for citynames where not nan + CHP_de["city"] = CHP_de["city"].apply(lambda x: " " + x if pd.notna(x) else "") + CHP_de["bus"] = CHP_de["bus"] + CHP_de["city"] + CHP_de.drop("city", axis=1, inplace=True) + + return CHP_de + + if __name__ == "__main__": if "snakemake" not in globals(): snakemake = mock_snakemake( @@ -245,4 +295,15 @@ def BP(cap, year): gdf = gpd.GeoDataFrame(geometry=geometry, crs=4326) CHP_de["bus"] = gpd.sjoin_nearest(gdf, regions, how="left")["name"] + if snakemake.params.district_heating_subnodes["enable"]: + subnodes = gpd.read_file( + snakemake.input.district_heating_subnodes, + columns=["Stadt", "yearly_heat_demand_MWh", "lau_shape"], + ).set_index("Stadt") + CHP_de = assign_subnode( + CHP_de, + subnodes, + head=snakemake.params.district_heating_subnodes["nlargest"], + ) + CHP_de.to_csv(snakemake.output.german_chp, index=False) diff --git a/scripts/pypsa-de/export_ariadne_variables.py b/scripts/pypsa-de/export_ariadne_variables.py index f465656d1..9c812faa0 100644 --- a/scripts/pypsa-de/export_ariadne_variables.py +++ b/scripts/pypsa-de/export_ariadne_variables.py @@ -3567,7 +3567,6 @@ def get_prices(n, region): # Price|Final Energy|Transportation|Liquids|Petroleum|Transport and Distribution # Price|Final Energy|Transportation|Liquids|Petroleum|Carbon Price Component # Price|Final Energy|Transportation|Liquids|Petroleum|Other Taxes - # Price|Final Energy|Transportation|Liquids|Diesel # Price|Final Energy|Transportation|Liquids|Diesel|Sales Margin # Price|Final Energy|Transportation|Liquids|Diesel|Transport and Distribution diff --git a/scripts/pypsa-de/plot_ariadne_variables.py b/scripts/pypsa-de/plot_ariadne_variables.py index 9ca388c63..71701c9f9 100644 --- a/scripts/pypsa-de/plot_ariadne_variables.py +++ b/scripts/pypsa-de/plot_ariadne_variables.py @@ -699,7 +699,6 @@ def elec_val_plot(df, savepath): 7.86, 54.36, ] # https://energy-charts.info/charts/installed_power/chart.htm?l=en&c=DE&year=2020 - elec_capacities["pypsa"] = [ 0, df.loc[("Capacity|Electricity|Hydro", "GW"), 2020], diff --git a/scripts/pypsa-de/prepare_district_heating_subnodes.py b/scripts/pypsa-de/prepare_district_heating_subnodes.py new file mode 100644 index 000000000..b10e93bae --- /dev/null +++ b/scripts/pypsa-de/prepare_district_heating_subnodes.py @@ -0,0 +1,701 @@ +import logging + +logger = logging.getLogger(__name__) + +import os +import sys +import tempfile +import weakref +import zipfile + +import dask +import geopandas as gpd +import numpy as np +import pandas as pd +import rasterio +import shapely +import xarray as xr +from atlite.gis import ExclusionContainer, shape_availability +from dask.diagnostics import ProgressBar +from rasterio.windows import Window + +from scripts._helpers import ( + configure_logging, + set_scenario_config, + update_config_from_wildcards, +) + + +def load_census_data(census_path: str) -> gpd.GeoDataFrame: + """ + Load heating raster data of census from a CSV file, clean data, and return a GeoDataFrame. + + Parameters + ---------- + census_path : str + Path to the CSV file containing the census data. + + Returns + ------- + gpd.GeoDataFrame + GeoDataFrame containing the census data. + """ + # Load and clean the census data + census = pd.read_csv(census_path, encoding="latin1", sep=";") + census = census.replace("\x96", 0) + + # Create a GeoDataFrame from the census data + census = gpd.GeoDataFrame( + census, + geometry=gpd.points_from_xy(census.x_mp_100m, census.y_mp_100m), + crs="EPSG:3035", + ) + + return census + + +def get_chunked_raster( + dataset_path: str, + bounds: tuple[float, float, float, float], + buffer_distance: float = 1000, +) -> rasterio.io.DatasetReader: + """ + Returns windowed data from a raster. + + Parameters + ---------- + dataset_path : str + Path to the raster dataset. + bounds : tuple + (min_x, min_y, max_x, max_y) in the dataset's CRS. + buffer_distance : float, optional + Buffer distance in the dataset's units to add around the bounds. + Default is 1000. + + Returns + ------- + rasterio.io.DatasetReader + A windowed raster dataset. + """ + + # Open the source dataset + with rasterio.open(dataset_path) as src: + # Buffer the bounds + buffered_bounds = ( + bounds[0] - buffer_distance, + bounds[1] - buffer_distance, + bounds[2] + buffer_distance, + bounds[3] + buffer_distance, + ) + + # Get window for the buffered bounds + window = rasterio.windows.from_bounds(*buffered_bounds, transform=src.transform) + + # Ensure window coordinates are valid integers + window = Window( + int(max(0, window.col_off)), + int(max(0, window.row_off)), + int(min(src.width - int(window.col_off), window.width)), + int(min(src.height - int(window.row_off), window.height)), + ) + + # Create a temporary file for the windowed data + with tempfile.NamedTemporaryFile(suffix=".tif", delete=False) as temp: + temp_path = temp.name + + # Create the output dataset with proper metadata + with rasterio.open( + temp_path, + "w", + driver="GTiff", + height=window.height, + width=window.width, + count=src.count, + dtype=src.dtypes[0], + crs=src.crs, + transform=src.window_transform(window), + nodata=src.nodata, + ) as dst: + # Read and write the windowed data + dst.write(src.read(window=window)) + + # Return a reader for the temporary file + # The OS will clean it up when the process exits or it's manually deleted + dataset = rasterio.open(temp_path) + + def cleanup(temp_path: str) -> None: + try: + os.unlink(temp_path) + except: + pass + + weakref.finalize(dataset, cleanup, temp_path) + + return dataset + + +def process_eligible_points( + x_coords: list[float], + y_coords: list[float], + values: np.ndarray, + crs: str, + lau_shapes: gpd.GeoDataFrame, +) -> gpd.GeoDataFrame: + """ + Process points to create eligible area geometries for PTES potential assessment. + + Parameters + ---------- + x_coords : list[float] + X-coordinates of eligible points. + y_coords : list[float] + Y-coordinates of eligible points. + values : np.ndarray + Values associated with each point (eligibility flags). + crs : str + Coordinate reference system of the input points. + lau_shapes : gpd.GeoDataFrame + GeoDataFrame containing LAU (Local Administrative Unit) shapes to intersect with eligible areas. + + Returns + ------- + gpd.GeoDataFrame + GeoDataFrame containing the intersection of eligible areas with LAU shapes. + """ + eligible_areas = pd.DataFrame({"x": x_coords, "y": y_coords, "eligible": values}) + eligible_areas = gpd.GeoDataFrame( + eligible_areas, + geometry=gpd.points_from_xy(eligible_areas.x, eligible_areas.y), + crs=crs, + ) + + # Apply a 5 meter buffer to all geometries to yield 10m raster resolution + eligible_areas["geometry"] = eligible_areas.geometry.buffer(5, cap_style="square") + + # Use spatial indexing for more efficient overlay + merged_data = eligible_areas.union_all() + result = gpd.GeoDataFrame(geometry=[merged_data], crs=eligible_areas.crs) + result = result.explode(index_parts=False).reset_index(drop=True) + + # Overlay with dh_systems using spatial indexing + return gpd.overlay(result, lau_shapes, how="intersection") + + +def prepare_subnodes( + subnodes: pd.DataFrame, + cities: gpd.GeoDataFrame, + regions_onshore: gpd.GeoDataFrame, + lau: gpd.GeoDataFrame, + heat_techs: gpd.GeoDataFrame, +) -> gpd.GeoDataFrame: + """ + Prepare subnodes by assigning the corresponding LAU and onshore region shapes. + + Parameters + ---------- + subnodes : pd.DataFrame + DataFrame containing information about district heating systems. + cities : gpd.GeoDataFrame + GeoDataFrame containing city coordinates with columns 'Stadt' and 'geometry'. + regions_onshore : gpd.GeoDataFrame + GeoDataFrame containing onshore region geometries of clustered network. + lau : gpd.GeoDataFrame + GeoDataFrame containing LAU (Local Administrative Units) geometries and IDs. + heat_techs : gpd.GeoDataFrame + GeoDataFrame containing NUTS3 region geometries of heat technologies and data from eGo^N project. + + Returns + ------- + gpd.GeoDataFrame + GeoDataFrame with processed subnodes, including geometries, clusters, LAU IDs, and NUTS3 shapes. + """ + + subnodes["Stadt"] = subnodes["Stadt"].str.split("_").str[0] + + # Drop duplicates if Gelsenkirchen, Kiel, or Flensburg is included and keep the one with higher Wärmeeinspeisung in GWh/a + subnodes = subnodes.drop_duplicates(subset="Stadt", keep="first") + + subnodes["yearly_heat_demand_MWh"] = subnodes["Wärmeeinspeisung in GWh/a"] * 1e3 + + logger.info( + f"The selected district heating networks have an overall yearly heat demand of {subnodes['yearly_heat_demand_MWh'].sum()} MWh/a. " + ) + + subnodes["geometry"] = subnodes["Stadt"].apply( + lambda s: cities.loc[cities["Stadt"] == s, "geometry"].values[0] + ) + + subnodes = subnodes.dropna(subset=["geometry"]) + # Convert the DataFrame to a GeoDataFrame + subnodes = gpd.GeoDataFrame(subnodes, crs="EPSG:4326") + # Rename geometry column to point_coords + subnodes = subnodes.rename(columns={"geometry": "point_coords"}) + subnodes = subnodes.set_geometry("point_coords") + + # Assign cluster to subnodes according to onshore regions + subnodes["cluster"] = subnodes.apply( + lambda x: regions_onshore.geometry.contains(x.point_coords).idxmax(), axis=1 + ) + + subnodes["name"] = subnodes["cluster"] + " " + subnodes["Stadt"] + + # For cities that are assigned to onshore regions outside Germany assign closest German cluster + subnodes.loc[~subnodes.cluster.str.contains("DE"), "cluster"] = subnodes.loc[ + ~subnodes.cluster.str.contains("DE") + ].apply( + lambda x: ( + regions_onshore.filter(like="DE", axis=0) + .geometry.distance(x.point_coords) + .idxmin() + ), + axis=1, + ) + subnodes["lau"] = subnodes.apply( + lambda x: lau.loc[lau.geometry.contains(x.point_coords).idxmax(), "LAU_ID"], + axis=1, + ) + subnodes["lau_shape"] = subnodes.apply( + lambda x: lau.loc[ + lau.geometry.contains(x.point_coords).idxmax(), "geometry" + ].wkt, + axis=1, + ) + subnodes["nuts3"] = subnodes.apply( + lambda x: heat_techs.geometry.contains(x.point_coords).idxmax(), + axis=1, + ) + subnodes["nuts3_shape"] = subnodes.apply( + lambda x: heat_techs.loc[ + heat_techs.geometry.contains(x.point_coords).idxmax(), "geometry" + ].wkt, + axis=1, + ) + + # Set LAU shapes as geometry and adjust CRS + subnodes["lau_shape"] = subnodes["lau_shape"].apply(shapely.wkt.loads) + subnodes = subnodes.set_geometry("lau_shape") + subnodes.crs = "EPSG:4326" + subnodes = subnodes.to_crs(3035) + + # Make point_coords wkt + subnodes["point_coords"] = subnodes["point_coords"].apply(lambda x: x.wkt) + + return subnodes + + +def process_district_heating_areas( + gdf: gpd.GeoDataFrame, + min_areas: list[float] = None, + buffer_factors: list[float] = None, +) -> gpd.GeoDataFrame: + """ + Process geometries in a GeoDataFrame by uniting polygons of same city and applying optional area filters and buffers to disjoint subpolygons. + Performs iterative processing with multiple min_area and buffer_factor values. + + Parameters + ---------- + gdf : gpd.GeoDataFrame + GeoDataFrame containing geometries to be processed. Must contain a 'Stadt' column for dissolving. + min_areas : list[float], optional + List of minimum area thresholds. Geometries smaller than these will be filtered out in each iteration. + buffer_factors : list[float], optional + List of factors used to calculate buffer distance as a proportion of the square root of geometry area. + + Returns + ------- + gpd.GeoDataFrame + Processed GeoDataFrame with optionally dissolved and exploded geometries. + """ + + # Ensure both lists have the same length + iterations = max(len(min_areas), len(buffer_factors)) + + # Iterative processing with different parameters + for i in range(iterations): + gdf = gdf.explode() + # Get current parameters, use 0 if index out of bounds + min_area = min_areas[i] if i < len(min_areas) else 0 + buffer_factor = buffer_factors[i] if i < len(buffer_factors) else 0 + + gdf = gdf.loc[gdf.area > min_area] + gdf["geometry"] = gdf.geometry.buffer(np.sqrt(gdf.area) * buffer_factor) + + gdf = gdf.dissolve("Stadt").reset_index() + + return gdf + + +def refine_dh_areas_from_census_data( + subnodes: gpd.GeoDataFrame, + census: gpd.GeoDataFrame, + min_dh_share: float, + **processing_config: dict[str, any], +) -> gpd.GeoDataFrame: + """ + Refine district heating areas based on census raster data. + + Parameters + ---------- + subnodes : gpd.GeoDataFrame + GeoDataFrame containing information about district heating subnodes, including city and LAU shapes. + census : gpd.GeoDataFrame + GeoDataFrame containing census data about geographical distribution of heating systems. + min_dh_share : float + Minimum share of district heating required to include a raster cell of census data. + processing_config : dict[str, any] + Configuration parameters for processing district heating areas, including minimum area and buffer factor. + + Returns + ------- + gpd.GeoDataFrame + Updated GeoDataFrame with refined district heating areas, processed and filtered based on census data. + """ + # Keep rows where share of district heating is larger than the specified threshold + census = census[ + census["Fernheizung"].astype(int) / census["Insgesamt_Heizungsart"].astype(int) + > min_dh_share + ] + + # Add buffer, so tiles are 100x100m + census["geometry"] = census.geometry.buffer(50, cap_style="square") + + # Union of conjoint geometries + census = census.union_all() + census = gpd.GeoDataFrame(geometry=[census], crs="EPSG:3035") + + # Explode to single geometries + census = census.explode().reset_index(drop=True) + # Assign to subnodes LAU regions + census = gpd.overlay(subnodes, census, how="intersection") + + # Add LAU shapes from subnodes to census + lau_shape_dict = dict(zip(subnodes["Stadt"], subnodes["lau_shape"])) + census["lau_shape"] = census["Stadt"].map(lau_shape_dict) + + # Take convex hull of census geometries + census["geometry"] = census.convex_hull + + # Process census geometries using passed configuration + census = process_district_heating_areas( + census, + processing_config["min_area"], + processing_config["buffer_factor"], + ) + + return census + + +def process_batch(batch, osm_land_cover_path, natura_path, excluder_resolution, codes): + # Get efficient chunked rasters for this batch + osm_dataset = get_chunked_raster(osm_land_cover_path, batch.total_bounds) + natura_dataset = get_chunked_raster(natura_path, batch.total_bounds) + + # Create exclusion container + excluder = ExclusionContainer(crs=3035, res=excluder_resolution) + excluder.add_raster(osm_dataset, codes=codes, invert=True, crs=3035) + excluder.add_raster(natura_dataset, codes=[1], invert=False, crs=3035) + + # Process batch shapes + batch_shapes = batch["geometry"] + band, transform = shape_availability(batch_shapes, excluder) + + # Extract valid points + row_indices, col_indices = np.where(band != osm_dataset.nodata) + values = band[row_indices, col_indices] + + x_coords, y_coords = rasterio.transform.xy(transform, row_indices, col_indices) + + # Process eligible points if any exist + if len(x_coords) > 0: + eligible_areas = process_eligible_points( + x_coords, y_coords, values, osm_dataset.crs, batch + ) + return eligible_areas + return None + + +def add_ptes_limit( + subnodes: gpd.GeoDataFrame, + osm_land_cover_path: rasterio.io.DatasetReader, + natura_path: rasterio.io.DatasetReader, + groundwater: xr.Dataset, + codes: list, + max_groundwater_depth: float, + excluder_resolution: int, + min_area: float = 10000, + default_capacity: float = 4500, +) -> gpd.GeoDataFrame: + """ + Add PTES limit to subnodes according to land availability within city regions. + + Parameters + ---------- + subnodes : gpd.GeoDataFrame + GeoDataFrame containing information about district heating subnodes. + osm_land_cover : rasterio.io.DatasetReader + OSM land cover raster dataset. + natura : rasterio.io.DatasetReader + NATURA 2000 protected areas raster dataset. + groundwater : xr.Dataset + Groundwater depth dataset. + codes : list + List of CORINE land cover codes to include. + max_groundwater_depth : float + Maximum allowable groundwater depth for PTES installation. + excluder_resolution : int + Resolution of the exclusion raster. + min_area : float, optional + Minimum area for eligible regions. Default is 10000 m². + default_capacity : float, optional + Default capacity for PTES potential calculation. Default comes from DEA data and is 4500 MWh. + + Returns + ------- + gpd.GeoDataFrame + Updated GeoDataFrame with PTES potential added. + """ + # Increase batch size for better performance + batch_size = 1 + + # Create batches + batches = [] + for i in range(0, len(subnodes), batch_size): + batches.append(subnodes.iloc[i : i + batch_size]) + + # Create delayed tasks for each batch + delayed_results = [ + dask.delayed(process_batch)( + batch, osm_land_cover_path, natura_path, excluder_resolution, codes + ) + for batch in batches + ] + + # Execute tasks in parallel with progress bar + with ProgressBar(): + results = dask.compute(*delayed_results) + + # Filter out None results and combine + batch_results = [result for result in results if result is not None] + + # Combine results from all batches + if batch_results: + eligible_areas = pd.concat(batch_results, ignore_index=True) + + eligible_areas = gpd.sjoin( + eligible_areas, subnodes.drop("Stadt", axis=1), how="left", rsuffix="" + )[["Stadt", "geometry"]].set_geometry("geometry") + + # filter for eligible areas that are larger than min_area + eligible_areas = eligible_areas[eligible_areas.area > min_area] + + # Find closest value in groundwater dataset and kick out areas with groundwater level > threshold + eligible_areas["groundwater_level"] = eligible_areas.to_crs("EPSG:4326").apply( + lambda a: groundwater.sel( + lon=a.geometry.centroid.x, lat=a.geometry.centroid.y, method="nearest" + )["WTD"].values[0], + axis=1, + ) + eligible_areas = eligible_areas[ + eligible_areas.groundwater_level < max_groundwater_depth + ] + + # Combine eligible areas by city + eligible_areas = eligible_areas.dissolve("Stadt") + + # Calculate PTES potential according to storage configuration + eligible_areas["area_m2"] = eligible_areas.area + eligible_areas["nstorages_pot"] = eligible_areas.area_m2 / min_area + eligible_areas["storage_pot_mwh"] = ( + eligible_areas["nstorages_pot"] * default_capacity + ) + + subnodes.set_index("Stadt", inplace=True) + subnodes["ptes_pot_mwh"] = eligible_areas.loc[ + subnodes.index.intersection(eligible_areas.index) + ]["storage_pot_mwh"] + subnodes["ptes_pot_mwh"] = subnodes["ptes_pot_mwh"].fillna(0) + subnodes.reset_index(inplace=True) + + return subnodes + + +def extend_regions_onshore( + regions_onshore: gpd.GeoDataFrame, + subnodes_all: gpd.GeoDataFrame, + head: int = 40, +) -> dict[str, gpd.GeoDataFrame]: + """ + Extend onshore regions to include city LAU regions and restrict onshore regions to + district heating areas of Fernwärmeatlas. + + Parameters + ---------- + regions_onshore : geopandas.GeoDataFrame + GeoDataFrame containing the onshore regions. + subnodes_all : pandas.DataFrame + DataFrame containing preprocessed district heating systems of Fernwärmeatlas. + head : int or bool, optional + Number of top cities to include based on yearly district heat feed-in. Default is 40. + + Returns + ------- + dict + Dictionary with two keys: + - 'extended': GeoDataFrame with extended onshore regions including city LAU regions. + - 'restricted': GeoDataFrame with restricted onshore regions, replacing geometries + with the district heating areas. + """ + + # Extend regions_onshore to include the cities' lau regions + subnodes = ( + subnodes_all.sort_values(by="Wärmeeinspeisung in GWh/a", ascending=False) + .head(head)[["name", "cluster", "lau_shape"]] + .rename(columns={"lau_shape": "geometry"}) + ) + # Create GeoDataFrame with lau_shape as geometry and EPSG:4326 CRS + subnodes = gpd.GeoDataFrame(subnodes, geometry="geometry", crs="EPSG:3035") + subnodes = subnodes.to_crs("EPSG:4326") + + # Crop city regions from onshore regions + regions_onshore["geometry"] = regions_onshore.geometry.difference( + subnodes.union_all() + ) + + # Rename lau_shape to geometry + subnodes = subnodes.drop(columns=["cluster"]) + + # Concat regions_onshore and subnodal regions + regions_onshore_extended = pd.concat([regions_onshore, subnodes.set_index("name")]) + + # Restrict regions_onshore geometries to only consist of the remaining city areas + subnodes_rest = subnodes_all.loc[ + ~subnodes_all.Stadt.apply(lambda s: s in subnodes.name.str.cat()) + ] + + subnodes_rest_dissolved = ( + subnodes_rest.set_geometry("geometry").dissolve("cluster").to_crs("EPSG:4326") + ) + # regions_onshore_restricted should replace geometries of regions_onshore with the geometries of subnodes_rest + regions_onshore_restricted = regions_onshore_extended.copy() + regions_onshore_restricted.loc[subnodes_rest_dissolved.index, "geometry"] = ( + subnodes_rest_dissolved["geometry"] + ) + regions_onshore_restricted.loc[subnodes.name, "geometry"] = ( + subnodes_all.loc[subnodes.index].set_index("name").geometry.to_crs("EPSG:4326") + ) + + return { + "extended": regions_onshore_extended, + "restricted": regions_onshore_restricted, + } + + +if __name__ == "__main__": + if "snakemake" not in globals(): + import os + import sys + + os.chdir(os.path.dirname(os.path.abspath(__file__))) + + path = "../../" + sys.path.insert(0, os.path.abspath(path)) + from scripts._helpers import mock_snakemake + + snakemake = mock_snakemake( + "prepare_district_heating_subnodes", + simpl="", + clusters=27, + opts="", + ll="vopt", + sector_opts="none", + planning_horizons="2045", + run="LowGroundWaterDepth", + ) + + configure_logging(snakemake) + set_scenario_config(snakemake) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) + logger.info("Adding SysGF-specific functionality") + + heat_techs = gpd.read_file(snakemake.input.heating_technologies_nuts3).set_index( + "index" + ) + lau = gpd.read_file( + f"{snakemake.input.lau_regions}!LAU_RG_01M_2019_3035.geojson", + crs="EPSG:3035", + ).to_crs("EPSG:4326") + + fernwaermeatlas = pd.read_excel( + snakemake.input.fernwaermeatlas, + sheet_name="Fernwärmeatlas_öffentlich", + ) + cities = gpd.read_file(snakemake.input.cities) + regions_onshore = gpd.read_file(snakemake.input.regions_onshore).set_index("name") + # Assign onshore region to heat techs based on geometry + heat_techs["cluster"] = heat_techs.apply( + lambda x: regions_onshore.geometry.contains(x.geometry).idxmax(), + axis=1, + ) + with zipfile.ZipFile(snakemake.input.census, "r") as z: + census = load_census_data(z.open("Zensus2022_Heizungsart_100m-Gitter.csv")) + + subnodes = prepare_subnodes( + fernwaermeatlas, + cities, + regions_onshore, + lau, + heat_techs, + ) + + if snakemake.params.district_heating["subnodes"]["census_areas"]["enable"]: + # Parameters for processing of census data is read from config file. + # Default values were chosen to yield district heating areas with high + # geographic accordance to the ones publicly available e.g. Berlin, Hamburg. + min_dh_share = snakemake.params.district_heating["subnodes"]["census_areas"][ + "min_district_heating_share" + ] + processing_config = snakemake.params.district_heating["subnodes"][ + "census_areas" + ]["processing"] + subnodes = refine_dh_areas_from_census_data( + subnodes, census, min_dh_share, **processing_config + ) + + if snakemake.params.district_heating["subnodes"]["limit_ptes_potential"]["enable"]: + bounds = subnodes.to_crs("EPSG:4326").total_bounds # (minx, miny, maxx, maxy) + groundwater = xr.open_dataset(snakemake.input.groundwater_depth).sel( + lon=slice(bounds[0], bounds[2]), # minx to maxx + lat=slice(bounds[1], bounds[3]), # miny to maxy + ) + + subnodes = add_ptes_limit( + subnodes, + snakemake.input.osm_land_cover, + snakemake.input.natura, + groundwater, + snakemake.params.district_heating["subnodes"]["limit_ptes_potential"][ + "osm_landcover_codes" + ], + snakemake.params.district_heating["subnodes"]["limit_ptes_potential"][ + "max_groundwater_depth" + ], + snakemake.params.district_heating["subnodes"]["limit_ptes_potential"][ + "excluder_resolution" + ], + ) + + subnodes.to_file(snakemake.output.district_heating_subnodes, driver="GeoJSON") + + regions_onshore_modified = extend_regions_onshore( + regions_onshore, + subnodes, + head=snakemake.params.district_heating["subnodes"]["nlargest"], + ) + + regions_onshore_modified["extended"].to_file( + snakemake.output.regions_onshore_extended, driver="GeoJSON" + ) + + regions_onshore_modified["restricted"].to_file( + snakemake.output.regions_onshore_restricted, driver="GeoJSON" + )