diff --git a/CHANGELOG.md b/CHANGELOG.md index e81c82d8..2d96062e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ * **ADD** Spatial resolution that aligns with the regions defined by the [e-Highway 2050 project](https://cordis.europa.eu/project/id/308908/reporting) (`ehighways`) (#370). -* **ADD** fully-electrified heat demand (#284). +* **ADD** fully-electrified heat demand (#284, #343, #390, #391). * **ADD** fully-electrified road transportation (#270, #271, #358). A parameter allows to define the share of uncontrolled (timeseries) vs controlled charging (optimised) by the solver (#338). Data for controlled charging constraints is readily available (#356), but corresponding constraints are not yet implemented (#385). diff --git a/config/default.yaml b/config/default.yaml index 742d2d40..e325caa6 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -21,8 +21,7 @@ data-sources: swiss-industry-energy-balance: https://www.bfe.admin.ch/bfe/en/home/versorgung/statistik-und-geodaten/energiestatistiken/teilstatistiken.exturl.html/aHR0cHM6Ly9wdWJkYi5iZmUuYWRtaW4uY2gvZGUvcHVibGljYX/Rpb24vZG93bmxvYWQvODc4OA==.html controlled-ev-profiles: https://zenodo.org/record/6579421/files/ramp-ev-{dataset}.csv.gz?download=1 uncontrolled-ev-profiles: https://sandbox.zenodo.org/records/45530/files/uncontrolled-charging-profiles.csv.gz?download=1 # TODO: convert into Zenodo repository - gridded-temperature-data: https://zenodo.org/records/6557643/files/temperature.nc?download=1 - gridded-10m-windspeed-data: https://zenodo.org/records/6557643/files/wind10m.nc?download=1 + gridded-weather-data: https://zenodo.org/records/11516744/files/{data_var}.nc when2heat-params: https://zenodo.org/records/10965295/files/{dataset}?download=1 population: https://ec.europa.eu/eurostat/cache/GISCO/geodatafiles/JRC_GRID_2018.zip data-pre-processing: diff --git a/config/schema.yaml b/config/schema.yaml index f2d3dddf..e403f44e 100644 --- a/config/schema.yaml +++ b/config/schema.yaml @@ -111,14 +111,10 @@ properties: type: string pattern: ^(https?|http?):\/\/.+ description: Web address of electric light-duty vehicle uncontrolled charging. - gridded-temperature-data: + gridded-weather-data: type: string - pattern: ^(https?|http?):\/\/.+ - description: "Web address of gridded temperature data. Expecting a netCDF file with the coordinates [time, site], and the variables [temperature, lat, lon]." - gridded-10m-windspeed-data: - type: string - pattern: ^(https?|http?):\/\/.+ - description: "Web address of gridded 10m wind speed data. Expecting a netCDF file with the coordinates [time, site], and the variables [temperature, lat, lon]." + pattern: ^(https?|http?):\/\/.+{data_var}.*\.(nc)$ + description: "Web address of gridded weather data. Expecting a netCDF file with the coordinates [time, site], the variables [{data_var}, lat, lon], and the attribute 'unit'." when2heat-params: type: string pattern: ^(https?|http?):\/\/.+ diff --git a/rules/heat.smk b/rules/heat.smk index 52e07c1a..106461b6 100644 --- a/rules/heat.smk +++ b/rules/heat.smk @@ -1,16 +1,7 @@ -rule download_gridded_temperature_data: - message: "Download gridded temperature data" - params: url = config["data-sources"]["gridded-temperature-data"] - output: protected("data/automatic/gridded-weather/temperature.nc") - conda: "../envs/shell.yaml" - localrule: True - shell: "curl -sSLo {output} '{params.url}'" - - -rule download_gridded_10m_windspeed_data: - message: "Download gridded 10m wind speed data" - params: url = config["data-sources"]["gridded-10m-windspeed-data"] - output: protected("data/automatic/gridded-weather/wind10m.nc") +rule download_gridded_weather_data: + message: "Download gridded {wildcards.data_var} data" + params: url = lambda wildcards: config["data-sources"]["gridded-weather-data"].format(data_var=wildcards.data_var) + output: protected("data/automatic/gridded-weather/{data_var}.nc") conda: "../envs/shell.yaml" localrule: True shell: "curl -sSLo {output} '{params.url}'" @@ -87,10 +78,11 @@ use rule create_heat_demand_timeseries as create_heat_demand_timeseries_historic output: "build/models/{resolution}/timeseries/demand/heat-demand-historic-electrification.csv", + rule population_per_weather_gridbox: message: "Get {wildcards.resolution} population information per weather data gridbox" input: - wind_speed = rules.download_gridded_10m_windspeed_data.output[0], + weather_grid = "data/automatic/gridded-weather/grid.nc", population = rules.raw_population_unzipped.output[0], locations = rules.units.output[0] params: @@ -103,14 +95,15 @@ rule population_per_weather_gridbox: rule unscaled_heat_profiles: message: "Generate gridded heat demand profile shapes for {wildcards.year} from weather and population data" + input: population = rules.population_per_weather_gridbox.output[0], - wind_speed = rules.download_gridded_10m_windspeed_data.output[0], - temperature = rules.download_gridded_temperature_data.output[0], + wind_speed = "data/automatic/gridded-weather/wind10m.nc", + temperature = "data/automatic/gridded-weather/temperature.nc", when2heat = rules.download_when2heat_params.output[0] params: - lat_name = "lat", - lon_name = "lon", + first_year = config["scope"]["temporal"]["first-year"], + final_year = config["scope"]["temporal"]["final-year"], conda: "../envs/default.yaml" - output: "build/data/{resolution}/gridded_hourly_unscaled_heat_demand_{year}.nc" + output: "build/data/{resolution}/hourly_unscaled_heat_demand.nc" script: "../scripts/heat/unscaled_heat_profiles.py" diff --git a/scripts/heat/population_per_gridbox.py b/scripts/heat/population_per_gridbox.py index c3b35ac2..b21bbb11 100644 --- a/scripts/heat/population_per_gridbox.py +++ b/scripts/heat/population_per_gridbox.py @@ -96,7 +96,7 @@ def population_on_weather_grid( population_on_weather_grid( path_to_population=snakemake.input.population, path_to_locations=snakemake.input.locations, - path_to_coordinates=snakemake.input.wind_speed, + path_to_coordinates=snakemake.input.weather_grid, lat_name=snakemake.params.lat_name, lon_name=snakemake.params.lon_name, out_path=snakemake.output[0], diff --git a/scripts/heat/unscaled_heat_profiles.py b/scripts/heat/unscaled_heat_profiles.py index 6f7b9438..57fbce05 100644 --- a/scripts/heat/unscaled_heat_profiles.py +++ b/scripts/heat/unscaled_heat_profiles.py @@ -1,49 +1,68 @@ """ Use weather data to simulate hourly heat profiles using When2Heat (https://github.com/oruhnau/when2heat) data processing pipeline. -Functions attributable to When2Heat are explictily referenced as such -in the function docstring. + +Functions attributable to When2Heat are explicitly referenced as such in the function docstring. """ import os -from typing import Callable, Union +from typing import Callable, Literal, Union import numpy as np import pandas as pd import xarray as xr +# ASSUME (from When2Heat): +# 1. Below 15 °C, the water heating demand is not defined and assumed to stay constant +HOT_WATER_LOWER_BOUND_TEMP = 15 +# 2. the reference temperature is always 30C for hot water demand calcs +HOT_WATER_REF_TEMP = 30 +# All locations are separated by the average wind speed with the threshold 4.4 m/s separating windy and not-windy (normal) locations +AVE_WIND_SPEED_THRESHOLD = 4.4 + def get_unscaled_heat_profiles( path_to_population: str, path_to_wind_speed: str, path_to_temperature: str, path_to_when2heat_params: str, - lat_name: str, - lon_name: str, + first_year: Union[str, int], + final_year: Union[str, int], out_path: str, - year: Union[str, int], ) -> None: - """ - Produces time series of heat demand profiles with the correct shape, and consistent - within themselves, but without meaningful units. - The profiles need to be scaled so their magnitude matches annual heat demand data - in a subsequent step. - + """Produces time series of heat demand profiles with the correct shape, and consistent within themselves, but without meaningful units. + + The profiles need to be scaled so their magnitude matches annual heat demand data in a subsequent step. + + Args: + path_to_population (str): Gridded population data, which will act as the weighting to got from grid-level profiles to profiles per model unit. + path_to_wind_speed (str): Gridded wind speed data in m/s. + path_to_temperature (str): Gridded air temperature data in degrees C. + path_to_when2heat_params (str): Parameters to convert weather data into demand profiles from the When2Heat project. + first_year (Union[str, int]): First year of data to include in the profile (inclusive). + final_year (Union[str, int]): Final year of data to include in the profile (inclusive). + out_path (str): Path to which data will be saved. """ population = xr.open_dataarray(path_to_population) - temperature_ds = xr.open_dataset(path_to_temperature).rename({ - lon_name: "x", - lat_name: "y", - }) + + # Weather data is subset by the geographic area covered by model run (given by available population sites) + temperature_ds = xr.open_dataset(path_to_temperature).sel(site=population.site) + wind_ds = xr.open_dataset(path_to_wind_speed).sel(site=population.site) + + # Check units + assert temperature_ds.attrs["unit"].lower() == "degrees c" + assert wind_ds.attrs["unit"].lower() == "m/s" + # Only need site-wide mean wind speed for this analysis - wind_ds = xr.open_dataset(path_to_wind_speed).rename({lon_name: "x", lat_name: "y"}) - average_wind_speed = wind_ds["wind_speed"].mean("time") + average_wind_speed = wind_ds["wind10m"].mean("time") # Subset temperature to the selected year extended by a couple of days either end, # so we don't compute values for years we don't need, but keep a buffer for the shifts # happening in get_reference_temperature() temperature_ds = temperature_ds.sel( - time=slice(str(int(year) - 1) + "-12-25", str(int(year) + 1) + "-01-05") + time=slice( + str(int(first_year) - 1) + "-12-25", str(int(final_year) + 1) + "-01-05" + ) ) # This is a weighted average temperature from 3 days prior to each day in the timeseries @@ -54,15 +73,20 @@ def get_unscaled_heat_profiles( ) # After running get_reference_temperature(), we now subset to get only the target year - reference_temperature = reference_temperature.sel(time=str(year)) + reference_temperature = reference_temperature.sel( + time=slice(str(first_year), str(final_year)) + ) # Parameters and how to apply them is based on [@BDEW:2015] daily_params = read_daily_parameters(path_to_when2heat_params) hourly_params = read_hourly_parameters(path_to_when2heat_params) # Get daily demand - daily_heat = get_daily_heat_demand( - reference_temperature, average_wind_speed, daily_params + daily_heat = daily( + reference_temperature, average_wind_speed, daily_params, _heat_function + ) + daily_hot_water = daily( + reference_temperature, average_wind_speed, daily_params, _water_function ) # Map profiles to daily demand @@ -70,30 +94,51 @@ def get_unscaled_heat_profiles( reference_temperature, daily_heat, hourly_params ) + hourly_hot_water = get_hourly_heat_profiles( + reference_temperature.clip(min=HOT_WATER_REF_TEMP), + daily_hot_water, + hourly_params, + ) + + # Space heating demand = total heating demand - hot water demand + hourly_space = (hourly_heat - hourly_hot_water).clip(min=0) + + # Sanity check that there is more space heating demand in winter than summer + hourly_space_monthly = hourly_space.groupby("time.month").sum() + hourly_space_winter = hourly_space_monthly.sel(month=[12, 1, 2]).sum("month") + hourly_space_summer = hourly_space_monthly.sel(month=[6, 7, 8]).sum("month") + assert (hourly_space_winter > hourly_space_summer).all() + # population weighted profiles. # NOTE: profile magnitude is now only consistent within each region, not between them weight = population / population.sum(["site"]) - # `hourly_heat` has dims [x, y, datetime], `weight` has dims [x, y, id], - # we want a final array with dims [id, datetime] - grouped_hourly_heat = xr.concat( - [(hourly_heat * weight.sel({"id": id})).sum(["site"]) for id in weight.id], - dim="id", + grouped_hourly_space = _group_gridcells(hourly_space, weight).rename("space_heat") + grouped_hourly_water = _group_gridcells(hourly_hot_water, weight).rename( + "hot_water" ) + grouped_hourly_heat = xr.merge([grouped_hourly_space, grouped_hourly_water]) grouped_hourly_heat.to_netcdf(out_path) def get_hourly_heat_profiles( - reference_temperature: Union[int, float], + reference_temperature: xr.DataArray, daily_heat: xr.DataArray, - hourly_params: pd.DataFrame, + hourly_params: pd.Series, ) -> xr.DataArray: - """ - reference_temperature: temperature in degrees C + """Convert daily heat demand to hourly profiles. + Heavily modified from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/demand.py + to work with xarray datasets and to improve efficiency + + Args: + reference_temperature (xr.DataArray): Daily reference temperature in degrees C. + daily_heat (xr.DataArray): Relative daily heat demand per site. + hourly_params (pd.Series): Parameters from When2Heat to convert from daily to hourly profiles. + + Returns: + xr.DataArray: Hourly heat demand profiles (which are internally consistent but whose magnitudes are meaningless). """ - # Heavily modified from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/demand.py - # to work with xarray datasets and to improve efficiency # get temperature in 5C increments between -15C and +30C temperature_increments = ( @@ -107,53 +152,46 @@ def get_hourly_heat_profiles( .assign(weekday=temperature_increments.index.get_level_values("time").dayofweek) .set_index(["temperature", "weekday"], append=True) ) - aligned_increment_to_params = hourly_params.align(increment_index)[0] + aligned_increment_to_params = pd.merge( + hourly_params.rename("param"), + increment_index, + left_index=True, + right_index=True, + ).squeeze() + hourly_params_at_all_locations = xr.DataArray.from_series( aligned_increment_to_params.droplevel(["weekday", "temperature"]) ) - # daily heat is multiplied by the hourly paramater value to get the relative heat demand for that hour + # daily heat is multiplied by the hourly parameter value to get the relative heat demand for that hour hourly_heat = hourly_params_at_all_locations * daily_heat - hourly_heat = hour_and_day_to_datetime(hourly_heat) + hourly_heat = _hour_and_day_to_datetime(hourly_heat) return hourly_heat -def hour_and_day_to_datetime(da: xr.DataArray) -> xr.DataArray: - da = da.stack(new_time=["time", "hour"]) - new_time = da.new_time.to_index() - da.coords["new_time"] = new_time.get_level_values(0) + pd.to_timedelta( - new_time.get_level_values(1), unit="H" - ) - return da.rename({"new_time": "time"}) - - def read_daily_parameters(input_path: str) -> pd.DataFrame: - # Direct copy from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/read.py + """Load When2Heat daily parameters. + + Direct copy from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/read.py + """ file = os.path.join(input_path, "daily_demand.csv") return pd.read_csv(file, sep=";", decimal=",", header=[0, 1], index_col=0) def read_hourly_parameters(input_path: str) -> pd.DataFrame: - # Modified from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/read.py - # to set columns as integer - parameters = {} + """Load When2Heat hourly parameters - def _csv_reader(building_type): - filename = f"hourly_factors_{building_type}.csv" - filepath = os.path.join(input_path, filename) - - # MultiIndex for commercial heat because of weekday dependency - index_col = [0, 1] if building_type == "COM" else 0 - return pd.read_csv(filepath, sep=";", decimal=",", index_col=index_col).apply( - pd.to_numeric, downcast="float" - ) + Modified from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/read.py + to set columns as integer. + """ + parameters = {} - parameters["COM"] = _csv_reader("COM") + parameters["COM"] = _csv_reader("COM", input_path) for building_type in ["SFH", "MFH"]: parameters[building_type] = ( - _csv_reader(building_type) + _csv_reader(building_type, input_path) .rename_axis(index="time") .align(parameters["COM"])[0] ) @@ -168,42 +206,38 @@ def _csv_reader(building_type): return combined_df.stack() -def get_reference_temperature(temperature: xr.DataArray, time_dim: str = "time"): - # Modified from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/demand.py - # to expect xarray not pandas - - # Daily average - daily_average = temperature.resample(time="1D").mean(time_dim) - - # Weighted mean, method for which is given in [@Ruhnau:2019] - return sum( - (0.5**i) * daily_average.shift({time_dim: i}).bfill(time_dim) for i in range(4) - ) / sum(0.5**i for i in range(4)) - - -def get_daily_heat_demand( - temperature: xr.DataArray, average_wind: xr.DataArray, all_parameters: pd.DataFrame +def get_reference_temperature( + temperature: xr.DataArray, time_dim: str = "time" ) -> xr.DataArray: - # Direct copy from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/demand.py + """Get daily reference temperature values which account for the temperature in preceding days using a weighted average. - def heat_function(t, parameters): - # BDEW et al. 2015 describes this function combining parameters - celsius = t - 273.15 # The temperature input is in Kelvin + Modified from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/demand.py + to expect xarray not pandas - sigmoid = ( - parameters["A"] - / (1 + (parameters["B"] / (celsius - 40)) ** parameters["C"]) - + parameters["D"] - ) + Args: + temperature (xr.DataArray): Hourly temperature in degrees C + time_dim (str, optional): The name of the hourly time dimension. Defaults to "time". - linear = xr.concat( - [parameters[f"m_{i}"] * celsius + parameters[f"b_{i}"] for i in ["s", "w"]], - dim="param", - ).max("param") + Returns: + xr.DataArray: Daily reference temperatures per site. + """ - return sigmoid + linear + # Daily average + # pandas manages time resampling much quicker than xarray, so we switch to a dataframe here. + daily_average = ( + temperature.rename(time=time_dim) + .to_series() + .unstack("time") + .T.resample("1D") + .mean() + .stack() + .to_xarray() + ) - return daily(temperature, average_wind, all_parameters, heat_function) + # Weighted mean, method for which is given in [@Ruhnau:2019] + return sum( + (0.5**i) * daily_average.shift({"time": i}).bfill("time") for i in range(4) + ) / sum(0.5**i for i in range(4)) def daily( @@ -224,9 +258,13 @@ def daily( return xr.concat( [ func( - temperature.where(wind <= 4.4), all_parameters[(building, "normal")] + temperature.where(wind <= AVE_WIND_SPEED_THRESHOLD), + all_parameters[(building, "normal")], ).fillna( - func(temperature.where(wind > 4.4), all_parameters[(building, "windy")]) + func( + temperature.where(wind > AVE_WIND_SPEED_THRESHOLD), + all_parameters[(building, "windy")], + ) ) for building in buildings ], @@ -234,14 +272,93 @@ def daily( ) +def _group_gridcells(gridded_data: xr.DataArray, weight: xr.DataArray) -> xr.DataArray: + # `hourly_heat` has dims [x, y, datetime], `weight` has dims [x, y, id], + # we want a final array with dims [id, datetime] + + return xr.concat( + [(gridded_data * weight.sel({"id": id})).sum(["site"]) for id in weight.id], + dim="id", + ) + + +def _hour_and_day_to_datetime(da: xr.DataArray) -> xr.DataArray: + "Combine hour and date (a.k.a. 'time') as two dimensions into one datetime ('time') dimension" + da = da.stack(new_time=["time", "hour"]) + new_time = da.new_time.to_index() + da.coords["new_time"] = new_time.get_level_values(0) + pd.to_timedelta( + new_time.get_level_values(1), unit="H" + ) + return da.rename({"new_time": "time"}) + + +def _csv_reader( + building_type: Literal["SFH", "MFH", "COM"], input_path: str +) -> pd.DataFrame: + filename = f"hourly_factors_{building_type}.csv" + filepath = os.path.join(input_path, filename) + + # MultiIndex for commercial heat because of weekday dependency + index_col = [0, 1] if building_type == "COM" else 0 + return pd.read_csv(filepath, sep=";", decimal=",", index_col=index_col).apply( + pd.to_numeric, downcast="float" + ) + + +def _heat_function(temperature: xr.DataArray, parameters: pd.DataFrame) -> xr.DataArray: + """A function for the total (space + water) daily heating demand, derived from [@BDEW:2015]. + + Direct copy from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/demand.py + + Args: + temperature (xr.DataArray): Reference daily temperature in degrees C. + parameters (pd.DataFrame): Relevant parameters from When2Heat. + + Returns: + xr.DataArray: Daily relative heat demand. + """ + + sigmoid = ( + parameters["A"] + / (1 + (parameters["B"] / (temperature - 40)) ** parameters["C"]) + + parameters["D"] + ) + + linear = xr.concat( + [parameters[f"m_{i}"] * temperature + parameters[f"b_{i}"] for i in ["s", "w"]], + dim="param", + ).max("param") + + return sigmoid + linear + + +def _water_function( + temperature: xr.DataArray, parameters: pd.DataFrame +) -> xr.DataArray: + """A function for the daily water heating demand, derived from [@BDEW:2015]. + + Direct copy from https://github.com/oruhnau/when2heat/blob/351bd1a2f9392ed50a7bdb732a103c9327c51846/scripts/demand.py + + Args: + temperature (xr.DataArray): Reference daily temperature in degrees C. + parameters (pd.DataFrame): Relevant parameters from When2Heat. + + Returns: + xr.DataArray: Daily relative hot water demand. + + """ + celsius_clipped = temperature.clip(min=HOT_WATER_LOWER_BOUND_TEMP) + + return parameters["m_w"] * celsius_clipped + parameters["b_w"] + parameters["D"] + + if __name__ == "__main__": get_unscaled_heat_profiles( path_to_population=snakemake.input.population, path_to_wind_speed=snakemake.input.wind_speed, path_to_temperature=snakemake.input.temperature, path_to_when2heat_params=snakemake.input.when2heat, - lat_name=snakemake.params.lat_name, - lon_name=snakemake.params.lon_name, - year=snakemake.wildcards.year, + first_year=snakemake.params.first_year, + final_year=snakemake.params.final_year, out_path=snakemake.output[0], )