Skip to content
Snippets Groups Projects
Commit 13a09629 authored by David Radu's avatar David Radu
Browse files

reverting to previous status (indirect resampling after building the matrices)

parent 232333e4
No related branches found
No related tags found
No related merge requests found
# Path to data folder
#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
data_path: '/data/dcradu/resite_ip/'
data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
#data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
resampling_rate: 3
# Technologies to deploy.
regions: ['GB', 'NL', 'FR', 'DE', 'DK', 'NO', 'PL', 'IE', 'IT', 'SE', 'FI', 'ES', 'GR', 'PT', 'BE', 'LT', 'LV', 'EE', 'HR']
technologies: ['wind_offshore']
deployments: [[80], [60], [57], [36], [35], [30], [28], [22], [20], [20], [15], [13], [10], [9], [6], [4], [3], [1], [1]]
load_coverage: 0.3
siting_params:
# Defines how \alpha is considered in space and time.
......
# Path to data folder
#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
data_path: '/data/dcradu/resite_ip/'
data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
#data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
resampling_rate: 3
# Technologies to deploy.
regions: ['EU']
technologies: ['wind_offshore']
deployments: [[468]]
load_coverage: 0.3
siting_params:
# Defines how \alpha is considered in space and time.
......
......@@ -25,7 +25,7 @@ wind_onshore:
wind_offshore:
where: 'offshore'
filters: ['resource_quality', 'bathymetry', 'latitude', 'distance', 'legacy']
filters: ['bathymetry', 'latitude', 'distance', 'legacy']
converter_IV: 'V90'
converter_III: 'V90'
converter_II: 'V164'
......@@ -38,7 +38,7 @@ wind_offshore:
protected_areas_selection: ['Ia', 'Ib', 'II', 'V']
protected_areas_distance_threshold: 5.
depth_threshold_low: 0.
depth_threshold_high: 199.
depth_threshold_high: 999.
altitude_threshold: 0.
terrain_slope_threshold: 1.
forestry_ratio_threshold: 1.
......
......@@ -16,7 +16,7 @@ import geopy
import logging
logging.basicConfig(level=logging.INFO, format=f"%(levelname)s %(asctime)s - %(message)s", datefmt='%Y-%m-%d %H:%M:%S')
logging.disable(logging.CRITICAL)
# logging.disable(logging.CRITICAL)
logger = logging.getLogger(__name__)
......@@ -425,14 +425,14 @@ def get_potential_per_site(input_dict, tech_parameters, spatial_resolution):
return output_dict
def smooth_load_data(data_path, regions, date_slice, delta):
def smooth_load_data(data_path, regions, date_slice, delta, resample_rate):
load_data_fn = join(data_path, 'input/load_data', 'load_entsoe_2006_2020_full.csv')
load_data = read_csv(load_data_fn, index_col=0)
# From MW to GW
load_data = load_data.divide(1e3)
load_data.index = to_datetime(load_data.index)
load_data_sliced = load_data.loc[date_slice[0]:date_slice[1]]
load_data_sliced = load_data.loc[date_slice[0]:date_slice[1]].resample(f"{resample_rate}H").mean()
regions_list = return_region_divisions(regions, data_path)
load_vector = load_data_sliced[regions_list]
......
......@@ -5,6 +5,7 @@ from numpy import argmax, ceil, float64
import argparse
import pickle
from copy import deepcopy
from helpers import read_inputs, init_folder, xarray_to_ndarray, generate_jl_input, \
get_potential_per_site, capacity_to_cardinality
from tools import read_database, return_filtered_coordinates, selected_data, return_output, resource_quality_mapping, \
......@@ -12,7 +13,7 @@ from tools import read_database, return_filtered_coordinates, selected_data, ret
import logging
logging.basicConfig(level=logging.INFO, format=f"%(levelname)s %(asctime)s - %(message)s", datefmt='%Y-%m-%d %H:%M:%S')
logging.disable(logging.CRITICAL)
# logging.disable(logging.CRITICAL)
logger = logging.getLogger(__name__)
......@@ -25,6 +26,8 @@ def parse_args():
parser.add_argument('--alpha_method', type=str, default=None)
parser.add_argument('--alpha_coverage', type=str, default=None)
parser.add_argument('--delta', type=int, default=None)
parser.add_argument('--resampling_rate', type=str)
parser.add_argument('--maxdepth', type=str)
parsed_args = vars(parser.parse_args())
......@@ -38,6 +41,7 @@ if __name__ == '__main__':
logger.info('Starting data pre-processing.')
model_parameters = read_inputs(f"../config_model_{args['k']}.yml")
model_parameters['resampling_rate'] = args['resampling_rate']
siting_parameters = model_parameters['siting_params']
tech_parameters = read_inputs('../config_techs.yml')
......@@ -52,11 +56,17 @@ if __name__ == '__main__':
database = read_database(data_path, spatial_resolution)
if isfile(join(data_path, f"input/capacity_factors_data_{args['k']}.p")):
capacity_factors_data = pickle.load(open(join(data_path, f"input/capacity_factors_data_{args['k']}.p"), 'rb'))
site_coordinates = pickle.load(open(join(data_path, f"input/site_coordinates_{args['k']}.p"), 'rb'))
legacy_coordinates = pickle.load(open(join(data_path, f"input/legacy_coordinates_{args['k']}.p"), 'rb'))
if isfile(join(data_path, f"input/capacity_factors_data_{args['k']}"
f"_{args['resampling_rate']}h_{args['maxdepth']}m.p")):
capacity_factors_data = \
pickle.load(open(join(data_path,
f"input/capacity_factors_data_{args['k']}_{args['resampling_rate']}h_"
f"{args['maxdepth']}m.p"), 'rb'))
site_coordinates = \
pickle.load(open(join(data_path, f"input/site_coordinates_{args['k']}_{args['maxdepth']}m.p"), 'rb'))
legacy_coordinates = \
pickle.load(open(join(data_path, f"input/legacy_coordinates_{args['k']}_{args['maxdepth']}m.p"), 'rb'))
logger.info('Input files read from disk.')
else:
......@@ -65,12 +75,22 @@ if __name__ == '__main__':
truncated_data = selected_data(database, site_coordinates, time_horizon)
capacity_factors_data = return_output(truncated_data, data_path)
pickle.dump(capacity_factors_data,
open(join(data_path, f"input/capacity_factors_data_{args['k']}.p"), 'wb'), protocol=4)
resampled_data = deepcopy(capacity_factors_data)
rate = model_parameters['resampling_rate']
for region in capacity_factors_data.keys():
for tech in capacity_factors_data[region].keys():
resampled_data[region][tech] = \
capacity_factors_data[region][tech].resample(time=f"{rate}H").mean(dim='time')
pickle.dump(resampled_data,
open(join(data_path, f"input/capacity_factors_data_{args['k']}_{args['resampling_rate']}h_"
f"{args['maxdepth']}m.p"), 'wb'), protocol=4)
pickle.dump(site_coordinates,
open(join(data_path, f"input/site_coordinates_{args['k']}.p"), 'wb'), protocol=4)
open(join(data_path, f"input/site_coordinates_{args['k']}_"
f"{args['maxdepth']}m.p"), 'wb'), protocol=4)
pickle.dump(legacy_coordinates,
open(join(data_path, f"input/legacy_coordinates_{args['k']}.p"), 'wb'), protocol=4)
open(join(data_path, f"input/legacy_coordinates_{args['k']}_"
f"{args['maxdepth']}m.p"), 'wb'), protocol=4)
logger.info('Input files written to disk.')
time_windows_data = resource_quality_mapping(capacity_factors_data, siting_parameters)
......@@ -84,7 +104,12 @@ if __name__ == '__main__':
jl_dict = generate_jl_input(deployment_dict, site_coordinates, site_positions, legacy_coordinates)
total_no_locs = sum(deployment_dict[r][t] for r in deployment_dict.keys() for t in deployment_dict[r].keys())
c = int(ceil(siting_parameters['c'] * total_no_locs))
output_folder = init_folder(model_parameters, total_no_locs, c, suffix=f"_{args['alpha_method']}_{args['alpha_coverage']}_d{args['delta']}")
import sys
sys.exit()
output_folder = init_folder(model_parameters, total_no_locs, c,
suffix=f"_{args['alpha_method']}_{args['alpha_coverage']}_d{args['delta']}")
logger.info('Data pre-processing finished. Opening Julia instance.')
......@@ -101,14 +126,6 @@ if __name__ == '__main__':
params['neighborhood'], params['initial_temp'],
params['no_iterations'], params['no_epochs'], params['no_runs'])
for r in range(jl_sel.shape[0]):
x = jl_sel[r, :]
assert sum(x) == total_no_locs, f"Total cardinality in run {r} is {sum(x)} instead of {total_no_locs}."
for partition in jl_dict['deployment_dict'].keys():
ids = [k-1 for k, v in jl_dict['index_dict'].items() if v == partition]
assert sum(x[ids]) == jl_dict['deployment_dict'][partition], \
f"Cardinality in {partition} is {sum(x[ids])} instead of {jl_dict['deployment_dict'][partition]}."
with open(join(output_folder, 'config_model.yaml'), 'w') as outfile:
yaml.dump(model_parameters, outfile, default_flow_style=False, sort_keys=False)
with open(join(output_folder, 'config_techs.yaml'), 'w') as outfile:
......
......@@ -25,7 +25,7 @@ from helpers import filter_onshore_offshore_locations, union_regions, return_coo
import logging
logging.basicConfig(level=logging.INFO, format=f"%(levelname)s %(asctime)s - %(message)s", datefmt='%Y-%m-%d %H:%M:%S')
logging.disable(logging.CRITICAL)
# logging.disable(logging.CRITICAL)
logger = logging.getLogger(__name__)
......@@ -608,9 +608,11 @@ def critical_window_mapping(time_windows_dict, potentials_dict, deployments_dict
regions = model_params['regions']
date_slice = model_params['time_slice']
sampling_rate = model_params['resampling_rate']
alpha = model_params['siting_params']['alpha']
delta = model_params['siting_params']['delta']
data_path = model_params['data_path']
load_coverage = model_params['load_coverage']
key_list = return_dict_keys(time_windows_dict)
output_dict = deepcopy(time_windows_dict)
......@@ -619,7 +621,7 @@ def critical_window_mapping(time_windows_dict, potentials_dict, deployments_dict
assert alpha['coverage'] in ['partition', 'system'], f"Criticality coverage {alpha['coverage']} not available."
assert alpha['norm'] in ['min', 'max'], f"Norm {alpha['norm']} not available."
load_ds = smooth_load_data(data_path, regions, date_slice, delta)
load_ds = smooth_load_data(data_path, regions, date_slice, delta, sampling_rate)
if alpha['coverage'] == 'system':
......@@ -628,7 +630,7 @@ def critical_window_mapping(time_windows_dict, potentials_dict, deployments_dict
if alpha['method'] == 'potential':
# Covering only a fraction of 30% of demand, as per EC expectations
load_ds_system = load_ds_system.multiply(0.3)
load_ds_system = load_ds_system.multiply(load_coverage)
deployments = sum(deployments_dict[key][subkey] for key in deployments_dict
for subkey in deployments_dict[key])
......@@ -656,8 +658,9 @@ def critical_window_mapping(time_windows_dict, potentials_dict, deployments_dict
if alpha['method'] == 'potential':
# Covering only a fraction of the demand via offshore wind. EC suggests 30% EU-wide, no data per country currently available
load_ds_region = load_ds_region.multiply(0.3)
# Covering only a fraction of the demand via offshore wind. EC suggests 30% EU-wide,
# no data per country currently available
load_ds_region = load_ds_region.multiply(load_coverage)
deployments = sum(deployments_dict[key][subkey] for key in deployments_dict
for subkey in deployments_dict[key] if key == region)
......@@ -726,11 +729,13 @@ def retrieve_index_dict(deployment_vector, coordinate_dict):
def retrieve_site_data(model_parameters, capacity_factor_data, criticality_data, deployment_dict,
location_mapping, comp_site_coordinates, legacy_sites, output_folder, benchmark):
sampling_rate = model_parameters['resampling_rate']
c = int(ceil(model_parameters['siting_params']['c'] * sum(deployment_dict[r][t] for r in deployment_dict.keys()
for t in deployment_dict[r].keys())))
output_by_tech = collapse_dict_region_level(capacity_factor_data)
time_dt = date_range(start=model_parameters['time_slice'][0], end=model_parameters['time_slice'][1], freq='H')
time_dt = date_range(start=model_parameters['time_slice'][0], end=model_parameters['time_slice'][1],
freq=f"{sampling_rate}H")
for tech in output_by_tech:
_, index = unique(output_by_tech[tech].locations, return_index=True)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment