Skip to content
Snippets Groups Projects
Commit 232333e4 authored by David Radu's avatar David Radu
Browse files

generation of two config models, RO-BG removed from EU, partitioned-based...

generation of two config models, RO-BG removed from EU, partitioned-based criticality covers share of demand
parent 6755f502
No related branches found
No related tags found
No related merge requests found
# Path to data folder
#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
# Technologies to deploy.
regions: ['GB', 'NL', 'FR', 'DE', 'DK', 'NO', 'PL', 'IE', 'IT', 'SE', 'FI', 'ES', 'GR', 'PT', 'BE', 'LT', 'LV', 'EE', 'HR']
technologies: ['wind_offshore']
deployments: [[80], [60], [57], [36], [35], [30], [28], [22], [20], [20], [15], [13], [10], [9], [6], [4], [3], [1], [1]]
siting_params:
# Defines how \alpha is considered in space and time.
alpha:
method: 'load' # 'potentials'
coverage: 'system' # 'partition'
smoothing: 'mean' # 'median'
norm: 'min' # 'max'
# Time-window length used to compute the criticality indicator. Integer value.
delta: 1 # \in \mathbb{N}
# Threshold
c: 0.1 # \in [0, 1]
# Solution method: BB or HEU or RAND or GRED.
solution_method:
neighborhood: 1
no_iterations: 2000
no_epochs: 500
initial_temp: 100.
no_runs: 30
# Path to data folder
#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
# Technologies to deploy.
regions: ['EU']
technologies: ['wind_offshore']
deployments: [[468]]
siting_params:
# Defines how \alpha is considered in space and time.
alpha:
method: 'load' # 'potentials'
coverage: 'system' # 'partition'
smoothing: 'mean' # 'median'
norm: 'min' # 'max'
# Time-window length used to compute the criticality indicator. Integer value.
delta: 1 # \in \mathbb{N}
# Threshold
c: 0.1 # \in [0, 1]
# Solution method: BB or HEU or RAND or GRED.
solution_method:
neighborhood: 1
no_iterations: 2000
no_epochs: 500
initial_temp: 100.
no_runs: 30
......@@ -160,7 +160,7 @@ def return_region_divisions(region_list, data_path):
region_subdivisions = ['AT', 'BE', 'DE', 'DK', 'ES',
'FR', 'GB', 'IE', 'IT', 'LU',
'NL', 'NO', 'PT', 'SE', 'CH', 'CZ',
'EE', 'LV', 'RO', 'BG', 'HR', 'RS',
'EE', 'LV', 'HR',
'FI', 'GR', 'HR', 'HU', 'LT',
'PL', 'SI', 'SK']
elif region == 'NA':
......
......@@ -20,10 +20,10 @@ def parse_args():
parser = argparse.ArgumentParser(description='Command line arguments.')
parser.add_argument('--k', type=str, default=None)
parser.add_argument('--c', type=float)
parser.add_argument('--alpha_method', type=str, default=None)
parser.add_argument('--alpha_coverage', type=str, default=None)
parser.add_argument('--alpha_norm', type=str, default=None)
parser.add_argument('--delta', type=int, default=None)
parsed_args = vars(parser.parse_args())
......@@ -37,13 +37,12 @@ if __name__ == '__main__':
logger.info('Starting data pre-processing.')
model_parameters = read_inputs('../config_model.yml')
model_parameters = read_inputs(f"../config_model_{args['k']}.yml")
siting_parameters = model_parameters['siting_params']
tech_parameters = read_inputs('../config_techs.yml')
siting_parameters['alpha']['method'] = args['alpha_method']
siting_parameters['alpha']['coverage'] = args['alpha_coverage']
siting_parameters['alpha']['norm'] = args['alpha_norm']
siting_parameters['delta'] = int(args['delta'])
siting_parameters['c'] = args['c']
......@@ -53,11 +52,11 @@ if __name__ == '__main__':
database = read_database(data_path, spatial_resolution)
if isfile(join(data_path, 'input/capacity_factors_data_partitioned.p')):
if isfile(join(data_path, f"input/capacity_factors_data_{args['k']}.p")):
capacity_factors_data = pickle.load(open(join(data_path, 'input/capacity_factors_data_partitioned.p'), 'rb'))
site_coordinates = pickle.load(open(join(data_path, 'input/site_coordinates_partitioned.p'), 'rb'))
legacy_coordinates = pickle.load(open(join(data_path, 'input/legacy_coordinates_partitioned.p'), 'rb'))
capacity_factors_data = pickle.load(open(join(data_path, f"input/capacity_factors_data_{args['k']}.p"), 'rb'))
site_coordinates = pickle.load(open(join(data_path, f"input/site_coordinates_{args['k']}.p"), 'rb'))
legacy_coordinates = pickle.load(open(join(data_path, f"input/legacy_coordinates_{args['k']}.p"), 'rb'))
logger.info('Input files read from disk.')
else:
......@@ -67,11 +66,11 @@ if __name__ == '__main__':
capacity_factors_data = return_output(truncated_data, data_path)
pickle.dump(capacity_factors_data,
open(join(data_path, 'input/capacity_factors_data_partitioned.p'), 'wb'), protocol=4)
open(join(data_path, f"input/capacity_factors_data_{args['k']}.p"), 'wb'), protocol=4)
pickle.dump(site_coordinates,
open(join(data_path, 'input/site_coordinates_partitioned.p'), 'wb'), protocol=4)
open(join(data_path, f"input/site_coordinates_{args['k']}.p"), 'wb'), protocol=4)
pickle.dump(legacy_coordinates,
open(join(data_path, 'input/legacy_coordinates_partitioned.p'), 'wb'), protocol=4)
open(join(data_path, f"input/legacy_coordinates_{args['k']}.p"), 'wb'), protocol=4)
logger.info('Input files written to disk.')
time_windows_data = resource_quality_mapping(capacity_factors_data, siting_parameters)
......@@ -85,6 +84,7 @@ if __name__ == '__main__':
jl_dict = generate_jl_input(deployment_dict, site_coordinates, site_positions, legacy_coordinates)
total_no_locs = sum(deployment_dict[r][t] for r in deployment_dict.keys() for t in deployment_dict[r].keys())
c = int(ceil(siting_parameters['c'] * total_no_locs))
output_folder = init_folder(model_parameters, total_no_locs, c, suffix=f"_{args['alpha_method']}_{args['alpha_coverage']}_d{args['delta']}")
logger.info('Data pre-processing finished. Opening Julia instance.')
......@@ -109,10 +109,6 @@ if __name__ == '__main__':
assert sum(x[ids]) == jl_dict['deployment_dict'][partition], \
f"Cardinality in {partition} is {sum(x[ids])} instead of {jl_dict['deployment_dict'][partition]}."
output_folder = init_folder(model_parameters, total_no_locs, c,
suffix=f"_MIRSA_{args['alpha_method']}_{args['alpha_coverage']}"
f"_{args['alpha_norm']}_d{args['delta']}")
with open(join(output_folder, 'config_model.yaml'), 'w') as outfile:
yaml.dump(model_parameters, outfile, default_flow_style=False, sort_keys=False)
with open(join(output_folder, 'config_techs.yaml'), 'w') as outfile:
......@@ -125,7 +121,7 @@ if __name__ == '__main__':
locations_dict = retrieve_location_dict(jl_locations_vector, model_parameters, site_positions)
retrieve_site_data(model_parameters, capacity_factors_data, criticality_data, deployment_dict,
site_positions, locations_dict, legacy_coordinates, output_folder, benchmark=None)
site_positions, locations_dict, legacy_coordinates, output_folder, benchmark="PROD")
pickle.dump(jl_sel, open(join(output_folder, 'solution_matrix.p'), 'wb'))
pickle.dump(jl_obj, open(join(output_folder, 'objective_vector.p'), 'wb'))
......
......@@ -626,6 +626,10 @@ def critical_window_mapping(time_windows_dict, potentials_dict, deployments_dict
load_ds_system = load_ds.sum(axis=1)
if alpha['method'] == 'potential':
# Covering only a fraction of 30% of demand, as per EC expectations
load_ds_system = load_ds_system.multiply(0.3)
deployments = sum(deployments_dict[key][subkey] for key in deployments_dict
for subkey in deployments_dict[key])
l_norm = norm_load_by_deployments(load_ds_system, deployments)
......@@ -651,6 +655,10 @@ def critical_window_mapping(time_windows_dict, potentials_dict, deployments_dict
load_ds_region = load_ds[region]
if alpha['method'] == 'potential':
# Covering only a fraction of the demand via offshore wind. EC suggests 30% EU-wide, no data per country currently available
load_ds_region = load_ds_region.multiply(0.3)
deployments = sum(deployments_dict[key][subkey] for key in deployments_dict
for subkey in deployments_dict[key] if key == region)
l_norm = norm_load_by_deployments(load_ds_region, deployments)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment