diff --git a/config_model.yml b/config_model.yml index ce2a8bd1c598c2b1c4f8ec25555815d875e12177..fb86ffd76ce8a253017c4dc7a2e4c6c6831e9cce 100644 --- a/config_model.yml +++ b/config_model.yml @@ -1,15 +1,15 @@ # Path to data folder -#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/' -data_path: '/data/dcradu/resite_ip/' +data_path: 'D:/ULg_PhD_work/datasets/resite_ip/' +#data_path: '/data/dcradu/resite_ip/' # Spatial resolution (in degrees) of the potential sites. spatial_resolution: 0.28 # Start time and end time of the analysis. -time_slice: ['2014-01-01T00:00', '2018-12-31T23:00'] +time_slice: ['2014-01-01T00:00', '2014-01-07T23:00'] # Technologies to deploy. -regions: ['FR'] +regions: ['FR', 'ES'] technologies: ['wind_onshore'] -deployments: [[19]] +deployments: [[19], [12]] siting_params: smooth_measure: 'mean' diff --git a/src/main.py b/src/main.py index 015a105290c71848eb64863d519fb8de778bf79a..8ac6d7297ad15242d3466e74e0df47139338dba3 100644 --- a/src/main.py +++ b/src/main.py @@ -1,7 +1,7 @@ import pickle import yaml from os.path import join, isfile -from numpy import array, argsort +from numpy import array, argsort, sum from pyomo.opt import SolverFactory import time @@ -24,13 +24,20 @@ if __name__ == '__main__': model_parameters['technologies'], model_parameters['deployments']) - if isfile(join(data_path, 'input_data/criticality_matrix.p')): + if isfile(join(data_path, 'input/criticality_matrix.p')): - custom_log(' WARNING! Instance data read from files. Make sure the files are the ones that you need.') - criticality_data = pickle.load(open(join(data_path, 'input_data/criticality_matrix.p', 'rb'))) - site_coordinates = pickle.load(open(join(data_path, 'input_data/site_coordinates.p', 'rb'))) - capacity_factors_data = pickle.load(open(join(data_path, 'input_data/capacity_factors_data.p', 'rb'))) - site_positions = pickle.load(open(join(data_path, 'input_data/site_positions.p', 'rb'))) + custom_log(' WARNING! Instance data read from files.') + criticality_data = pickle.load(open(join(data_path, 'input/criticality_matrix.p'), 'rb')) + site_coordinates = pickle.load(open(join(data_path, 'input/site_coordinates.p'), 'rb')) + capacity_factors_data = pickle.load(open(join(data_path, 'input/capacity_factors_data.p'), 'rb')) + site_positions = pickle.load(open(join(data_path, 'input/site_positions.p'), 'rb')) + + r = list(site_coordinates.keys()) + d = sum(model_parameters['deployments']) + t = model_parameters['technologies'] + ts = len(capacity_factors_data[list(site_coordinates.keys())[0]][model_parameters['technologies'][0]].time) + custom_log(f" Reading data for a model with a spatial resolution of {float(spatial_resolution)}, " + f"covering {r}, siting {d} {t} sites and {ts} time steps.") else: @@ -44,10 +51,10 @@ if __name__ == '__main__': criticality_data = xarray_to_ndarray(critical_window_mapping(time_windows_data, model_parameters)) site_positions = sites_position_mapping(time_windows_data) - pickle.dump(criticality_data, open(join(data_path, 'input_data/criticality_matrix.p', 'wb')), protocol=4) - pickle.dump(site_coordinates, open(join(data_path, 'input_data/site_coordinates.p', 'wb')), protocol=4) - pickle.dump(capacity_factors_data, open(join(data_path, 'input_data/capacity_factors_data.p', 'wb')), protocol=4) - pickle.dump(site_positions, open(join(data_path, 'input_data/site_positions.p', 'wb')), protocol=4) + pickle.dump(criticality_data, open(join(data_path, 'input/criticality_matrix.p'), 'wb'), protocol=4) + pickle.dump(site_coordinates, open(join(data_path, 'input/site_coordinates.p'), 'wb'), protocol=4) + pickle.dump(capacity_factors_data, open(join(data_path, 'input/capacity_factors_data.p'), 'wb'), protocol=4) + pickle.dump(site_positions, open(join(data_path, 'input/site_positions.p'), 'wb'), protocol=4) custom_log(' Data read. Building model.')