diff --git a/config_model.yml b/config_model.yml index 8ef6219103fc88c9f6b975d62e9861af034801f3..3b4e8d9b0a725a16d3890d54d92efde8c843f7c8 100644 --- a/config_model.yml +++ b/config_model.yml @@ -5,7 +5,7 @@ data_path: '/home/dcradu/data/resite_ip/' # Spatial resolution (in degrees) of the potential sites. spatial_resolution: 0.25 # Start time and end time of the analysis. -time_slice: ['2010-01-01T00:00', '2019-12-31T23:00'] +time_slice: ['2011-01-01T00:00', '2020-12-31T23:00'] # Technologies to deploy. regions: ['EU'] technologies: ['wind_onshore'] @@ -20,17 +20,17 @@ siting_params: # Time-window length used to compute the criticality indicator. Integer value. delta: 1 # Solution method: BB or HEU or RAND or GRED. - c: 112 + c: 1 solution_method: BB: # Branch & Bound - set: False + set: True mir: False solver: 'gurobi' mipgap: 0.01 - timelimit: 43200 - threads: 36 + timelimit: 1800 + threads: 0 MIRSA: # Simulated Annealing with Local Search set: False @@ -59,7 +59,7 @@ siting_params: algorithm: 'RS' SGHLS: # Stochastic greedy & simulated annealing - set: True + set: False neighborhood: 1 no_iterations: 2000 no_epochs: 500 diff --git a/src/helpers.py b/src/helpers.py index 5069de505839b915d381845fd8fbd8bf036d2edc..c2e049540417462be7ae983d2d942f90b2a3fbe9 100644 --- a/src/helpers.py +++ b/src/helpers.py @@ -339,21 +339,26 @@ def return_coordinates_from_shapefiles(resource_dataset, shapefiles_region): def retrieve_load_data_partitions(data_path, date_slice, alpha, delta, regions, norm_type): - load_data_fn = join(data_path, 'input/load_data', 'load_2000_2019.csv') + load_data_fn = join(data_path, 'input/load_data', 'load_entsoe_2006_2020_patch.csv') load_data = read_csv(load_data_fn, index_col=0) - load_data.index = date_range('2000-01-02T00:00', '2019-12-31T23:00', freq='H') + load_data.index = to_datetime(load_data.index) load_data_sliced = load_data.loc[date_slice[0]:date_slice[1]] regions_list = return_region_divisions(regions, data_path) + load_data_sliced = load_data_sliced[regions_list].fillna(method='pad', axis='index') + nan_regions = load_data_sliced.columns[load_data_sliced.isna().any()].tolist() + + if nan_regions: + raise ValueError(f"Regions {nan_regions} have missing load values. To be filled before proceeding.") if alpha == 'load_central': - load_vector = load_data_sliced[regions_list].sum(axis=1) + load_data_sliced = load_data_sliced.sum(axis=1) elif alpha == 'load_partition': - load_vector = load_data_sliced[regions_list] + pass else: raise ValueError(' This way of defining criticality is not available.') - load_vector_norm = return_filtered_and_normed(load_vector, delta, norm_type) + load_vector_norm = return_filtered_and_normed(load_data_sliced, delta, norm_type) return load_vector_norm diff --git a/src/main.py b/src/main.py index 1704c6c9b37a6adb104367a6b4258ae4e230d7eb..cd38fd1023ace5e7290811600d19d82dfb228332 100644 --- a/src/main.py +++ b/src/main.py @@ -18,6 +18,7 @@ def parse_args(): parser.add_argument('--c', type=int) parser.add_argument('--run_BB', type=bool, default=False) + parser.add_argument('--run_MIR', type=bool, default=False) parser.add_argument('--run_MIRSA', type=bool, default=False) parser.add_argument('--run_GRED_DET', type=bool, default=False) parser.add_argument('--run_GRED_STO', type=bool, default=False) @@ -85,6 +86,7 @@ if __name__ == '__main__': custom_log(' Data read. Building model.') siting_parameters['solution_method']['BB']['set'] = args['run_BB'] + siting_parameters['solution_method']['BB']['mir'] = args['run_MIR'] siting_parameters['solution_method']['MIRSA']['set'] = args['run_MIRSA'] siting_parameters['solution_method']['GRED_DET']['set'] = args['run_GRED_DET'] siting_parameters['solution_method']['GRED_STO']['set'] = args['run_GRED_STO'] @@ -102,7 +104,7 @@ if __name__ == '__main__': custom_log(' BB chosen to solve the IP.') params = siting_parameters['solution_method']['BB'] - output_folder = init_folder(model_parameters, c, f"_BB_MIR_{params['mir']}") + output_folder = init_folder(model_parameters, c, f"_BB_MIR_{args['run_MIR']}") with open(join(output_folder, 'config_model.yaml'), 'w') as outfile: yaml.dump(model_parameters, outfile, default_flow_style=False, sort_keys=False) with open(join(output_folder, 'config_techs.yaml'), 'w') as outfile: @@ -115,7 +117,7 @@ if __name__ == '__main__': opt.options['TimeLimit'] = params['timelimit'] instance = build_ip_model(deployment_dict, site_coordinates, criticality_data, - c, output_folder, params['mir']) + c, output_folder, args['run_MIR']) custom_log(' Sending model to solver.') results = opt.solve(instance, tee=False, keepfiles=False,