Skip to content
Snippets Groups Projects
Commit 36b073e1 authored by David Radu's avatar David Radu
Browse files

code clean-up

parent 66c2dbb2
No related branches found
No related tags found
No related merge requests found
# Path to data folder
data_path: 'path_to_data'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2018-01-01T00:00', '2018-01-31T23:00']
# Technologies to deploy.
regions: ['BE', 'LT', 'LV', 'EE', 'HR']
technologies: ['wind_offshore']
deployments: [[3], [4], [3], [2], [1]]
siting_params:
smooth_measure: 'mean' # median, percentiles
# Defines how \alpha is considered in space and time.
alpha: 'load_central' # load_central, load_partition
# Normalization procedures (detailed in tools.py). (min, max)
norm_type: 'max' # max, min
# Time-window length used to compute the criticality indicator. Integer value.
delta: 1 # \in \mathbb{N}
# Threshold
c: 1 # < n, \in \mathbb{N}
# Solution method: BB or HEU or RAND or GRED.
solution_method:
SA:
set: False
neighborhood: 1
no_iterations: 100
no_epochs: 100
initial_temp: 200.
p: 0.05
no_runs: 3
no_runs_init: 5
algorithm: 'MIR' # 'MIR', 'SGH'
SGH:
set: True
p: 0.05
no_runs: 3
algorithm: 'SGH'
\ No newline at end of file
......@@ -6,10 +6,12 @@ data_path: '/data/dcradu/resite_ip/'
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
resampling_rate: 3
# Technologies to deploy.
regions: ['GB', 'NL', 'FR', 'DE', 'DK', 'NO', 'PL', 'IE', 'IT', 'SE', 'FI', 'ES', 'GR', 'PT', 'BE', 'LT', 'LV', 'EE', 'HR']
technologies: ['wind_offshore']
deployments: [[80], [60], [57], [36], [35], [30], [28], [22], [20], [20], [15], [13], [10], [9], [6], [4], [3], [1], [1]]
load_coverage: 0.3
siting_params:
# Defines how \alpha is considered in space and time.
......
# Path to data folder
#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
resampling_rate: 3
# Technologies to deploy.
regions: ['GB', 'NL', 'FR', 'DE', 'DK', 'NO', 'PL', 'IE', 'IT', 'SE', 'FI', 'ES', 'GR', 'PT', 'BE', 'LT', 'LV', 'EE', 'HR']
technologies: ['wind_offshore']
deployments: [[80], [60], [57], [36], [35], [30], [28], [22], [20], [20], [15], [13], [10], [9], [6], [4], [3], [1], [1]]
load_coverage: 0.3
siting_params:
# Defines how \alpha is considered in space and time.
alpha:
method: 'load' # 'potentials'
coverage: 'system' # 'partition'
smoothing: 'mean' # 'median'
norm: 'min' # 'max'
# Time-window length used to compute the criticality indicator. Integer value.
delta: 1 # \in \mathbb{N}
# Threshold
c: 0.1 # \in [0, 1]
# Solution method: BB or HEU or RAND or GRED.
solution_method:
neighborhood: 1
no_iterations: 5000
no_epochs: 500
initial_temp: 100.
no_runs: 30
# Path to data folder
#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
resampling_rate: 3
# Technologies to deploy.
regions: ['EU']
technologies: ['wind_offshore']
deployments: [[468]]
load_coverage: 0.3
siting_params:
# Defines how \alpha is considered in space and time.
alpha:
method: 'load' # 'potentials'
coverage: 'system' # 'partition'
smoothing: 'mean' # 'median'
norm: 'min' # 'max'
# Time-window length used to compute the criticality indicator. Integer value.
delta: 1 # \in \mathbb{N}
# Threshold
c: 0.1 # \in [0, 1]
# Solution method: BB or HEU or RAND or GRED.
solution_method:
neighborhood: 1
no_iterations: 5000
no_epochs: 500
initial_temp: 100.
no_runs: 30
# Config file for various conversion technologies.
wind_onshore:
where: 'onshore'
filters: ['resource_quality', 'population_density', 'orography', 'forestry', 'water_mask', 'latitude']
converter_IV: 'V110'
converter_III: 'E103'
converter_II: 'V90'
converter_I: 'E-126'
resource: 'wind'
deployment: 'onshore'
resource_threshold: 4. #m/s
population_density_threshold_low: 0.
population_density_threshold_high: 100.
protected_areas_selection: ['Ia', 'Ib', 'II']
protected_areas_distance_threshold: 10.
depth_threshold_low: 0.
depth_threshold_high: 0.
altitude_threshold: 1500.
terrain_slope_threshold: 0.03
forestry_ratio_threshold: 0.8
latitude_threshold: 65.
wind_offshore:
where: 'offshore'
filters: ['resource_quality', 'bathymetry', 'latitude', 'distance', 'legacy']
converter_IV: 'V90'
converter_III: 'V90'
converter_II: 'V164'
converter_I: 'V164'
resource: 'wind'
deployment: 'offshore'
resource_threshold: 5. #m/s
population_density_threshold_low: 0.
population_density_threshold_high: 100.
protected_areas_selection: ['Ia', 'Ib', 'II', 'V']
protected_areas_distance_threshold: 5.
depth_threshold_low: 0.
depth_threshold_high: 199.
altitude_threshold: 0.
terrain_slope_threshold: 1.
forestry_ratio_threshold: 1.
latitude_threshold: 70.
distance_threshold_min: 22.2
distance_threshold_max: 222.0 # 111.
legacy_min: 0.1
wind_floating:
where: 'offshore'
filters: ['resource_quality', 'bathymetry', 'latitude', 'distance']
converter_IV: 'V90'
converter_III: 'V90'
converter_II: 'V164'
converter_I: 'V164'
resource: 'wind'
deployment: 'floating'
resource_threshold: 9. #m/s
population_density_threshold_low: 0.
population_density_threshold_high: 100.
protected_areas_selection: ['Ia', 'Ib', 'II', 'V']
protected_areas_distance_threshold: 5.
depth_threshold_low: 200.
depth_threshold_high: 990.
altitude_threshold: 0.
terrain_slope_threshold: 1.
forestry_ratio_threshold: 1.
latitude_threshold: 65.
distance_threshold_min: 23.
distance_threshold_max: 180.
pv_utility:
where: 'onshore'
filters: ['resource_quality', 'population_density', 'orography', 'forestry', 'water_mask', 'latitude']
converter: 'DEG15MC'
resource: 'solar'
deployment: 'utility'
resource_threshold: 130. #W/m2
population_density_threshold_low: 0.
population_density_threshold_high: 100.
protected_areas_selection: ['Ia', 'Ib', 'II', 'V']
protected_areas_distance_threshold: 5.
depth_threshold_low: 0.
depth_threshold_high: 0.
altitude_threshold: 1500.
terrain_slope_threshold: 0.03
forestry_ratio_threshold: 0.8
latitude_threshold: 65.
#TODO: adjust pv_residential filters
pv_residential:
where: 'onshore'
filters: ['population_density', 'water_mask']
converter: 'DD06M'
resource: 'solar'
deployment: 'residential'
resource_threshold: 100. #W/m2
population_density_threshold_low: 10.
population_density_threshold_high: 999999.
protected_areas_selection: ['Ia', 'Ib', 'II', 'V']
protected_areas_distance_threshold: 1.
depth_threshold_low: 0.
depth_threshold_high: 0.
altitude_threshold: 3000.
terrain_slope_threshold: 1.
forestry_ratio_threshold: 1.
latitude_threshold: 65.
......@@ -539,37 +539,6 @@ def get_partition_index(input_dict):
return index_dict
def init_folder(parameters, no_locs, c, suffix=None):
"""Initilize an output folder.
Parameters:
------------
parameters : dict
Parameters dictionary.
Returns:
------------
path : str
Relative path of the folder.
"""
output_data_path = join(parameters['data_path'], 'output')
no_locs = str(no_locs)
no_part = str(len(parameters['regions']))
no_yrs = str(int(round((to_datetime(parameters['time_slice'][1]) -
to_datetime(parameters['time_slice'][0])) / timedelta64(1, 'Y'), 0)))
c = str(c)
if not isdir(output_data_path):
makedirs(abspath(output_data_path))
path = abspath(output_data_path + '/' + no_yrs + 'y_n' + no_locs + '_k' + no_part + '_c' + c + suffix)
makedirs(path)
return path
def generate_jl_input(deployment_dict, filtered_coordinates, site_positions, legacy_sites):
concat_deployment_dict = concatenate_dict_keys(deployment_dict)
......
import yaml
import julia
from os.path import join, isfile
from julia import Main
from os.path import join, isdir
from os import makedirs
from numpy import argmax, ceil, float64
import argparse
import pickle
from time import strftime
from copy import deepcopy
from helpers import read_inputs, init_folder, xarray_to_ndarray, generate_jl_input, \
get_potential_per_site, capacity_to_cardinality
from helpers import read_inputs, xarray_to_ndarray, generate_jl_input, get_potential_per_site, capacity_to_cardinality
from tools import read_database, return_filtered_coordinates, selected_data, return_output, resource_quality_mapping, \
critical_window_mapping, sites_position_mapping, retrieve_location_dict, retrieve_site_data
......@@ -16,82 +17,29 @@ logging.basicConfig(level=logging.INFO, format=f"%(levelname)s %(asctime)s - %(m
logging.disable(logging.CRITICAL)
logger = logging.getLogger(__name__)
def parse_args():
parser = argparse.ArgumentParser(description='Command line arguments.')
parser.add_argument('--k', type=str, default=None)
parser.add_argument('--c', type=float)
parser.add_argument('--alpha_method', type=str, default=None)
parser.add_argument('--alpha_coverage', type=str, default=None)
parser.add_argument('--delta', type=int, default=None)
parser.add_argument('--resampling_rate', type=str)
parser.add_argument('--maxdepth', type=str)
parsed_args = vars(parser.parse_args())
return parsed_args
if __name__ == '__main__':
args = parse_args()
logger.info('Starting data pre-processing.')
model_parameters = read_inputs(f"../config_model_{args['k']}.yml")
model_parameters['resampling_rate'] = args['resampling_rate']
model_parameters = read_inputs(f"../config_model.yml")
siting_parameters = model_parameters['siting_params']
tech_parameters = read_inputs('../config_techs.yml')
siting_parameters['alpha']['method'] = args['alpha_method']
siting_parameters['alpha']['coverage'] = args['alpha_coverage']
siting_parameters['delta'] = int(args['delta'])
siting_parameters['c'] = args['c']
data_path = model_parameters['data_path']
spatial_resolution = model_parameters['spatial_resolution']
time_horizon = model_parameters['time_slice']
database = read_database(data_path, spatial_resolution)
site_coordinates, legacy_coordinates = return_filtered_coordinates(database, model_parameters, tech_parameters)
truncated_data = selected_data(database, site_coordinates, time_horizon)
capacity_factors_data = return_output(truncated_data, data_path)
if isfile(join(data_path, f"input/capacity_factors_data_{args['k']}"
f"_{args['resampling_rate']}h_{args['maxdepth']}m.p")):
capacity_factors_data = \
pickle.load(open(join(data_path,
f"input/capacity_factors_data_{args['k']}_{args['resampling_rate']}h_"
f"{args['maxdepth']}m.p"), 'rb'))
site_coordinates = \
pickle.load(open(join(data_path, f"input/site_coordinates_{args['k']}_{args['maxdepth']}m.p"), 'rb'))
legacy_coordinates = \
pickle.load(open(join(data_path, f"input/legacy_coordinates_{args['k']}_{args['maxdepth']}m.p"), 'rb'))
logger.info('Input files read from disk.')
else:
site_coordinates, legacy_coordinates = return_filtered_coordinates(database, model_parameters, tech_parameters)
truncated_data = selected_data(database, site_coordinates, time_horizon)
capacity_factors_data = return_output(truncated_data, data_path)
resampled_data = deepcopy(capacity_factors_data)
rate = model_parameters['resampling_rate']
for region in capacity_factors_data.keys():
for tech in capacity_factors_data[region].keys():
resampled_data[region][tech] = \
capacity_factors_data[region][tech].resample(time=f"{rate}H").mean(dim='time')
pickle.dump(resampled_data,
open(join(data_path, f"input/capacity_factors_data_{args['k']}_{args['resampling_rate']}h_"
f"{args['maxdepth']}m.p"), 'wb'), protocol=4)
pickle.dump(site_coordinates,
open(join(data_path, f"input/site_coordinates_{args['k']}_"
f"{args['maxdepth']}m.p"), 'wb'), protocol=4)
pickle.dump(legacy_coordinates,
open(join(data_path, f"input/legacy_coordinates_{args['k']}_"
f"{args['maxdepth']}m.p"), 'wb'), protocol=4)
logger.info('Input files written to disk.')
resampled_data = deepcopy(capacity_factors_data)
rate = model_parameters['resampling_rate']
for region in capacity_factors_data.keys():
for tech in capacity_factors_data[region].keys():
resampled_data[region][tech] = \
capacity_factors_data[region][tech].resample(time=f"{rate}H").mean(dim='time')
time_windows_data = resource_quality_mapping(capacity_factors_data, siting_parameters)
site_positions = sites_position_mapping(time_windows_data)
......@@ -105,29 +53,27 @@ if __name__ == '__main__':
total_no_locs = sum(deployment_dict[r][t] for r in deployment_dict.keys() for t in deployment_dict[r].keys())
c = int(ceil(siting_parameters['c'] * total_no_locs))
output_folder = init_folder(model_parameters, total_no_locs, c,
suffix=f"_{args['alpha_method']}_{args['alpha_coverage']}_d{args['delta']}")
output_dir = join(data_path, f"output/{strftime('%Y%m%d_%H%M%S')}/")
if not isdir(output_dir):
makedirs(output_dir)
with open(join(output_dir, 'config_model.yaml'), 'w') as outfile:
yaml.dump(model_parameters, outfile, default_flow_style=False, sort_keys=False)
with open(join(output_dir, 'config_techs.yaml'), 'w') as outfile:
yaml.dump(tech_parameters, outfile, default_flow_style=False, sort_keys=False)
logger.info('Data pre-processing finished. Opening Julia instance.')
j = julia.Julia(compiled_modules=False)
from julia import Main
Main.include("jl/SitingHeuristics.jl")
params = siting_parameters['solution_method']
jl_sel, jl_obj, jl_tra = Main.main_SA(jl_dict['index_dict'],
jl_dict['deployment_dict'],
jl_dict['legacy_site_list'],
criticality_data.astype('float64'), float64(c),
params['neighborhood'], params['initial_temp'],
params['no_iterations'], params['no_epochs'], params['no_runs'])
with open(join(output_folder, 'config_model.yaml'), 'w') as outfile:
yaml.dump(model_parameters, outfile, default_flow_style=False, sort_keys=False)
with open(join(output_folder, 'config_techs.yaml'), 'w') as outfile:
yaml.dump(tech_parameters, outfile, default_flow_style=False, sort_keys=False)
jl_sel, jl_obj, _ = Main.main_SA(jl_dict['index_dict'],
jl_dict['deployment_dict'],
jl_dict['legacy_site_list'],
criticality_data.astype('float64'), float64(c),
params['neighborhood'], params['initial_temp'],
params['no_iterations'], params['no_epochs'], params['no_runs'])
logger.info('Siting heuristics done. Writing results to disk.')
jl_objective_pick = argmax(jl_obj)
......@@ -135,10 +81,9 @@ if __name__ == '__main__':
locations_dict = retrieve_location_dict(jl_locations_vector, model_parameters, site_positions)
retrieve_site_data(model_parameters, capacity_factors_data, criticality_data, deployment_dict,
site_positions, locations_dict, legacy_coordinates, output_folder, benchmark="PROD")
site_positions, locations_dict, legacy_coordinates, output_dir, benchmark="PROD")
pickle.dump(jl_sel, open(join(output_folder, 'solution_matrix.p'), 'wb'))
pickle.dump(jl_obj, open(join(output_folder, 'objective_vector.p'), 'wb'))
pickle.dump(jl_tra, open(join(output_folder, 'trajectory_matrix.p'), 'wb'))
pickle.dump(jl_sel, open(join(output_dir, 'solution_matrix.p'), 'wb'))
pickle.dump(jl_obj, open(join(output_dir, 'objective_vector.p'), 'wb'))
logger.info(f"Results written to {output_folder}")
logger.info(f"Results written to {output_dir}")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment