Skip to content
Snippets Groups Projects
Commit 94fccd0d authored by David Radu's avatar David Radu
Browse files

init commit

parent 66c2dbb2
No related branches found
No related tags found
No related merge requests found
# Path to data folder
#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
data_path: '/data/dcradu/resite_ip/'
data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
#data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
time_slice: ['2018-01-01T00:00', '2018-01-01T23:00']
# Technologies to deploy.
regions: ['GB', 'NL', 'FR', 'DE', 'DK', 'NO', 'PL', 'IE', 'IT', 'SE', 'FI', 'ES', 'GR', 'PT', 'BE', 'LT', 'LV', 'EE', 'HR']
technologies: ['wind_offshore']
deployments: [[80], [60], [57], [36], [35], [30], [28], [22], [20], [20], [15], [13], [10], [9], [6], [4], [3], [1], [1]]
technologies: ['wind_offshore', 'wind_onshore']
deployments: [[80, 15], [60, 8], [57, 36], [36, 70], [35, 5], [30, 10], [28, 11], [22, 6], [20, 14], [20, 12], [15, 5],
[13, 35], [10, 6], [9, 7], [6, 4], [4, 1], [3, 1], [1, 1], [1, 2]]
load_coverage: 0.5
siting_params:
# Defines how \alpha is considered in space and time.
......@@ -21,7 +23,7 @@ siting_params:
# Time-window length used to compute the criticality indicator. Integer value.
delta: 1 # \in \mathbb{N}
# Threshold
c: 0.1 # \in [0, 1]
c: 0.25 # \in [0, 1]
# Solution method: BB or HEU or RAND or GRED.
solution_method:
neighborhood: 1
......
# Path to data folder
#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
resampling_rate: 3
# Technologies to deploy.
regions: ['GB', 'NL', 'FR', 'DE', 'DK', 'NO', 'PL', 'IE', 'IT', 'SE', 'FI', 'ES', 'GR', 'PT', 'BE', 'LT', 'LV', 'EE', 'HR']
technologies: ['wind_offshore']
deployments: [[80], [60], [57], [36], [35], [30], [28], [22], [20], [20], [15], [13], [10], [9], [6], [4], [3], [1], [1]]
load_coverage: 0.3
siting_params:
# Defines how \alpha is considered in space and time.
alpha:
method: 'load' # 'potentials'
coverage: 'system' # 'partition'
smoothing: 'mean' # 'median'
norm: 'min' # 'max'
# Time-window length used to compute the criticality indicator. Integer value.
delta: 1 # \in \mathbb{N}
# Threshold
c: 0.1 # \in [0, 1]
# Solution method: BB or HEU or RAND or GRED.
solution_method:
neighborhood: 1
no_iterations: 5000
no_epochs: 500
initial_temp: 100.
no_runs: 30
# Path to data folder
#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
resampling_rate: 3
# Technologies to deploy.
regions: ['EU']
technologies: ['wind_offshore']
deployments: [[468]]
load_coverage: 0.3
siting_params:
# Defines how \alpha is considered in space and time.
alpha:
method: 'load' # 'potentials'
coverage: 'system' # 'partition'
smoothing: 'mean' # 'median'
norm: 'min' # 'max'
# Time-window length used to compute the criticality indicator. Integer value.
delta: 1 # \in \mathbb{N}
# Threshold
c: 0.1 # \in [0, 1]
# Solution method: BB or HEU or RAND or GRED.
solution_method:
neighborhood: 1
no_iterations: 5000
no_epochs: 500
initial_temp: 100.
no_runs: 30
# Config file for various conversion technologies.
wind_onshore:
where: 'onshore'
filters: ['resource_quality', 'population_density', 'orography', 'forestry', 'water_mask', 'latitude']
filters: ['population_density', 'orography', 'forestry', 'water_mask', 'latitude', 'legacy']
converter_IV: 'V110'
converter_III: 'E103'
converter_II: 'V90'
......@@ -21,7 +21,7 @@ wind_onshore:
latitude_threshold: 65.
legacy_min: 0.1
power_density: 5. # MW/sqkm
land_utilization_factor: 0.5
land_utilization_factor: 0.3
wind_offshore:
where: 'offshore'
......
import yaml
import julia
from os.path import join, isfile
from os.path import join
from numpy import argmax, ceil, float64
import argparse
import pickle
from copy import deepcopy
......@@ -17,86 +16,37 @@ logging.disable(logging.CRITICAL)
logger = logging.getLogger(__name__)
def parse_args():
parser = argparse.ArgumentParser(description='Command line arguments.')
parser.add_argument('--k', type=str, default=None)
parser.add_argument('--c', type=float)
parser.add_argument('--alpha_method', type=str, default=None)
parser.add_argument('--alpha_coverage', type=str, default=None)
parser.add_argument('--delta', type=int, default=None)
parser.add_argument('--resampling_rate', type=str)
parser.add_argument('--maxdepth', type=str)
parsed_args = vars(parser.parse_args())
return parsed_args
if __name__ == '__main__':
args = parse_args()
logger.info('Starting data pre-processing.')
model_parameters = read_inputs(f"../config_model_{args['k']}.yml")
model_parameters['resampling_rate'] = args['resampling_rate']
model_parameters = read_inputs(f"../config_model.yml")
model_parameters['resampling_rate'] = 1
siting_parameters = model_parameters['siting_params']
tech_parameters = read_inputs('../config_techs.yml')
siting_parameters['alpha']['method'] = args['alpha_method']
siting_parameters['alpha']['coverage'] = args['alpha_coverage']
siting_parameters['delta'] = int(args['delta'])
siting_parameters['c'] = args['c']
data_path = model_parameters['data_path']
spatial_resolution = model_parameters['spatial_resolution']
time_horizon = model_parameters['time_slice']
database = read_database(data_path, spatial_resolution)
if isfile(join(data_path, f"input/capacity_factors_data_{args['k']}"
f"_{args['resampling_rate']}h_{args['maxdepth']}m.p")):
capacity_factors_data = \
pickle.load(open(join(data_path,
f"input/capacity_factors_data_{args['k']}_{args['resampling_rate']}h_"
f"{args['maxdepth']}m.p"), 'rb'))
site_coordinates = \
pickle.load(open(join(data_path, f"input/site_coordinates_{args['k']}_{args['maxdepth']}m.p"), 'rb'))
legacy_coordinates = \
pickle.load(open(join(data_path, f"input/legacy_coordinates_{args['k']}_{args['maxdepth']}m.p"), 'rb'))
logger.info('Input files read from disk.')
else:
site_coordinates, legacy_coordinates = return_filtered_coordinates(database, model_parameters, tech_parameters)
truncated_data = selected_data(database, site_coordinates, time_horizon)
capacity_factors_data = return_output(truncated_data, data_path)
resampled_data = deepcopy(capacity_factors_data)
rate = model_parameters['resampling_rate']
for region in capacity_factors_data.keys():
for tech in capacity_factors_data[region].keys():
resampled_data[region][tech] = \
capacity_factors_data[region][tech].resample(time=f"{rate}H").mean(dim='time')
pickle.dump(resampled_data,
open(join(data_path, f"input/capacity_factors_data_{args['k']}_{args['resampling_rate']}h_"
f"{args['maxdepth']}m.p"), 'wb'), protocol=4)
pickle.dump(site_coordinates,
open(join(data_path, f"input/site_coordinates_{args['k']}_"
f"{args['maxdepth']}m.p"), 'wb'), protocol=4)
pickle.dump(legacy_coordinates,
open(join(data_path, f"input/legacy_coordinates_{args['k']}_"
f"{args['maxdepth']}m.p"), 'wb'), protocol=4)
logger.info('Input files written to disk.')
site_coordinates, legacy_coordinates = return_filtered_coordinates(database, model_parameters, tech_parameters)
truncated_data = selected_data(database, site_coordinates, time_horizon)
capacity_factors_data = return_output(truncated_data, data_path)
resampled_data = deepcopy(capacity_factors_data)
rate = model_parameters['resampling_rate']
for region in capacity_factors_data.keys():
for tech in capacity_factors_data[region].keys():
resampled_data[region][tech] = \
capacity_factors_data[region][tech].resample(time=f"{rate}H").mean(dim='time')
time_windows_data = resource_quality_mapping(capacity_factors_data, siting_parameters)
site_positions = sites_position_mapping(time_windows_data)
deployment_dict = capacity_to_cardinality(database, model_parameters, tech_parameters, site_coordinates,
legacy_coordinates)
site_potential_data = get_potential_per_site(time_windows_data, tech_parameters, spatial_resolution)
criticality_data = xarray_to_ndarray(critical_window_mapping(time_windows_data, site_potential_data,
deployment_dict, model_parameters))
......@@ -105,8 +55,7 @@ if __name__ == '__main__':
total_no_locs = sum(deployment_dict[r][t] for r in deployment_dict.keys() for t in deployment_dict[r].keys())
c = int(ceil(siting_parameters['c'] * total_no_locs))
output_folder = init_folder(model_parameters, total_no_locs, c,
suffix=f"_{args['alpha_method']}_{args['alpha_coverage']}_d{args['delta']}")
output_folder = init_folder(model_parameters, total_no_locs, c, suffix='_test')
logger.info('Data pre-processing finished. Opening Julia instance.')
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment