Skip to content
Snippets Groups Projects
Commit 3c6c861e authored by David Radu's avatar David Radu
Browse files

fixed read and write bug, verbose data read

parent bbc50c03
No related branches found
No related tags found
No related merge requests found
# Path to data folder
#data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
data_path: '/data/dcradu/resite_ip/'
data_path: 'D:/ULg_PhD_work/datasets/resite_ip/'
#data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.28
# Start time and end time of the analysis.
time_slice: ['2014-01-01T00:00', '2018-12-31T23:00']
time_slice: ['2014-01-01T00:00', '2014-01-07T23:00']
# Technologies to deploy.
regions: ['FR']
regions: ['FR', 'ES']
technologies: ['wind_onshore']
deployments: [[19]]
deployments: [[19], [12]]
siting_params:
smooth_measure: 'mean'
......
import pickle
import yaml
from os.path import join, isfile
from numpy import array, argsort
from numpy import array, argsort, sum
from pyomo.opt import SolverFactory
import time
......@@ -24,13 +24,20 @@ if __name__ == '__main__':
model_parameters['technologies'],
model_parameters['deployments'])
if isfile(join(data_path, 'input_data/criticality_matrix.p')):
if isfile(join(data_path, 'input/criticality_matrix.p')):
custom_log(' WARNING! Instance data read from files. Make sure the files are the ones that you need.')
criticality_data = pickle.load(open(join(data_path, 'input_data/criticality_matrix.p', 'rb')))
site_coordinates = pickle.load(open(join(data_path, 'input_data/site_coordinates.p', 'rb')))
capacity_factors_data = pickle.load(open(join(data_path, 'input_data/capacity_factors_data.p', 'rb')))
site_positions = pickle.load(open(join(data_path, 'input_data/site_positions.p', 'rb')))
custom_log(' WARNING! Instance data read from files.')
criticality_data = pickle.load(open(join(data_path, 'input/criticality_matrix.p'), 'rb'))
site_coordinates = pickle.load(open(join(data_path, 'input/site_coordinates.p'), 'rb'))
capacity_factors_data = pickle.load(open(join(data_path, 'input/capacity_factors_data.p'), 'rb'))
site_positions = pickle.load(open(join(data_path, 'input/site_positions.p'), 'rb'))
r = list(site_coordinates.keys())
d = sum(model_parameters['deployments'])
t = model_parameters['technologies']
ts = len(capacity_factors_data[list(site_coordinates.keys())[0]][model_parameters['technologies'][0]].time)
custom_log(f" Reading data for a model with a spatial resolution of {float(spatial_resolution)}, "
f"covering {r}, siting {d} {t} sites and {ts} time steps.")
else:
......@@ -44,10 +51,10 @@ if __name__ == '__main__':
criticality_data = xarray_to_ndarray(critical_window_mapping(time_windows_data, model_parameters))
site_positions = sites_position_mapping(time_windows_data)
pickle.dump(criticality_data, open(join(data_path, 'input_data/criticality_matrix.p', 'wb')), protocol=4)
pickle.dump(site_coordinates, open(join(data_path, 'input_data/site_coordinates.p', 'wb')), protocol=4)
pickle.dump(capacity_factors_data, open(join(data_path, 'input_data/capacity_factors_data.p', 'wb')), protocol=4)
pickle.dump(site_positions, open(join(data_path, 'input_data/site_positions.p', 'wb')), protocol=4)
pickle.dump(criticality_data, open(join(data_path, 'input/criticality_matrix.p'), 'wb'), protocol=4)
pickle.dump(site_coordinates, open(join(data_path, 'input/site_coordinates.p'), 'wb'), protocol=4)
pickle.dump(capacity_factors_data, open(join(data_path, 'input/capacity_factors_data.p'), 'wb'), protocol=4)
pickle.dump(site_positions, open(join(data_path, 'input/site_positions.p'), 'wb'), protocol=4)
custom_log(' Data read. Building model.')
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment