Skip to content
Snippets Groups Projects
Commit 72206444 authored by David Radu's avatar David Radu
Browse files

testing candidate sites selection

parent ad3faa13
No related branches found
No related tags found
No related merge requests found
......@@ -3,16 +3,13 @@
data_path: '/data/dcradu/resite_ip/'
# Spatial resolution (in degrees) of the potential sites.
spatial_resolution: 0.28
spatial_resolution: 0.25
# Start time and end time of the analysis.
time_slice: ['2016-01-01T00:00', '2018-12-31T23:00']
time_slice: ['2018-01-01T00:00', '2018-01-31T23:00']
# Technologies to deploy.
regions: ['GB', 'NL', 'FR', 'DE', 'DK', 'NO', 'PL', 'IE', 'IT', 'SE', 'FI', 'ES', 'GR', 'PT', 'BE', 'LT', 'LV', 'EE']
regions: ['GB', 'NL', 'FR', 'DE', 'DK', 'NO', 'PL', 'IE', 'IT', 'SE', 'FI', 'ES', 'GR', 'PT', 'BE', 'LT', 'LV', 'EE', 'HR']
technologies: ['wind_offshore']
deployments: [[48], [36], [35], [22], [21], [18], [17], [14], [12], [12], [9], [8], [6], [6], [3], [3], [2], [1]]
# deployments: [[21], [16], [15], [10], [9], [8], [8], [6], [6], [6], [4], [4], [3], [3], [2], [2], [1], [1]]
# deployments: [[14], [10], [10], [6], [6], [5], [5], [4], [4], [4], [3], [3], [2], [2], [1], [1], [1], [1]]
# deployments: [[10], [8], [7], [5], [5], [4], [4], [3], [3], [3], [2], [2], [2], [2], [1], [1], [1], [1]]
deployments: [[31], [23], [22], [14], [14], [12], [11], [9], [8], [8], [6], [5], [4], [4], [3], [2], [2], [1], [1]]
siting_params:
smooth_measure: 'mean'
......
......@@ -35,13 +35,13 @@ wind_offshore:
protected_areas_selection: ['Ia', 'Ib', 'II', 'V']
protected_areas_distance_threshold: 5.
depth_threshold_low: 0.
depth_threshold_high: 999.
depth_threshold_high: 99.
altitude_threshold: 0.
terrain_slope_threshold: 1.
forestry_ratio_threshold: 1.
latitude_threshold: 70.
latitude_threshold: 65.
distance_threshold_min: 22.2
distance_threshold_max: 222.2 # 111.
distance_threshold_max: 111.0 # 111.
wind_floating:
where: 'offshore'
......
......@@ -346,9 +346,9 @@ def return_coordinates_from_shapefiles(resource_dataset, shapefiles_region):
def retrieve_load_data_partitions(data_path, date_slice, alpha, delta, regions, norm_type):
load_data_fn = join(data_path, 'input/load_data', 'load_2009_2018.csv')
load_data_fn = join(data_path, 'input/load_data', 'load_entsoe_2006_2020_full.csv')
load_data = read_csv(load_data_fn, index_col=0)
load_data.index = date_range('2009-01-01T00:00', '2018-12-31T23:00', freq='H')
load_data.index = to_datetime(load_data.index)
load_data_sliced = load_data.loc[date_slice[0]:date_slice[1]]
regions_list = return_region_divisions(regions, data_path)
......
......@@ -35,6 +35,11 @@ if __name__ == '__main__':
criticality_data = xarray_to_ndarray(critical_window_mapping(time_windows_data, model_parameters))
site_positions = sites_position_mapping(time_windows_data)
import pickle
pickle.dump(site_coordinates, open(join(data_path, 'input/site_coordinates.p'), 'wb'), protocol=4)
import sys
sys.exit()
custom_log(' Data read. Building model.')
if siting_parameters['solution_method']['BB']['set']:
......
......@@ -141,6 +141,7 @@ def filter_locations_by_layer(regions, start_coordinates, model_params, tech_par
elif which == 'resource_quality':
database = read_database(data_path, model_params['spatial_resolution'])
print(database)
if tech_params['resource'] == 'wind':
array_resource = xu.sqrt(database.u100 ** 2 +
......@@ -148,7 +149,7 @@ def filter_locations_by_layer(regions, start_coordinates, model_params, tech_par
elif tech_params['resource'] == 'solar':
array_resource = database.ssrd / 3600.
else:
raise ValueError (" This resource is not available.")
raise ValueError(" This resource is not available.")
array_resource_mean = array_resource.mean(dim='time')
mask_resource = array_resource_mean.where(array_resource_mean.data < tech_params['resource_threshold'])
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment