diff --git a/src/era5data.py b/src/era5data.py index 883255604834891505e493a5f422efcdefac0260..a1c002e889a212f5011db9888469af550931c4d8 100644 --- a/src/era5data.py +++ b/src/era5data.py @@ -12,7 +12,7 @@ regions = {'EU': '75/-20/30/40'} years = ['2017', '2018'] months = ['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12'] -directory = join(data_path, 'input_data/resource_data/', str(spatial_resolution)) +directory = join(data_path, 'input/resource_data/', str(spatial_resolution)) if not os.path.exists(directory): os.makedirs(directory) @@ -22,9 +22,9 @@ for region, coords in regions.items(): for month in months: c.retrieve( 'reanalysis-era5-single-levels', - {'variable':['100m_u_component_of_wind','100m_v_component_of_wind', - '2m_temperature', 'surface_solar_radiation_downwards', 'forecast_surface_roughness'], - 'product_type':'reanalysis', + {'variable': ['100m_u_component_of_wind','100m_v_component_of_wind', + '2m_temperature', 'surface_solar_radiation_downwards', 'forecast_surface_roughness'], + 'product_type': 'reanalysis', 'area': str(coords), 'grid': str(spatial_resolution)+'/'+str(spatial_resolution), 'year': year, diff --git a/src/helpers.py b/src/helpers.py index 6980055126c275d9d7cdf4237b8fb74d0e085da3..4dcd401e7168cebfaa54196a9777c0ffff756cf6 100644 --- a/src/helpers.py +++ b/src/helpers.py @@ -143,18 +143,23 @@ def get_deployment_vector(regions, technologies, deployments): def return_region_divisions(region_list, data_path): - onshore_shapes = get_onshore_shapes(region_list, data_path) - region_subdivisions = None + arcgis_fn = f"{data_path}input/shapefiles/Longitude_Graticules_and_World_Countries_Boundaries-shp/" \ + f"99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.shp" + shapes = read_file(arcgis_fn) + shapes["CNTRY_NAME"] = shapes["CNTRY_NAME"].apply(convert_old_country_names) + shapes["iso2"] = Series(convert_country_codes(shapes["CNTRY_NAME"].values, "name", "alpha_2")) + shapes = shapes[notnull(shapes["iso2"])] + shapes = shapes.set_index("iso2")['geometry'] regions = [] for region in region_list: if region == 'EU': region_subdivisions = ['AT', 'BE', 'DE', 'DK', 'ES', - 'FR', 'UK', 'IE', 'IT', 'LU', + 'FR', 'GB', 'IE', 'IT', 'LU', 'NL', 'NO', 'PT', 'SE', 'CH', 'CZ', - 'EE', 'LV', 'RO', 'BG', 'EL', 'HR', 'RS', - 'FI', 'EL', 'HR', 'HU', 'LT', + 'EE', 'LV', 'RO', 'BG', 'HR', 'RS', + 'FI', 'GR', 'HR', 'HU', 'LT', 'PL', 'SI', 'SK'] elif region == 'NA': region_subdivisions = ['DZ', 'EG', 'MA', 'LY', 'TN'] @@ -163,8 +168,11 @@ def return_region_divisions(region_list, data_path): 'PS', 'QA', 'SA', 'SY', 'YE'] elif region == 'CWE': region_subdivisions = ['FR', 'BE', 'LU', 'NL', 'DE'] - elif region in onshore_shapes.index: + elif region in shapes.index: region_subdivisions = [region] + else: + custom_log(f"{region} not in shapes list!") + continue regions.extend(region_subdivisions) @@ -239,7 +247,7 @@ def get_onshore_shapes(regions, data_path): """ - arcgis_fn = f"{data_path}input_data/shapefiles/Longitude_Graticules_and_World_Countries_Boundaries-shp/" \ + arcgis_fn = f"{data_path}input/shapefiles/Longitude_Graticules_and_World_Countries_Boundaries-shp/" \ f"99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.shp" shapes = read_file(arcgis_fn) shapes["CNTRY_NAME"] = shapes["CNTRY_NAME"].apply(convert_old_country_names) @@ -249,7 +257,7 @@ def get_onshore_shapes(regions, data_path): if regions is not None: missing_codes = set(regions) - set(shapes.index) - assert not missing_codes, f"Error: Shapes are not available for the " \ + assert not missing_codes, f"Shapes are not available for the " \ f"following codes: {sorted(list(missing_codes))}" shapes = shapes[regions] @@ -264,7 +272,7 @@ def get_offshore_shapes(regions, data_path): # Remove landlocked countries for which there is no offshore shapes iso_codes = remove_landlocked_countries(regions) - eez_fn = f"{data_path}input_data/shapefiles/eez/World_EEZ_v8_2014.shp" + eez_fn = f"{data_path}input/shapefiles/eez/World_EEZ_v8_2014.shp" eez_shapes = read_file(eez_fn) eez_shapes = eez_shapes[notnull(eez_shapes['ISO_3digit'])] @@ -334,7 +342,7 @@ def return_coordinates_from_shapefiles(resource_dataset, shapefiles_region): def retrieve_load_data_partitions(data_path, date_slice, alpha, delta, regions, norm_type): - load_data_fn = join(data_path, 'input_data/load_data', 'load_2009_2018.csv') + load_data_fn = join(data_path, 'input/load_data', 'load_2009_2018.csv') load_data = read_csv(load_data_fn, index_col=0) load_data.index = date_range('2009-01-01T00:00', '2018-12-31T23:00', freq='H') load_data_sliced = load_data.loc[date_slice[0]:date_slice[1]] @@ -383,7 +391,7 @@ def filter_onshore_offshore_locations(coordinates_in_region, data_path, spatial_ """ land_fn = 'ERA5_surface_characteristics_20181231_' + str(spatial_resolution) + '.nc' - land_path = join(data_path, 'input_data/land_data', land_fn) + land_path = join(data_path, 'input/land_data', land_fn) dataset = xr.open_dataset(land_path) dataset = dataset.sortby([dataset.longitude, dataset.latitude]) @@ -457,7 +465,7 @@ def init_folder(parameters, c, suffix=None): Relative path of the folder. """ - output_data_path = join(parameters['data_path'], 'output_data') + output_data_path = join(parameters['data_path'], 'output') no_locs = str(sum(parameters['deployments'])) no_part = str(len(parameters['regions'])) diff --git a/src/tools.py b/src/tools.py index de30e09d3981c7c48ef9d2159b3911c356f8f2ae..071e4054a5832954d742bf9d883ce6cf53aa42d3 100644 --- a/src/tools.py +++ b/src/tools.py @@ -36,7 +36,7 @@ def read_database(data_path, spatial_resolution): dataset: xarray.Dataset """ - file_path = join(data_path, 'input_data/resource_data', str(spatial_resolution)) + file_path = join(data_path, 'input/resource_data', str(spatial_resolution)) # Read through all files, extract the first 2 characters (giving the # macro-region) and append in a list that will keep the unique elements. files = [f for f in listdir(file_path) if isfile(join(file_path, f))] @@ -104,7 +104,7 @@ def filter_locations_by_layer(regions, start_coordinates, model_params, tech_par threshold_distance = tech_params['protected_areas_distance_threshold'] coords_to_remove = [] - areas_fn = join(data_path, 'input_data/land_data', 'WDPA_Feb2019-shapefile-points.shp') + areas_fn = join(data_path, 'input/land_data', 'WDPA_Feb2019-shapefile-points.shp') dataset = read_file(areas_fn) lons = [] @@ -191,7 +191,7 @@ def filter_locations_by_layer(regions, start_coordinates, model_params, tech_par elif which == 'orography': orography_fn = 'ERA5_orography_characteristics_20181231_' + str(model_params['spatial_resolution']) + '.nc' - orography_path = join(data_path, 'input_data/land_data', orography_fn) + orography_path = join(data_path, 'input/land_data', orography_fn) dataset = xr.open_dataset(orography_path).astype(float32) dataset = dataset.sortby([dataset.longitude, dataset.latitude]) dataset = dataset.assign_coords(longitude=(((dataset.longitude @@ -217,7 +217,7 @@ def filter_locations_by_layer(regions, start_coordinates, model_params, tech_par elif which in ['forestry', 'water_mask', 'bathymetry']: surface_fn = 'ERA5_surface_characteristics_20181231_' + str(model_params['spatial_resolution']) + '.nc' - surface_path = join(data_path, 'input_data/land_data', surface_fn) + surface_path = join(data_path, 'input/land_data', surface_fn) dataset = xr.open_dataset(surface_path).astype(float32) dataset = dataset.sortby([dataset.longitude, dataset.latitude]) dataset = dataset.assign_coords(longitude=(((dataset.longitude @@ -263,7 +263,7 @@ def filter_locations_by_layer(regions, start_coordinates, model_params, tech_par elif which == 'population_density': population_fn = 'gpw_v4_population_density_adjusted_rev11_0.5.nc' - population_path = join(data_path, 'input_data/population_density', population_fn) + population_path = join(data_path, 'input/population_density', population_fn) dataset = xr.open_dataset(population_path) varname = [item for item in dataset.data_vars][0] @@ -435,9 +435,9 @@ def return_output(input_dict, data_path, smooth_wind_power_curve=True): output_dict = deepcopy(input_dict) tech_dict = read_inputs('../config_techs.yml') - wind_data_path = join(data_path, 'input_data/transfer_functions', 'data_wind_turbines.csv') + wind_data_path = join(data_path, 'input/transfer_functions', 'data_wind_turbines.csv') data_converter_wind = read_csv(wind_data_path, sep=';', index_col=0) - solar_data_path = join(data_path, 'input_data/transfer_functions', 'data_solar_modules.csv') + solar_data_path = join(data_path, 'input/transfer_functions', 'data_solar_modules.csv') data_converter_solar = read_csv(solar_data_path, sep=';', index_col=0) for region, tech in key_list: @@ -753,7 +753,7 @@ def retrieve_site_data(model_parameters, deployment_dict, coordinates_dict, outp # Capacity credit sites. - load_data_fn = join(model_parameters['data_path'], 'input_data/load_data', 'load_2009_2018.csv') + load_data_fn = join(model_parameters['data_path'], 'input/load_data', 'load_2009_2018.csv') load_data = read_csv(load_data_fn, index_col=0) load_data.index = to_datetime(load_data.index) load_data = load_data[(load_data.index > time_slice[0]) & (load_data.index < time_slice[1])]