diff --git a/config_model.yml b/config_model.yml
index e3c396419a16ab74f2779e4e7bb9462a790e8d79..e9b99a3c5f7851c4cec5f9047cd5403941d062ff 100644
--- a/config_model.yml
+++ b/config_model.yml
@@ -5,7 +5,7 @@ data_path: '/home/dcradu/data/resite_ip/'
 # Spatial resolution (in degrees) of the potential sites.
 spatial_resolution: 0.25
 # Start time and end time of the analysis.
-time_slice: ['2017-01-01T00:00', '2019-12-31T23:00']
+time_slice: ['2010-01-01T00:00', '2019-12-31T23:00']
 # Technologies to deploy.
 regions: ['EU']
 technologies: ['wind_onshore']
@@ -20,36 +20,38 @@ siting_params:
   # Time-window length used to compute the criticality indicator. Integer value.
   delta: 1
   # Solution method: BB or HEU or RAND or GRED.
+  c: 112
+
   solution_method:
     BB:
       # Branch & Bound
-      set: True
+      set: False
       mir: True
-      c: [1, 112, 224, 336, 448, 560]
       solver: 'gurobi'
       mipgap: 0.01
       timelimit: 3600
       threads: 1
     MIRSA:
       # Simulated Annealing with Local Search
-      set: False
-      c: [1, 112, 224, 336, 448, 560]
+      set: True
       neighborhood: 1
-      no_iterations: 2000
+      no_iterations: 1000
       no_epochs: 500
       initial_temp: 200.
       no_runs: 1
-      algorithm: 'SALS' # 'GLS' (Greedy Local Search)
-    GRED:
+      algorithm: 'RSSA' # 'GLS' (Greedy Local Search),  'RSSA', 'SALS'
+    GRED_DET:
       set: False
-      p: 0.10 # [0.05, 0.10, 0.15, 0.20]
-      c: [1, 112, 224, 336, 448, 560]
-      no_runs: 1
-      algorithm: 'STGH' # TGH, STGH
+      no_runs: 5
+      algorithm: 'TGH'
+    GRED_STO:
+      set: False
+      p: 55 # 10% of 10150 locations
+      no_runs: 100
+      algorithm: 'STGH'
     RAND:
       # Random Search
       set: False
-      c: [1, 112, 224, 336, 448, 560]
       no_iterations: 50
       no_epochs: 500
       no_runs: 1
diff --git a/src/jl/SitingHeuristics.jl b/src/jl/SitingHeuristics.jl
index 99449f35ccb5c2fcf5921fd261e23a492ca5de35..d36e740984d9a2fe11b3089fcf45108023d78753 100644
--- a/src/jl/SitingHeuristics.jl
+++ b/src/jl/SitingHeuristics.jl
@@ -30,7 +30,6 @@ function main_MIRSA(index_dict, deployment_dict, D, c, N, I, E, T_init, R, run)
     x_init = solve_MILP_partitioning(D, c, n_partitions, index_dict, "Gurobi")
     
     for r = 1:R
-      println("Run ", r, "/", R)
       x_sol[r, :], LB_sol[r], obj_sol[r, :] = simulated_annealing_local_search_partition(D, c, n_partitions, N, I, E, x_init, T_init, index_dict)
     end
 
@@ -51,7 +50,6 @@ function main_MIRSA(index_dict, deployment_dict, D, c, N, I, E, T_init, R, run)
     x_init = convert.(Float64, x_init)
 
     for r = 1:R
-      println("Run ", r, "/", R)
       x_sol[r, :], LB_sol[r], obj_sol[r, :] = simulated_annealing_local_search_partition(D, c, n_partitions, N, I, E, x_init, T_init, index_dict)
     end
 
@@ -77,15 +75,14 @@ function main_GRED(deployment_dict, D, c, R, p, run)
     x_sol, LB_sol = Array{Float64, 2}(undef, R, L), Array{Float64, 1}(undef, R)
     n = convert(Float64, deployment_dict[1])
     for r = 1:R
-      println("Run ", r, "/", R)
       x_sol[r, :], LB_sol[r] = threshold_greedy_algorithm(D, c, n)
     end
+
   elseif run == "STGH"
     x_sol, LB_sol = Array{Float64, 2}(undef, R, L), Array{Float64, 1}(undef, R)
     n = convert(Float64, deployment_dict[1])
     p = convert(Float64, p)
     for r = 1:R
-      println("Run ", r, "/", R)
       x_sol[r, :], LB_sol[r] = stochastic_threshold_greedy_algorithm(D, c, n, p)
     end
   else
@@ -111,7 +108,6 @@ function main_RAND(deployment_dict, D, c, I, R, run)
     x_sol, LB_sol = Array{Float64, 2}(undef, R, L), Array{Float64, 1}(undef, R)
     n = convert(Float64, deployment_dict[1])
     for r = 1:R
-      println("Run ", r, "/", R)
       x_sol[r, :], LB_sol[r] = random_search(D, c, n, I)
     end
 
@@ -148,7 +144,6 @@ function main_LSEA(index_dict, deployment_dict, D, c, N, I, E, run)
     x_sol, LB_sol, obj_sol = Array{Float64, 2}(undef, R, L), Array{Float64, 1}(undef, R), Array{Float64, 2}(undef, R, I)
     x_init = solve_MILP_partitioning(D, c, n_partitions, index_dict, "Gurobi")
     for r = 1:R
-      println("Run ", r, "/", R)
       x_sol[r, :], LB_sol[r], obj_sol[r, :] = greedy_local_search_partition(D, c, n_partitions, N, I, E, x_init, index_dict)
     end
   else
diff --git a/src/main.py b/src/main.py
index 1cbc092b70051d2f016f610813dc5b202f487aef..f4ed0745d2f90e2e72ba166518aa7acd94646f83 100644
--- a/src/main.py
+++ b/src/main.py
@@ -1,17 +1,41 @@
 import pickle
 import yaml
 from os.path import join, isfile
-from numpy import array, argsort, sum
+from numpy import array, sum
 from pyomo.opt import SolverFactory
 import time
+import argparse
 
 from helpers import read_inputs, init_folder, custom_log, xarray_to_ndarray, generate_jl_input, get_deployment_vector
 from tools import read_database, return_filtered_coordinates, selected_data, return_output, resource_quality_mapping, \
-    critical_window_mapping, sites_position_mapping, retrieve_location_dict, retrieve_site_data
+    critical_window_mapping, sites_position_mapping
 from models import build_ip_model
 
+
+def parse_args():
+
+    parser = argparse.ArgumentParser(description='Command line arguments.')
+
+    parser.add_argument('-c', '--threshold', type=int)
+    parser.add_argument('-run_BB', '--run_BB_algorithm', type=bool, default=False)
+    parser.add_argument('-run_MIRSA', '--run_MIRSA_algorithm', type=bool, default=False)
+    parser.add_argument('-run_GRED_DET', '--run_GRED_DET_algorithm', type=bool, default=False)
+    parser.add_argument('-run_GRED_STO', '--run_GRED_STO_algorithm', type=bool, default=False)
+
+    parsed_args = vars(parser.parse_args())
+
+    return parsed_args
+
+
+def single_true(iterable):
+    i = iter(iterable)
+    return any(i) and not any(i)
+
+
 if __name__ == '__main__':
 
+    args = parse_args()
+
     model_parameters = read_inputs('../config_model.yml')
     siting_parameters = model_parameters['siting_params']
     tech_parameters = read_inputs('../config_techs.yml')
@@ -58,50 +82,51 @@ if __name__ == '__main__':
 
         custom_log(' Data read. Building model.')
 
+    siting_parameters['solution_method']['BB']['set'] = args['run_BB']
+    siting_parameters['solution_method']['MIRSA']['set'] = args['run_MIRSA']
+    siting_parameters['solution_method']['GRED_DET']['set'] = args['run_GRED_DET']
+    siting_parameters['solution_method']['GRED_STO']['set'] = args['run_GRED_STO']
+
+    c = args['c']
+
+    if not single_true([args['run_BB'], args['run_MIRSA'], args['run_GRED_DET'], args['GRED_STO']]):
+        raise ValueError(' More than one run selected in the argparser.')
+
     if siting_parameters['solution_method']['BB']['set']:
 
         custom_log(' BB chosen to solve the IP.')
         params = siting_parameters['solution_method']['BB']
 
-        if not isinstance(params['c'], list):
-            raise ValueError(' Values of c have to be elements of a list for the Branch & Bound set-up.')
-
-        for c in params['c']:
+        output_folder = init_folder(model_parameters, c, f"_BB_MIR_str{params['mir']}")
+        with open(join(output_folder, 'config_model.yaml'), 'w') as outfile:
+            yaml.dump(model_parameters, outfile, default_flow_style=False, sort_keys=False)
+        with open(join(output_folder, 'config_techs.yaml'), 'w') as outfile:
+            yaml.dump(tech_parameters, outfile, default_flow_style=False, sort_keys=False)
 
-            output_folder = init_folder(model_parameters, c, f"_BB_c{c}")
-            with open(join(output_folder, 'config_model.yaml'), 'w') as outfile:
-                yaml.dump(model_parameters, outfile, default_flow_style=False, sort_keys=False)
-            with open(join(output_folder, 'config_techs.yaml'), 'w') as outfile:
-                yaml.dump(tech_parameters, outfile, default_flow_style=False, sort_keys=False)
+        # Solver options for the MIP problem
+        opt = SolverFactory(params['solver'])
+        opt.options['MIPGap'] = params['mipgap']
+        opt.options['Threads'] = params['threads']
+        opt.options['TimeLimit'] = params['timelimit']
 
-            # Solver options for the MIP problem
-            opt = SolverFactory(params['solver'])
-            opt.options['MIPGap'] = params['mipgap']
-            opt.options['Threads'] = params['threads']
-            opt.options['TimeLimit'] = params['timelimit']
+        instance = build_ip_model(deployment_dict, site_coordinates, criticality_data,
+                                  c, output_folder, params['mir'])
+        custom_log(' Sending model to solver.')
 
-            instance = build_ip_model(deployment_dict, site_coordinates, criticality_data,
-                                      c, output_folder, params['mir'])
-            custom_log(' Sending model to solver.')
+        results = opt.solve(instance, tee=False, keepfiles=False,
+                            report_timing=False, logfile=join(output_folder, 'solver_log.log'))
 
-            results = opt.solve(instance, tee=False, keepfiles=False,
-                                report_timing=False, logfile=join(output_folder, 'solver_log.log'))
+        objective = instance.objective()
+        x_values = array(list(instance.x.extract_values().values()))
 
-            objective = instance.objective()
-            x_values = array(list(instance.x.extract_values().values()))
-            comp_location_dict = retrieve_location_dict(x_values, model_parameters, site_positions)
-            retrieve_site_data(model_parameters, deployment_dict, site_coordinates, capacity_factors_data,
-                               criticality_data, site_positions, c, comp_location_dict, objective,
-                               output_folder, benchmarks=False)
+        pickle.dump(x_values, open(join(output_folder, 'solution_matrix.p'), 'wb'))
+        pickle.dump(objective, open(join(output_folder, 'objective_vector.p'), 'wb'))
 
     elif siting_parameters['solution_method']['MIRSA']['set']:
 
         custom_log(' MIRSA chosen to solve the IP. Opening a Julia instance.')
         params = siting_parameters['solution_method']['MIRSA']
 
-        if not isinstance(params['c'], list):
-            raise ValueError(' Values of c have to be elements of a list for the heuristic set-up.')
-
         jl_dict = generate_jl_input(deployment_dict, site_coordinates)
 
         import julia
@@ -109,42 +134,68 @@ if __name__ == '__main__':
         from julia import Main
         Main.include("jl/SitingHeuristics.jl")
 
-        for c in params['c']:
-            print('Running heuristic for c value of', c)
-            jl_selected, jl_objective, jl_traj = Main.main_MIRSA(jl_dict['index_dict'], jl_dict['deployment_dict'],
-                                                                 criticality_data, c, params['neighborhood'],
-                                                                 params['no_iterations'], params['no_epochs'],
-                                                                 params['initial_temp'], params['no_runs'],
-                                                                 params['algorithm'])
-            output_folder = init_folder(model_parameters, c, suffix='_MIRSA')
-
-            with open(join(output_folder, 'config_model.yaml'), 'w') as outfile:
-                yaml.dump(model_parameters, outfile, default_flow_style=False, sort_keys=False)
-            with open(join(output_folder, 'config_techs.yaml'), 'w') as outfile:
-                yaml.dump(tech_parameters, outfile, default_flow_style=False, sort_keys=False)
-
-            pickle.dump(jl_selected, open(join(output_folder, 'solution_matrix.p'), 'wb'))
-            pickle.dump(jl_objective, open(join(output_folder, 'objective_vector.p'), 'wb'))
-            pickle.dump(jl_traj, open(join(output_folder, 'trajectory_matrix.p'), 'wb'))
-
-            med_run = argsort(jl_objective)[params['no_runs']//2]
-            jl_selected_seed = jl_selected[med_run, :]
-            jl_objective_seed = jl_objective[med_run]
-
-            jl_locations = retrieve_location_dict(jl_selected_seed, model_parameters, site_positions)
-            retrieve_site_data(model_parameters, deployment_dict, site_coordinates, capacity_factors_data,
-                               criticality_data, site_positions, c, jl_locations, jl_objective_seed,
-                               output_folder, benchmarks=False)
-
-    elif siting_parameters['solution_method']['RAND']['set']:
-
-        custom_log(' Locations to be chosen via random search.')
-        params = siting_parameters['solution_method']['RAND']
-
-        if not isinstance(params['c'], list):
-            raise ValueError(' Values of c have to provided as list for the RAND set-up.')
-        if len(model_parameters['technologies']) > 1:
-            raise ValueError(' This method is currently implemented for one single technology only.')
+        start = time.time()
+        jl_selected, jl_objective, jl_traj = Main.main_MIRSA(jl_dict['index_dict'], jl_dict['deployment_dict'],
+                                                             criticality_data, c, params['neighborhood'],
+                                                             params['no_iterations'], params['no_epochs'],
+                                                             params['initial_temp'], params['no_runs'],
+                                                             params['algorithm'])
+        end = time.time()
+        print(f"Average CPU time for c={c}: {round((end-start)/params['no_runs'], 1)} s")
+
+        output_folder = init_folder(model_parameters, c, suffix=f"_MIRSA_{params['algorithm']}")
+
+        with open(join(output_folder, 'config_model.yaml'), 'w') as outfile:
+            yaml.dump(model_parameters, outfile, default_flow_style=False, sort_keys=False)
+        with open(join(output_folder, 'config_techs.yaml'), 'w') as outfile:
+            yaml.dump(tech_parameters, outfile, default_flow_style=False, sort_keys=False)
+
+        pickle.dump(jl_selected, open(join(output_folder, 'solution_matrix.p'), 'wb'))
+        pickle.dump(jl_objective, open(join(output_folder, 'objective_vector.p'), 'wb'))
+        pickle.dump(jl_traj, open(join(output_folder, 'trajectory_matrix.p'), 'wb'))
+
+    # elif siting_parameters['solution_method']['RAND']['set']:
+    #
+    #     custom_log(' Locations to be chosen via random search.')
+    #     params = siting_parameters['solution_method']['RAND']
+    #
+    #     if not isinstance(params['c'], list):
+    #         raise ValueError(' Values of c have to provided as list for the RAND set-up.')
+    #     if len(model_parameters['technologies']) > 1:
+    #         raise ValueError(' This method is currently implemented for one single technology only.')
+    #
+    #     jl_dict = generate_jl_input(deployment_dict, site_coordinates)
+    #
+    #     import julia
+    #     j = julia.Julia(compiled_modules=False)
+    #     from julia import Main
+    #     Main.include("jl/SitingHeuristics.jl")
+    #
+    #     for c in params['c']:
+    #         print('Running heuristic for c value of', c)
+    #
+    #         jl_selected, jl_objective = Main.main_RAND(jl_dict['deployment_dict'], criticality_data,
+    #                                                    c, params['no_iterations'], params['no_runs'],
+    #                                                    params['algorithm'])
+    #
+    #         output_folder = init_folder(model_parameters, c, suffix='_RS')
+    #
+    #         pickle.dump(jl_selected, open(join(output_folder, 'solution_matrix.p'), 'wb'))
+    #         pickle.dump(jl_objective, open(join(output_folder, 'objective_vector.p'), 'wb'))
+    #
+    #         med_run = argsort(jl_objective)[params['no_runs']//2]
+    #         jl_selected_seed = jl_selected[med_run, :]
+    #         jl_objective_seed = jl_objective[med_run]
+    #
+    #         jl_locations = retrieve_location_dict(jl_selected_seed, model_parameters, site_positions)
+    #         retrieve_site_data(model_parameters, deployment_dict, site_coordinates, capacity_factors_data,
+    #                            criticality_data, site_positions, c, jl_locations, jl_objective_seed,
+    #                            output_folder, benchmarks=False)
+
+    elif siting_parameters['solution_method']['GRED_DET']['set']:
+
+        params = siting_parameters['solution_method']['GRED_DET']
+        custom_log(f" GRED_{params['algorithm']} chosen to solve the IP. Opening a Julia instance.")
 
         jl_dict = generate_jl_input(deployment_dict, site_coordinates)
 
@@ -153,30 +204,20 @@ if __name__ == '__main__':
         from julia import Main
         Main.include("jl/SitingHeuristics.jl")
 
-        for c in params['c']:
-            print('Running heuristic for c value of', c)
-
-            jl_selected, jl_objective = Main.main_RAND(jl_dict['deployment_dict'], criticality_data,
-                                                       c, params['no_iterations'], params['no_runs'],
-                                                       params['algorithm'])
-
-            output_folder = init_folder(model_parameters, c, suffix='_RS')
+        start = time.time()
+        jl_selected, jl_objective = Main.main_GRED(jl_dict['deployment_dict'], criticality_data, c,
+                                                   params['no_runs'], params['p'], params['algorithm'])
+        end = time.time()
+        print(f"Average CPU time for c={c}: {round((end-start)/params['no_runs'], 1)} s")
 
-            pickle.dump(jl_selected, open(join(output_folder, 'solution_matrix.p'), 'wb'))
-            pickle.dump(jl_objective, open(join(output_folder, 'objective_vector.p'), 'wb'))
+        output_folder = init_folder(model_parameters, c, suffix=f"_GRED_{params['algorithm']}")
 
-            med_run = argsort(jl_objective)[params['no_runs']//2]
-            jl_selected_seed = jl_selected[med_run, :]
-            jl_objective_seed = jl_objective[med_run]
+        pickle.dump(jl_selected, open(join(output_folder, 'solution_matrix.p'), 'wb'))
+        pickle.dump(jl_objective, open(join(output_folder, 'objective_vector.p'), 'wb'))
 
-            jl_locations = retrieve_location_dict(jl_selected_seed, model_parameters, site_positions)
-            retrieve_site_data(model_parameters, deployment_dict, site_coordinates, capacity_factors_data,
-                               criticality_data, site_positions, c, jl_locations, jl_objective_seed,
-                               output_folder, benchmarks=False)
+    elif siting_parameters['solution_method']['GRED_STO']['set']:
 
-    elif siting_parameters['solution_method']['GRED']['set']:
-
-        params = siting_parameters['solution_method']['GRED']
+        params = siting_parameters['solution_method']['GRED_STO']
         custom_log(f" GRED_{params['algorithm']} chosen to solve the IP. Opening a Julia instance.")
 
         if not isinstance(params['c'], list):
@@ -189,27 +230,16 @@ if __name__ == '__main__':
         from julia import Main
         Main.include("jl/SitingHeuristics.jl")
 
-        for c in params['c']:
-            print('Running heuristic for c value of', c)
-            start = time.time()
-            jl_selected, jl_objective = Main.main_GRED(jl_dict['deployment_dict'], criticality_data, c,
-                                                       params['no_runs'], params['p'], params['algorithm'])
-            end = time.time()
-            print(f"Average CPU time: {round((end-start)/params['no_runs'], 1)} s")
-
-            output_folder = init_folder(model_parameters, c, suffix=f"_GRED_{params['algorithm']}_p{params['p']}")
-
-            pickle.dump(jl_selected, open(join(output_folder, 'solution_matrix.p'), 'wb'))
-            pickle.dump(jl_objective, open(join(output_folder, 'objective_vector.p'), 'wb'))
+        start = time.time()
+        jl_selected, jl_objective = Main.main_GRED(jl_dict['deployment_dict'], criticality_data, c,
+                                                   params['no_runs'], params['p'], params['algorithm'])
+        end = time.time()
+        print(f"Average CPU time for c={c}: {round((end-start)/params['no_runs'], 1)} s")
 
-            med_run = argsort(jl_objective)[params['no_runs']//2]
-            jl_selected_seed = jl_selected[med_run, :]
-            jl_objective_seed = jl_objective[med_run]
+        output_folder = init_folder(model_parameters, c, suffix=f"_GRED_{params['algorithm']}_p{params['p']}")
 
-            jl_locations = retrieve_location_dict(jl_selected_seed, model_parameters, site_positions)
-            retrieve_site_data(model_parameters, deployment_dict, site_coordinates, capacity_factors_data,
-                               criticality_data, site_positions, c, jl_locations, jl_objective_seed,
-                               output_folder, benchmarks=False)
+        pickle.dump(jl_selected, open(join(output_folder, 'solution_matrix.p'), 'wb'))
+        pickle.dump(jl_objective, open(join(output_folder, 'objective_vector.p'), 'wb'))
 
     else:
         raise ValueError(' This solution method is not available. ')