diff --git a/config_model.yml b/config_model.yml
index 3e6c9d541d0b091ee85821c4e207b201208211ef..1766d34390f4eff4e3741c2d58086ceab02d80da 100644
--- a/config_model.yml
+++ b/config_model.yml
@@ -65,4 +65,6 @@ siting_params:
       no_experiments: 50
       no_runs_per_experiment: 10
       criterion: 'max'
-      algorithm: 'custom' # 'k_fold'
+      method: 'custom' # 'k_fold'
+      algorithm: 'ga'
+
diff --git a/src/jl/SitingHeuristics.jl b/src/jl/SitingHeuristics.jl
index 1657d6117b0940c0092061ea1b9d956654514c23..7e76aa37b4688ab8d9d0c4a78dcd149bcc49b54b 100644
--- a/src/jl/SitingHeuristics.jl
+++ b/src/jl/SitingHeuristics.jl
@@ -156,7 +156,7 @@ function main_RAND(deployment_dict, D, c, I, R, run)
 
 end
 
-function main_CROSS(D, c, n, k, number_years, number_years_training, number_years_testing, number_experiments, number_runs_per_experiment, criterion, cross_validation_method)
+function main_CROSS(D, c, n, k, number_years, number_years_training, number_years_testing, number_experiments, number_runs_per_experiment, criterion, cross_validation_method, algorithm)
 
   D = convert.(Float64, D)
   c = convert(Float64, c)
@@ -169,11 +169,12 @@ function main_CROSS(D, c, n, k, number_years, number_years_training, number_year
   number_runs_per_experiment = convert(Int64, number_runs_per_experiment)
   criterion = convert(String, criterion)
   cross_validation_method = convert(String, cross_validation_method)
+  algorithm = convert(String, algorithm)
 
   if cross_validation_method == "custom"
-      obj_training, obj_testing, ind_training, ind_testing = custom_cross_validation(D, c, n, number_years, number_years_training, number_years_testing, number_experiments, number_runs_per_experiment, criterion);
+      obj_training, obj_testing, ind_training, ind_testing = custom_cross_validation(D, c, n, number_years, number_years_training, number_years_testing, number_experiments, number_runs_per_experiment, criterion, algorithm);
   elseif cross_validation_method == "k_fold"
-      obj_training, obj_testing, ind_training, ind_testing = k_fold_cross_validation(D, c, n, k, number_years, number_runs_per_experiment, criterion);
+      obj_training, obj_testing, ind_training, ind_testing = k_fold_cross_validation(D, c, n, k, number_years, number_runs_per_experiment, criterion, algorithm);
   end
 
   return obj_training, obj_testing, ind_training, ind_testing
diff --git a/src/jl/cross_validation_tools.jl b/src/jl/cross_validation_tools.jl
index 70483422bb4533956494ba7b958fa9c588bbf19f..d4d7b3b0da347055cdf2a214b25d6c61b6043d5f 100644
--- a/src/jl/cross_validation_tools.jl
+++ b/src/jl/cross_validation_tools.jl
@@ -80,6 +80,8 @@ function k_fold_cross_validation(D::Array{Float64, 2}, c::Float64, n::Int64, k::
                 ind_tmp .= mirsa(D_training, c, n, N, I, E, T_init)
             elseif algorithm == "rga"
                 ind_tmp .= randomised_greedy_algorithm(D_training, c, n, p)
+            elseif algorithm == "ga"
+                ind_tmp .= greedy_algorithm(D_training, c, n)
             end
             obj_tmp_training[run] = evaluate_obj(D_training, c, ind_tmp, col_tmp_training, col_zeros_training, y_tmp_training)
             ind_tmp_training[run,:] .= ind_tmp
@@ -100,6 +102,8 @@ function k_fold_cross_validation(D::Array{Float64, 2}, c::Float64, n::Int64, k::
                 ind_tmp .= mirsa(D_testing, c, n, N, I, E, T_init)
             elseif algorithm == "rga"
                 ind_tmp .= randomised_greedy_algorithm(D_testing, c, n, p)
+            elseif algorithm == "ga"
+                ind_tmp .= greedy_algorithm(D_testing, c, n)
             end
             obj_tmp_testing[run] = evaluate_obj(D_testing, c, ind_tmp, col_tmp_testing, col_zeros_testing, y_tmp_testing)
             ind_tmp_testing[run,:] .= ind_tmp
@@ -174,6 +178,8 @@ function custom_cross_validation(D::Array{Float64, 2}, c::Float64, n::Int64, num
                 ind_tmp .= mirsa(D_training, c, n, N, I, E, T_init)
             elseif algorithm == "rga"
                 ind_tmp .= randomised_greedy_algorithm(D_training, c, n, p)
+            elseif algorithm == "ga"
+                ind_tmp .= greedy_algorithm(D_training, c, n)
             end
             obj_tmp_training[run] = evaluate_obj(D_training, c, ind_tmp, col_tmp_training, col_zeros_training, y_tmp_training)
             ind_tmp_training[run,:] .= ind_tmp
@@ -194,6 +200,8 @@ function custom_cross_validation(D::Array{Float64, 2}, c::Float64, n::Int64, num
                 ind_tmp .= mirsa(D_testing, c, n, N, I, E, T_init)
             elseif algorithm == "rga"
                 ind_tmp .= randomised_greedy_algorithm(D_testing, c, n, p)
+            elseif algorithm == "ga"
+                ind_tmp .= greedy_algorithm(D_testing, c, n)
             end
             obj_tmp_testing[run] = evaluate_obj(D_testing, c, ind_tmp, col_tmp_testing, col_zeros_testing, y_tmp_testing)
             ind_tmp_testing[run,:] .= ind_tmp
@@ -403,3 +411,48 @@ function randomised_greedy_algorithm(D::Array{Float64,2}, c::Float64, n::Int64,
   end
   return ind_incumbent
 end
+
+
+function greedy_algorithm(D::Array{Float64,2}, c::Float64, n::Float64)
+
+  W, L = size(D)
+  n = convert(Int64, n)
+  ind_compl_incumbent = [i for i in 1:L]
+  ind_incumbent = Vector{Int64}(undef, n)
+  Dx_incumbent = zeros(Float64, W)
+  obj_incumbent = 0
+  Dx_tmp = Vector{Float64}(undef, W)
+  y_tmp = Vector{Float64}(undef, W)
+  ind_candidate_list = zeros(Int64, L)
+  locations_added, threshold = 0, 0
+  @inbounds while locations_added < n
+    if locations_added < c
+      threshold = locations_added + 1
+      obj_candidate = 0
+    else
+      obj_candidate = obj_incumbent
+    end
+    ind_candidate_pointer = 1
+    @inbounds for ind in ind_compl_incumbent
+        Dx_tmp .= Dx_incumbent .+ view(D, :, ind)
+        y_tmp .= Dx_tmp .>= threshold
+        obj_tmp = sum(y_tmp)
+        if obj_tmp > obj_candidate
+          ind_candidate_pointer = 1
+          ind_candidate_list[ind_candidate_pointer] = ind
+          obj_candidate = obj_tmp
+          ind_candidate_pointer += 1
+        elseif obj_tmp == obj_candidate
+          ind_candidate_list[ind_candidate_pointer] = ind
+          ind_candidate_pointer += 1
+        end
+    end
+    ind_candidate = sample(view(ind_candidate_list, 1:ind_candidate_pointer-1))
+    ind_incumbent[locations_added+1] = ind_candidate
+    filter!(a -> a != ind_candidate, ind_compl_incumbent)
+    Dx_incumbent .= Dx_incumbent .+ view(D, :, ind_candidate)
+    obj_incumbent = obj_candidate
+    locations_added += 1
+  end
+  return ind_incumbent
+end
diff --git a/src/main.py b/src/main.py
index efa562c1eb27aee1a0832f378ee82420f7830152..e783da97823e9fb1c28ebdf8afb238718c5e6115 100644
--- a/src/main.py
+++ b/src/main.py
@@ -243,7 +243,7 @@ if __name__ == '__main__':
                                                 sum(model_parameters['deployments']), params['k'],
                                                 params['no_years'], params['no_years_train'], params['no_years_test'],
                                                 params['no_experiments'], params['no_runs_per_experiment'],
-                                                params['criterion'], params['algorithm'])
+                                                params['criterion'], params['method'], params['algorithm'])
 
         if params['no_years_train'] == params['no_years_test']:
             bal = 'balanced'