diff --git a/config_model.yml b/config_model.yml
index 255ab7dcf794f9f73f1e9fc603b466040f8d9041..cab10787e2417edb9831a8a92b0d97369822dcb5 100644
--- a/config_model.yml
+++ b/config_model.yml
@@ -52,7 +52,6 @@ siting_params:
     RAND:
       # Random Search
       set: False
-      no_iterations: 1000
-      no_epochs: 1000
+      no_iterations: 100000
       no_runs: 100
       algorithm: 'RS'
diff --git a/src/jl/MCP_heuristics.jl b/src/jl/MCP_heuristics.jl
index c9c569c13809d4bb6aa87715e04fa029dfbcc8f2..f156feabc2e1ea0f1ec53b7a85ff7208aaaa7a4c 100644
--- a/src/jl/MCP_heuristics.jl
+++ b/src/jl/MCP_heuristics.jl
@@ -4,28 +4,31 @@ using Distributions
 #################### Random Search Algorithm #######################
 
 function random_search(D::Array{Float64, 2}, c::Float64, n::Float64, R::Int64)
-
   W, L = size(D)
+  n = convert(Int64, n)
   x_incumbent = zeros(Float64, L)
   ind_set = [l for l in 1:L]
-  ind_incumbent = Vector{Int64}(undef, convert(Int64, n))
-  ind_candidate = Vector{Int64}(undef, convert(Int64, n))
+  ind_incumbent = Vector{Int64}(undef, n)
+  ind_candidate = Vector{Int64}(undef, n)
   Dx_candidate = Vector{Float64}(undef, W)
+  Dx_init = zeros(Float64, W)
   y_candidate = Vector{Float64}(undef, W)
-
-  LB_incumbent = 0
-  for r in 1:R
-    ind_candidate .= sample(ind_set, convert(Int64, n), replace=false)
-    Dx_candidate = sum(view(D, :, ind_candidate), dims = 2)
-    y_candidate = Dx_candidate .>= c
-    LB_candidate = sum(y_candidate)
-    if LB_candidate >= LB_incumbent
+  obj_incumbent = 0
+  @inbounds for r in 1:R
+    sample!(ind_set, ind_candidate, replace=false)
+    Dx_candidate .= Dx_init
+    @inbounds for ind in ind_candidate
+      Dx_candidate .= Dx_candidate .+ view(D, :, ind)
+    end
+    y_candidate .= Dx_candidate .>= c
+    obj_candidate = sum(y_candidate)
+    if obj_candidate >= obj_incumbent
       ind_incumbent .= ind_candidate
-      LB_incumbent = LB_candidate
+      obj_incumbent = obj_candidate
     end
   end
   x_incumbent[ind_incumbent] .= 1.
-  return x_incumbent, LB_incumbent
+  return x_incumbent, obj_incumbent
 end
 
 #################### Greedy Local Search w/ Partitioning Constraints (Dict Implementation) #######################
diff --git a/src/main.py b/src/main.py
index 32d1f4e13993843267e0f597bf20a3ce89bea5cf..989fba80b5a2c99a1f3356d86734e2e88ef2cd9a 100644
--- a/src/main.py
+++ b/src/main.py
@@ -181,7 +181,7 @@ if __name__ == '__main__':
                                                    c, params['no_iterations'], params['no_runs'],
                                                    params['algorithm'])
     
-        output_folder = init_folder(model_parameters, c, suffix='_RS')
+        output_folder = init_folder(model_parameters, c, suffix='_RS_bis')
     
         pickle.dump(jl_selected, open(join(output_folder, 'solution_matrix.p'), 'wb'))
         pickle.dump(jl_objective, open(join(output_folder, 'objective_vector.p'), 'wb'))