From 57b4539b8c3b28d794f91d773633e1b22a705ad8 Mon Sep 17 00:00:00 2001
From: dcradu <dcradu@uliege.be>
Date: Mon, 19 Apr 2021 08:13:52 +0200
Subject: [PATCH] fixed algorithm updates

---
 src/jl/MCP_heuristics.jl      | 14 ++++++++++----
 src/jl/SitingHeuristics.jl    |  5 +++--
 src/jl/optimisation_models.jl |  2 +-
 src/main.py                   |  6 +++---
 4 files changed, 17 insertions(+), 10 deletions(-)

diff --git a/src/jl/MCP_heuristics.jl b/src/jl/MCP_heuristics.jl
index 035f66c..85fab6b 100644
--- a/src/jl/MCP_heuristics.jl
+++ b/src/jl/MCP_heuristics.jl
@@ -438,7 +438,9 @@ function stochastic_threshold_greedy_algorithm(D::Array{Float64,2}, c::Float64,
     obj_incumbent = obj_candidate
     locations_added += 1
   end
-  return ind_incumbent, obj_incumbent
+  x_incumbent = zeros(Float64, L)
+  x_incumbent[ind_ones_incumbent] .= 1.
+  return x_incumbent, obj_incumbent
 end
 
 function stochastic_threshold_greedy_algorithm_trajectory(D::Array{Float64,2}, c::Float64, n::Float64, p::Float64)
@@ -483,7 +485,9 @@ function stochastic_threshold_greedy_algorithm_trajectory(D::Array{Float64,2}, c
     obj_incumbent_vec[locations_added+1] = obj_incumbent
     locations_added += 1
   end
-  return ind_incumbent, obj_incumbent
+  x_incumbent = zeros(Float64, L)
+  x_incumbent[ind_ones_incumbent] .= 1.
+  return x_incumbent, obj_incumbent
 
 end
 
@@ -596,5 +600,7 @@ function simulated_annealing_local_search(D::Array{Float64, 2}, c::Float64, n::F
     end
   end
   obj_incumbent = sum(y_incumbent)
-  return ind_ones_incumbent, obj_incumbent, obj
-end
\ No newline at end of file
+  x_incumbent = zeros(Float64, L)
+  x_incumbent[ind_ones_incumbent] .= 1.
+  return x_incumbent, obj_incumbent, obj
+end
diff --git a/src/jl/SitingHeuristics.jl b/src/jl/SitingHeuristics.jl
index 6b9c88c..d2fc058 100644
--- a/src/jl/SitingHeuristics.jl
+++ b/src/jl/SitingHeuristics.jl
@@ -35,12 +35,12 @@ function main_MIRSA(index_dict, deployment_dict, D, c, N, I, E, T_init, R, run,
   run = string(run)
   p = string(p)
   data_path = string(data_path)
-  legacy_index = Vector{Float64}(undef, 0)
+  legacy_index = Vector{Int64}(undef, 0)
 
   W, L = size(D)
 
   P = maximum(values(index_dict))
-  n = deployment_dict[1]
+  n = convert(Float64, deployment_dict[1])
 
   if run == "MIR"
 
@@ -48,6 +48,7 @@ function main_MIRSA(index_dict, deployment_dict, D, c, N, I, E, T_init, R, run,
     x_init = solve_MILP(D, c, n, "Gurobi")
 
     for r = 1:R
+      println(r)
       x_sol[r, :], LB_sol[r], obj_sol[r, :] = simulated_annealing_local_search(D, c, n, N, I, E, x_init, T_init, legacy_index)
     end
 
diff --git a/src/jl/optimisation_models.jl b/src/jl/optimisation_models.jl
index eea6c7f..5a8823d 100644
--- a/src/jl/optimisation_models.jl
+++ b/src/jl/optimisation_models.jl
@@ -7,7 +7,7 @@ function solve_MILP(D::Array{Float64, 2}, c::Float64, n::Float64, solver::String
   L = size(D)[2]
 
   if solver == "Gurobi"
-    MILP_model = Model(optimizer_with_attributes(Gurobi.Optimizer, "TimeLimit" => 7200., "MIPGap" => 0.05))
+    MILP_model = Model(optimizer_with_attributes(Gurobi.Optimizer, "TimeLimit" => 7200., "MIPGap" => 0.01, "LogToConsole" => 0))
   else
       println("Please use Cbc or Gurobi")
       throw(ArgumentError)
diff --git a/src/main.py b/src/main.py
index e6b9cc4..0413fa5 100644
--- a/src/main.py
+++ b/src/main.py
@@ -154,7 +154,7 @@ if __name__ == '__main__':
         end = time.time()
         print(f"Average CPU time for c={c}: {round((end-start)/params['no_runs'], 1)} s")
 
-        output_folder = init_folder(model_parameters, c, suffix=f"_LS_{args['LS_init_algorithm']}")
+        output_folder = init_folder(model_parameters, c, suffix=f"_LS_{args['LS_init_algorithm']}_bis")
 
         with open(join(output_folder, 'config_model.yaml'), 'w') as outfile:
             yaml.dump(model_parameters, outfile, default_flow_style=False, sort_keys=False)
@@ -204,7 +204,7 @@ if __name__ == '__main__':
         end = time.time()
         print(f"Average CPU time for c={c}: {round((end-start)/params['no_runs'], 1)} s")
 
-        output_folder = init_folder(model_parameters, c, suffix=f"_GRED_{params['algorithm']}")
+        output_folder = init_folder(model_parameters, c, suffix=f"_GRED_{params['algorithm']}_bis")
 
         pickle.dump(jl_selected, open(join(output_folder, 'solution_matrix.p'), 'wb'))
         pickle.dump(jl_objective, open(join(output_folder, 'objective_vector.p'), 'wb'))
@@ -227,7 +227,7 @@ if __name__ == '__main__':
         end = time.time()
         print(f"Average CPU time for c={c}: {round((end-start)/params['no_runs'], 1)} s")
 
-        output_folder = init_folder(model_parameters, c, suffix=f"_GRED_{params['algorithm']}_p{params['p']}")
+        output_folder = init_folder(model_parameters, c, suffix=f"_GRED_{params['algorithm']}_p{params['p']}_bis")
 
         pickle.dump(jl_selected, open(join(output_folder, 'solution_matrix.p'), 'wb'))
         pickle.dump(jl_objective, open(join(output_folder, 'objective_vector.p'), 'wb'))
-- 
GitLab