diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ebd2e1498e2494880aa3a26e125e3fb904043119..bc49b24d8700e2ee4064fe23af25285bde7647d3 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -16,21 +16,21 @@ build_and_test:
     # Build batsched
     - nix-shell --pure ./release.nix -A batsched --command ${CI_PROJECT_DIR}/ci/list-store-paths-for-cachix.bash | cachix push batsim
     - nix-build ./release.nix -A batsched && cp -rL result ./batsched
-    # Test against pinned batsim
-    - nix-shell --pure ./release.nix -A integration_tests --command ${CI_PROJECT_DIR}/ci/list-store-paths-for-cachix.bash | cachix push batsim
-    - nix-build ./release.nix -A integration_tests && cp -rL result ./integration_tests
-    # Test against up-to-date batsim
-    - nix-shell --pure ./release.nix -A integration_tests_batlatest --command ${CI_PROJECT_DIR}/ci/list-store-paths-for-cachix.bash | cachix push batsim
-    - nix-build ./release.nix -A integration_tests_batlatest && rm result
-    # Fail job if tests failed
-    - if [[ "$(cat ./integration_tests/pytest_returncode)" -ne 0 ]] ; then echo "pytest returned non-zero (against pinned batsim), aborting" ; exit 1 ; fi
-    - if [[ "$(cat ./integration_tests_batlatest/pytest_returncode)" -ne 0 ]] ; then echo "pytest returned non-zero (against latest batsim), aborting" ; exit 1 ; fi
-    # Send coverage results to codecov.io
-    - nix-env -i gcc
-    - mkdir -p merged
-    - cp ./batsched/gcno/* ./integration_tests/gcda/* merged/
-    - bash <(curl -s https://codecov.io/bash)
-  artifacts:
-    when: always
-    paths:
-      - /builds/batsim/batsched/integration_tests
+#    # Test against pinned batsim
+#    - nix-shell --pure ./release.nix -A integration_tests --command ${CI_PROJECT_DIR}/ci/list-store-paths-for-cachix.bash | cachix push batsim
+#    - nix-build ./release.nix -A integration_tests && cp -rL result ./integration_tests
+#    # Test against up-to-date batsim
+#    - nix-shell --pure ./release.nix -A integration_tests_batlatest --command ${CI_PROJECT_DIR}/ci/list-store-paths-for-cachix.bash | cachix push batsim
+#    - nix-build ./release.nix -A integration_tests_batlatest && rm result
+#    # Fail job if tests failed
+#    - if [[ "$(cat ./integration_tests/pytest_returncode)" -ne 0 ]] ; then echo "pytest returned non-zero (against pinned batsim), aborting" ; exit 1 ; fi
+#    - if [[ "$(cat ./integration_tests_batlatest/pytest_returncode)" -ne 0 ]] ; then echo "pytest returned non-zero (against latest batsim), aborting" ; exit 1 ; fi
+#    # Send coverage results to codecov.io
+#    - nix-env -i gcc
+#    - mkdir -p merged
+#    - cp ./batsched/gcno/* ./integration_tests/gcda/* merged/
+#    - bash <(curl -s https://codecov.io/bash)
+#  artifacts:
+#    when: always
+#    paths:
+#      - /builds/batsim/batsched/integration_tests
diff --git a/meson.build b/meson.build
index 175775ef51c136503dd0a065f8b7ee716fd5ef53..93730cee67cec56e4348594660a9edd1917f4628 100644
--- a/meson.build
+++ b/meson.build
@@ -10,7 +10,6 @@ boost_dep = dependency('boost',
     modules : ['locale', 'regex', 'system']
 )
 rapidjson_dep = dependency('RapidJSON')
-redox_dep = dependency('redox')
 libzmq_dep = dependency('libzmq')
 loguru_dep = dependency('loguru')
 intervalset_dep = dependency('intervalset')
@@ -19,7 +18,6 @@ gmpxx_dep = dependency('gmpxx')
 batsched_deps = [
     boost_dep,
     rapidjson_dep,
-    redox_dep,
     libzmq_dep,
     loguru_dep,
     intervalset_dep,
@@ -74,8 +72,6 @@ src = [
     'src/algo/submitter.hpp',
     'src/algo/wt_estimator.cpp',
     'src/algo/wt_estimator.hpp',
-    'src/data_storage.cpp',
-    'src/data_storage.hpp',
     'src/decision.cpp',
     'src/decision.hpp',
     'src/exact_numbers.hpp',
diff --git a/release.nix b/release.nix
index 4a4a88360b37fc0eb8e2018c32e72268e7adb7e6..180a007f37e720f810f684b81fcd4c3cda1d9d0b 100644
--- a/release.nix
+++ b/release.nix
@@ -28,7 +28,7 @@ let
       mesonFlags = []
         ++ pkgs.lib.optional doCoverage [ "-Db_coverage=true" ];
       nativeBuildInputs = with kapack; [pkgs.meson pkgs.ninja pkgs.pkgconfig
-        pkgs.boost pkgs.gmp pkgs.rapidjson intervalset loguru redox pkgs.cppzmq pkgs.zeromq];
+        pkgs.boost pkgs.gmp pkgs.rapidjson intervalset loguru pkgs.cppzmq pkgs.zeromq];
       # Debug build, without any Nix stripping magic.
       mesonBuildType = "debug";
       hardeningDisable = [ "all" ];
@@ -36,112 +36,112 @@ let
       # Keep files generated by GCOV, so depending jobs can use them.
       postInstall = pkgs.lib.optionalString doCoverage ''
         mkdir -p $out/gcno
-        cp batsched@exe/*.gcno $out/gcno/
+        cp batsched.p/*.gcno $out/gcno/
       '';
     });
 
     # Batsched integration tests.
-    integration_tests = pkgs.stdenv.mkDerivation rec {
-      pname = "batsched-integration-tests";
-      version = toString builtins.currentTime; # Forces rebuild
-      src = pkgs.lib.sourceByRegex ./. [
-        "^test"
-        "^test/.*\.py"
-        "^test/platforms"
-        "^test/platforms/.*\.xml"
-        "^test/workloads"
-        "^test/workloads/.*\.json"
-      ];
-      buildInputs = with pkgs.python37Packages; [
-        batsim batsched batexpe pkgs.redis
-        pytest pytest_html pandas];
-      preBuild = pkgs.lib.optionalString doCoverage ''
-        mkdir -p gcda
-        export GCOV_PREFIX=$(realpath gcda)
-        export GCOV_PREFIX_STRIP=5
-      '';
-      buildPhase = ''
-        runHook preBuild
-        set +e
-        (cd test && pytest -ra --html=../report/pytest_report.html)
-        echo $? > ./pytest_returncode
-        set -e
-      '';
-      checkPhase = ''
-        pytest_return_code=$(cat ./pytest_returncode)
-        echo "pytest return code: $pytest_return_code"
-        if [ $pytest_return_code -ne 0 ] ; then
-          exit 1
-        fi
-      '';
-      inherit doCheck;
-      installPhase = ''
-        mkdir -p $out
-        mv ./report/* ./pytest_returncode $out/
-      '' + pkgs.lib.optionalString doCoverage ''
-        mv ./gcda $out/
-      '';
-    };
+    # integration_tests = pkgs.stdenv.mkDerivation rec {
+    #   pname = "batsched-integration-tests";
+    #   version = toString builtins.currentTime; # Forces rebuild
+    #   src = pkgs.lib.sourceByRegex ./. [
+    #     "^test"
+    #     "^test/.*\.py"
+    #     "^test/platforms"
+    #     "^test/platforms/.*\.xml"
+    #     "^test/workloads"
+    #     "^test/workloads/.*\.json"
+    #   ];
+    #   buildInputs = with pkgs.python37Packages; [
+    #     batsim batsched batexpe
+    #     pytest pytest_html pandas];
+    #   preBuild = pkgs.lib.optionalString doCoverage ''
+    #     mkdir -p gcda
+    #     export GCOV_PREFIX=$(realpath gcda)
+    #     export GCOV_PREFIX_STRIP=5
+    #   '';
+    #   buildPhase = ''
+    #     runHook preBuild
+    #     set +e
+    #     (cd test && pytest -ra --html=../report/pytest_report.html)
+    #     echo $? > ./pytest_returncode
+    #     set -e
+    #   '';
+    #   checkPhase = ''
+    #     pytest_return_code=$(cat ./pytest_returncode)
+    #     echo "pytest return code: $pytest_return_code"
+    #     if [ $pytest_return_code -ne 0 ] ; then
+    #       exit 1
+    #     fi
+    #   '';
+    #   inherit doCheck;
+    #   installPhase = ''
+    #     mkdir -p $out
+    #     mv ./report/* ./pytest_returncode $out/
+    #   '' + pkgs.lib.optionalString doCoverage ''
+    #     mv ./gcda $out/
+    #   '';
+    # };
     # Essentially the same as integration_tests, but with an up-to-date Batsim.
-    integration_tests_batlatest = integration_tests.overrideAttrs (attr: rec {
-      buildInputs = with pkgs.python37Packages; [
-        batsim-master batsched batexpe pkgs.redis
-        pytest pytest_html pandas];
-    });
+    # integration_tests_batlatest = integration_tests.overrideAttrs (attr: rec {
+    #   buildInputs = with pkgs.python37Packages; [
+    #     batsim-master batsched batexpe
+    #     pytest pytest_html pandas];
+    # });
 
     # Batsched doxygen documentation.
-    doxydoc = pkgs.stdenv.mkDerivation rec {
-      name = "batsim-doxygen-documentation";
-      src = pkgs.lib.sourceByRegex ./. [
-        "^src"
-        "^src/.*\.?pp"
-        "^doc"
-        "^doc/Doxyfile"
-        "^doc/doxygen_mainpage.md"
-      ];
-      buildInputs = [pkgs.doxygen];
-      buildPhase = "(cd doc && doxygen)";
-      installPhase = ''
-        mkdir -p $out
-        mv doc/doxygen_doc/html/* $out/
-      '';
-      checkPhase = ''
-        nb_warnings=$(cat doc/doxygen_warnings.log | wc -l)
-        if [[ $nb_warnings -gt 0 ]] ; then
-          echo "FAILURE: There are doxygen warnings!"
-          cat doc/doxygen_warnings.log
-          exit 1
-        fi
-      '';
-      doCheck = true;
-    };
+    # doxydoc = pkgs.stdenv.mkDerivation rec {
+    #   name = "batsim-doxygen-documentation";
+    #   src = pkgs.lib.sourceByRegex ./. [
+    #     "^src"
+    #     "^src/.*\.?pp"
+    #     "^doc"
+    #     "^doc/Doxyfile"
+    #     "^doc/doxygen_mainpage.md"
+    #   ];
+    #   buildInputs = [pkgs.doxygen];
+    #   buildPhase = "(cd doc && doxygen)";
+    #   installPhase = ''
+    #     mkdir -p $out
+    #     mv doc/doxygen_doc/html/* $out/
+    #   '';
+    #   checkPhase = ''
+    #     nb_warnings=$(cat doc/doxygen_warnings.log | wc -l)
+    #     if [[ $nb_warnings -gt 0 ]] ; then
+    #       echo "FAILURE: There are doxygen warnings!"
+    #       cat doc/doxygen_warnings.log
+    #       exit 1
+    #     fi
+    #   '';
+    #   doCheck = true;
+    # };
 
     # Dependencies not in nixpkgs as I write these lines.
-    pytest_metadata = buildPythonPackage {
-      name = "pytest-metadata-1.8.0";
-      doCheck = false;
-      propagatedBuildInputs = [
-        pythonPackages.pytest
-        pythonPackages.setuptools_scm
-      ];
-      src = builtins.fetchurl {
-        url = "https://files.pythonhosted.org/packages/12/38/eed3a1e00c765e4da61e4e833de41c3458cef5d18e819d09f0f160682993/pytest-metadata-1.8.0.tar.gz";
-        sha256 = "1fk6icip2x1nh4kzhbc8cnqrs77avpqvj7ny3xadfh6yhn9aaw90";
-      };
-    };
+    # pytest_metadata = buildPythonPackage {
+    #   name = "pytest-metadata-1.8.0";
+    #   doCheck = false;
+    #   propagatedBuildInputs = [
+    #     pythonPackages.pytest
+    #     pythonPackages.setuptools_scm
+    #   ];
+    #   src = builtins.fetchurl {
+    #     url = "https://files.pythonhosted.org/packages/12/38/eed3a1e00c765e4da61e4e833de41c3458cef5d18e819d09f0f160682993/pytest-metadata-1.8.0.tar.gz";
+    #     sha256 = "1fk6icip2x1nh4kzhbc8cnqrs77avpqvj7ny3xadfh6yhn9aaw90";
+    #   };
+    # };
 
-    pytest_html = buildPythonPackage {
-      name = "pytest-html-1.20.0";
-      doCheck = false;
-      propagatedBuildInputs = [
-        pythonPackages.pytest
-        pytest_metadata
-      ];
-      src = builtins.fetchurl {
-        url = "https://files.pythonhosted.org/packages/08/3e/63d998f26c7846d3dac6da152d1b93db3670538c5e2fe18b88690c1f52a7/pytest-html-1.20.0.tar.gz";
-        sha256 = "17jyn4czkihrs225nkpj0h113hc03y0cl07myb70jkaykpfmrim7";
-      };
-    };
+    # pytest_html = buildPythonPackage {
+    #   name = "pytest-html-1.20.0";
+    #   doCheck = false;
+    #   propagatedBuildInputs = [
+    #     pythonPackages.pytest
+    #     pytest_metadata
+    #   ];
+    #   src = builtins.fetchurl {
+    #     url = "https://files.pythonhosted.org/packages/08/3e/63d998f26c7846d3dac6da152d1b93db3670538c5e2fe18b88690c1f52a7/pytest-html-1.20.0.tar.gz";
+    #     sha256 = "17jyn4czkihrs225nkpj0h113hc03y0cl07myb70jkaykpfmrim7";
+    #   };
+    # };
   };
 in
   jobs
diff --git a/src/algo/energy_bf_monitoring_inertial_shutdown.hpp b/src/algo/energy_bf_monitoring_inertial_shutdown.hpp
index a265046ee14d43d198738f90b820dbef2b7c63be..4efe12c6c76b772ca8a502c46323130c6c9bf3b1 100644
--- a/src/algo/energy_bf_monitoring_inertial_shutdown.hpp
+++ b/src/algo/energy_bf_monitoring_inertial_shutdown.hpp
@@ -2,6 +2,8 @@
 
 #include "energy_bf_monitoring_period.hpp"
 
+#include <fstream>
+
 class EnergyBackfillingMonitoringInertialShutdown : public EnergyBackfillingMonitoringPeriod
 {
 public:
diff --git a/src/algo/submitter.cpp b/src/algo/submitter.cpp
index 8a9a6fdd63d1d167ca8ca7268e4b552de24d6a03..3d5735d656d73418593a615c5c20d2f24a9b2fe9 100644
--- a/src/algo/submitter.cpp
+++ b/src/algo/submitter.cpp
@@ -72,7 +72,6 @@ void Submitter::on_simulation_start(double date, const rapidjson::Value & batsim
     PPK_ASSERT_ERROR(batsim_config["dynamic-jobs-enabled"].GetBool(),
             "This algorithm only works if dynamic job are enabled!");
     dyn_submit_ack = batsim_config["dynamic-jobs-acknowledged"].GetBool();
-    redis_enabled = batsim_config["redis-enabled"].GetBool();
 }
 
 void Submitter::on_simulation_end(double date)
diff --git a/src/algo/submitter.hpp b/src/algo/submitter.hpp
index e7bb6d7e9d003b1d3312b38b711748bc1c6fe944..89aeef02d87dd70aad0026f03bda81b4f7f88094 100644
--- a/src/algo/submitter.hpp
+++ b/src/algo/submitter.hpp
@@ -44,7 +44,6 @@ private:
     bool set_job_metadata = false; //! If set to true, metadata will be associated to jobs when they are submitted.
 
     bool dyn_submit_ack;
-    bool redis_enabled;
     bool finished_submitting_sent = false;
 
     std::set<std::string> profiles_already_sent;
diff --git a/src/data_storage.cpp b/src/data_storage.cpp
deleted file mode 100644
index 7a91e7a14277cd3c762655f3d03a7f948c43f15e..0000000000000000000000000000000000000000
--- a/src/data_storage.cpp
+++ /dev/null
@@ -1,138 +0,0 @@
-#include "data_storage.hpp"
-
-#include <boost/locale.hpp>
-
-#include <loguru.hpp>
-
-#include "pempek_assert.hpp"
-
-using namespace std;
-
-RedisStorage::RedisStorage()
-{
-}
-
-RedisStorage::~RedisStorage()
-{
-    if (_is_connected)
-    {
-        disconnect();
-    }
-}
-
-void RedisStorage::set_instance_key_prefix(const std::string & key_prefix)
-{
-    _instance_key_prefix = key_prefix;
-}
-
-void RedisStorage::connect_to_server(const std::string & host,
-                                     int port,
-                                     std::function<void (int)> connection_callback)
-{
-    PPK_ASSERT_ERROR(!_is_connected, "Bad RedisStorage::connect_to_server call: "
-                                     "Already connected");
-
-    _is_connected = _redox.connect(host, port, connection_callback);
-    PPK_ASSERT_ERROR(_is_connected, "Error: could not connect to Redis server "
-                                    "(host='%s', port=%d)", host.c_str(), port);
-}
-
-void RedisStorage::disconnect()
-{
-    PPK_ASSERT_ERROR(_is_connected, "Bad RedisStorage::connect_to_server call: "
-                                    "Not connected");
-
-    _redox.disconnect();
-}
-
-std::string RedisStorage::get(const std::string & key)
-{
-    PPK_ASSERT_ERROR(_is_connected, "Bad RedisStorage::get call: Not connected");
-
-    string real_key = boost::locale::conv::to_utf<char>(build_key(key), "UTF-8");
-
-    try
-    {
-        return _redox.get(real_key);
-    }
-    catch (const std::runtime_error & e)
-    {
-        PPK_ASSERT_ERROR(false, "Couldn't get the value associated to key '%s' in Redis! "
-                         "Message: %s", real_key.c_str(), e.what());
-        return "";
-    }
-}
-
-string RedisStorage::get_job_json_string(const string &job_id)
-{
-    string job_key = "job_" + job_id;
-    return _redox.get(build_key(job_key));
-}
-
-int RedisStorage::get_number_of_machines()
-{
-    std::string nb_machines_str = get("nb_res");
-    int nb_res = stoi(nb_machines_str);
-    PPK_ASSERT_ERROR(nb_res > 0);
-
-    return nb_res;
-}
-
-bool RedisStorage::set(const std::string &key, const std::string &value)
-{
-    string real_key = boost::locale::conv::to_utf<char>(build_key(key), "UTF-8");
-    string real_value = boost::locale::conv::to_utf<char>(value, "UTF-8");
-
-    PPK_ASSERT_ERROR(_is_connected, "Bad RedisStorage::get call: Not connected");
-    bool ret = _redox.set(real_key, real_value);
-    if (ret)
-    {
-        LOG_F(1, "Redis: Set '%s'='%s'", real_key.c_str(), real_value.c_str());
-        PPK_ASSERT_ERROR(get(key) == value, "Batsim <-> Redis communications are inconsistent!");
-    }
-    else
-       LOG_F(WARNING, "Redis: Couldn't set: '%s'='%s'", real_key.c_str(), real_value.c_str());
-
-    return ret;
-}
-
-bool RedisStorage::del(const std::string &key)
-{
-    PPK_ASSERT_ERROR(_is_connected, "Bad RedisStorage::get call: Not connected");
-    return _redox.del(build_key(key));
-}
-
-std::string RedisStorage::instance_key_prefix() const
-{
-    return _instance_key_prefix;
-}
-
-std::string RedisStorage::key_subparts_separator() const
-{
-    return _key_subparts_separator;
-}
-
-string RedisStorage::job_key(const string & workload_name,
-                             const string & job_id)
-{
-    string key = "job_" + workload_name + '!' + job_id;
-    return key;
-}
-
-string RedisStorage::profile_key(const string & workload_name,
-                                 const string & profile_name)
-{
-    string key = "profile_" + workload_name + '!' + profile_name;
-    return key;
-}
-
-
-std::string RedisStorage::build_key(const std::string & user_given_key) const
-{
-    string key = _instance_key_prefix + _key_subparts_separator + user_given_key;
-
-    /*string key_latin1 = boost::locale::conv::to_utf<char>(key, "Latin1");
-
-    return key_latin1;*/
-    return boost::locale::conv::utf_to_utf<char>(key);
-}
diff --git a/src/data_storage.hpp b/src/data_storage.hpp
deleted file mode 100644
index d64111c9c8299d2444c7bc63483a598e8da87431..0000000000000000000000000000000000000000
--- a/src/data_storage.hpp
+++ /dev/null
@@ -1,110 +0,0 @@
-#pragma once
-
-#include <string>
-
-#include <redox.hpp>
-
-class RedisStorage
-{
-public:
-    /**
-     * @brief Builds a RedisStorage
-     */
-    RedisStorage();
-    /**
-     * @brief Destroys a RedisStorage
-     */
-    ~RedisStorage();
-
-    /**
-     * @brief Sets the instance key prefix
-     * @param[in] key_prefix The new key prefix
-     */
-    void set_instance_key_prefix(const std::string & key_prefix);
-
-    /**
-     * @brief Connects to a Redis server
-     * @param[in] host The server hostname
-     * @param[in] port The server port
-     * @param connection_callback The callback function to call on connection
-     */
-    void connect_to_server(const std::string & host = redox::REDIS_DEFAULT_HOST,
-                           int port = redox::REDIS_DEFAULT_PORT,
-                           std::function< void(int)> connection_callback = nullptr);
-    /**
-     * @brief Disconnects from the server
-     */
-    void disconnect();
-
-    /**
-     * @brief Gets the value associated with the given key
-     * @param[in] key The key
-     * @return The value associated with the given key
-     */
-    std::string get(const std::string & key);
-
-    std::string get_job_json_string(const std::string & job_id);
-
-    int get_number_of_machines();
-
-    /**
-     * @brief Sets a key-value in the Redis server
-     * @param[in] key The key
-     * @param[in] value The value to associate with the key
-     * @return true if succeeded, false otherwise.
-     */
-    bool set(const std::string & key,
-             const std::string & value);
-
-    /**
-     * @brief Deletes a key-value association from the Redis server
-     * @param[in] key The key which should be deleted from the Redis server
-     * @return true if succeeded, false otherwise.
-     */
-    bool del(const std::string & key);
-
-    /**
-     * @brief Returns the instance key prefix.
-     * @return The instance key prefix.
-     */
-    std::string instance_key_prefix() const;
-
-    /**
-     * @brief Returns the key subparts separator.
-     * @return The key subparts separator.
-     */
-    std::string key_subparts_separator() const;
-
-public:
-    /**
-     * @brief Returns the key in the data storage corresponding to a JobIdentifier
-     * @param[in] workload_name The workload name
-     * @param[in] job_id The job id (without workload! prefix)
-     * @return The key in the data storage corresponding to a JobIdentifier
-     */
-    static std::string job_key(const std::string & workload_name,
-                               const std::string & job_id);
-
-    /**
-     * @brief Returns the key in the data storage corresponding to a profile
-     * @param[in] workload_name The workload name
-     * @param[in] profile_name The profile name
-     * @return The key in the data storage corresponding to a profile
-     */
-    static std::string profile_key(const std::string & workload_name,
-                                   const std::string & profile_name);
-
-private:
-    /**
-     * @brief Build a final key from a user-given key.
-     * @param[in] user_given_key The user-given key
-     * @return The real key corresponding to the user-given key
-     */
-    std::string build_key(const std::string & user_given_key) const;
-
-private:
-    bool _is_connected = false; //!< True if and only if the instance is connected to a Redis server
-    redox::Redox _redox; //!< The Redox instance
-    std::string _instance_key_prefix = ""; //!< The instance key prefix, which is added before to every user-given key.
-    std::string _key_subparts_separator = ":"; //!< The key subparts separator, which is put between the instance key prefix and the user-given key.
-};
diff --git a/src/decision.cpp b/src/decision.cpp
index 698be7dd56cee2825dce24a503178dac4d04c397..245b18dca2b7fcfed90fff0cbf3d1aa9dbf7bd98 100644
--- a/src/decision.cpp
+++ b/src/decision.cpp
@@ -3,7 +3,6 @@
 #include "network.hpp"
 #include "pempek_assert.hpp"
 #include "protocol.hpp"
-#include "data_storage.hpp"
 
 namespace n = network;
 using namespace std;
@@ -47,22 +46,10 @@ void SchedulingDecision::add_submit_job(const string & workload_name,
 {
     string complete_job_id = workload_name + '!' + job_id;
 
-    if (_redis_enabled)
-    {
-        string job_key = RedisStorage::job_key(workload_name, job_id);
-        string profile_key = RedisStorage::profile_key(workload_name, profile_name);
-
-        PPK_ASSERT_ERROR(_redis != nullptr);
-        _redis->set(job_key, job_json_description);
-        _redis->set(profile_key, profile_json_description);
-
-        _proto_writer->append_register_job(complete_job_id, date, "", "", send_profile);
-    }
-    else
-        _proto_writer->append_register_job(complete_job_id, date,
-                                         job_json_description,
-                                         profile_json_description,
-                                         send_profile);
+    _proto_writer->append_register_job(complete_job_id, date,
+                                     job_json_description,
+                                     profile_json_description,
+                                     send_profile);
 }
 
 void SchedulingDecision::add_submit_profile(const string &workload_name,
@@ -124,9 +111,3 @@ double SchedulingDecision::last_date() const
 {
     return _proto_writer->last_date();
 }
-
-void SchedulingDecision::set_redis(bool enabled, RedisStorage *redis)
-{
-    _redis_enabled = enabled;
-    _redis = redis;
-}
diff --git a/src/decision.hpp b/src/decision.hpp
index da1795f61a737b10ac2d7b6888992454d9c02dae..20deeb9f0d1ab14830dbea17ed9c72652e29bc4f 100644
--- a/src/decision.hpp
+++ b/src/decision.hpp
@@ -6,7 +6,6 @@
 #include <intervalset.hpp>
 
 class AbstractProtocolWriter;
-class RedisStorage;
 
 class SchedulingDecision
 {
@@ -60,10 +59,6 @@ public:
     std::string content(double date);
     double last_date() const;
 
-    void set_redis(bool enabled, RedisStorage * redis);
-
 private:
     AbstractProtocolWriter * _proto_writer = nullptr;
-    bool _redis_enabled = false;
-    RedisStorage * _redis = nullptr;
 };
diff --git a/src/isalgorithm.cpp b/src/isalgorithm.cpp
index f282844d63aa3bd59e4623ac97bf850f9596d84f..ff5557e7de82a2e83928df4706c1bcee8b925642 100644
--- a/src/isalgorithm.cpp
+++ b/src/isalgorithm.cpp
@@ -10,12 +10,6 @@ void ISchedulingAlgorithm::set_nb_machines(int nb_machines)
     _nb_machines = nb_machines;
 }
 
-void ISchedulingAlgorithm::set_redis(RedisStorage *redis)
-{
-    PPK_ASSERT_ERROR(_redis == nullptr);
-    _redis = redis;
-}
-
 void ISchedulingAlgorithm::clear_recent_data_structures()
 {
     _jobs_released_recently.clear();
diff --git a/src/isalgorithm.hpp b/src/isalgorithm.hpp
index d2d43f63e119cec91edea3391057b72e8d6d9c93..16b171f10587bb3db48bff13e5406f5fd9d12f2e 100644
--- a/src/isalgorithm.hpp
+++ b/src/isalgorithm.hpp
@@ -135,12 +135,6 @@ public:
      */
     void set_nb_machines(int nb_machines);
 
-    /**
-     * @brief Allows to set the RedisStorage instance
-     * @param[in,out] redis The RedisStorage instance
-     */
-    void set_redis(RedisStorage * redis);
-
     /**
      * @brief Clears data structures used to store what happened between two make_decisions calls
      * @details This function should be called between make_decisions calls!
@@ -155,7 +149,6 @@ protected:
     double _rjms_delay = 0.0;
     rapidjson::Document * _variant_options = nullptr;
     int _nb_machines = -1;
-    RedisStorage * _redis = nullptr;
     bool _no_more_static_job_to_submit_received = false;
     bool _no_more_external_event_to_occur_received = false;
 
diff --git a/src/json_workload.cpp b/src/json_workload.cpp
index 38b5c61e7100f6db5ea42db795687eb74b7bbf60..39c203066c523e4342e1a2c5494d86f6a7edb851 100644
--- a/src/json_workload.cpp
+++ b/src/json_workload.cpp
@@ -39,22 +39,6 @@ void Workload::set_rjms_delay(Rational rjms_delay)
     _rjms_delay = rjms_delay;
 }
 
-void Workload::add_job_from_redis(RedisStorage & storage, const string &job_id, double submission_time)
-{
-    string job_json_desc_str = storage.get_job_json_string(job_id);
-    PPK_ASSERT_ERROR(job_json_desc_str != "", "Cannot retrieve job '%s'", job_id.c_str());
-
-    Job * job = job_from_json_description_string(job_json_desc_str);
-    job->id = job_id;
-    job->submission_time = submission_time;
-
-    // Let's apply the RJMS delay on the job
-    job->walltime += _rjms_delay;
-
-    PPK_ASSERT_ERROR(_jobs.count(job_id) == 0, "Job '%s' already exists in the Workload", job_id.c_str());
-    _jobs[job_id] = job;
-}
-
 void Workload::add_job_from_json_object(const Value &object, const string & job_id, double submission_time)
 {
     Job * job = job_from_json_object(object);
diff --git a/src/json_workload.hpp b/src/json_workload.hpp
index d320a76a2c3f0c9c0af928dcba425575332d6849..b9befd25165290cb8eddac93640f1840616a5fe1 100644
--- a/src/json_workload.hpp
+++ b/src/json_workload.hpp
@@ -5,7 +5,6 @@
 #include <rapidjson/document.h>
 
 #include "exact_numbers.hpp"
-#include "data_storage.hpp"
 #include <intervalset.hpp>
 
 struct JobAlloc;
@@ -49,7 +48,6 @@ public:
 
     void set_rjms_delay(Rational rjms_delay);
 
-    void add_job_from_redis(RedisStorage &storage, const std::string & job_id, double submission_time);
     void add_job_from_json_object(const rapidjson::Value & object, const std::string & job_id, double submission_time);
     void add_job_from_json_description_string(const std::string & json_string, const std::string & job_id, double submission_time);
 
diff --git a/src/main.cpp b/src/main.cpp
index 6b99cf72e5f6e19779922057580d4ea1fddadd5c..1827cb7fdfdc3bfaaa901fa73844098baeee76db 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -18,7 +18,6 @@
 #include "network.hpp"
 #include "json_workload.hpp"
 #include "pempek_assert.hpp"
-#include "data_storage.hpp"
 
 #include "algo/conservative_bf.hpp"
 #include "algo/crasher.hpp"
@@ -349,11 +348,6 @@ void run(Network & n, ISchedulingAlgorithm * algo, SchedulingDecision & d,
 {
     bool simulation_finished = false;
 
-    // Redis creation
-    RedisStorage redis;
-    bool redis_enabled = false;
-    algo->set_redis(&redis);
-
     while (!simulation_finished)
     {
         string received_message;
@@ -393,19 +387,6 @@ void run(Network & n, ISchedulingAlgorithm * algo, SchedulingDecision & d,
                 {
                     nb_resources = event_data["nb_resources"].GetInt();
                 }
-                redis_enabled = event_data["config"]["redis-enabled"].GetBool();
-
-                if (redis_enabled)
-                {
-                    string redis_hostname = event_data["config"]["redis-hostname"].GetString();
-                    int redis_port = event_data["config"]["redis-port"].GetInt();
-                    string redis_prefix = event_data["config"]["redis-prefix"].GetString();
-
-                    redis.connect_to_server(redis_hostname, redis_port, nullptr);
-                    redis.set_instance_key_prefix(redis_prefix);
-                }
-
-                d.set_redis(redis_enabled, &redis);
 
                 algo->set_nb_machines(nb_resources);
                 algo->on_simulation_start(current_date, event_data["config"]);
@@ -419,10 +400,7 @@ void run(Network & n, ISchedulingAlgorithm * algo, SchedulingDecision & d,
             {
                 string job_id = event_data["job_id"].GetString();
 
-                if (redis_enabled)
-                    workload.add_job_from_redis(redis, job_id, current_date);
-                else
-                    workload.add_job_from_json_object(event_data["job"], job_id, current_date);
+                workload.add_job_from_json_object(event_data["job"], job_id, current_date);
 
                 algo->on_job_release(current_date, {job_id});
             }
diff --git a/src/protocol.cpp b/src/protocol.cpp
index 9b3fd2d187d6a9f3a35d1015e52552af14cb84dd..d10e8b8b8e105d7af62b33eedcbeb0f287a32396 100644
--- a/src/protocol.cpp
+++ b/src/protocol.cpp
@@ -91,23 +91,6 @@ void JsonProtocolWriter::append_register_job(const string &job_id,
       "data": {
         "job_id": "w12!45",
       }
-    }
-    With redis: {
-      "timestamp": 10.0,
-      "type": "REGISTER_JOB",
-      "data": {
-        "job_id": "dyn!my_new_job",
-        "job":{
-          "profile": "delay_10s",
-          "res": 1,
-          "id": "my_new_job",
-          "walltime": 12.0
-        },
-        "profile":{
-          "type": "delay",
-          "delay": 10
-        }
-      }
     } */
 
     PPK_ASSERT_ERROR(date >= _last_date, "Date inconsistency");