diff --git a/nifty5/minimization/descent_minimizers.py b/nifty5/minimization/descent_minimizers.py
index e128da04eee22b3b6c275a4b79159a4aa6a8a55f..cbbc9e3507aa08502266fd5d4dbec3e9d17cd45b 100644
--- a/nifty5/minimization/descent_minimizers.py
+++ b/nifty5/minimization/descent_minimizers.py
@@ -161,7 +161,7 @@ class NewtonCG(DescentMinimizer):
     """
 
     def __init__(self, controller, napprox=0, line_searcher=None, name=None,
-                 nreset=20):
+                 nreset=20, file_name=None):
         if line_searcher is None:
             line_searcher = LineSearch(preferred_initial_step_size=1.)
         super(NewtonCG, self).__init__(controller=controller,
@@ -169,6 +169,7 @@ class NewtonCG(DescentMinimizer):
         self._napprox = napprox
         self._name = name
         self._nreset = nreset
+        self._file_name = file_name
 
     def get_descent_direction(self, energy, f_k_minus_1):
         if f_k_minus_1 is None:
@@ -177,7 +178,8 @@ class NewtonCG(DescentMinimizer):
             alpha = 0.1
             ediff = alpha*(f_k_minus_1 - energy.value)
             ic = AbsDeltaEnergyController(
-                ediff, iteration_limit=200, name=self._name)
+                ediff, iteration_limit=200, name=self._name,
+                file_name=self._file_name)
         e = QuadraticEnergy(0*energy.position, energy.metric, energy.gradient)
         p = None
         if self._napprox > 1:
diff --git a/nifty5/minimization/iteration_controllers.py b/nifty5/minimization/iteration_controllers.py
index efaaa14e896966991f92d3bc627c8ce004c488c4..3c569032d30da674173d9eb95cfbf1a98cf4d2d3 100644
--- a/nifty5/minimization/iteration_controllers.py
+++ b/nifty5/minimization/iteration_controllers.py
@@ -15,9 +15,12 @@
 #
 # NIFTy is being developed at the Max-Planck-Institut fuer Astrophysik.
 
+from time import time
+
+import numpy as np
+
 from ..logger import logger
 from ..utilities import NiftyMeta
-import numpy as np
 
 
 class IterationController(metaclass=NiftyMeta):
@@ -94,16 +97,17 @@ class GradientNormController(IterationController):
     """
 
     def __init__(self, tol_abs_gradnorm=None, tol_rel_gradnorm=None,
-                 convergence_level=1, iteration_limit=None, name=None, p=2):
+                 convergence_level=1, iteration_limit=None, name=None, p=2,
+                 file_name=None):
         self._tol_abs_gradnorm = tol_abs_gradnorm
         self._tol_rel_gradnorm = tol_rel_gradnorm
         self._convergence_level = convergence_level
         self._iteration_limit = iteration_limit
         self._name = name
         self._p = p
+        self._file_name = file_name
 
     def start(self, energy):
-        self.energyhistory = []
         self._itcount = -1
         self._ccount = 0
         if self._tol_rel_gradnorm is not None:
@@ -137,7 +141,6 @@ class GradientNormController(IterationController):
                 "{}: Iteration #{} energy={:.6E} gradnorm={:.2E} clvl={}"
                 .format(self._name, self._itcount, energy.value,
                         self._norm(energy), self._ccount))
-        self.energyhistory.append(energy.value)
 
         # Are we done?
         if self._iteration_limit is not None:
@@ -149,6 +152,12 @@ class GradientNormController(IterationController):
         if self._ccount >= self._convergence_level:
             return self.CONVERGED
 
+        # Write energy to file
+        if self._file_name is not None:
+            with open(self._file_name, 'a+') as f:
+                f.write('{} {} {}\n'.format(time(), energy.value,
+                                            self._norm(energy)))
+
         return self.CONTINUE
 
 
@@ -171,11 +180,12 @@ class GradInfNormController(IterationController):
     """
 
     def __init__(self, tol, convergence_level=1, iteration_limit=None,
-                 name=None):
+                 name=None, file_name=None):
         self._tol = tol
         self._convergence_level = convergence_level
         self._iteration_limit = iteration_limit
         self._name = name
+        self._file_name = file_name
 
     def start(self, energy):
         self._itcount = -1
@@ -208,6 +218,12 @@ class GradInfNormController(IterationController):
         if self._ccount >= self._convergence_level:
             return self.CONVERGED
 
+        # Write energy to file
+        if self._file_name is not None:
+            with open(self._file_name, 'a+') as f:
+                f.write('{} {} {}\n'.format(time(), energy.value,
+                                            crit))
+
         return self.CONTINUE
 
 
@@ -298,12 +314,13 @@ class AbsDeltaEnergyController(IterationController):
         printed after every iteration
     """
 
-    def __init__(self, deltaE, convergence_level=1,
-                 iteration_limit=None, name=None):
+    def __init__(self, deltaE, convergence_level=1, iteration_limit=None,
+                 name=None, file_name=None):
         self._deltaE = deltaE
         self._convergence_level = convergence_level
         self._iteration_limit = iteration_limit
         self._name = name
+        self._file_name = file_name
 
     def start(self, energy):
         self._itcount = -1
@@ -342,4 +359,9 @@ class AbsDeltaEnergyController(IterationController):
         if self._ccount >= self._convergence_level:
             return self.CONVERGED
 
+        # Write energy to file
+        if self._file_name is not None:
+            with open(self._file_name, 'a+') as f:
+                f.write('{} {} {}\n'.format(time(), energy.value, diff))
+
         return self.CONTINUE