diff --git a/nifty/minimization/descent_minimizer.py b/nifty/minimization/descent_minimizer.py
index 5d224b3687da61842335e5e89587d467f1a9b746..f499a4ee557640331522167c13725812a1994a4e 100644
--- a/nifty/minimization/descent_minimizer.py
+++ b/nifty/minimization/descent_minimizer.py
@@ -146,14 +146,14 @@ class DescentMinimizer(Loggable, object):
                 break
 
             # current position is encoded in energy object
-            descend_direction = self.get_descend_direction(energy)
+            descent_direction = self.get_descent_direction(energy)
 
             # compute the step length, which minimizes energy.value along the
             # search direction
             step_length, f_k, new_energy = \
                 self.line_searcher.perform_line_search(
                                                energy=energy,
-                                               pk=descend_direction,
+                                               pk=descent_direction,
                                                f_k_minus_1=f_k_minus_1)
             f_k_minus_1 = energy.value
 
@@ -195,5 +195,5 @@ class DescentMinimizer(Loggable, object):
         return energy, convergence
 
     @abc.abstractmethod
-    def get_descend_direction(self, energy):
+    def get_descent_direction(self, energy):
         raise NotImplementedError
diff --git a/nifty/minimization/line_searching/line_search.py b/nifty/minimization/line_searching/line_search.py
index e17f5e787091f47c6fade5068aadbde747fae0db..6debbc5d2c18dfedf644f70a715feda7296d0ead 100644
--- a/nifty/minimization/line_searching/line_search.py
+++ b/nifty/minimization/line_searching/line_search.py
@@ -25,31 +25,31 @@ from nifty import LineEnergy
 
 class LineSearch(Loggable, object):
     """Class for determining the optimal step size along some descent direction.
-    
+
     Initialize the line search procedure which can be used by a specific line
     search method. Its finds the step size in a specific direction in the
     minimization process.
-    
+
     Attributes
     ----------
     line_energy : LineEnergy Object
         LineEnergy object from which we can extract energy at a specific point.
     f_k_minus_1 : Field
         Value of the field at the k-1 iteration of the line search procedure.
-    prefered_initial_step_size : float
+    preferred_initial_step_size : float
         Initial guess for the step length.
-    
+
     """
-    
+
     __metaclass__ = abc.ABCMeta
 
     def __init__(self):
 
-        
+
 
         self.line_energy = None
         self.f_k_minus_1 = None
-        self.prefered_initial_step_size = None
+        self.preferred_initial_step_size = None
 
     def _set_line_energy(self, energy, pk, f_k_minus_1=None):
         """Set the coordinates for a new line search.
@@ -58,13 +58,13 @@ class LineSearch(Loggable, object):
         ----------
         energy : Energy object
             Energy object from which we can calculate the energy, gradient and
-            curvature at a specific point.        
+            curvature at a specific point.
         pk : Field
             Unit vector pointing into the search direction.
         f_k_minus_1 : float
-            Value of the fuction (energy) which will be minimized at the k-1 
+            Value of the fuction (energy) which will be minimized at the k-1
             iteration of the line search procedure. (Default: None)
-            
+
         """
         self.line_energy = LineEnergy(position=0.,
                                       energy=energy,
diff --git a/nifty/minimization/line_searching/line_search_strong_wolfe.py b/nifty/minimization/line_searching/line_search_strong_wolfe.py
index 1d44f05225b19e498e224222aacff9145da1c204..9a4941e34de94096af886aa67419f1bb1c286257 100644
--- a/nifty/minimization/line_searching/line_search_strong_wolfe.py
+++ b/nifty/minimization/line_searching/line_search_strong_wolfe.py
@@ -120,8 +120,8 @@ class LineSearchStrongWolfe(LineSearch):
 
         # set alphas
         alpha0 = 0.
-        if self.prefered_initial_step_size is not None:
-            alpha1 = self.prefered_initial_step_size
+        if self.preferred_initial_step_size is not None:
+            alpha1 = self.preferred_initial_step_size
         elif old_phi_0 is not None and phiprime_0 != 0:
             alpha1 = min(1.0, 1.01*2*(phi_0 - old_phi_0)/phiprime_0)
             if alpha1 < 0:
diff --git a/nifty/minimization/relaxed_newton.py b/nifty/minimization/relaxed_newton.py
index 5d7767caecd1f8ef374e7f2468b6f93deda55182..508a9eb0978e953ce3df5ba23e977b6b2002477b 100644
--- a/nifty/minimization/relaxed_newton.py
+++ b/nifty/minimization/relaxed_newton.py
@@ -32,9 +32,9 @@ class RelaxedNewton(DescentMinimizer):
                                 convergence_level=convergence_level,
                                 iteration_limit=iteration_limit)
 
-        self.line_searcher.prefered_initial_step_size = 1.
+        self.line_searcher.preferred_initial_step_size = 1.
 
-    def get_descend_direction(self, energy):
+    def get_descent_direction(self, energy):
         """ Calculates the descent direction according to a Newton scheme.
 
         The descent direction is determined by weighting the gradient at the
@@ -50,12 +50,9 @@ class RelaxedNewton(DescentMinimizer):
 
         Returns
         -------
-        descend_direction : Field
+        descent_direction : Field
            Returns the descent direction with proposed step length. In a
            quadratic potential this corresponds to the optimal step.
 
         """
-        gradient = energy.gradient
-        curvature = energy.curvature
-        descend_direction = curvature.inverse_times(gradient)
-        return descend_direction * -1
+        return -energy.curvature.inverse_times(energy.gradient)
diff --git a/nifty/minimization/steepest_descent.py b/nifty/minimization/steepest_descent.py
index 97281588d5b3b63a37c7a7e5e3abb263f6f9312d..b72dda781a19224e8ef52853d08e7cb73fd78435 100644
--- a/nifty/minimization/steepest_descent.py
+++ b/nifty/minimization/steepest_descent.py
@@ -20,7 +20,7 @@ from .descent_minimizer import DescentMinimizer
 
 
 class SteepestDescent(DescentMinimizer):
-    def get_descend_direction(self, energy):
+    def get_descent_direction(self, energy):
         """ Implementation of the steepest descent minimization scheme.
 
         Also known as 'gradient descent'. This algorithm simply follows the
@@ -34,10 +34,9 @@ class SteepestDescent(DescentMinimizer):
 
         Returns
         -------
-        descend_direction : Field
+        descent_direction : Field
             Returns the descent direction.
 
         """
 
-        descend_direction = energy.gradient
-        return descend_direction * -1
+        return -energy.gradient
diff --git a/nifty/minimization/vl_bfgs.py b/nifty/minimization/vl_bfgs.py
index 1bf1b7a47c12bacb11f03fbd0ab1e52a41f979e6..a7a4770d56b6b366a0056027ed66c1f211ed182c 100644
--- a/nifty/minimization/vl_bfgs.py
+++ b/nifty/minimization/vl_bfgs.py
@@ -40,7 +40,7 @@ class VL_BFGS(DescentMinimizer):
         self._information_store = None
         return super(VL_BFGS, self).__call__(energy)
 
-    def get_descend_direction(self, energy):
+    def get_descent_direction(self, energy):
         """Implementation of the Vector-free L-BFGS minimization scheme.
 
         Find the descent direction by using the inverse Hessian.
@@ -57,7 +57,7 @@ class VL_BFGS(DescentMinimizer):
 
         Returns
         -------
-        descend_direction : Field
+        descent_direction : Field
             Returns the descent direction.
 
         References
@@ -80,11 +80,11 @@ class VL_BFGS(DescentMinimizer):
         b = self._information_store.b
         delta = self._information_store.delta
 
-        descend_direction = delta[0] * b[0]
+        descent_direction = delta[0] * b[0]
         for i in xrange(1, len(delta)):
-            descend_direction += delta[i] * b[i]
+            descent_direction += delta[i] * b[i]
 
-        return descend_direction
+        return descent_direction
 
 
 class InformationStore(object):