diff --git a/nifty5/library/adjust_variances.py b/nifty5/library/adjust_variances.py index 7ef27c586fde0381f4e7797f4028db07f338f304..4a1802bb2fe099c57e5141895972957c72ccb0f7 100644 --- a/nifty5/library/adjust_variances.py +++ b/nifty5/library/adjust_variances.py @@ -1,13 +1,34 @@ -from ..operators.energy_operators import InverseGammaLikelihood,Hamiltonian +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# +# Copyright(C) 2013-2018 Max-Planck-Society +# +# NIFTy is being developed at the Max-Planck-Institut fuer Astrophysik +# and financially supported by the Studienstiftung des deutschen Volkes. + +from __future__ import absolute_import, division, print_function + +from ..compat import * +from ..operators.energy_operators import Hamiltonian, InverseGammaLikelihood from ..operators.scaling_operator import ScalingOperator -def make_adjust_variances(a,xi,position,samples=[],scaling=None,ic_samp=None): + +def make_adjust_variances(a, xi, position, samples=[], scaling=None, ic_samp=None): """ Creates a Hamiltonian for constant likelihood optimizations. - - Constructs a Hamiltonian to solve constant likelihood optimizations of the form - phi = a * xi - under the constraint that phi remains constant. - + + Constructs a Hamiltonian to solve constant likelihood optimizations of the + form phi = a * xi under the constraint that phi remains constant. + Parameters ---------- a : Operator @@ -29,19 +50,19 @@ def make_adjust_variances(a,xi,position,samples=[],scaling=None,ic_samp=None): A Hamiltonian that can be used for further minimization """ - d = a * xi + d = a*xi d = (d.conjugate()*d).real n = len(samples) - if n>0: + if n > 0: d_eval = 0. for i in range(n): - d_eval = d_eval + d(position+samples[i]) - d_eval = d_eval / n + d_eval = d_eval + d(position + samples[i]) + d_eval = d_eval/n else: d_eval = d(position) x = (a.conjugate()*a).real if scaling is not None: - x = ScalingOperator(scaling,x.target)(x) + x = ScalingOperator(scaling, x.target)(x) - return Hamiltonian(InverseGammaLikelihood(x,d_eval),ic_samp=ic_samp) \ No newline at end of file + return Hamiltonian(InverseGammaLikelihood(x, d_eval), ic_samp=ic_samp) diff --git a/test/test_energies/test_consistency.py b/test/test_energies/test_consistency.py index d5f38ebffde894da77bdcd2fe6027633098e9363..91a090c75e1941fb605d9cae0ca3260e6414f7ff 100644 --- a/test/test_energies/test_consistency.py +++ b/test/test_energies/test_consistency.py @@ -56,15 +56,14 @@ class Energy_Tests(unittest.TestCase): # energy = ift.QuadraticEnergy(s[0], ift.makeOp(s[1]), s[2]) # ift.extra.check_value_gradient_consistency(energy) - @expand(product( - [ift.GLSpace(15), - ift.RGSpace(64, distances=.789), - ift.RGSpace([32, 32], distances=.789)], - [4, 78, 23] - )) + @expand( + product([ + ift.GLSpace(15), + ift.RGSpace(64, distances=.789), + ift.RGSpace([32, 32], distances=.789) + ], [4, 78, 23])) def testInverseGammaLikelihood(self, space, seed): - model = self.make_model( - space_key='s1', space=space, seed=seed)['s1'] + model = self.make_model(space_key='s1', space=space, seed=seed)['s1'] d = np.random.normal(10, size=space.shape)**2 d = ift.Field.from_global_data(space, d) energy = ift.InverseGammaLikelihood(ift.exp, d)