ift issueshttps://gitlab.mpcdf.mpg.de/groups/ift/-/issues2024-03-25T09:30:45Zhttps://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/370Implement Evidence Calculation2024-03-25T09:30:45ZGordian EdenhoferImplement Evidence CalculationImplement the evidence calculation in NIFTy.re akin to NIFTy using the existing tools in `NIFTy.re.num.lanczos`.
* [ ] Enable `NIFTy.re.num.lanczos` to work with pytrees
* [ ] ...Implement the evidence calculation in NIFTy.re akin to NIFTy using the existing tools in `NIFTy.re.num.lanczos`.
* [ ] Enable `NIFTy.re.num.lanczos` to work with pytrees
* [ ] ...Matteo GuardianiMatteo Guardiani2023-12-31https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/369Thread safety in optimize_kl2023-11-07T11:27:57ZLandman BesterThread safety in optimize_klThe following code attempts to run a number of optimize_kl instances in parallel, first with a process pool and then with a thread pool.
```python
import numpy as np
import nifty8 as ift
import concurrent.futures as cf
def function_ca...The following code attempts to run a number of optimize_kl instances in parallel, first with a process pool and then with a thread pool.
```python
import numpy as np
import nifty8 as ift
import concurrent.futures as cf
def function_calling_nifty(seed):
# set up correlated field model
npix = 512
pospace = ift.RGSpace((npix,))
spfreq = ift.RGSpace((npix,))
cfmaker = ift.CorrelatedFieldMaker('amplitude')
cfmaker.add_fluctuations(spfreq, (0.1, 1e-2), None, None, (-3, 1),
'f')
cfmaker.set_amplitude_total_offset(0., (1e-2, 1e-6))
cf = cfmaker.finalize()
normalized_amp = cfmaker.get_normalized_amplitudes()
pspec = normalized_amp[0]**2
# signal + fake data
signal = ift.exp(cf.real)
mask = np.random.binomial(1, 0.5, size=npix).astype(bool)
tmp = ift.makeField(signal.target, ~mask)
R = ift.MaskOperator(tmp)
signal_response = R(signal)
mock_position = ift.from_random(signal_response.domain, 'normal')
dspace = R.target
noise = .001
N = ift.ScalingOperator(dspace, noise, float)
data = signal_response(mock_position) + N.draw_sample()
# Minimization parameters
ic_sampling = ift.AbsDeltaEnergyController(deltaE=0.01, iteration_limit=50)
ic_newton = ift.AbsDeltaEnergyController(deltaE=0.01, iteration_limit=15)
minimizer = ift.NewtonCG(ic_newton, enable_logging=False)
# Set up likelihood energy and information Hamiltonian
likelihood_energy = ift.GaussianEnergy(data, inverse_covariance=N.inverse) @ signal_response
n_samples = 3
n_iterations = 5
with ift.random.Context(seed):
samples = ift.optimize_kl(likelihood_energy,
n_iterations,
n_samples,
minimizer,
ic_sampling,
None, # for GeoVI
plot_energy_history=False,
plot_minisanity_history=False)
return samples
if __name__=='__main__':
# processes
print("___________________________________Running with processes___________________________________")
nrun = 8
futures = []
with cf.ProcessPoolExecutor(max_workers=nrun) as executor:
for i in range(nrun):
future = executor.submit(function_calling_nifty, i)
futures.append(future)
for f in cf.as_completed(futures):
samples = f.result()
# threads
print("___________________________________Running with threads___________________________________")
futures = []
with cf.ThreadPoolExecutor(max_workers=nrun) as executor:
for i in range(nrun):
future = executor.submit(function_calling_nifty, i)
futures.append(future)
for f in cf.as_completed(futures):
samples = f.result()
```
The first run with processes is successful but the second one falls over with
```bash
Traceback (most recent call last):
File "/home/landman/software/scratch/nifty_threadsafety.py", line 43, in function_calling_nifty
samples = ift.optimize_kl(likelihood_energy,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/landman/venvs/qcal/lib/python3.11/site-packages/nifty8/minimization/optimize_kl.py", line 368, in optimize_kl
e = SampledKLEnergy(
^^^^^^^^^^^^^^^^
File "/home/landman/venvs/qcal/lib/python3.11/site-packages/nifty8/minimization/kl_energies.py", line 290, in SampledKLEnergy
sample_list = draw_samples(position, ham_sampling, minimizer_sampling, n_samples,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/landman/venvs/qcal/lib/python3.11/site-packages/nifty8/minimization/kl_energies.py", line 142, in draw_samples
with random.Context(sseq[i]):
File "/home/landman/venvs/qcal/lib/python3.11/site-packages/nifty8/random.py", line 290, in __exit__
raise RuntimeError("inconsistent RNG usage detected")
RuntimeError: inconsistent RNG usage detected
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/landman/software/scratch/nifty_threadsafety.py", line 78, in <module>
samples = f.result()
^^^^^^^^^^
File "/usr/lib/python3.11/concurrent/futures/_base.py", line 449, in result
return self.__get_result()
^^^^^^^^^^^^^^^^^^^
File "/usr/lib/python3.11/concurrent/futures/_base.py", line 401, in __get_result
raise self._exception
File "/usr/lib/python3.11/concurrent/futures/thread.py", line 58, in run
result = self.fn(*self.args, **self.kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/landman/software/scratch/nifty_threadsafety.py", line 42, in function_calling_nifty
with ift.random.Context(seed):
File "/home/landman/venvs/qcal/lib/python3.11/site-packages/nifty8/random.py", line 290, in __exit__
raise RuntimeError("inconsistent RNG usage detected")
RuntimeError: inconsistent RNG usage detected
```
I tried using the Context class as suggested in ```src/random.py``` but this doesn't seem to have the desired effect. Any suggestions on how to get this working with a thread pool would be much appreciated.https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/368Dictionary with .update instead of | for python 3.7 compatibility2023-10-24T14:46:50ZVincent EberleDictionary with .update instead of | for python 3.7 compatibility# Broken Interface for Python < 3.9 and Jax installed
If Jax is installed the Correlated Field of nifty tries to make an instance of the nifty.re.correlated field using |= which is a python 3.9 syntax. This brakes the interface for user...# Broken Interface for Python < 3.9 and Jax installed
If Jax is installed the Correlated Field of nifty tries to make an instance of the nifty.re.correlated field using |= which is a python 3.9 syntax. This brakes the interface for users having an older python version and jax as well.
IMHO we should go to the .update syntax instead. If we want to use this syntax, we can consider to update the minimal python version required for using NIFTy8.
@gedenhof @wmarg @pfrank
```python
fluctuations = WrappedCall(fluctuations, name=prefix + "fluctuations")
ptree = fluctuations.domain
loglogavgslope = WrappedCall(loglogavgslope, name=prefix + "loglogavgslope")
ptree |= loglogavgslope.domain
if flexibility is not None:
flexibility = WrappedCall(flexibility, name=prefix + "flexibility")
ptree |= flexibility.domain
# Register the parameters for the spectrum
_safe_assert(log_vol is not None)
_safe_assert(rel_log_mode_len.ndim == log_vol.ndim == 1)
ptree |= {prefix + "spectrum": ShapeWithDtype((2, ) + log_vol.shape)}
if asperity is not None:
asperity = WrappedCall(asperity, name=prefix + "asperity")
ptree |= asperity.domain
```
[Go to File](src/re/correlated_field.py#L133-146)Gordian EdenhoferGordian Edenhoferhttps://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/367Inverse Hartley transform for creating a GP covariance kernel based on a Four...2023-08-21T23:36:22ZJohannes BuchnerInverse Hartley transform for creating a GP covariance kernel based on a Fourier power spectrumThe demo of https://gitlab.mpcdf.mpg.de/ift/nifty/-/blob/NIFTy_8/demos/re/hmc_wiener_filter.py
uses `jft.correlated_field.hartley` to create a covariance kernel from a powerlaw Fourier power spectrum.
My understanding is that an inverse...The demo of https://gitlab.mpcdf.mpg.de/ift/nifty/-/blob/NIFTy_8/demos/re/hmc_wiener_filter.py
uses `jft.correlated_field.hartley` to create a covariance kernel from a powerlaw Fourier power spectrum.
My understanding is that an inverse Hartley transform would be needed to make the connection, since the
Fourier power spectrum is obtained by a Fourier transform.
However, in https://gitlab.mpcdf.mpg.de/ift/nifty/-/blob/NIFTy_8/src/re/correlated_field.py#L23
the "hartley" function is implemented with a (forward) FFT and summing real and imaginary components.
Why is that equivalent to an inverse Hartley transform?https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/365Update PyPi minor release2024-02-18T14:50:58ZMatteo GuardianiUpdate PyPi minor releaseThe last minor NIFTy8 release on PyPi (https://pypi.org/project/nifty8/#history) is 8.4 (from 29.9.22).
Since many commits have been merged in between (including many bugfixs), it would be good to release a new minor stable version to ke...The last minor NIFTy8 release on PyPi (https://pypi.org/project/nifty8/#history) is 8.4 (from 29.9.22).
Since many commits have been merged in between (including many bugfixs), it would be good to release a new minor stable version to keep the PiPy version more or less up to date.
@pfrank @gedenhof @mtrhttps://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/363Wrong result of FFT.jax_expr2023-04-19T08:07:45ZJakob RothWrong result of FFT.jax_exprThe inverse application of the `FFT.jax_expr` seems to give wrong results. Here is a snippet to reproduce the error:
```
import numpy as np
import nifty8 as ift
sp = ift.RGSpace((100, 100), distances=0.1)
FFT = ift.FFTOperator(sp)
in...The inverse application of the `FFT.jax_expr` seems to give wrong results. Here is a snippet to reproduce the error:
```
import numpy as np
import nifty8 as ift
sp = ift.RGSpace((100, 100), distances=0.1)
FFT = ift.FFTOperator(sp)
inp = ift.from_random(FFT.domain)
res_nifty = FFT(inp)
res_jax = FFT.jax_expr(inp.val)
assert(np.allclose(res_nifty.val, res_jax)) # works
res_nifty_inv = FFT.inverse(res_nifty)
res_jax_inv = FFT.jax_expr(inp.val, inverse=True)
assert(np.allclose(res_nifty_inv.val, res_jax_inv)) # fails
res_nifty_inv = FFT.inverse(res_nifty)
res_jax_inv = FFT.inverse.jax_expr(inp.val, inverse=True)
assert(np.allclose(res_nifty_inv.val, res_jax_inv)) # fails
assert(np.allclose(res_nifty_inv.val, inp.val)) # works
assert(np.allclose(res_jax, inp.val)) # fails
```https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/361Failing docs pipeline2023-11-09T12:51:08ZPhilipp FrankFailing docs pipelineRecently the docs stage of the pipeline fails due to some ownership issues regarding the docs folder (see https://gitlab.mpcdf.mpg.de/ift/nifty/-/jobs/2023129#L497).
I am not entirely sure what is going on here, I suspect it may be somet...Recently the docs stage of the pipeline fails due to some ownership issues regarding the docs folder (see https://gitlab.mpcdf.mpg.de/ift/nifty/-/jobs/2023129#L497).
I am not entirely sure what is going on here, I suspect it may be something related to using a cached docker image.
@veberle, @gedenhof: Do you have any thoughts on this?https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/358Mean / Variance confusion in save_fits function of optimize_KL2023-01-13T09:35:36ZVincent EberleMean / Variance confusion in save_fits function of optimize_KLWhile finishing some plots I was surprised by a very high standard deviation and wanted to check if this could be true.
Then I found this:
```python
if mean or std:
m, s = self.sample_stat(op)
if mean:
se...While finishing some plots I was surprised by a very high standard deviation and wanted to check if this could be true.
Then I found this:
```python
if mean or std:
m, s = self.sample_stat(op)
if mean:
self._save_fits_2d(m, file_name_base + "_mean.fits", overwrite)
if std:
self._save_fits_2d(s, file_name_base + "_std.fits", overwrite)
```
here: https://gitlab.mpcdf.mpg.de/ift/nifty/-/blob/NIFTy_8/src/minimization/sample_list.py#L224
So in the 'if std:' we save something called standard deviation.
But sample_stat returns the mean and the variance:
```python
def sample_stat(self, op=None):
"""Compute mean and variance of samples after applying `op`.
Parameters
----------
op : callable or None
Callable that is applied to each item in the :class:`SampleListBase`
before it is used to compute mean and variance.
Returns
-------
tuple
A tuple with two items: the mean and the variance.
"""
from ..probing import StatCalculator
if self.n_samples == 1:
res = self.average(op)
return res, 0*res
sc = StatCalculator()
for ss in self.iterator(op):
sc.add(ss)
return sc.mean, sc.var
```
here: https://gitlab.mpcdf.mpg.de/ift/nifty/-/blob/NIFTy_8/src/minimization/sample_list.py#L348
So I think we should take the square root here.... @gedenhof @pfrank
I address also @parras, since he worked a lot on the whole optmize-kl and sample-list part of nifty.https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/357Un-deprecate CMAP registration2023-01-02T12:50:38ZGordian EdenhoferUn-deprecate CMAP registration```
test/test_plot.py::test_plots
/builds/ift/nifty/nifty8/plot.py:266: PendingDeprecationWarning: The register_cmap function will be deprecated in a future version. Use ``matplotlib.colormaps.register(name)`` instead.
```
See https:/...```
test/test_plot.py::test_plots
/builds/ift/nifty/nifty8/plot.py:266: PendingDeprecationWarning: The register_cmap function will be deprecated in a future version. Use ``matplotlib.colormaps.register(name)`` instead.
```
See https://gitlab.mpcdf.mpg.de/ift/nifty/-/jobs/1948616#L528https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/356Failing docs release2023-01-04T12:00:55ZGordian EdenhoferFailing docs releasehttps://gitlab.mpcdf.mpg.de/ift/nifty/-/jobs/1977018
@veberle As documentation master: Can you reproduce the failing documentation or do you have an idea of what if broken? For me the documentation builds just fine :|https://gitlab.mpcdf.mpg.de/ift/nifty/-/jobs/1977018
@veberle As documentation master: Can you reproduce the failing documentation or do you have an idea of what if broken? For me the documentation builds just fine :|https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/355Let's allow `assert`s again2022-12-08T19:15:49ZGordian EdenhoferLet's allow `assert`s again`assert`s in python are advertised as quick-and-easy checking tool to assert self-consistency within a program. They are used all over the place in the python eco-system and can help code be more readable (by making assumptions explicit)...`assert`s in python are advertised as quick-and-easy checking tool to assert self-consistency within a program. They are used all over the place in the python eco-system and can help code be more readable (by making assumptions explicit) and avoid errors. The latter being the main reason for their existence.
However, `assert`s are also sometimes misused to validate user-input. This is bad because `assert`s can be disabled via a python optimization flag. To error on the side of caution, we decided to ban the use of `assert`s in NIFTy.
I would like to revert this decision. I think the two upsides already mentioned - improved readability and avoiding errors - far outweigh the potential harm (basically **no one** enables these python optimizations [1]). I use a lot of `assert`s in my own code base and on multiple occasions had to remove asserts from code submitted to NIFTy to conform with this policy. In short: IMHO NIFTy does not get better by disallowing `assert`s.
@all What are you thoughts on this?
[1] `python -O` enables basic optimization and removes all `assert`s. With the `-O` flag, python yields `.pyo` files instead of `.pyc` files. On my machine (with many, many python packages) not a single package installed a `.pyo` file.https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/354Nifty Documentation 7->82022-12-01T10:32:32ZVincent EberleNifty Documentation 7->8# Changes in the Documentation
We have been thinking about changing the default [NIFTy documentation](http://ift.pages.mpcdf.de/nifty/) to nifty8 as it is somehow the stable version now.
I think of two different scenarios:
1) We drop th...# Changes in the Documentation
We have been thinking about changing the default [NIFTy documentation](http://ift.pages.mpcdf.de/nifty/) to nifty8 as it is somehow the stable version now.
I think of two different scenarios:
1) We drop the online nifty7 documentation
2) We move the nifty8 index.html in to the bottom directory and the nifty7 into a 'nifty7' directory. (exactly the other way around as it is now)
Neither is much work. Personally, I would prefer the first option.
@gedenhof @pfrank @mtrVincent EberleVincent Eberlehttps://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/353Expensive sanity checks in optimize_kl.py?2022-11-09T13:10:23ZMartin ReineckeExpensive sanity checks in optimize_kl.py?The file `optimize_KL.py` contains several calls to `check_MPI_equality`, to ensure that certain objects are identical on all MPI tasks. While this is definitely useful, some of these calls try to compare objects that can become huge (wh...The file `optimize_KL.py` contains several calls to `check_MPI_equality`, to ensure that certain objects are identical on all MPI tasks. While this is definitely useful, some of these calls try to compare objects that can become huge (which leads to a lot of additional memory/time consumption) and will in extreme cases exceed 2GB, causing MPI failures.
Would it be possible to either remove these cases or simplify the comparison somehow, e.g. by hashing the data and comparing the hash across MPI tasks?
@pfrank, @gedenhof, @kjakohttps://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/352Minisanity history broken if likelihoods with different names are used2022-05-13T14:06:13ZPhilipp Arrasparras@mpa-garching.mpg.deMinisanity history broken if likelihoods with different names are usedReproducer:
```python
import nifty8 as ift
dom = ift.RGSpace(2)
lh0 = ift.GaussianEnergy(domain=dom).ducktape("inp")
lh1 = ift.GaussianEnergy(domain=dom).ducktape("inp")
lh0.name = "First lh"
lh1.name = "Second lh"
ic = ift.GradientNo...Reproducer:
```python
import nifty8 as ift
dom = ift.RGSpace(2)
lh0 = ift.GaussianEnergy(domain=dom).ducktape("inp")
lh1 = ift.GaussianEnergy(domain=dom).ducktape("inp")
lh0.name = "First lh"
lh1.name = "Second lh"
ic = ift.GradientNormController(iteration_limit=2)
mini = ift.SteepestDescent(ic)
def lhs(iglobal):
if iglobal == 0:
return lh0
return lh1
ift.optimize_kl(lhs, 2, 0, mini, ic, None, overwrite=True)
```
Output:
```
Iteration limit reached. Assuming convergence
========================================================
reduced χ² mean # dof
--------------------------------------------------------
Data residuals
First lh 0.0 0.0 2
Latent space
inp 0.0 0.0 2
========================================================
/home/philipp/git/nifty/nifty8/plot.py:342: UserWarning: Attempting to set identical left == right == 0.0 results in singular transformations; automatically expanding.
ax.set_xlim((mi-delta, ma+delta))
========================================================
reduced χ² mean # dof
--------------------------------------------------------
Data residuals
Second lh 0.0 0.0 2
Latent space
inp 0.0 0.0 2
========================================================
Traceback (most recent call last):
File "/home/philipp/asdf.py", line 18, in <module>
ift.optimize_kl(lhs, 2, 0, mini, ic, None, overwrite=True)
File "/home/philipp/git/nifty/nifty8/minimization/optimize_kl.py", line 392, in optimize_kl
_minisanity(likelihood_energy, iglobal, sl, comm, plot_minisanity_history)
File "/home/philipp/git/nifty/nifty8/minimization/optimize_kl.py", line 627, in _minisanity
v = ms_val[k1][k2][k3]
KeyError: 'First lh'
```Lukas PlatzLukas Platzhttps://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/351correlated Field model linearization adjoint very slow if total_N != 02022-03-29T18:52:49ZJakob Rothcorrelated Field model linearization adjoint very slow if total_N != 0As noticed by @parras @pfrank and me the adjoint of the linearization of the correlated field model is very slow if total_N != 0.
Here is a demo:
```
import nifty8 as ift
import numpy as np
sp1 = ift.RGSpace((4000, 4000))
cfmaker = ift...As noticed by @parras @pfrank and me the adjoint of the linearization of the correlated field model is very slow if total_N != 0.
Here is a demo:
```
import nifty8 as ift
import numpy as np
sp1 = ift.RGSpace((4000, 4000))
cfmaker = ift.CorrelatedFieldMaker('')
cfmaker.add_fluctuations(sp1, (0.1, 1e-2), (2, .2), (.01, .5), (-4, 2.),
'amp1')
cfmaker.set_amplitude_total_offset(0., (1e-2, 1e-6))
cf0 = cfmaker.finalize(0)
n_tot=1
cfmaker = ift.CorrelatedFieldMaker('', total_N=n_tot)
cfmaker.add_fluctuations(sp1, (0.1, 1e-2), (2, .2), (.01, .5), (-4, 2.),
'amp1', dofdex=np.arange(n_tot))
cfmaker.set_amplitude_total_offset(0., (1e-2, 1e-6), dofdex=np.arange(n_tot))
cf1 = cfmaker.finalize(0)
print("benchmark for total_N = 0")
ift.exec_time(cf0)
print("benchmark for total_N = 1")
ift.exec_time(cf1)
```
I had a quick look at the issue. The problem seems to be the _Distributor operator in the correlated field model file: https://gitlab.mpcdf.mpg.de/ift/nifty/-/blob/NIFTy_8/src/library/correlated_fields.py#L214
For the case of total_N != 0 this _Distributor is called multiple times. The call which is very slow in adjoint direction is in line: https://gitlab.mpcdf.mpg.de/ift/nifty/-/blob/NIFTy_8/src/library/correlated_fields.py#L362 and the line below.
Note this _Distibutor operator is only used for the case total_N != 0, and therefore the simple case with total_N = 0 is reasonably fast.https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/349Do not track ipynb and instead use jupytext2022-05-28T08:50:24ZGordian EdenhoferDo not track ipynb and instead use jupytextSee https://gitlab.mpcdf.mpg.de/ift/nifty/-/merge_requests/702#note_111326 .See https://gitlab.mpcdf.mpg.de/ift/nifty/-/merge_requests/702#note_111326 .Gordian EdenhoferGordian Edenhoferhttps://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/348Pipeline broken?2022-01-27T11:26:59ZJakob RothPipeline broken?It seems to me that our pipeline is broken. The check_no_assert step fails for !734 although no assert was introduced.It seems to me that our pipeline is broken. The check_no_assert step fails for !734 although no assert was introduced.https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/347More MPI Bugs2022-02-01T08:47:37ZJakob RothMore MPI BugsWhen I execute the getting_started_3.py script with MPI `mpiexec -n 2 python getting_started_3.py` I get the following error message:
```
Traceback (most recent call last):
File "/home/jakob/nifty/demos/getting_started_3.py", line 163,...When I execute the getting_started_3.py script with MPI `mpiexec -n 2 python getting_started_3.py` I get the following error message:
```
Traceback (most recent call last):
File "/home/jakob/nifty/demos/getting_started_3.py", line 163, in <module>
main()
File "/home/jakob/nifty/demos/getting_started_3.py", line 153, in main
[pspec.force(mock_position), samples.average(logspec).exp()],
File "/home/jakob/nifty/nifty8/minimization/sample_list.py", line 306, in average
return utilities.allreduce_sum(res, self.comm) / n
File "/home/jakob/nifty/nifty8/utilities.py", line 371, in allreduce_sum
vals[j] = vals[j] + comm.recv(source=who[j+step])
File "/home/jakob/nifty/nifty8/field.py", line 726, in func2
return self._binary_op(other, op)
File "/home/jakob/nifty/nifty8/field.py", line 689, in _binary_op
utilities.check_object_identity(other._domain, self._domain)
File "/home/jakob/nifty/nifty8/utilities.py", line 419, in check_object_identity
raise ValueError(f"Mismatch:\n{obj0}\n{obj1}")
ValueError: Mismatch:
DomainTuple, len: 1
* PowerSpace(harmonic_partner=RGSpace(shape=(128, 128), distances=(1.0, 1.0), harmonic=True), binbounds=None)
DomainTuple, len: 1
* PowerSpace(harmonic_partner=RGSpace(shape=(128, 128), distances=(1.0, 1.0), harmonic=True), binbounds=None)
```
When executing without MPI I get no error.https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/346Nifty installation2022-11-25T17:58:16ZVincent EberleNifty installationInstallation Issues
-------------------
Jakob Roth and me found a concerning issue.
Recently it is not possible to `pip install -e .`
Somehow this returns this error [1].
But the installation works for `pip install .`
You can try to ...Installation Issues
-------------------
Jakob Roth and me found a concerning issue.
Recently it is not possible to `pip install -e .`
Somehow this returns this error [1].
But the installation works for `pip install .`
You can try to reproduce it by uninstalling nifty and reinstalling it..
Does someone know why this is happening?
@jroth @parras @mtr @gedenhof
[1]
`ERROR: Command errored out with exit status 1:
command: /usr/bin/python3 -c 'import sys, setuptools, tokenize; sys.argv[0] = '"'"'/home/veberle/MPA/nifty/setup.py'"'"'; __file__='"'"'/home/veberle/MPA/nifty/setup.py'"'"';f=getattr(tokenize, '"'"'open'"'"', open)(__file__);code=f.read().replace('"'"'\r\n'"'"', '"'"'\n'"'"');f.close();exec(compile(code, __file__, '"'"'exec'"'"'))' develop --no-deps --user --prefix=
cwd: /home/veberle/MPA/nifty/
Complete output (28 lines):
running develop
/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/command/easy_install.py:156: EasyInstallDeprecationWarning: easy_install command is deprecated. Use build and pip and other standards-based tools.
warnings.warn(
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/home/veberle/MPA/nifty/setup.py", line 33, in <module>
setup(name="nifty8",
File "/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/__init__.py", line 153, in setup
return distutils.core.setup(**attrs)
File "/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/_distutils/core.py", line 148, in setup
return run_commands(dist)
File "/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/_distutils/core.py", line 163, in run_commands
dist.run_commands()
File "/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/_distutils/dist.py", line 967, in run_commands
self.run_command(cmd)
File "/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/_distutils/dist.py", line 985, in run_command
cmd_obj.ensure_finalized()
File "/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/_distutils/cmd.py", line 107, in ensure_finalized
self.finalize_options()
File "/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/command/develop.py", line 52, in finalize_options
easy_install.finalize_options(self)
File "/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/command/easy_install.py", line 263, in finalize_options
self._fix_install_dir_for_user_site()
File "/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/command/easy_install.py", line 375, in _fix_install_dir_for_user_site
self.select_scheme(scheme_name)
File "/tmp/pip-build-env-9z5pmolp/overlay/lib/python3.8/site-packages/setuptools/command/easy_install.py", line 716, in select_scheme
scheme = INSTALL_SCHEMES[name]
KeyError: 'unix_user'
----------------------------------------
ERROR: Can't roll back nifty8; was not uninstalled
ERROR: Command errored out with exit status 1: /usr/bin/python3 -c 'import sys, setuptools, tokenize; sys.argv[0] = '"'"'/home/veberle/MPA/nifty/setup.py'"'"'; __file__='"'"'/home/veberle/MPA/nifty/setup.py'"'"';f=getattr(tokenize, '"'"'open'"'"', open)(__file__);code=f.read().replace('"'"'\r\n'"'"', '"'"'\n'"'"');f.close();exec(compile(code, __file__, '"'"'exec'"'"'))' develop --no-deps --user --prefix= Check the logs for full command output.
`https://gitlab.mpcdf.mpg.de/ift/nifty/-/issues/345sample list average crashes for mpi run with map estimator2022-01-27T11:18:26ZJakob Rothsample list average crashes for mpi run with map estimatorWhen computing a map estimate with multiple MPI tasks the sample list average method crashes.
Here is an example code that works if executed normally with python, but crashes in an MPI run
```
import numpy as np
import nifty8 as ift
ift...When computing a map estimate with multiple MPI tasks the sample list average method crashes.
Here is an example code that works if executed normally with python, but crashes in an MPI run
```
import numpy as np
import nifty8 as ift
ift.random.push_sseq_from_seed(27)
try:
from mpi4py import MPI
comm = MPI.COMM_WORLD
master = comm.Get_rank() == 0
except ImportError:
comm = None
master = True
position_space = ift.RGSpace([128, 128])
op = ift.makeOp(ift.full(position_space, 10.))
noise = 0.1
N = ift.ScalingOperator(position_space, noise, np.float64)
mock_position = ift.from_random(op.domain)
data = op(mock_position) + N.draw_sample()
lh = ift.GaussianEnergy(mean=data, inverse_covariance=N.inverse) @ op
ic_sampling = ift.AbsDeltaEnergyController(
name="Sampling (linear)", deltaE=0.05, iteration_limit=10
)
ic_newton = ift.AbsDeltaEnergyController(
name="Newton", deltaE=0.5, convergence_level=2, iteration_limit=5
)
minimizer = ift.NewtonCG(ic_newton)
def callback(samples, i):
plot = ift.Plot()
mean = samples.average(op)
plot.add(mean, title="Reconstruction", zmin=0, zmax=1)
if master:
plot.output()
n_iterations = 3
n_samples = lambda iiter: 0 if iiter < 1 else 2
samples = ift.optimize_kl(
lh,
n_iterations,
n_samples,
minimizer,
ic_sampling,
None,
overwrite=True,
comm=comm,
callback=callback,
)
```