diff --git a/reconstruction.py b/reconstruction.py index 02caf75f27ef8483082c3db272d04ffc15144445..3a20e2fbbf5639a92e3dc13c03ad5d539a93b0a0 100644 --- a/reconstruction.py +++ b/reconstruction.py @@ -25,9 +25,9 @@ from time import time import nifty6 as ift import src as vlbi from config import comm, nranks, rank, master -from config import doms, dt, eps, min_timestamps_per_bin, npixt, nthreads +from config import doms, eps, min_timestamps_per_bin, nthreads from config import sky_movie_mf as sky -from config import startt, dt, npix +from config import startt, dt, npixt def stat_plotting(pos, KL): if master: @@ -79,8 +79,7 @@ def optimization_heuristic(ii, likelihoods): lh = lh_full_ph return minimizer, N_samples, N_iterations, lh - -def build_likelihood(rawdd, startt, npix, dt, mode): +def build_likelihood(rawdd, startt, npixt, dt, mode): lh = [] active_inds = [] for freq in vlbi.data.FREQS: @@ -103,13 +102,13 @@ def build_likelihood(rawdd, startt, npix, dt, mode): vis2closph, evalsph, _ = vlbi.Visibilities2ClosurePhases(dd) llh.append(ift.GaussianEnergy(mean=vis2closph(vis)) @ vis2closph) llh_op = reduce(add, llh) @ nfft.ducktape(ind) - ift.extra.check_jacobian_consistency(llh_op, ift.from_random(llh_op.domain), - tol=1e-5, ntries=10) + if mode == 'full' and freq == vlbi.data.FREQS[0]: + ift.extra.check_jacobian_consistency(llh_op, ift.from_random(llh_op.domain), + tol=1e-5, ntries=10) lh.append(llh_op) conv = vlbi.DomainTuple2MultiField(sky.target, active_inds) - lh= reduce(add, lh) @ conv - return lh + return reduce(add, lh) @ conv def setup(): if len(sys.argv) != 3: @@ -124,13 +123,13 @@ def setup(): pre_output = pre_data - lh_full = build_likelihood(rawd, startt, npix, dt,'full') - lh_full_ph = build_likelihood(rawd, startt, npix, dt,'ph') - lh_full_amp = build_likelihood(rawd, startt, npix, dt,'amp') + lh_full = build_likelihood(rawd, startt, npixt, dt, 'full') + lh_full_ph = build_likelihood(rawd, startt, npixt, dt, 'ph') + lh_full_amp = build_likelihood(rawd, startt, npixt, dt, 'amp') - lh_cut = build_likelihood(rawd, startt, npix//2, dt,'full') - lh_cut_ph = build_likelihood(rawd, startt, npix//2, dt,'ph') - lh_cut_amp = build_likelihood(rawd, startt, npix//2, dt,'amp') + lh_cut = build_likelihood(rawd, startt, npixt//2, dt, 'full') + lh_cut_ph = build_likelihood(rawd, startt, npixt//2, dt, 'ph') + lh_cut_amp = build_likelihood(rawd, startt, npixt//2, dt, 'amp') pos = vlbi.load_hdf5(fname_input, sky.domain) if master else None if nranks > 1: @@ -151,7 +150,7 @@ def main(): with open("time_averaging.txt", 'w') as f: # delete the file such that new lines can be appended f.write("min max avg med\n") - pos, sky, ic, pre_output, likelihoods= setup() + pos, sky, ic, pre_output, likelihoods = setup() for ii in range(60): gc.collect() @@ -159,7 +158,7 @@ def main(): if master: print(f'Iter: {ii}, N_samples: {N_samples}, N_iter: {N_iterations}') - + ll = lh @ sky H = ift.StandardHamiltonian(ll, ic) KL = ift.MetricGaussianKL(pos, H, N_samples, comm=comm, mirror_samples=True)