From c1674d4d34682359dae2da5357e82662627a8a3a Mon Sep 17 00:00:00 2001 From: Julian Ruestig <jruestig@posteo.de> Date: Mon, 3 Jun 2024 11:16:34 +0200 Subject: [PATCH] averaging: time and frequency averaging --- resolve/data/averaging.py | 70 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 67 insertions(+), 3 deletions(-) diff --git a/resolve/data/averaging.py b/resolve/data/averaging.py index 7e69de65..6cd730d3 100644 --- a/resolve/data/averaging.py +++ b/resolve/data/averaging.py @@ -15,6 +15,7 @@ # Author: Martin Reinecke import numpy as np +from .observation import tmin_tmax, Observation def fair_share_averaging(ts_per_bin, times, gap_time): @@ -46,7 +47,8 @@ def fair_share_averaging(ts_per_bin, times, gap_time): while i0 < nval: i = i0+1 nscan = 1 # number of different time stamps in the scan - while i < nval and times[i]-times[i-1] < gap_time: # as long as there are less than x seconds between time stamps, we assume we are in the same scan + # as long as there are less than x seconds between time stamps, we assume we are in the same scan + while i < nval and times[i]-times[i-1] < gap_time: if times[i] != times[i-1]: nscan += 1 i += 1 @@ -63,13 +65,15 @@ def fair_share_averaging(ts_per_bin, times, gap_time): if icnt < n: if icnt == n-1: tbins += [(times[i0], times[i], times[i]-times[i0])] - times[i] = times[i0] # give all values in this bin the time stamp of the first value + # give all values in this bin the time stamp of the first value + times[i] = times[i0] i += 1 i0 = i tbsize = np.array([t[2] for t in tbins]) print("Size time bins:") print(f" min: {np.min(tbsize):.1f}s\n max: {np.max(tbsize):.1f}s") - print(f" mean: {np.mean(tbsize):.1f}s\n median: {np.median(tbsize):.1f}s") + print( + f" mean: {np.mean(tbsize):.1f}s\n median: {np.median(tbsize):.1f}s") times = np.unique(times) times = np.hstack([times, np.inf]) time_bins = np.array([times[:-1], times[1:]]).T @@ -78,3 +82,63 @@ def fair_share_averaging(ts_per_bin, times, gap_time): def _fair_share(n, nshare, ishare): return n//nshare + (ishare < (n % nshare)) + + +def freq_average(obs, n_freq_chuncks): + splitted_freqs = np.array_split(obs.freq, n_freq_chuncks) + splitted_obs = [] + for ff in splitted_freqs: + splitted_obs.append(obs.restrict_by_freq(ff[0], ff[-1])) + + obs_avg = [] + for obsi in splitted_obs: + new_vis = np.mean(obsi.vis.val, axis=2, keepdims=True) + cov = 1 / obsi.weight.val + new_cov = np.sum(cov, axis=2, keepdims=True) / (obsi.vis.shape[2]**2) + new_weight = 1 / new_cov + new_freq = np.array([np.mean(obsi.freq)]) + new_obs = Observation( + obsi.antenna_positions, + new_vis, + new_weight, + obsi.polarization, + new_freq, + obs._auxiliary_tables) + obs_avg.append(new_obs) + + new_freq = [obs.freq[0] for obs in obs_avg] + new_freq = np.array(new_freq) + new_vis_shape = (obs.vis.shape[0], obs.vis.shape[1], len(new_freq)) + new_vis = np.zeros(new_vis_shape, obs.vis.dtype) + new_weight = np.zeros(new_vis_shape, obs.weight.dtype) + for ii, obs in enumerate(obs_avg): + new_vis[:, :, ii] = obs.vis.val[:, :, 0] + new_weight[:, :, ii] = obs.weight.val[:, :, 0] + + obs_averaged = Observation( + obs.antenna_positions, + new_vis, + new_weight, + obs.polarization, + new_freq, + obs._auxiliary_tables) + return obs_averaged + + +def time_average(obs, len_tbin): + tmin, tmax = tmin_tmax(obs) + assert tmin == 0 + n_tbins = int(tmax // len_tbin + 2) + tbins_endpoints = np.arange(0, n_tbins*len_tbin, len_tbin) + unique_times = np.unique(obs.time) + t_intervals = [] + for ii in range(n_tbins-1): + start = tbins_endpoints[ii] + stop = tbins_endpoints[ii+1] + s = start <= unique_times + b = stop > unique_times + vis_in_inter = np.any(np.logical_and(s, b)) + if vis_in_inter: + t_intervals.append([start, stop]) + + return obs.time_average(t_intervals) -- GitLab