From a0504f51e60d1b447b42e525f52e8dc1ae3d03fc Mon Sep 17 00:00:00 2001
From: Cristian C Lalescu <Cristian.Lalescu@ds.mpg.de>
Date: Wed, 6 Mar 2019 13:15:50 +0100
Subject: [PATCH] update reading of hdf5 dataset values

.value was deprecated, so I now use [...], which should work for old
enough versions (they recommend [()] instead).
---
 bfps/DNS.py                          | 28 ++++++++++++++--------------
 bfps/PP.py                           | 26 +++++++++++++-------------
 bfps/_base.py                        |  4 ++--
 bfps/_code.py                        |  2 +-
 bfps/test/test_bfps_NSVEparticles.py | 12 ++++++------
 bfps/test/test_bfps_resize.py        |  4 ++--
 bfps/test/test_fftw.py               |  4 ++--
 bfps/test/test_interpolation.py      |  8 ++++----
 bfps/test/test_particle_clouds.py    | 16 ++++++++--------
 bfps/test/test_particles.py          | 12 ++++++------
 bfps/tools.py                        | 16 ++++++++--------
 tests/test_field_class.py            |  6 +++---
 tests/test_filters.py                | 10 +++++-----
 tests/test_particles.py              |  4 ++--
 14 files changed, 76 insertions(+), 76 deletions(-)

diff --git a/bfps/DNS.py b/bfps/DNS.py
index e0d83caf..6cc994ca 100644
--- a/bfps/DNS.py
+++ b/bfps/DNS.py
@@ -235,11 +235,11 @@ class DNS(_code):
                               'vel_max(t)',
                               'renergy(t)']:
                         if k in pp_file.keys():
-                            self.statistics[k] = pp_file[k].value
-                    self.statistics['kM'] = pp_file['kspace/kM'].value
-                    self.statistics['dk'] = pp_file['kspace/dk'].value
-                    self.statistics['kshell'] = pp_file['kspace/kshell'].value
-                    self.statistics['nshell'] = pp_file['kspace/nshell'].value
+                            self.statistics[k] = pp_file[k][...]
+                    self.statistics['kM'] = pp_file['kspace/kM'][...]
+                    self.statistics['dk'] = pp_file['kspace/dk'][...]
+                    self.statistics['kshell'] = pp_file['kspace/kshell'][...]
+                    self.statistics['nshell'] = pp_file['kspace/nshell'][...]
         else:
             self.read_parameters()
             with self.get_data_file() as data_file:
@@ -249,26 +249,26 @@ class DNS(_code):
                              self.parameters['niter_stat']-1),
                             iter0)
                 if type(iter1) == type(None):
-                    iter1 = data_file['iteration'].value
+                    iter1 = data_file['iteration'][...]
                 else:
-                    iter1 = min(data_file['iteration'].value, iter1)
+                    iter1 = min(data_file['iteration'][...], iter1)
                 ii0 = iter0 // self.parameters['niter_stat']
                 ii1 = iter1 // self.parameters['niter_stat']
-                self.statistics['kshell'] = data_file['kspace/kshell'].value
-                self.statistics['nshell'] = data_file['kspace/nshell'].value
+                self.statistics['kshell'] = data_file['kspace/kshell'][...]
+                self.statistics['nshell'] = data_file['kspace/nshell'][...]
                 for kk in [-1, -2]:
                     if (self.statistics['kshell'][kk] == 0):
                         self.statistics['kshell'][kk] = np.nan
-                self.statistics['kM'] = data_file['kspace/kM'].value
-                self.statistics['dk'] = data_file['kspace/dk'].value
+                self.statistics['kM'] = data_file['kspace/kM'][...]
+                self.statistics['dk'] = data_file['kspace/dk'][...]
                 computation_needed = True
                 pp_file = h5py.File(self.get_postprocess_file_name(), 'a')
                 if not ('parameters' in pp_file.keys()):
                     data_file.copy('parameters', pp_file)
                     data_file.copy('kspace', pp_file)
                 if 'ii0' in pp_file.keys():
-                    computation_needed =  not (ii0 == pp_file['ii0'].value and
-                                               ii1 == pp_file['ii1'].value)
+                    computation_needed =  not (ii0 == pp_file['ii0'][...] and
+                                               ii1 == pp_file['ii1'][...])
                     if computation_needed:
                         for k in ['t', 'vel_max(t)', 'renergy(t)',
                                   'energy(t)', 'enstrophy(t)',
@@ -312,7 +312,7 @@ class DNS(_code):
                   'vel_max(t)',
                   'renergy(t)']:
             if k in pp_file.keys():
-                self.statistics[k] = pp_file[k].value
+                self.statistics[k] = pp_file[k][...]
         # sanity check --- Parseval theorem check
         assert(np.max(np.abs(
                 self.statistics['renergy(t)'] -
diff --git a/bfps/PP.py b/bfps/PP.py
index 77bf9d6c..72e693d4 100644
--- a/bfps/PP.py
+++ b/bfps/PP.py
@@ -184,19 +184,19 @@ class PP(_code):
                          self.parameters['niter_stat']-1),
                         iter0)
             if type(iter1) == type(None):
-                iter1 = data_file['iteration'].value
+                iter1 = data_file['iteration'][...]
             else:
-                iter1 = min(data_file['iteration'].value, iter1)
+                iter1 = min(data_file['iteration'][...], iter1)
             ii0 = iter0 // self.parameters['niter_stat']
             ii1 = iter1 // self.parameters['niter_stat']
-            self.statistics['kshell'] = data_file['kspace/kshell'].value
-            self.statistics['kM'] = data_file['kspace/kM'].value
-            self.statistics['dk'] = data_file['kspace/dk'].value
+            self.statistics['kshell'] = data_file['kspace/kshell'][...]
+            self.statistics['kM'] = data_file['kspace/kM'][...]
+            self.statistics['dk'] = data_file['kspace/dk'][...]
             computation_needed = True
             pp_file = h5py.File(self.get_postprocess_file_name(), 'a')
             if 'ii0' in pp_file.keys():
-                computation_needed =  not (ii0 == pp_file['ii0'].value and
-                                           ii1 == pp_file['ii1'].value)
+                computation_needed =  not (ii0 == pp_file['ii0'][...] and
+                                           ii1 == pp_file['ii1'][...])
                 if computation_needed:
                     for k in pp_file.keys():
                         del pp_file[k]
@@ -224,7 +224,7 @@ class PP(_code):
                       'vel_max(t)',
                       'renergy(t)']:
                 if k in pp_file.keys():
-                    self.statistics[k] = pp_file[k].value
+                    self.statistics[k] = pp_file[k][...]
             self.compute_time_averages()
         return None
     def compute_time_averages(self):
@@ -503,7 +503,7 @@ class PP(_code):
         for k in self.pp_parameters.keys():
              self.parameters[k] = self.pp_parameters[k]
         self.pars_from_namespace(opt)
-        niter_out = self.get_data_file()['parameters/niter_out'].value
+        niter_out = self.get_data_file()['parameters/niter_out'][...]
         assert(opt.iter0 % niter_out == 0)
         self.pp_parameters['iteration_list'] = np.arange(
                 opt.iter0, opt.iter1+niter_out, niter_out, dtype = np.int)
@@ -672,7 +672,7 @@ class PP(_code):
                 parameters = self.pp_parameters,
                 get_sim_info = False)
         for kk in ['nx', 'ny', 'nz']:
-            self.parameters[kk] = self.get_data_file()['parameters/' + kk].value
+            self.parameters[kk] = self.get_data_file()['parameters/' + kk][...]
         n = self.parameters['nx']
         if self.dns_type in ['filtered_slices',
                              'filtered_acceleration']:
@@ -799,9 +799,9 @@ class PP(_code):
         if 'field_dtype' in df.keys():
             # we don't need to do anything, raw binary files are used
             return None
-        last_iteration = df['iteration'].value
-        cppf = df['parameters/checkpoints_per_file'].value
-        niter_out = df['parameters/niter_out'].value
+        last_iteration = df['iteration'][...]
+        cppf = df['parameters/checkpoints_per_file'][...]
+        niter_out = df['parameters/niter_out'][...]
         with h5py.File(os.path.join(self.work_dir, self.simname + '_fields.h5'), 'a') as ff:
             ff.require_group('vorticity')
             ff.require_group('vorticity/complex')
diff --git a/bfps/_base.py b/bfps/_base.py
index 3d7c747b..086adea3 100644
--- a/bfps/_base.py
+++ b/bfps/_base.py
@@ -231,9 +231,9 @@ class _base(object):
             for k in data_file['parameters'].keys():
                 if k in self.parameters.keys():
                     if type(self.parameters[k]) in [int, str, float]:
-                        self.parameters[k] = type(self.parameters[k])(data_file['parameters/' + k].value)
+                        self.parameters[k] = type(self.parameters[k])(data_file['parameters/' + k][...])
                     else:
-                        self.parameters[k] = data_file['parameters/' + k].value
+                        self.parameters[k] = data_file['parameters/' + k][...]
         return None
     def pars_from_namespace(
             self,
diff --git a/bfps/_code.py b/bfps/_code.py
index f997d651..d791be61 100644
--- a/bfps/_code.py
+++ b/bfps/_code.py
@@ -245,7 +245,7 @@ class _code(_base):
             no_debug = False):
         self.read_parameters()
         with h5py.File(os.path.join(self.work_dir, self.simname + '.h5'), 'r') as data_file:
-            iter0 = data_file['iteration'].value
+            iter0 = data_file['iteration'][...]
         if not os.path.isdir(self.work_dir):
             os.makedirs(self.work_dir)
         if not os.path.exists(os.path.join(self.work_dir, self.name)):
diff --git a/bfps/test/test_bfps_NSVEparticles.py b/bfps/test/test_bfps_NSVEparticles.py
index fe1e7875..e9abe672 100644
--- a/bfps/test/test_bfps_NSVEparticles.py
+++ b/bfps/test/test_bfps_NSVEparticles.py
@@ -66,14 +66,14 @@ def main():
             'r')
     f1 = h5py.File(c.get_checkpoint_0_fname(), 'r')
     for iteration in [0, 32, 64]:
-        field0 = f0['vorticity/complex/{0}'.format(iteration)].value
-        field1 = f1['vorticity/complex/{0}'.format(iteration)].value
+        field0 = f0['vorticity/complex/{0}'.format(iteration)][...]
+        field1 = f1['vorticity/complex/{0}'.format(iteration)][...]
         field_error = np.max(np.abs(field0 - field1))
-        x0 = f0['tracers0/state/{0}'.format(iteration)].value
-        x1 = f1['tracers0/state/{0}'.format(iteration)].value
+        x0 = f0['tracers0/state/{0}'.format(iteration)][...]
+        x1 = f1['tracers0/state/{0}'.format(iteration)][...]
         traj_error = np.max(np.abs(x0 - x1))
-        y0 = f0['tracers0/rhs/{0}'.format(iteration)].value
-        y1 = f1['tracers0/rhs/{0}'.format(iteration)].value
+        y0 = f0['tracers0/rhs/{0}'.format(iteration)][...]
+        y1 = f1['tracers0/rhs/{0}'.format(iteration)][...]
         rhs_error = np.max(np.abs(y0 - y1))
         assert(field_error < 1e-5)
         assert(traj_error < 1e-5)
diff --git a/bfps/test/test_bfps_resize.py b/bfps/test/test_bfps_resize.py
index ce0a051d..2dfb4c69 100644
--- a/bfps/test/test_bfps_resize.py
+++ b/bfps/test/test_bfps_resize.py
@@ -46,8 +46,8 @@ def main():
              sys.argv[1:])
     f0 = h5py.File(c.get_checkpoint_0_fname(), 'r')
     f1 = h5py.File('pp_resize_test_fields.h5', 'r')
-    d0 = f0['vorticity/complex/0'].value
-    d1 = f1['vorticity/complex/0'].value
+    d0 = f0['vorticity/complex/0'][...]
+    d1 = f1['vorticity/complex/0'][...]
     small_kdata = pyfftw.n_byte_align_empty(
             (32, 32, 17, 3),
             pyfftw.simd_alignment,
diff --git a/bfps/test/test_fftw.py b/bfps/test/test_fftw.py
index 3de2d97d..f129d57e 100644
--- a/bfps/test/test_fftw.py
+++ b/bfps/test/test_fftw.py
@@ -32,8 +32,8 @@ def main():
                  sys.argv[1:])
         df = h5py.File(c.simname + '.h5', 'r')
         df = h5py.File(c.simname + '_fields.h5', 'r')
-        field1_complex = df['field1/complex/0'].value
-        field1_real = df['field1/real/0'].value
+        field1_complex = df['field1/complex/0'][...]
+        field1_real = df['field1/real/0'][...]
         npoints = field1_real.size//3
 
         np_field1_real = np.fft.irfftn(field1_complex, axes = (0, 1, 2)).transpose(1, 0, 2, 3)
diff --git a/bfps/test/test_interpolation.py b/bfps/test/test_interpolation.py
index eeb40248..a4fea59f 100644
--- a/bfps/test/test_interpolation.py
+++ b/bfps/test/test_interpolation.py
@@ -32,11 +32,11 @@ def main():
     ofile = h5py.File(
             'test_output.h5',
             'r')
-    pos0 = ifile['tracers0/state/0'].value
-    pos1 = ofile['tracers0/position/0'].value
+    pos0 = ifile['tracers0/state/0'][...]
+    pos1 = ofile['tracers0/position/0'][...]
     assert(np.max(np.abs(pos0-pos1) / np.abs(pos0)) <= 1e-5)
-    vort0 = ofile['tracers0/vorticity/0'].value
-    vel_gradient = ofile['tracers0/velocity_gradient/0'].value
+    vort0 = ofile['tracers0/vorticity/0'][...]
+    vel_gradient = ofile['tracers0/velocity_gradient/0'][...]
     vort1 = vort0.copy()
     vort1[:, 0] = vel_gradient[:, 5] - vel_gradient[:, 7]
     vort1[:, 1] = vel_gradient[:, 6] - vel_gradient[:, 2]
diff --git a/bfps/test/test_particle_clouds.py b/bfps/test/test_particle_clouds.py
index 5d204539..aef70598 100644
--- a/bfps/test/test_particle_clouds.py
+++ b/bfps/test/test_particle_clouds.py
@@ -47,8 +47,8 @@ def main():
     del c.parameters['nparticles']
     del c.parameters['tracers1_integration_steps']
     ic_file = h5py.File(c.get_checkpoint_0_fname(), 'a')
-    ic_file['tracers0/state/0'] = ic_file['tracers1/state/0'].value.reshape(nclouds, nparticles_per_cloud, 3)
-    ic_file['tracers0/rhs/0'] = ic_file['tracers1/rhs/0'].value.reshape(4, nclouds, nparticles_per_cloud, 3)
+    ic_file['tracers0/state/0'] = ic_file['tracers1/state/0'][...].reshape(nclouds, nparticles_per_cloud, 3)
+    ic_file['tracers0/rhs/0'] = ic_file['tracers1/rhs/0'][...].reshape(4, nclouds, nparticles_per_cloud, 3)
     ic_file.close()
     c.launch(
             ['NSVEparticles',
@@ -73,14 +73,14 @@ def main():
             'r')
     f1 = h5py.File(c.get_checkpoint_0_fname(), 'r')
     for iteration in [0, 32, 64]:
-        field0 = f0['vorticity/complex/{0}'.format(iteration)].value
-        field1 = f1['vorticity/complex/{0}'.format(iteration)].value
+        field0 = f0['vorticity/complex/{0}'.format(iteration)][...]
+        field1 = f1['vorticity/complex/{0}'.format(iteration)][...]
         field_error = np.max(np.abs(field0 - field1))
-        x0 = f0['tracers0/state/{0}'.format(iteration)].value
-        x1 = f1['tracers0/state/{0}'.format(iteration)].value.reshape(x0.shape)
+        x0 = f0['tracers0/state/{0}'.format(iteration)][...]
+        x1 = f1['tracers0/state/{0}'.format(iteration)][...].reshape(x0.shape)
         traj_error = np.max(np.abs(x0 - x1))
-        y0 = f0['tracers0/rhs/{0}'.format(iteration)].value
-        y1 = f1['tracers0/rhs/{0}'.format(iteration)].value.reshape(y0.shape)
+        y0 = f0['tracers0/rhs/{0}'.format(iteration)][...]
+        y1 = f1['tracers0/rhs/{0}'.format(iteration)][...].reshape(y0.shape)
         rhs_error = np.max(np.abs(y0 - y1))
         assert(field_error < 1e-5)
         assert(traj_error < 1e-5)
diff --git a/bfps/test/test_particles.py b/bfps/test/test_particles.py
index 6d3abec1..c3249a98 100644
--- a/bfps/test/test_particles.py
+++ b/bfps/test/test_particles.py
@@ -68,7 +68,7 @@ def main():
             f = plt.figure()
             a = f.add_subplot(111)
             for iteration in range(0, niterations*njobs+1, niterations//2):
-                x = pf['tracers0/position/{0}'.format(iteration)].value
+                x = pf['tracers0/position/{0}'.format(iteration)][...]
                 hist, bins = np.histogram(
                         np.sum(x**2, axis = -1).flatten()**.5,
                         bins = 40)
@@ -83,7 +83,7 @@ def main():
             f = plt.figure()
             a = f.add_subplot(111)
             for iteration in range(0, niterations*njobs+1, niterations//2):
-                x = pf['tracers0/orientation/{0}'.format(iteration)].value
+                x = pf['tracers0/orientation/{0}'.format(iteration)][...]
                 hist, bins = np.histogram(
                         np.sum(x**2, axis = -1).flatten()**.5,
                         bins = np.linspace(0, 2, 40))
@@ -96,14 +96,14 @@ def main():
             plt.close(f)
             # compared sampled positions with checkpoint positions
             for iteration in range(0, niterations*njobs+1, niterations):
-                x = pf['tracers0/position/{0}'.format(iteration)].value
-                s = cf['tracers0/state/{0}'.format(iteration)].value
+                x = pf['tracers0/position/{0}'.format(iteration)][...]
+                s = cf['tracers0/state/{0}'.format(iteration)][...]
                 distance = (np.max(np.abs(x - s[..., :3]) /
                                    np.maximum(np.ones(x.shape),
                                               np.maximum(np.abs(x),
                                                          np.abs(s[..., :3])))))
                 assert(distance < 1e-14)
-                x = pf['tracers0/orientation/{0}'.format(iteration)].value
+                x = pf['tracers0/orientation/{0}'.format(iteration)][...]
                 distance = (np.max(np.abs(x - s[..., 3:]) /
                                    np.maximum(np.ones(x.shape),
                                               np.maximum(np.abs(x),
@@ -115,7 +115,7 @@ def main():
             f = plt.figure()
             a = f.add_subplot(111)
             for iteration in range(0, niterations*njobs+1, niterations//4):
-                x = pf['tracers0/orientation/{0}'.format(iteration)].value
+                x = pf['tracers0/orientation/{0}'.format(iteration)][...]
                 hist, bins = np.histogram(
                         x.flatten(),
                         bins = 100)
diff --git a/bfps/tools.py b/bfps/tools.py
index a3fbc9d9..9acf4e22 100644
--- a/bfps/tools.py
+++ b/bfps/tools.py
@@ -284,10 +284,10 @@ def particle_finite_diff_test(
         pf = c.get_particle_file()
         group = pf['tracers{0}'.format(species)]
     acc_on = 'acceleration' in group.keys()
-    pos = group['state'].value
-    vel = group['velocity'].value
+    pos = group['state'][...]
+    vel = group['velocity'][...]
     if acc_on:
-        acc = group['acceleration'].value
+        acc = group['acceleration'][...]
     n = m
     fc = get_fornberg_coeffs(0, range(-n, n+1))
     dt = c.parameters['dt']*c.parameters['niter_part']
@@ -311,7 +311,7 @@ def particle_finite_diff_test(
     if interp_name not in pars.keys():
         # old format
         interp_name = 'tracers{0}_field'.format(species)
-    interp_name = pars[interp_name].value
+    interp_name = pars[interp_name][...]
     if type(interp_name) == bytes:
         if sys.version_info[0] == 3:
             interp_name = str(interp_name, 'ASCII')
@@ -319,11 +319,11 @@ def particle_finite_diff_test(
             interp_name = str(interp_name)
     to_print = (
             'steps={0}, interp={1}, neighbours={2}, '.format(
-                pars['tracers{0}_integration_steps'.format(species)].value,
-                pars[interp_name + '_type'].value,
-                pars[interp_name + '_neighbours'].value))
+                pars['tracers{0}_integration_steps'.format(species)][...],
+                pars[interp_name + '_type'][...],
+                pars[interp_name + '_neighbours'][...]))
     if 'spline' in interp_name:
-        to_print += 'smoothness = {0}, '.format(pars[interp_name + '_smoothness'].value)
+        to_print += 'smoothness = {0}, '.format(pars[interp_name + '_smoothness'][...])
     to_print += (
             'SNR d1p-vel={0:.3f}'.format(np.mean(snr_vel1)))
     if acc_on:
diff --git a/tests/test_field_class.py b/tests/test_field_class.py
index 110d9be6..ef0cc851 100644
--- a/tests/test_field_class.py
+++ b/tests/test_field_class.py
@@ -142,9 +142,9 @@ def main():
              '--ncpu', '2'])
 
     f = h5py.File('field.h5', 'r')
-    #err0 = np.max(np.abs(f['scal_tmp/real/0'].value - rdata)) / np.mean(np.abs(rdata))
-    #err1 = np.max(np.abs(f['scal/real/0'].value/(n**3) - rdata)) / np.mean(np.abs(rdata))
-    #err2 = np.max(np.abs(f['scal_tmp/complex/0'].value/(n**3) - cdata)) / np.mean(np.abs(cdata))
+    #err0 = np.max(np.abs(f['scal_tmp/real/0'][...] - rdata)) / np.mean(np.abs(rdata))
+    #err1 = np.max(np.abs(f['scal/real/0'][...]/(n**3) - rdata)) / np.mean(np.abs(rdata))
+    #err2 = np.max(np.abs(f['scal_tmp/complex/0'][...]/(n**3) - cdata)) / np.mean(np.abs(cdata))
     #print(err0, err1, err2)
     #assert(err0 < 1e-5)
     #assert(err1 < 1e-5)
diff --git a/tests/test_filters.py b/tests/test_filters.py
index b6e2aedb..b3c24bf7 100644
--- a/tests/test_filters.py
+++ b/tests/test_filters.py
@@ -80,9 +80,9 @@ def filter_comparison(
         dd = None,
         base_name = 'filter_test_',
         dim = 0):
-    b = dd.df['ball/real/{0}'.format(dim)].value
-    g = dd.df['Gauss/real/{0}'.format(dim)].value
-    s = dd.df['sharp_Fourier_sphere/real/{0}'.format(dim)].value
+    b = dd.df['ball/real/{0}'.format(dim)][...]
+    g = dd.df['Gauss/real/{0}'.format(dim)][...]
+    s = dd.df['sharp_Fourier_sphere/real/{0}'.format(dim)][...]
     d3V = dd.grid_spacing['x']*dd.grid_spacing['y']*dd.grid_spacing['z']
     print(np.sum(b)*d3V)
     print(np.sum(g)*d3V)
@@ -164,7 +164,7 @@ def resolution_comparison(
     f = plt.figure(figsize = (6, 5))
     a = f.add_subplot(111)
     for dd in dlist:
-        s0 = dd.df[filter_type + '/real/{0}'.format(dim)].value
+        s0 = dd.df[filter_type + '/real/{0}'.format(dim)][...]
         a.plot(dd.get_coordinate('z'),
                s0[:, 0, 0],
                label = '{0}'.format(dd.simname))
@@ -182,7 +182,7 @@ class sim_data:
         pfile = h5py.File(simname + '.h5', 'r')
         self.parameters = {}
         for kk in pfile['parameters'].keys():
-            self.parameters[kk] = pfile['parameters/' + kk].value
+            self.parameters[kk] = pfile['parameters/' + kk][...]
         self.grid_spacing = {}
         for kk in ['x', 'y', 'z']:
             self.grid_spacing[kk] = 2*np.pi / (self.parameters['dk' + kk] * self.parameters['n' + kk])
diff --git a/tests/test_particles.py b/tests/test_particles.py
index 7c7c8661..6be07642 100644
--- a/tests/test_particles.py
+++ b/tests/test_particles.py
@@ -118,7 +118,7 @@ class err_finder:
                        for c in self.clist]
         self.ctraj = [None]
         for i in range(1, self.clist[0].particle_species):
-            self.ctraj.append([self.clist[j].get_particle_file()['tracers{0}/state'.format(i)].value.transpose((0, 2, 1))
+            self.ctraj.append([self.clist[j].get_particle_file()['tracers{0}/state'.format(i)][...].transpose((0, 2, 1))
                                for j in range(len(self.clist))])
         return None
     def get_AB_err(self, nsubsteps = 1):
@@ -178,7 +178,7 @@ if __name__ == '__main__':
     a = fig.add_subplot(111)
     for s in range(1, 5):
         ef.get_AB_err(s)
-        errlist = [np.average(np.abs(ef.clist[i].get_particle_file()['tracers{0}/state'.format(s)].value[-1, :, :3] - ef.xAB[i][-1].T))
+        errlist = [np.average(np.abs(ef.clist[i].get_particle_file()['tracers{0}/state'.format(s)][...][-1, :, :3] - ef.xAB[i][-1].T))
                    for i in range(len(ef.clist))]
         a.plot(ef.dtlist, errlist,
                label = 'directAB{0}'.format(s),
-- 
GitLab