diff --git a/TurTLE/DNS.py b/TurTLE/DNS.py index d69af1ead1f0006a71c4b520a8ffa5a2df89a1f7..731cab3c6e86e667f1bf67e72e8907f51a4daaef 100644 --- a/TurTLE/DNS.py +++ b/TurTLE/DNS.py @@ -131,14 +131,6 @@ class DNS(_code): self.parameter_description['dky'] = 'Smallest wavenumber in the y direction for a pseudo-spectral run.' self.parameters['dkz'] = float(1.0) self.parameter_description['dkz'] = 'Smallest wavenumber in the z direction for a pseudo-spectral run.' - self.parameters['niter_todo'] = int(8) - self.parameter_description['niter_todo'] = 'Number of iterations to compute during a single run.' - self.parameters['niter_stat'] = int(1) - self.parameter_description['niter_stat'] = 'Interval (in iterations) over which to compute field statistics (i.e. call `do_stats`).' - self.parameters['niter_out'] = int(8) - self.parameter_description['niter_out'] = 'Output is performed every `NITER_OUT` iterations.' - self.parameters['checkpoints_per_file'] = int(1) - self.parameter_description['checkpoints_per_file'] = 'Number of checkpoints to store in a single checkpoint file. Rule of thumb: files should hold gigabytes of data, rather than megabytes.' self.parameters['dt'] = float(0.01) self.parameter_description['dt'] = 'Fixed timestep to use. It is strongly recommended not to change this value in between jobs.' self.parameters['nu'] = float(0.1) @@ -238,22 +230,6 @@ class DNS(_code): self.parameters['nz']//2 + 1).astype(np.float64)*self.parameters['dkz'] kspace['kz'] = np.roll(kspace['kz'], self.parameters['nz']//2+1) return kspace - def get_data_file_name(self): - return os.path.join(self.work_dir, self.simname + '.h5') - def get_data_file(self): - return h5py.File(self.get_data_file_name(), 'r') - def get_particle_file_name(self): - return os.path.join(self.work_dir, self.simname + '_particles.h5') - def get_particle_file(self): - return h5py.File(self.get_particle_file_name(), 'r') - def get_cache_file_name(self): - return os.path.join(self.work_dir, self.simname + '_cache.h5') - def get_cache_file(self): - return h5py.File(self.get_cache_file_name(), 'r') - def get_postprocess_file_name(self): - return self.get_cache_file_name() - def get_postprocess_file(self): - return h5py.File(self.get_postprocess_file_name(), 'r') def compute_statistics( self, iter0 = 0, @@ -878,15 +854,6 @@ class DNS(_code): opt = self.prepare_launch(args = args) self.launch_jobs(opt = opt, **kwargs) return None - def get_checkpoint_0_fname(self): - return os.path.join( - self.work_dir, - self.simname + '_checkpoint_0.h5') - def get_checkpoint_fname(self, iteration = 0): - checkpoint = (iteration // self.parameters['niter_out']) // self.parameters['checkpoints_per_file'] - return os.path.join( - self.work_dir, - self.simname + '_checkpoint_{0}.h5'.format(checkpoint)) def generate_tracer_state( self, rseed = None, diff --git a/TurTLE/PP.py b/TurTLE/PP.py index 4699b52d8cd9f662f380bf0eff337e3595f9572c..f653f5dbd9b9cb05fdad85529bf625a90f674b5a 100644 --- a/TurTLE/PP.py +++ b/TurTLE/PP.py @@ -152,18 +152,11 @@ class PP(_code): elif dns_type == 'get_rfields': pars['TrS2_on'] = int(0) return pars - def get_data_file_name(self): - return os.path.join(self.work_dir, self.simname + '.h5') - def get_data_file(self): - return h5py.File(self.get_data_file_name(), 'r') - def get_particle_file_name(self): - return os.path.join(self.work_dir, self.simname + '_particles.h5') - def get_particle_file(self): - return h5py.File(self.get_particle_file_name(), 'r') def get_postprocess_file_name(self): + """ + Reimplemented from `_base` for postprocessing codes. + """ return os.path.join(self.work_dir, self.simname + '_postprocess.h5') - def get_postprocess_file(self): - return h5py.File(self.get_postprocess_file_name(), 'r') def compute_statistics(self, iter0 = 0, iter1 = None): """Run basic postprocessing on raw data. The energy spectrum :math:`E(t, k)` and the enstrophy spectrum @@ -779,10 +772,10 @@ class PP(_code): last_iteration = df['iteration'][()] cppf = df['parameters/checkpoints_per_file'][()] niter_out = df['parameters/niter_out'][()] - with h5py.File(os.path.join(self.work_dir, self.simname + '_fields.h5'), 'a') as ff: + with h5py.File(self.get_fields_fname(), 'a') as ff: ff.require_group('vorticity') ff.require_group('vorticity/complex') - checkpoint_file_list = [self.simname + '_checkpoint_{0}.h5'.format(cp) + checkpoint_file_list = [self.get_checkpoint_fname(checkpoint = cp) for cp in range(df['checkpoint'][()]+1)] for cpf_name in checkpoint_file_list: if os.path.exists(cpf_name): diff --git a/TurTLE/TEST.py b/TurTLE/TEST.py index 925f71f65fec8e1e9474aa78dc5ce48bd3dc440a..0b7d8df5db4f05804d3ed0091f24dff2ec6ecff3 100644 --- a/TurTLE/TEST.py +++ b/TurTLE/TEST.py @@ -137,7 +137,6 @@ class TEST(_code): pars['nxparticles'] = 13 pars['nzparticles'] = 11 pars['dt'] = 0.125 - pars['niter_todo'] = 128 if dns_type == 'phase_shift_test': pars['random_phase_seed'] = 1 if dns_type in ['dealias_test', 'Gauss_field_test']: @@ -176,10 +175,6 @@ class TEST(_code): self.parameters['nz']//2 + 1).astype(np.float64)*self.parameters['dkz'] kspace['kz'] = np.roll(kspace['kz'], self.parameters['nz']//2+1) return kspace - def get_data_file_name(self): - return os.path.join(self.work_dir, self.simname + '.h5') - def get_data_file(self): - return h5py.File(self.get_data_file_name(), 'r') def write_par( self, iter0 = 0, diff --git a/TurTLE/_base.py b/TurTLE/_base.py index 2b882dd0bb0004640901bef83cac7d228dc3bb4a..da1c91fc8a44b3270d77e6c30c4c9124d9d0cc8b 100644 --- a/TurTLE/_base.py +++ b/TurTLE/_base.py @@ -41,11 +41,20 @@ class _base(object): ### simulation parameters self.parameters = {'nx' : 32, 'ny' : 32, - 'nz' : 32} + 'nz' : 32, + 'niter_todo' : int(8), + 'niter_stat' : int(1), + 'niter_out' : int(8), + 'checkpoints_per_file' : int(1), + } self.parameter_description = { - 'nx' : 'Number of real-space grid nodes in the x direction.', - 'ny' : 'Number of real-space grid nodes in the y direction.', - 'nz' : 'Number of real-space grid nodes in the z direction.', + 'nx' : 'Number of real-space grid nodes in the x direction (fastest array index in real-space representation).', + 'ny' : 'Number of real-space grid nodes in the y direction.', + 'nz' : 'Number of real-space grid nodes in the z direction (slowest array index in real-space representation).', + 'niter_todo' : 'Number of iterations to compute during a single run.', + 'niter_stat' : 'Interval (in iterations) over which to compute field statistics (i.e. call `do_stats`).', + 'niter_out' : 'Output is performed every `NITER_OUT` iterations.', + 'checkpoints_per_file' : 'Number of checkpoints to store in a single checkpoint file. Rule of thumb: files should hold gigabytes of data, rather than megabytes.', } self.string_length = 512 self.work_dir = os.path.realpath(work_dir) @@ -366,4 +375,32 @@ class _base(object): help = description, default = None) return None + def get_data_file_name(self): + return os.path.join(self.work_dir, self.simname + '.h5') + def get_data_file(self): + return h5py.File(self.get_data_file_name(), 'r') + def get_particle_file_name(self): + return os.path.join(self.work_dir, self.simname + '_particles.h5') + def get_particle_file(self): + return h5py.File(self.get_particle_file_name(), 'r') + def get_cache_file_name(self): + return os.path.join(self.work_dir, self.simname + '_cache.h5') + def get_cache_file(self): + return h5py.File(self.get_cache_file_name(), 'r') + def get_postprocess_file_name(self): + return self.get_cache_file_name() + def get_postprocess_file(self): + return h5py.File(self.get_postprocess_file_name(), 'r') + def get_checkpoint_0_fname(self): + return os.path.join( + self.work_dir, + self.simname + '_checkpoint_0.h5') + def get_checkpoint_fname(self, iteration = 0, checkpoint = None): + if type(checkpoint) == type(None): + checkpoint = (iteration // self.parameters['niter_out']) // self.parameters['checkpoints_per_file'] + return os.path.join( + self.work_dir, + self.simname + '_checkpoint_{0}.h5'.format(checkpoint)) + def get_fields_fname(self): + return os.path.join(self.work_dir, self.simname + '_fields.h5') diff --git a/TurTLE/test/test_particle_integration.py b/TurTLE/test/test_particle_integration.py index 93bb0f4af35a29174e53e60cbc95490554c1d355..def55e5bde50af678b758e49aef7c21d027678b0 100644 --- a/TurTLE/test/test_particle_integration.py +++ b/TurTLE/test/test_particle_integration.py @@ -11,7 +11,8 @@ def main(): c.launch([ 'test_particle_integration', '--np', '4', - '--ntpp', '3']) + '--ntpp', '3', + '--niter_todo', '128']) data_file = h5py.File(c.simname + '_particles.h5', 'r')