diff --git a/bfps/DNS.py b/bfps/DNS.py index f360db8b8cad20578ffd5669770b92301b40a33c..b72527a85e52189f9e16104d5dac1813e65673a8 100644 --- a/bfps/DNS.py +++ b/bfps/DNS.py @@ -430,9 +430,7 @@ class DNS(_code): return None def write_par( self, - iter0 = 0, - particle_ic = None, - particles_off = False): + iter0 = 0): assert (self.parameters['niter_todo'] % self.parameters['niter_stat'] == 0) assert (self.parameters['niter_todo'] % self.parameters['niter_out'] == 0) assert (self.parameters['niter_out'] % self.parameters['niter_stat'] == 0) @@ -479,40 +477,8 @@ class DNS(_code): 4), dtype = np.int64) ofile['checkpoint'] = int(0) - if (self.dns_type in ['NSVE', 'NSVE_no_output']) or particles_off: + if (self.dns_type in ['NSVE', 'NSVE_no_output']): return None - - if type(particle_ic) == type(None): - pbase_shape = (self.parameters['nparticles'],) - number_of_particles = self.parameters['nparticles'] - else: - pbase_shape = particle_ic.shape[:-1] - assert(particle_ic.shape[-1] == 3) - number_of_particles = 1 - for val in pbase_shape[1:]: - number_of_particles *= val - ncomponents = 3 - if self.dns_type in ['NSVEcomplex_particles']: - ncomponents = 6 - with h5py.File(self.get_checkpoint_0_fname(), 'a') as ofile: - s = 0 - if not 'tracers{0}'.format(s) in ofile.keys(): - ofile.create_group('tracers{0}'.format(s)) - ofile.create_group('tracers{0}/rhs'.format(s)) - ofile.create_group('tracers{0}/state'.format(s)) - ofile['tracers{0}/rhs'.format(s)].create_dataset( - '0', - shape = ( - (self.parameters['tracers{0}_integration_steps'.format(s)],) + - pbase_shape + - (ncomponents,)), - dtype = np.float) - ofile['tracers{0}/state'.format(s)].create_dataset( - '0', - shape = ( - pbase_shape + - (ncomponents,)), - dtype = np.float) return None def job_parser_arguments( self, @@ -818,35 +784,56 @@ class DNS(_code): def generate_tracer_state( self, rseed = None, - species = 0): - with h5py.File(self.get_checkpoint_0_fname(), 'a') as data_file: - dset = data_file[ - 'tracers{0}/state/0'.format(species)] - if not type(rseed) == type(None): - np.random.seed(rseed) - nn = self.parameters['nparticles'] - cc = int(0) - batch_size = int(1e6) - def get_random_phases(npoints): - return np.random.random( - (npoints, 3))*2*np.pi - def get_random_versors(npoints): - bla = np.random.normal( - size = (npoints, 3)) - bla /= np.sum(bla**2, axis = 1)[:, None]**.5 - return bla - while nn > 0: - if nn > batch_size: - dset[cc*batch_size:(cc+1)*batch_size, :3] = get_random_phases(batch_size) - if dset.shape[1] == 6: - dset[cc*batch_size:(cc+1)*batch_size, 3:] = get_random_versors(batch_size) - nn -= batch_size - else: - dset[cc*batch_size:cc*batch_size+nn, :3] = get_random_phases(nn) - if dset.shape[1] == 6: - dset[cc*batch_size:cc*batch_size+nn, 3:] = get_random_versors(nn) - nn = 0 - cc += 1 + species = 0, + integration_steps = None, + ncomponents = 3): + try: + if type(integration_steps) == type(None): + integration_steps = self.NSVEp_extra_parameters['tracers0_integration_steps'] + if 'tracers{0}_integration_steps'.format(species) in self.parameters.keys(): + integration_steps = self.parameters['tracers{0}_integration_steps'.format(species)] + if self.dns_type == 'NSVEcomplex_particles' and species == 0: + ncomponents = 6 + with h5py.File(self.get_checkpoint_0_fname(), 'a') as data_file: + nn = self.parameters['nparticles'] + if not 'tracers{0}'.format(species) in data_file.keys(): + data_file.create_group('tracers{0}'.format(species)) + data_file.create_group('tracers{0}/rhs'.format(species)) + data_file.create_group('tracers{0}/state'.format(species)) + data_file['tracers{0}/rhs'.format(species)].create_dataset( + '0', + shape = (integration_steps, nn, ncomponents,), + dtype = np.float) + dset = data_file['tracers{0}/state'.format(species)].create_dataset( + '0', + shape = (nn, ncomponents,), + dtype = np.float) + if not type(rseed) == type(None): + np.random.seed(rseed) + cc = int(0) + batch_size = int(1e6) + def get_random_phases(npoints): + return np.random.random( + (npoints, 3))*2*np.pi + def get_random_versors(npoints): + bla = np.random.normal( + size = (npoints, 3)) + bla /= np.sum(bla**2, axis = 1)[:, None]**.5 + return bla + while nn > 0: + if nn > batch_size: + dset[cc*batch_size:(cc+1)*batch_size, :3] = get_random_phases(batch_size) + if dset.shape[1] == 6: + dset[cc*batch_size:(cc+1)*batch_size, 3:] = get_random_versors(batch_size) + nn -= batch_size + else: + dset[cc*batch_size:cc*batch_size+nn, :3] = get_random_phases(nn) + if dset.shape[1] == 6: + dset[cc*batch_size:cc*batch_size+nn, 3:] = get_random_versors(nn) + nn = 0 + cc += 1 + except Exception as e: + print(e) return None def generate_vector_field( self, @@ -995,66 +982,61 @@ class DNS(_code): particle_file.create_group('tracers0/pressure_gradient') particle_file.create_group('tracers0/pressure_Hessian') return None + def generate_initial_condition( + self, + opt = None): + # take care of fields' initial condition + # first, check if initial field exists + need_field = False + if not os.path.exists(self.get_checkpoint_0_fname()): + need_field = True + else: + f = h5py.File(self.get_checkpoint_0_fname(), 'r') + try: + dset = f['vorticity/complex/0'] + need_field = (dset.shape == (self.parameters['ny'], + self.parameters['nz'], + self.parameters['nx']//2+1, + 3)) + except: + need_field = True + f.close() + if need_field: + f = h5py.File(self.get_checkpoint_0_fname(), 'a') + if len(opt.src_simname) > 0: + source_cp = 0 + src_file = 'not_a_file' + while True: + src_file = os.path.join( + os.path.realpath(opt.src_work_dir), + opt.src_simname + '_checkpoint_{0}.h5'.format(source_cp)) + f0 = h5py.File(src_file, 'r') + if '{0}'.format(opt.src_iteration) in f0['vorticity/complex'].keys(): + f0.close() + break + source_cp += 1 + self.copy_complex_field( + src_file, + 'vorticity/complex/{0}'.format(opt.src_iteration), + f, + 'vorticity/complex/{0}'.format(0)) + else: + data = self.generate_vector_field( + write_to_file = False, + spectra_slope = 2.0, + amplitude = 0.05) + f['vorticity/complex/{0}'.format(0)] = data + f.close() + # now take care of particles' initial condition + if self.dns_type in ['NSVEparticles', 'NSVEcomplex_particles', 'NSVEparticles_no_output', 'NSVEp_extra_sampling']: + self.generate_particle_data(opt = opt) + return None def launch_jobs( self, - opt = None, - particle_initial_condition = None): - if not os.path.exists(os.path.join(self.work_dir, self.simname + '.h5')): - # take care of fields' initial condition - if not os.path.exists(self.get_checkpoint_0_fname()): - f = h5py.File(self.get_checkpoint_0_fname(), 'w') - if len(opt.src_simname) > 0: - source_cp = 0 - src_file = 'not_a_file' - while True: - src_file = os.path.join( - os.path.realpath(opt.src_work_dir), - opt.src_simname + '_checkpoint_{0}.h5'.format(source_cp)) - f0 = h5py.File(src_file, 'r') - if '{0}'.format(opt.src_iteration) in f0['vorticity/complex'].keys(): - f0.close() - break - source_cp += 1 - self.copy_complex_field( - src_file, - 'vorticity/complex/{0}'.format(opt.src_iteration), - f, - 'vorticity/complex/{0}'.format(0)) - else: - data = self.generate_vector_field( - write_to_file = False, - spectra_slope = 2.0, - amplitude = 0.05) - f['vorticity/complex/{0}'.format(0)] = data - f.close() - ## take care of particles' initial condition - #if self.dns_type in ['NSVEparticles', 'NSVEparticles_no_output']: - # if opt.pclouds > 1: - # np.random.seed(opt.particle_rand_seed) - # if opt.pcloud_type == 'random-cube': - # particle_initial_condition = ( - # np.random.random((opt.pclouds, 1, 3))*2*np.pi + - # np.random.random((1, self.parameters['nparticles'], 3))*opt.particle_cloud_size) - # elif opt.pcloud_type == 'regular-cube': - # onedarray = np.linspace( - # -opt.particle_cloud_size/2, - # opt.particle_cloud_size/2, - # self.parameters['nparticles']) - # particle_initial_condition = np.zeros( - # (opt.pclouds, - # self.parameters['nparticles'], - # self.parameters['nparticles'], - # self.parameters['nparticles'], 3), - # dtype = np.float64) - # particle_initial_condition[:] = \ - # np.random.random((opt.pclouds, 1, 1, 1, 3))*2*np.pi - # particle_initial_condition[..., 0] += onedarray[None, None, None, :] - # particle_initial_condition[..., 1] += onedarray[None, None, :, None] - # particle_initial_condition[..., 2] += onedarray[None, :, None, None] - self.write_par( - particle_ic = None) - if self.dns_type in ['NSVEparticles', 'NSVEcomplex_particles', 'NSVEparticles_no_output', 'NSVEp_extra_sampling']: - self.generate_particle_data(opt = opt) + opt = None): + if not os.path.exists(self.get_data_file_name()): + self.generate_initial_condition(opt = opt) + self.write_par() self.run( nb_processes = opt.nb_processes, nb_threads_per_process = opt.nb_threads_per_process, diff --git a/bfps/__main__.py b/bfps/__main__.py index 16a7cf7d099c49a39368a8ff09cb05bf890feb6f..cf269edbaea91acd4b3de595782b92e32e1cbd3b 100644 --- a/bfps/__main__.py +++ b/bfps/__main__.py @@ -33,7 +33,7 @@ from .PP import PP from .TEST import TEST def main(): - parser = argparse.ArgumentParser(prog = 'bfps') + parser = argparse.ArgumentParser(prog = 'bfps', conflict_handler = 'resolve') parser.add_argument( '-v', '--version', action = 'version', diff --git a/bfps/cpp/full_code/NSVEparticles.cpp b/bfps/cpp/full_code/NSVEparticles.cpp index 90184d8bb6e9eea430e6b0af366132893d3c7136..e74716404f8bcbe7835d17ae93fa517fb0b25c38 100644 --- a/bfps/cpp/full_code/NSVEparticles.cpp +++ b/bfps/cpp/full_code/NSVEparticles.cpp @@ -1,4 +1,26 @@ - +/********************************************************************** +* * +* Copyright 2019 Max Planck Institute * +* for Dynamics and Self-Organization * +* * +* This file is part of bfps. * +* * +* bfps is free software: you can redistribute it and/or modify * +* it under the terms of the GNU General Public License as published * +* by the Free Software Foundation, either version 3 of the License, * +* or (at your option) any later version. * +* * +* bfps is distributed in the hope that it will be useful, * +* but WITHOUT ANY WARRANTY; without even the implied warranty of * +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * +* GNU General Public License for more details. * +* * +* You should have received a copy of the GNU General Public License * +* along with bfps. If not, see <http://www.gnu.org/licenses/> * +* * +* Contact: Cristian.Lalescu@ds.mpg.de * +* * +**********************************************************************/ @@ -39,6 +61,7 @@ int NSVEparticles<rnumber>::initialize(void) "tracers0", nparticles, tracers0_integration_steps); + this->particles_output_writer_mpi->setParticleFileLayout(this->ps->getParticleFileLayout()); this->particles_sample_writer_mpi = new particles_output_sampling_hdf5< long long int, double, 3>( MPI_COMM_WORLD, @@ -46,6 +69,7 @@ int NSVEparticles<rnumber>::initialize(void) (this->simname + "_particles.h5"), "tracers0", "position/0"); + this->particles_sample_writer_mpi->setParticleFileLayout(this->ps->getParticleFileLayout()); return EXIT_SUCCESS; } diff --git a/bfps/cpp/particles/abstract_particles_system.hpp b/bfps/cpp/particles/abstract_particles_system.hpp index 5d49c8f4353fae570f6562b388dfd31a22f51f7a..67c46855a59f576c186216fa613c7f88523261df 100644 --- a/bfps/cpp/particles/abstract_particles_system.hpp +++ b/bfps/cpp/particles/abstract_particles_system.hpp @@ -96,6 +96,9 @@ public: completeLoopWithVelocityGradient(dt, extra_rhs.get()); } } + + virtual int setParticleFileLayout(std::vector<hsize_t>) = 0; + virtual std::vector<hsize_t> getParticleFileLayout() = 0; }; #endif diff --git a/bfps/cpp/particles/particles_input_hdf5.hpp b/bfps/cpp/particles/particles_input_hdf5.hpp index 5231872d2f22dce1e8422cac05a0f7b6b72f9d10..e10377bff97812ebee186ee721aeeb32e91d8fec 100644 --- a/bfps/cpp/particles/particles_input_hdf5.hpp +++ b/bfps/cpp/particles/particles_input_hdf5.hpp @@ -22,16 +22,19 @@ class particles_input_hdf5 : public abstract_particles_input<partsize_t, real_nu int my_rank; int nb_processes; - hsize_t nb_total_particles; + hsize_t total_number_of_particles; hsize_t nb_rhs; partsize_t nb_particles_for_me; + std::vector<hsize_t> particle_file_layout; // to hold the shape of initial condition array std::unique_ptr<real_number[]> my_particles_positions; std::unique_ptr<partsize_t[]> my_particles_indexes; std::vector<std::unique_ptr<real_number[]>> my_particles_rhs; - static std::vector<real_number> BuildLimitsAllProcesses(MPI_Comm mpi_comm, - const real_number my_spatial_low_limit, const real_number my_spatial_up_limit){ + static std::vector<real_number> BuildLimitsAllProcesses( + MPI_Comm mpi_comm, + const real_number my_spatial_low_limit, + const real_number my_spatial_up_limit){ int my_rank; int nb_processes; @@ -41,8 +44,15 @@ class particles_input_hdf5 : public abstract_particles_input<partsize_t, real_nu std::vector<real_number> spatial_limit_per_proc(nb_processes*2); real_number intervalToSend[2] = {my_spatial_low_limit, my_spatial_up_limit}; - AssertMpi(MPI_Allgather(intervalToSend, 2, particles_utils::GetMpiType(real_number()), - spatial_limit_per_proc.data(), 2, particles_utils::GetMpiType(real_number()), mpi_comm)); + AssertMpi( + MPI_Allgather( + intervalToSend, + 2, + particles_utils::GetMpiType(real_number()), + spatial_limit_per_proc.data(), + 2, + particles_utils::GetMpiType(real_number()), + mpi_comm)); for(int idx_proc = 0; idx_proc < nb_processes-1 ; ++idx_proc){ assert(spatial_limit_per_proc[idx_proc*2] <= spatial_limit_per_proc[idx_proc*2+1]); @@ -56,18 +66,35 @@ class particles_input_hdf5 : public abstract_particles_input<partsize_t, real_nu } public: - particles_input_hdf5(const MPI_Comm in_mpi_comm,const std::string& inFilename, - const std::string& inDatanameState, const std::string& inDatanameRhs, - const real_number my_spatial_low_limit, const real_number my_spatial_up_limit) - : particles_input_hdf5(in_mpi_comm, inFilename, inDatanameState, inDatanameRhs, - BuildLimitsAllProcesses(in_mpi_comm, my_spatial_low_limit, my_spatial_up_limit)){ + particles_input_hdf5( + const MPI_Comm in_mpi_comm, + const std::string& inFilename, + const std::string& inDatanameState, + const std::string& inDatanameRhs, + const real_number my_spatial_low_limit, + const real_number my_spatial_up_limit) + : particles_input_hdf5( + in_mpi_comm, + inFilename, + inDatanameState, + inDatanameRhs, + BuildLimitsAllProcesses( + in_mpi_comm, + my_spatial_low_limit, + my_spatial_up_limit)){ } - particles_input_hdf5(const MPI_Comm in_mpi_comm,const std::string& inFilename, - const std::string& inDatanameState, const std::string& inDatanameRhs, - const std::vector<real_number>& in_spatial_limit_per_proc) + particles_input_hdf5( + const MPI_Comm in_mpi_comm, + const std::string& inFilename, + const std::string& inDatanameState, + const std::string& inDatanameRhs, + const std::vector<real_number>& in_spatial_limit_per_proc) : filename(inFilename), - mpi_comm(in_mpi_comm), my_rank(-1), nb_processes(-1), nb_total_particles(0), + mpi_comm(in_mpi_comm), + my_rank(-1), + nb_processes(-1), + total_number_of_particles(0), nb_particles_for_me(0){ TIMEZONE("particles_input_hdf5"); @@ -104,9 +131,12 @@ public: // Last value is the position dim of the particles assert(state_dim_array.back() == size_particle_positions); - nb_total_particles = 1; + // compute total number of particles, store initial condition array shape + total_number_of_particles = 1; + particle_file_layout.resize(state_dim_array.size()-1); for (size_t idx_dim = 0; idx_dim < state_dim_array.size()-1; ++idx_dim){ - nb_total_particles *= state_dim_array[idx_dim]; + total_number_of_particles *= state_dim_array[idx_dim]; + particle_file_layout[idx_dim] = state_dim_array[idx_dim]; } hdfret = H5Sclose(dspace); @@ -141,7 +171,7 @@ public: assert(hdfret >= 0); } - particles_utils::IntervalSplitter<hsize_t> load_splitter(nb_total_particles, nb_processes, my_rank); + particles_utils::IntervalSplitter<hsize_t> load_splitter(total_number_of_particles, nb_processes, my_rank); static_assert(std::is_same<real_number, double>::value || std::is_same<real_number, float>::value, "real_number must be double or float"); @@ -158,7 +188,8 @@ public: hid_t dset = H5Dopen(particle_file, inDatanameState.c_str(), H5P_DEFAULT); assert(dset >= 0); - hid_t rspace = H5Dget_space(dset); + hsize_t file_space_dims[2] = {total_number_of_particles, size_particle_positions}; + hid_t rspace = H5Screate_simple(2, file_space_dims, NULL); assert(rspace >= 0); hsize_t offset[2] = {load_splitter.getMyOffset(), 0}; @@ -184,11 +215,11 @@ public: TIMEZONE("rhs-read"); hid_t dset = H5Dopen(particle_file, inDatanameRhs.c_str(), H5P_DEFAULT); assert(dset >= 0); + hsize_t file_space_dims[3] = {nb_rhs, total_number_of_particles, size_particle_rhs}; + hid_t rspace = H5Screate_simple(3, file_space_dims, NULL); + assert(rspace >= 0); for(hsize_t idx_rhs = 0 ; idx_rhs < nb_rhs ; ++idx_rhs){ - hid_t rspace = H5Dget_space(dset); - assert(rspace >= 0); - if(load_splitter.getMySize()){ split_particles_rhs[idx_rhs].reset(new real_number[load_splitter.getMySize()*size_particle_rhs]); } @@ -208,11 +239,11 @@ public: rethdf = H5Sclose(mspace); assert(rethdf >= 0); - - rethdf = H5Sclose(rspace); - assert(rethdf >= 0); } - int rethdf = H5Dclose(dset); + + int rethdf = H5Sclose(rspace); + assert(rethdf >= 0); + rethdf = H5Dclose(dset); variable_used_only_in_assert(rethdf); assert(rethdf >= 0); } @@ -302,7 +333,7 @@ public: } partsize_t getTotalNbParticles() final{ - return partsize_t(nb_total_particles); + return partsize_t(total_number_of_particles); } partsize_t getLocalNbParticles() final{ @@ -327,6 +358,10 @@ public: assert(my_particles_indexes != nullptr || nb_particles_for_me == 0); return std::move(my_particles_indexes); } + + std::vector<hsize_t> getParticleFileLayout(){ + return std::vector<hsize_t>(this->particle_file_layout); + } }; #endif diff --git a/bfps/cpp/particles/particles_output_hdf5.hpp b/bfps/cpp/particles/particles_output_hdf5.hpp index 0098ba542812d68e462fadf19c94d600f4637af3..d7c987eea444d82d5c1180cd0124601c191cbf84 100644 --- a/bfps/cpp/particles/particles_output_hdf5.hpp +++ b/bfps/cpp/particles/particles_output_hdf5.hpp @@ -22,6 +22,7 @@ class particles_output_hdf5 : public abstract_particles_output<partsize_t, hid_t file_id; const partsize_t total_nb_particles; + std::vector<hsize_t> particle_file_layout; // to hold the shape of initial condition array hid_t dset_id_state; hid_t dset_id_rhs; @@ -204,12 +205,9 @@ public: } { - assert(total_nb_particles >= 0); - assert(size_particle_positions >= 0); - const hsize_t datacount[2] = { - hsize_t(total_nb_particles), - hsize_t(size_particle_positions)}; - hid_t dataspace = H5Screate_simple(2, datacount, NULL); + std::vector<hsize_t> datacount = std::vector<hsize_t>(this->particle_file_layout); + datacount.push_back(size_particle_positions); + hid_t dataspace = H5Screate_simple(datacount.size(), &datacount.front(), NULL); assert(dataspace >= 0); hid_t dataset_id = H5Dcreate( dset_id_state, @@ -228,7 +226,12 @@ public: hid_t memspace = H5Screate_simple(2, count, NULL); assert(memspace >= 0); - hid_t filespace = H5Dget_space(dataset_id); + assert(total_nb_particles >= 0); + assert(size_particle_positions >= 0); + const hsize_t file_count[2] = {hsize_t(total_nb_particles), size_particle_positions}; + hid_t filespace = H5Screate_simple(2, file_count, NULL); + assert(filespace >= 0); + int rethdf = H5Sselect_hyperslab( filespace, H5S_SELECT_SET, @@ -257,10 +260,10 @@ public: } { assert(size_particle_rhs >= 0); - const hsize_t datacount[3] = {hsize_t(Parent::getNbRhs()), - hsize_t(total_nb_particles), - hsize_t(size_particle_rhs)}; - hid_t dataspace = H5Screate_simple(3, datacount, NULL); + std::vector<hsize_t> datacount = std::vector<hsize_t>(this->particle_file_layout); + datacount.insert(datacount.begin(), hsize_t(Parent::getNbRhs())); + datacount.push_back(size_particle_positions); + hid_t dataspace = H5Screate_simple(datacount.size(), &datacount.front(), NULL); assert(dataspace >= 0); hid_t dataset_id = H5Dcreate( dset_id_rhs, @@ -285,8 +288,12 @@ public: hid_t memspace = H5Screate_simple(3, count, NULL); assert(memspace >= 0); - hid_t filespace = H5Dget_space(dataset_id); + assert(total_nb_particles >= 0); + assert(size_particle_positions >= 0); + const hsize_t file_count[3] = {hsize_t(Parent::getNbRhs()), hsize_t(total_nb_particles), size_particle_positions}; + hid_t filespace = H5Screate_simple(3, file_count, NULL); assert(filespace >= 0); + int rethdf = H5Sselect_hyperslab( filespace, H5S_SELECT_SET, @@ -322,6 +329,17 @@ public: assert(rethdf >= 0); } } + + int setParticleFileLayout(std::vector<hsize_t> input_layout){ + this->particle_file_layout.resize(input_layout.size()); + for (unsigned int i=0; i<this->particle_file_layout.size(); i++) + this->particle_file_layout[i] = input_layout[i]; + return EXIT_SUCCESS; + } + + std::vector<hsize_t> getParticleFileLayout(void){ + return std::vector<hsize_t>(this->particle_file_layout); + } }; #endif//PARTICLES_OUTPUT_HDF5_HPP diff --git a/bfps/cpp/particles/particles_output_sampling_hdf5.hpp b/bfps/cpp/particles/particles_output_sampling_hdf5.hpp index 22dafaedcd7095bd5214864f224003ea8d8be602..3693a587abe798d9f3fbb2ca731aee05608be9c7 100644 --- a/bfps/cpp/particles/particles_output_sampling_hdf5.hpp +++ b/bfps/cpp/particles/particles_output_sampling_hdf5.hpp @@ -19,6 +19,7 @@ class particles_output_sampling_hdf5 : public abstract_particles_output< hid_t file_id, pgroup_id; std::string dataset_name; + std::vector<hsize_t> particle_file_layout; // to hold the shape of initial condition array const bool use_collective_io; public: @@ -191,9 +192,9 @@ public: } { assert(size_particle_rhs >= 0); - const hsize_t datacount[2] = {hsize_t(Parent::getTotalNbParticles()), - hsize_t(size_particle_rhs)}; - hid_t dataspace = H5Screate_simple(2, datacount, NULL); + std::vector<hsize_t> datacount = std::vector<hsize_t>(this->particle_file_layout); + datacount.push_back(size_particle_positions); + hid_t dataspace = H5Screate_simple(datacount.size(), &datacount.front(), NULL); assert(dataspace >= 0); hid_t dataset_id = H5Dcreate( pgroup_id, @@ -215,7 +216,8 @@ public: hid_t memspace = H5Screate_simple(2, count, NULL); assert(memspace >= 0); - hid_t filespace = H5Dget_space(dataset_id); + const hsize_t file_count[2] = {hsize_t(Parent::getTotalNbParticles()), size_particle_rhs}; + hid_t filespace = H5Screate_simple(2, file_count, NULL); assert(filespace >= 0); int rethdf = H5Sselect_hyperslab( filespace, @@ -250,6 +252,17 @@ public: assert(rethdf >= 0); } } + + int setParticleFileLayout(std::vector<hsize_t> input_layout){ + this->particle_file_layout.resize(input_layout.size()); + for (unsigned int i=0; i<this->particle_file_layout.size(); i++) + this->particle_file_layout[i] = input_layout[i]; + return EXIT_SUCCESS; + } + + std::vector<hsize_t> getParticleFileLayout(void){ + return std::vector<hsize_t>(this->particle_file_layout); + } }; #endif diff --git a/bfps/cpp/particles/particles_system.hpp b/bfps/cpp/particles/particles_system.hpp index ebc7e79a67f5349c0b8c711ed9ae229a1cddc3a4..db651904b7f3e752f73846c494a8b9f6ee41b988 100644 --- a/bfps/cpp/particles/particles_system.hpp +++ b/bfps/cpp/particles/particles_system.hpp @@ -48,6 +48,7 @@ class particles_system : public abstract_particles_system<partsize_t, real_numbe partsize_t my_nb_particles; const partsize_t total_nb_particles; std::vector<std::unique_ptr<real_number[]>> my_particles_rhs; + std::vector<hsize_t> particle_file_layout; int step_idx; @@ -351,6 +352,17 @@ public: return int(my_particles_rhs.size()); } + int setParticleFileLayout(std::vector<hsize_t> input_layout) final{ + this->particle_file_layout.resize(input_layout.size()); + for (unsigned int i=0; i<this->particle_file_layout.size(); i++) + this->particle_file_layout[i] = input_layout[i]; + return EXIT_SUCCESS; + } + + std::vector<hsize_t> getParticleFileLayout(void) final{ + return std::vector<hsize_t>(this->particle_file_layout); + } + void checkNan() const { // TODO remove for(partsize_t idx_part = 0 ; idx_part < my_nb_particles ; ++idx_part){ // TODO remove me assert(std::isnan(my_particles_positions[idx_part*size_particle_positions+IDXC_X]) == false); diff --git a/bfps/cpp/particles/particles_system_builder.hpp b/bfps/cpp/particles/particles_system_builder.hpp index a9a140ac9921905513c1adcf558e071093b5afbb..916ab4bfe0f72f53a9a9b5c356d4255c78877096 100644 --- a/bfps/cpp/particles/particles_system_builder.hpp +++ b/bfps/cpp/particles/particles_system_builder.hpp @@ -243,6 +243,9 @@ struct particles_system_build_container { assert(part_sys->getNbRhs() == nsteps); + // store particle file layout + part_sys->setParticleFileLayout(generator.getParticleFileLayout()); + // Return the created particles system return std::unique_ptr<abstract_particles_system<partsize_t, particles_rnumber>>(part_sys); } diff --git a/bfps/test/test_bfps_NSVEparticles.py b/bfps/test/test_bfps_NSVEparticles.py index f914ad7dbfa7f23466c19cfba64f730b9e4cda45..fe1e7875a651b17dd9180f3cbe6d6bfe1f1b5c27 100644 --- a/bfps/test/test_bfps_NSVEparticles.py +++ b/bfps/test/test_bfps_NSVEparticles.py @@ -1,4 +1,29 @@ #! /usr/bin/env python +####################################################################### +# # +# Copyright 2019 Max Planck Institute # +# for Dynamics and Self-Organization # +# # +# This file is part of bfps. # +# # +# bfps is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published # +# by the Free Software Foundation, either version 3 of the License, # +# or (at your option) any later version. # +# # +# bfps is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with bfps. If not, see <http://www.gnu.org/licenses/> # +# # +# Contact: Cristian.Lalescu@ds.mpg.de # +# # +####################################################################### + + import os import numpy as np diff --git a/bfps/test/test_particle_clouds.py b/bfps/test/test_particle_clouds.py new file mode 100644 index 0000000000000000000000000000000000000000..5d2045390f51e7f529f78a3eb7037acb3fcae3b9 --- /dev/null +++ b/bfps/test/test_particle_clouds.py @@ -0,0 +1,93 @@ +#! /usr/bin/env python +####################################################################### +# # +# Copyright 2019 Max Planck Institute # +# for Dynamics and Self-Organization # +# # +# This file is part of bfps. # +# # +# bfps is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published # +# by the Free Software Foundation, either version 3 of the License, # +# or (at your option) any later version. # +# # +# bfps is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with bfps. If not, see <http://www.gnu.org/licenses/> # +# # +# Contact: Cristian.Lalescu@ds.mpg.de # +# # +####################################################################### + + + +import os +import numpy as np +import h5py +import sys + +import bfps +from bfps import DNS + + +def main(): + nclouds = 10 + nparticles_per_cloud = 1000 + nparticles = nclouds*nparticles_per_cloud + niterations = 32 + c = DNS() + c.dns_type = 'NSVEparticles' + c.parameters['nparticles'] = nparticles + c.parameters['tracers1_integration_steps'] = 4 + c.generate_tracer_state(rseed = 2, species = 1) + del c.parameters['nparticles'] + del c.parameters['tracers1_integration_steps'] + ic_file = h5py.File(c.get_checkpoint_0_fname(), 'a') + ic_file['tracers0/state/0'] = ic_file['tracers1/state/0'].value.reshape(nclouds, nparticles_per_cloud, 3) + ic_file['tracers0/rhs/0'] = ic_file['tracers1/rhs/0'].value.reshape(4, nclouds, nparticles_per_cloud, 3) + ic_file.close() + c.launch( + ['NSVEparticles', + '-n', '32', + '--src-simname', 'B32p1e4', + '--forcing_type', 'linear', + '--src-wd', bfps.lib_dir + '/test', + '--src-iteration', '0', + '--np', '4', + '--ntpp', '1', + '--fftw_plan_rigor', 'FFTW_PATIENT', + '--niter_todo', '{0}'.format(niterations), + '--niter_out', '{0}'.format(niterations), + '--niter_stat', '1', + '--nparticles', '{0}'.format(nparticles), + '--njobs', '2', + '--wd', './']) + f0 = h5py.File( + os.path.join( + os.path.join(bfps.lib_dir, 'test'), + 'B32p1e4_checkpoint_0.h5'), + 'r') + f1 = h5py.File(c.get_checkpoint_0_fname(), 'r') + for iteration in [0, 32, 64]: + field0 = f0['vorticity/complex/{0}'.format(iteration)].value + field1 = f1['vorticity/complex/{0}'.format(iteration)].value + field_error = np.max(np.abs(field0 - field1)) + x0 = f0['tracers0/state/{0}'.format(iteration)].value + x1 = f1['tracers0/state/{0}'.format(iteration)].value.reshape(x0.shape) + traj_error = np.max(np.abs(x0 - x1)) + y0 = f0['tracers0/rhs/{0}'.format(iteration)].value + y1 = f1['tracers0/rhs/{0}'.format(iteration)].value.reshape(y0.shape) + rhs_error = np.max(np.abs(y0 - y1)) + assert(field_error < 1e-5) + assert(traj_error < 1e-5) + assert(rhs_error < 1e-5) + print('SUCCESS! Basic test passed.') + return None + +if __name__ == '__main__': + main() +