diff --git a/bfps/NSVorticityEquation.py b/bfps/NSVorticityEquation.py index f67ba6aee16e93d1c4a8a9a710c3136eae678398..9d79eafb39cb5ca94ca545013c6b98e922da8a03 100644 --- a/bfps/NSVorticityEquation.py +++ b/bfps/NSVorticityEquation.py @@ -140,28 +140,34 @@ class NSVorticityEquation(_fluid_particle_base): fs->kk, // (kspace object, contains dkx, dky, dkz) tracers0_integration_steps, // to check coherency between parameters and hdf input file (nb rhs) nparticles, // to check coherency between parameters and hdf input file - fname, // particles input filename - std::string("/tracers0/state/0"), // dataset name for initial input - std::string("/tracers0/rhs/0"), // dataset name for initial input + fs->get_current_fname(), // particles input filename + "/tracers0/state/0", // dataset name for initial input + "/tracers0/rhs/0", // dataset name for initial input tracers0_neighbours, // parameter (interpolation no neighbours) tracers0_smoothness, // parameter MPI_COMM_WORLD); - particles_output_hdf5<double,3,3> particles_output_writer_mpi(MPI_COMM_WORLD, fname, nparticles, tracers0_integration_steps, - "/tracers0/state/", "/tracers0/rhs/"); + particles_output_hdf5<double,3,3> particles_output_writer_mpi( + MPI_COMM_WORLD, + "tracers0", + nparticles, + tracers0_integration_steps); """ self.particle_loop += """ fs->compute_velocity(fs->cvorticity); fs->cvelocity->ift(); ps->completeLoop(dt); """ - output_particles = """ - particles_output_writer_mpi.save(ps->getParticlesPositions(), - ps->getParticlesRhs(), - ps->getParticlesIndexes(), - ps->getLocalNbParticles(), - iteration+1); + self.particle_output = """ + { + particles_output_writer_mpi.open_file(fs->get_current_fname()); + particles_output_writer_mpi.save(ps->getParticlesPositions(), + ps->getParticlesRhs(), + ps->getParticlesIndexes(), + ps->getLocalNbParticles(), + iteration+1); + particles_output_writer_mpi.close_file(); + } """ - self.fluid_output += output_particles self.particle_end += 'ps.release();\n' return None def create_stat_output( @@ -356,6 +362,7 @@ class NSVorticityEquation(_fluid_particle_base): self.fluid_loop = 'fs->step(dt);\n' self.fluid_loop += ('if (fs->iteration % niter_out == 0)\n{\n' + self.fluid_output + + self.particle_output + self.store_checkpoint + '\n}\n' + 'if (stop_code_now){\n' + @@ -363,6 +370,7 @@ class NSVorticityEquation(_fluid_particle_base): 'break;\n}\n') self.fluid_end = ('if (fs->iteration % niter_out != 0)\n{\n' + self.fluid_output + + self.particle_output + self.store_checkpoint + 'DEBUG_MSG("checkpoint value is %d\\n", checkpoint);\n' + '\n}\n' + @@ -600,7 +608,7 @@ class NSVorticityEquation(_fluid_particle_base): number_of_particles = 1 for val in pbase_shape[1:]: number_of_particles *= val - with h5py.File(self.get_particle_file_name(), 'a') as ofile: + with h5py.File(self.get_checkpoint_0_fname(), 'a') as ofile: s = 0 ofile.create_group('tracers{0}'.format(s)) ofile.create_group('tracers{0}/rhs'.format(s)) @@ -746,6 +754,10 @@ class NSVorticityEquation(_fluid_particle_base): self.finalize_code() self.launch_jobs(opt = opt) return None + def get_checkpoint_0_fname(self): + return os.path.join( + self.work_dir, + self.simname + '_checkpoint_0.h5') def generate_tracer_state( self, rseed = None, @@ -764,7 +776,7 @@ class NSVorticityEquation(_fluid_particle_base): if testing: #data[0] = np.array([3.26434, 4.24418, 3.12157]) data[:] = np.array([ 0.72086101, 2.59043666, 6.27501953]) - with h5py.File(self.get_particle_file_name(), 'r+') as data_file: + with h5py.File(self.get_checkpoint_0_fname(), 'a') as data_file: data_file['tracers{0}/state/0'.format(species)][:] = data if write_to_file: data.tofile( @@ -776,6 +788,32 @@ class NSVorticityEquation(_fluid_particle_base): self, opt = None): if not os.path.exists(os.path.join(self.work_dir, self.simname + '.h5')): + # take care of fields' initial condition + if not os.path.exists(self.get_checkpoint_0_fname()): + f = h5py.File(self.get_checkpoint_0_fname(), 'w') + if len(opt.src_simname) > 0: + source_cp = 0 + src_file = 'not_a_file' + while True: + src_file = os.path.join( + os.path.realpath(opt.src_work_dir), + opt.src_simname + '_checkpoint_{0}.h5'.format(source_cp)) + f0 = h5py.File(src_file, 'r') + if '{0}'.format(opt.src_iteration) in f0['vorticity/complex'].keys(): + f0.close() + break + source_cp += 1 + f['vorticity/complex/{0}'.format(0)] = h5py.ExternalLink( + src_file, + 'vorticity/complex/{0}'.format(opt.src_iteration)) + else: + data = self.generate_vector_field( + write_to_file = False, + spectra_slope = 2.0, + amplitude = 0.05) + f['vorticity/complex/{0}'.format(0)] = data + f.close() + # take care of particles' initial condition particle_initial_condition = None if opt.pclouds > 1: np.random.seed(opt.particle_rand_seed) @@ -808,33 +846,6 @@ class NSVorticityEquation(_fluid_particle_base): data = particle_initial_condition) for s in range(1, self.particle_species): self.generate_tracer_state(species = s, data = data) - init_condition_file = os.path.join( - self.work_dir, - self.simname + '_checkpoint_0.h5') - if not os.path.exists(init_condition_file): - f = h5py.File(init_condition_file, 'w') - if len(opt.src_simname) > 0: - source_cp = 0 - src_file = 'not_a_file' - while True: - src_file = os.path.join( - os.path.realpath(opt.src_work_dir), - opt.src_simname + '_checkpoint_{0}.h5'.format(source_cp)) - f0 = h5py.File(src_file, 'r') - if '{0}'.format(opt.src_iteration) in f0['vorticity/complex'].keys(): - f0.close() - break - source_cp += 1 - f['vorticity/complex/{0}'.format(0)] = h5py.ExternalLink( - src_file, - 'vorticity/complex/{0}'.format(opt.src_iteration)) - else: - data = self.generate_vector_field( - write_to_file = False, - spectra_slope = 2.0, - amplitude = 0.05) - f['vorticity/complex/{0}'.format(0)] = data - f.close() self.run( nb_processes = opt.nb_processes, nb_threads_per_process = opt.nb_threads_per_process, diff --git a/bfps/_fluid_base.py b/bfps/_fluid_base.py index dac5a581c73bc456adcbd82112a29b7353635075..e6980eb466ef3bcc08a67612666843af31c142f7 100644 --- a/bfps/_fluid_base.py +++ b/bfps/_fluid_base.py @@ -88,6 +88,7 @@ class _fluid_particle_base(_code): self.particle_definitions = '' self.particle_start = '' self.particle_loop = '' + self.particle_output = '' self.particle_end = '' self.particle_stat_src = '' self.file_datasets_grow = '' @@ -297,9 +298,9 @@ class _fluid_particle_base(_code): self.main += self.fluid_loop self.main += output_time_difference.format('frame_index') self.main += '}\n' + self.main += self.fluid_end if self.particle_species > 0: self.main += self.particle_end - self.main += self.fluid_end return None def read_rfield( self, diff --git a/bfps/cpp/particles/abstract_particles_output.hpp b/bfps/cpp/particles/abstract_particles_output.hpp index 955f1e6fd07f98421837bd9bf359026ea9535b74..20f5de4aa31bd55f49620a5a2a721aaf89447d18 100644 --- a/bfps/cpp/particles/abstract_particles_output.hpp +++ b/bfps/cpp/particles/abstract_particles_output.hpp @@ -47,6 +47,10 @@ protected: return nb_rhs; } + int getMyRank(){ + return this->my_rank; + } + public: abstract_particles_output(MPI_Comm in_mpi_com, const int inTotalNbParticles, const int in_nb_rhs) : mpi_com(in_mpi_com), my_rank(-1), nb_processes(-1), @@ -74,8 +78,12 @@ public: } } - void save(const real_number input_particles_positions[], const std::unique_ptr<real_number[]> input_particles_rhs[], - const int index_particles[], const int nb_particles, const int idx_time_step){ + void save( + const real_number input_particles_positions[], + const std::unique_ptr<real_number[]> input_particles_rhs[], + const int index_particles[], + const int nb_particles, + const int idx_time_step){ TIMEZONE("abstract_particles_output::save"); assert(total_nb_particles != -1); diff --git a/bfps/cpp/particles/particles_output_hdf5.hpp b/bfps/cpp/particles/particles_output_hdf5.hpp index 49a0b69e3d0c0b888a7c685570d4ba7296a230f5..c85921c98ceae9591515e431ffc5469a51a1bb03 100644 --- a/bfps/cpp/particles/particles_output_hdf5.hpp +++ b/bfps/cpp/particles/particles_output_hdf5.hpp @@ -8,55 +8,79 @@ #include "abstract_particles_output.hpp" #include "scope_timer.hpp" -template <class real_number, int size_particle_positions, int size_particle_rhs> -class particles_output_hdf5 : public abstract_particles_output<real_number, size_particle_positions, size_particle_rhs>{ - using Parent = abstract_particles_output<real_number, size_particle_positions, size_particle_rhs>; - - const std::string filename; +template <class real_number, + int size_particle_positions, + int size_particle_rhs> +class particles_output_hdf5 : public abstract_particles_output<real_number, + size_particle_positions, + size_particle_rhs>{ + using Parent = abstract_particles_output<real_number, + size_particle_positions, + size_particle_rhs>; hid_t file_id; const int total_nb_particles; - const std::string datagroup_basename_state; - const std::string datagroup_basename_rhs; + const std::string particle_species_name; hid_t dset_id_state; hid_t dset_id_rhs; public: - particles_output_hdf5(MPI_Comm in_mpi_com, const std::string in_filename, const int inTotalNbParticles, - const int in_nb_rhs, const std::string in_datagroup_basename_state, - const std::string in_datagroup_basename_rhs) - : abstract_particles_output<real_number, size_particle_positions, size_particle_rhs>(in_mpi_com, inTotalNbParticles, in_nb_rhs), - filename(in_filename), - file_id(0), total_nb_particles(inTotalNbParticles), datagroup_basename_state(in_datagroup_basename_state), - datagroup_basename_rhs(in_datagroup_basename_rhs), dset_id_state(0), dset_id_rhs(0){ - if(datagroup_basename_state == datagroup_basename_rhs){ - DEBUG_MSG("The same dataset names have been passed to particles_output_hdf5 for the state and the rhs\n" - "It will result into an undefined behavior.\n" - "Dataset name = %s\n", datagroup_basename_state.c_str()); - } + particles_output_hdf5(MPI_Comm in_mpi_com, + const std::string ps_name, + const int inTotalNbParticles, + const int in_nb_rhs) + : abstract_particles_output<real_number, + size_particle_positions, + size_particle_rhs>( + in_mpi_com, + inTotalNbParticles, + in_nb_rhs), + particle_species_name(ps_name), + file_id(0), + total_nb_particles(inTotalNbParticles), + dset_id_state(0), + dset_id_rhs(0){} + int open_file(std::string filename){ + + TIMEZONE("particles_output_hdf5::open_file"); + + this->require_checkpoint_groups(filename); - TIMEZONE("particles_output_hdf5::H5Pcreate"); hid_t plist_id_par = H5Pcreate(H5P_FILE_ACCESS); assert(plist_id_par >= 0); - int retTest = H5Pset_fapl_mpio(plist_id_par, Parent::getCom(), MPI_INFO_NULL); + int retTest = H5Pset_fapl_mpio( + plist_id_par, + Parent::getCom(), + MPI_INFO_NULL); assert(retTest >= 0); // Parallel HDF5 write - file_id = H5Fopen(filename.c_str(), H5F_ACC_RDWR | H5F_ACC_DEBUG, plist_id_par); + file_id = H5Fopen( + filename.c_str(), + H5F_ACC_RDWR | H5F_ACC_DEBUG, + plist_id_par); // file_id = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC | H5F_ACC_DEBUG/*H5F_ACC_EXCL*/, H5P_DEFAULT/*H5F_ACC_RDWR*/, plist_id_par); assert(file_id >= 0); H5Pclose(plist_id_par); - dset_id_state = H5Gopen(file_id, datagroup_basename_state.c_str(), H5P_DEFAULT); + dset_id_state = H5Gopen( + file_id, + (this->particle_species_name + std::string("/state")).c_str(), + H5P_DEFAULT); assert(dset_id_state >= 0); - dset_id_rhs = H5Gopen(file_id, datagroup_basename_rhs.c_str(), H5P_DEFAULT); + dset_id_rhs = H5Gopen( + file_id, + (this->particle_species_name + std::string("/rhs")).c_str(), + H5P_DEFAULT); assert(dset_id_rhs >= 0); + return EXIT_SUCCESS; } - ~particles_output_hdf5(){ - TIMEZONE("particles_output_hdf5::H5Dclose"); + ~particles_output_hdf5(){} + int close_file(void){ + TIMEZONE("particles_output_hdf5::close_file"); int rethdf = H5Gclose(dset_id_state); assert(rethdf >= 0); @@ -66,18 +90,88 @@ public: rethdf = H5Fclose(file_id); assert(rethdf >= 0); + return EXIT_SUCCESS; + } + + void require_checkpoint_groups(std::string filename){ + if (Parent::getMyRank() == 0) + { + hid_t file_id = H5Fopen( + filename.c_str(), + H5F_ACC_RDWR | H5F_ACC_DEBUG, + H5P_DEFAULT); + assert(file_id >= 0); + bool group_exists = H5Lexists( + file_id, + this->particle_species_name.c_str(), + H5P_DEFAULT); + if (!group_exists) + { + hid_t gg = H5Gcreate( + file_id, + this->particle_species_name.c_str(), + H5P_DEFAULT, + H5P_DEFAULT, + H5P_DEFAULT); + assert(gg >= 0); + H5Gclose(gg); + } + hid_t gg = H5Gopen( + file_id, + this->particle_species_name.c_str(), + H5P_DEFAULT); + assert(gg >= 0); + group_exists = H5Lexists( + gg, + "state", + H5P_DEFAULT); + if (!group_exists) + { + hid_t ggg = H5Gcreate( + gg, + "state", + H5P_DEFAULT, + H5P_DEFAULT, + H5P_DEFAULT); + assert(ggg >= 0); + H5Gclose(ggg); + } + group_exists = H5Lexists( + gg, + "rhs", + H5P_DEFAULT); + if (!group_exists) + { + hid_t ggg = H5Gcreate( + gg, + "rhs", + H5P_DEFAULT, + H5P_DEFAULT, + H5P_DEFAULT); + assert(ggg >= 0); + H5Gclose(ggg); + } + H5Gclose(gg); + H5Fclose(file_id); + } + MPI_Barrier(Parent::getCom()); } - void write(const int idx_time_step, const real_number* particles_positions, const std::unique_ptr<real_number[]>* particles_rhs, - const int nb_particles, const int particles_idx_offset) final{ + void write( + const int idx_time_step, + const real_number* particles_positions, + const std::unique_ptr<real_number[]>* particles_rhs, + const int nb_particles, + const int particles_idx_offset) final{ TIMEZONE("particles_output_hdf5::write"); assert(particles_idx_offset < Parent::getTotalNbParticles()); assert(particles_idx_offset+nb_particles <= Parent::getTotalNbParticles()); - static_assert(std::is_same<real_number, double>::value - || std::is_same<real_number, float>::value, "real_number must be double or float"); - const hid_t type_id = (sizeof(real_number) == 8?H5T_NATIVE_DOUBLE:H5T_NATIVE_FLOAT); + static_assert(std::is_same<real_number, double>::value || + std::is_same<real_number, float>::value, + "real_number must be double or float"); + const hid_t type_id = (sizeof(real_number) == 8 ? H5T_NATIVE_DOUBLE : H5T_NATIVE_FLOAT); hid_t plist_id = H5Pcreate(H5P_DATASET_XFER); assert(plist_id >= 0); @@ -89,12 +183,19 @@ public: { assert(total_nb_particles >= 0); assert(size_particle_positions >= 0); - const hsize_t datacount[2] = {hsize_t(total_nb_particles), hsize_t(size_particle_positions)}; + const hsize_t datacount[2] = { + hsize_t(total_nb_particles), + hsize_t(size_particle_positions)}; hid_t dataspace = H5Screate_simple(2, datacount, NULL); assert(dataspace >= 0); - hid_t dataset_id = H5Dcreate( dset_id_state, std::to_string(idx_time_step).c_str(), type_id, dataspace, H5P_DEFAULT, - H5P_DEFAULT, H5P_DEFAULT); + hid_t dataset_id = H5Dcreate( dset_id_state, + std::to_string(idx_time_step).c_str(), + type_id, + dataspace, + H5P_DEFAULT, + H5P_DEFAULT, + H5P_DEFAULT); assert(dataset_id >= 0); assert(nb_particles >= 0); @@ -105,11 +206,22 @@ public: assert(memspace >= 0); hid_t filespace = H5Dget_space(dataset_id); - int rethdf = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL); + int rethdf = H5Sselect_hyperslab( + filespace, + H5S_SELECT_SET, + offset, + NULL, + count, + NULL); assert(rethdf >= 0); - herr_t status = H5Dwrite(dataset_id, type_id, memspace, filespace, - plist_id, particles_positions); + herr_t status = H5Dwrite( + dataset_id, + type_id, + memspace, + filespace, + plist_id, + particles_positions); assert(status >= 0); rethdf = H5Sclose(memspace); assert(rethdf >= 0); @@ -120,28 +232,52 @@ public: } { assert(size_particle_rhs >= 0); - const hsize_t datacount[3] = {hsize_t(Parent::getNbRhs()), hsize_t(total_nb_particles), hsize_t(size_particle_rhs)}; + const hsize_t datacount[3] = {hsize_t(Parent::getNbRhs()), + hsize_t(total_nb_particles), + hsize_t(size_particle_rhs)}; hid_t dataspace = H5Screate_simple(3, datacount, NULL); assert(dataspace >= 0); - hid_t dataset_id = H5Dcreate( dset_id_rhs, std::to_string(idx_time_step).c_str(), type_id, dataspace, H5P_DEFAULT, - H5P_DEFAULT, H5P_DEFAULT); + hid_t dataset_id = H5Dcreate( dset_id_rhs, + std::to_string(idx_time_step).c_str(), + type_id, + dataspace, + H5P_DEFAULT, + H5P_DEFAULT, + H5P_DEFAULT); assert(dataset_id >= 0); assert(particles_idx_offset >= 0); for(int idx_rhs = 0 ; idx_rhs < Parent::getNbRhs() ; ++idx_rhs){ - const hsize_t count[3] = {1, hsize_t(nb_particles), hsize_t(size_particle_rhs)}; - const hsize_t offset[3] = {hsize_t(idx_rhs), hsize_t(particles_idx_offset), 0}; + const hsize_t count[3] = { + 1, + hsize_t(nb_particles), + hsize_t(size_particle_rhs)}; + const hsize_t offset[3] = { + hsize_t(idx_rhs), + hsize_t(particles_idx_offset), + 0}; hid_t memspace = H5Screate_simple(3, count, NULL); assert(memspace >= 0); hid_t filespace = H5Dget_space(dataset_id); assert(filespace >= 0); - int rethdf = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL); + int rethdf = H5Sselect_hyperslab( + filespace, + H5S_SELECT_SET, + offset, + NULL, + count, + NULL); assert(rethdf >= 0); - herr_t status = H5Dwrite(dataset_id, type_id, memspace, filespace, - plist_id, particles_rhs[idx_rhs].get()); + herr_t status = H5Dwrite( + dataset_id, + type_id, + memspace, + filespace, + plist_id, + particles_rhs[idx_rhs].get()); assert(status >= 0); rethdf = H5Sclose(filespace); assert(rethdf >= 0); @@ -159,4 +295,5 @@ public: } }; -#endif +#endif//PARTICLES_OUTPUT_HDF5_HPP +