Skip to content
Snippets Groups Projects
Commit ff410a69 authored by Berenger Bramas's avatar Berenger Bramas Committed by Cristian Lalescu
Browse files

Update to have nparticles from system

parent c27efd2b
Branches
Tags
2 merge requests!21Bugfix/nansampling,!19Feature/sampling new particles test (extension of Update particles sampling for variable dim)
......@@ -74,8 +74,8 @@ int NSVEparticles<rnumber>::do_stats()
// sample_from_particles_system(*this->fs->cvelocity,// field to save TODO
// this->ps,
// 0, // hdf5 datagroup TODO
// "TODO", // dataset basename TODO
// this->particles_output_writer_mpi->getTotalNbParticles());
// "TODO" // dataset basename TODO
// );
// }
return EXIT_SUCCESS;
}
......
......@@ -32,8 +32,12 @@ public:
virtual partsize_t getLocalNbParticles() const = 0;
virtual partsize_t getGlobalNbParticles() const = 0;
virtual int getNbRhs() const = 0;
virtual int get_step_idx() const = 0;
//- Not generic to enable sampling begin
virtual void sample_compute_field(const field<float, FFTW, ONE>& sample_field,
real_number sample_rhs[]) = 0;
......
......@@ -60,7 +60,7 @@ public:
{
assert(size_particle_rhs >= 0);
const hsize_t datacount[3] = {hsize_t(Parent::getNbRhs()),
hsize_t(Parent::total_nb_particles),
hsize_t(Parent::getTotalNbParticles()),
hsize_t(size_particle_rhs)};
hid_t dataspace = H5Screate_simple(3, datacount, NULL);
assert(dataspace >= 0);
......
......@@ -15,25 +15,24 @@ template <class partsize_t, class particles_rnumber, class rnumber, field_backen
void sample_from_particles_system(const field<rnumber, be, fc>& in_field, // a pointer to a field<rnumber, FFTW, fc>
std::unique_ptr<abstract_particles_system<partsize_t, particles_rnumber>>& ps, // a pointer to an particles_system<double>
hid_t gid, // an hid_t identifying an HDF5 group
const std::string& fname,
const partsize_t totalNbParticles){
// const int size_particle_rhs = ncomp(fc);
// const partsize_t nb_particles = ps->getLocalNbParticles();
// std::unique_ptr<particles_rnumber[]> sample_rhs(new particles_rnumber[size_particle_rhs*nb_particles]);
// ps->sample_compute_field(in_field, sample_rhs.get());
// const std::string datasetname = fname + std::string("/") + std::to_string(ps->step_idx);
// particles_output_sampling_hdf5<partsize_t, particles_rnumber, 3, size_particle_rhs> outputclass(MPI_COMM_WORLD,
// totalNbParticles,
// gid,
// datasetname);
// outputclass.save(ps->getParticlesPositions(),
// &sample_rhs,
// ps->getParticlesIndexes(),
// ps->getLocalNbParticles(),
// in_field->iteration);
const std::string& fname){
const int size_particle_rhs = ncomp(fc);
const partsize_t nb_particles = ps->getLocalNbParticles();
std::unique_ptr<particles_rnumber[]> sample_rhs(new particles_rnumber[size_particle_rhs*nb_particles]);
ps->sample_compute_field(in_field, sample_rhs.get());
const std::string datasetname = fname + std::string("/") + std::to_string(ps->get_step_idx());
particles_output_sampling_hdf5<partsize_t, particles_rnumber, 3, size_particle_rhs> outputclass(MPI_COMM_WORLD,
ps->getGlobalNbParticles(),
gid,
datasetname);
outputclass.save(ps->getParticlesPositions(),
&sample_rhs,
ps->getParticlesIndexes(),
ps->getLocalNbParticles(),
ps->get_step_idx());
}
#endif
......
......@@ -42,6 +42,7 @@ class particles_system : public abstract_particles_system<partsize_t, real_numbe
std::unique_ptr<real_number[]> my_particles_positions;
std::unique_ptr<partsize_t[]> my_particles_positions_indexes;
partsize_t my_nb_particles;
const partsize_t total_nb_particles;
std::vector<std::unique_ptr<real_number[]>> my_particles_rhs;
int step_idx;
......@@ -55,6 +56,7 @@ public:
const std::array<size_t,3>& in_local_field_offset,
const field_class& in_field,
MPI_Comm in_mpi_com,
const partsize_t in_total_nb_particles,
const int in_current_iteration = 1)
: mpi_com(in_mpi_com),
current_partition_interval({in_local_field_offset[IDX_Z], in_local_field_offset[IDX_Z] + in_local_field_dims[IDX_Z]}),
......@@ -67,7 +69,7 @@ public:
default_field(in_field),
spatial_box_width(in_spatial_box_width), spatial_partition_width(in_spatial_partition_width),
my_spatial_low_limit(in_my_spatial_low_limit), my_spatial_up_limit(in_my_spatial_up_limit),
my_nb_particles(0), step_idx(in_current_iteration){
my_nb_particles(0), total_nb_particles(in_total_nb_particles), step_idx(in_current_iteration){
current_my_nb_particles_per_partition.reset(new partsize_t[partition_interval_size]);
current_offset_particles_for_partition.reset(new partsize_t[partition_interval_size+1]);
......@@ -190,6 +192,10 @@ public:
step_idx += 1;
}
int get_step_idx() const final {
return step_idx;
}
void shift_rhs_vectors() final {
if(my_particles_rhs.size()){
std::unique_ptr<real_number[]> next_current(std::move(my_particles_rhs.back()));
......@@ -226,6 +232,10 @@ public:
return my_nb_particles;
}
partsize_t getGlobalNbParticles() const final {
return total_nb_particles;
}
int getNbRhs() const final {
return int(my_particles_rhs.size());
}
......
......@@ -206,6 +206,7 @@ struct particles_system_build_container {
local_field_offset,
(*fs_field),
mpi_comm,
nparticles,
in_current_iteration);
// Load particles from hdf5
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment