The output datasets of sample_from_particles_system and sample_particles_system_position should have the same shape as the corresponding datasets from the checkpoint file.
At the moment they have an extra dimension of size 1.
Designs
Child items ...
Show closed items
Linked items 0
Link issues together to show that they're related.
Learn more.
C++ code reads initial condition of shape (..., d), and initial rhs of shape (n, ..., d) (where n is the order of the multi-step Adams Bashforth scheme).
C++ code asserts that the (...) is the same for both.
C++ code generates particle state array with shape (N, d), where N is the total number of particles (product of values in (...)). similar rhs array.
C++ code does whatever computations.
C++ saves checkpoints, particle datasets are shaped (..., d) and (n, ..., d).
C++ saves samples, datasets are shaped (...) for scalar data, (..., 3) for vector data and (..., 3, 3) for tensor data.
What is needed: the shape (...) needs to be saved in the particle_system object (or something like that), and it should be accessible to particles_output and particles_output_sampling. Ideally, the shape should be set when the output objects are initialized, since the shape will not change once a DNS is started.
I am currently looking to it. But, it is a little difficult. Either it can end in unbalanced number of particles per process, or some process will have to access the data multiple times.
For example, let (m1, m2, ..., mk) be (10,3,5,8) (so N = 1200).
If P = 11, we should split around 108 particles per process.
How can we load 108 particles? it will need multiple loads right? Or maybe there is something that I did not see?
{TIMEZONE("state");hid_tdset=H5Dopen(particle_file,inDatanameState.c_str(),H5P_DEFAULT);assert(dset>=0);hid_tdspace=H5Dget_space(dset);// copy?assert(dspace>=0);hid_tspace_dim=H5Sget_simple_extent_ndims(dspace);assert(space_dim>=2);std::vector<hsize_t>state_dim_array(space_dim);inthdfret=H5Sget_simple_extent_dims(dspace,&state_dim_array[0],NULL);assert(hdfret>=0);// Last value is the position dim of the particlesassert(state_dim_array.back()==size_particle_positions);// The data shape contains the dims except the last onedataShape=state_dim_array;dataShape.pop_back();nb_total_particles=1;for(size_tidx_dim=0;idx_dim<state_dim_array.size()-1;++idx_dim){nb_total_particles*=state_dim_array[idx_dim];}hdfret=H5Sclose(dspace);assert(hdfret>=0);hdfret=H5Dclose(dset);assert(hdfret>=0);}{TIMEZONE("rhs");hid_tdset=H5Dopen(particle_file,inDatanameRhs.c_str(),H5P_DEFAULT);assert(dset>=0);hid_tdspace=H5Dget_space(dset);// copy?assert(dspace>=0);hid_trhs_dim=H5Sget_simple_extent_ndims(dspace);assert(rhs_dim>=0);std::vector<hsize_t>rhs_dim_array(rhs_dim);inthdfret=H5Sget_simple_extent_dims(dspace,&rhs_dim_array[0],NULL);assert(hdfret>=0);assert(rhs_dim_array.back()==size_particle_rhs);assert(rhs_dim_array.front()>=1);assert(rhs_dim==2+dataShape.size());for(size_tidx_shape=0;idx_shape<dataShape.size();++idx_shape){assert(dataShape[idx_shape]==rhs_dim_array[idx_shape+1]);}nb_rhs=rhs_dim_array[0];hdfret=H5Sclose(dspace);assert(hdfret>=0);hdfret=H5Dclose(dset);assert(hdfret>=0);}particles_utils::IntervalSplitter<hsize_t>load_splitter(nb_total_particles,nb_processes,my_rank);static_assert(std::is_same<real_number,double>::value||std::is_same<real_number,float>::value,"real_number must be double or float");consthid_ttype_id=(sizeof(real_number)==8?H5T_NATIVE_DOUBLE:H5T_NATIVE_FLOAT);/// Load the datastd::unique_ptr<real_number[]>split_particles_positions(newreal_number[load_splitter.getMySize()*size_particle_positions]);{TIMEZONE("state-read");hid_tdset=H5Dopen(particle_file,inDatanameState.c_str(),H5P_DEFAULT);assert(dset>=0);hid_trspace=H5Dget_space(dset);assert(rspace>=0);std::vector<hsize_t>offset=get_offset_with_shape(load_splitter.getMyOffset(),dataShape);offset.push_back(0);std::vector<hsize_t>mem_dims=get_size_with_shape(load_splitter.getMySize(),dataShape,offset);mem_dims.push_back(size_particle_positions);hid_tmspace=H5Screate_simple(mem_dims.size(),&mem_dims[0],NULL);assert(mspace>=0);intrethdf=H5Sselect_hyperslab(rspace,H5S_SELECT_SET,offset,NULL,mem_dims,NULL);assert(rethdf>=0);rethdf=H5Dread(dset,type_id,mspace,rspace,H5P_DEFAULT,split_particles_positions.get());assert(rethdf>=0);rethdf=H5Sclose(rspace);assert(rethdf>=0);rethdf=H5Dclose(dset);assert(rethdf>=0);}std::vector<std::unique_ptr<real_number[]>>split_particles_rhs(nb_rhs);{TIMEZONE("rhs-read");hid_tdset=H5Dopen(particle_file,inDatanameRhs.c_str(),H5P_DEFAULT);assert(dset>=0);std::vector<hsize_t>offset=get_offset_with_shape(load_splitter.getMyOffset(),dataShape);offset.push_front(0);offset.push_back(0);std::vector<hsize_t>mem_dims=get_size_with_shape(load_splitter.getMySize(),dataShape,offset);mem_dims.push_front(1);mem_dims.push_back(size_particle_positions);for(hsize_tidx_rhs=0;idx_rhs<nb_rhs;++idx_rhs){hid_trspace=H5Dget_space(dset);assert(rspace>=0);split_particles_rhs[idx_rhs].reset(newreal_number[load_splitter.getMySize()*size_particle_rhs]);offset[0]=idx_rhs;hid_tmspace=H5Screate_simple(mem_dims.size(),&mem_dims[0],NULL);assert(mspace>=0);intrethdf=H5Sselect_hyperslab(rspace,H5S_SELECT_SET,offset,NULL,mem_dims,NULL);assert(rethdf>=0);rethdf=H5Dread(dset,type_id,mspace,rspace,H5P_DEFAULT,split_particles_rhs[idx_rhs].get());assert(rethdf>=0);rethdf=H5Sclose(mspace);assert(rethdf>=0);rethdf=H5Sclose(rspace);assert(rethdf>=0);}intrethdf=H5Dclose(dset);assert(rethdf>=0);}
{TIMEZONE("state-read");hid_tdset=H5Dopen(particle_file,inDatanameState.c_str(),H5P_DEFAULT);assert(dset>=0);hid_trspace=H5Dget_space(dset);assert(rspace>=0);// get number of dimensionsintrspace_ndims=H5Sget_simple_extent_ndims(rspace);// get sizeshsize_t*rspace_dims=newhsize_t[rspace_ndims];H5Sget_simple_extent_dims(rspace,rspace_dims,NULL);hsize_t*rspace_offset=newhsize_t[rspace_ndims];// for state component, offset is always 0rspace_offset[0]=0;hsize_ttotal_nparticle_offset=load_splitter.getMyOffset();for(intdim_counter=1;dim_counter<rspace_ndims;dim_counter++){rspace_offset[dim_counter]=total_particle_offset%rspace_dims[dim_counter];total_particle_offset=total_particle_offset/rspace_dims[dim_counter];}hsize_tmem_dims[2]={load_splitter.getMySize(),size_particle_positions};hid_tmspace=H5Screate_simple(2,&mem_dims[0],NULL);assert(mspace>=0);intrethdf=H5Sselect_hyperslab(rspace,H5S_SELECT_SET,rspace_offset,NULL,mem_dims,NULL);assert(rethdf>=0);rethdf=H5Dread(dset,type_id,mspace,rspace,H5P_DEFAULT,split_particles_positions.get());assert(rethdf>=0);rethdf=H5Sclose(rspace);assert(rethdf>=0);rethdf=H5Dclose(dset);assert(rethdf>=0);delete[]rspace_dims;delete[]rspace_offset;}
mspace and mem_dims stay the same.
HDF5 will not complain, because the sizes will be compatible (unless I did something wrong with the computation of the offset).
{TIMEZONE("state-read");hid_tdset=H5Dopen(particle_file,inDatanameState.c_str(),H5P_DEFAULT);assert(dset>=0);hid_trspace=H5Dget_space(dset);assert(rspace>=0);// get number of dimensionsintrspace_ndims=H5Sget_simple_extent_ndims(rspace);// get sizeshsize_t*rspace_dims=newhsize_t[rspace_ndims];H5Sget_simple_extent_dims(rspace,rspace_dims,NULL);hsize_t*rspace_offset=newhsize_t[rspace_ndims];hsize_t*rspace_count=newhsize_t[rspace_ndims];// for state component, offset is always 0rspace_offset[0]=0;rspace_count[0]=size_particle_positions;hsize_ttotal_nparticle_offset=load_splitter.getMyOffset();hsize_ttotal_nparticle_count=load_splitter.getMySize();for(intdim_counter=1;dim_counter<rspace_ndims;dim_counter++){rspace_offset[dim_counter]=total_particle_offset%rspace_dims[dim_counter];total_particle_offset=total_particle_offset/rspace_dims[dim_counter];rspace_count[dim_counter]=total_particle_count%rspace_dims[dim_counter];total_particle_count=total_particle_count/rspace_dims[dim_counter];}hsize_tmem_dims[2]={load_splitter.getMySize(),size_particle_positions};hid_tmspace=H5Screate_simple(2,&mem_dims[0],NULL);assert(mspace>=0);intrethdf=H5Sselect_hyperslab(rspace,H5S_SELECT_SET,rspace_offset,NULL,rspace_count,NULL);assert(rethdf>=0);rethdf=H5Dread(dset,type_id,mspace,rspace,H5P_DEFAULT,split_particles_positions.get());assert(rethdf>=0);rethdf=H5Sclose(rspace);assert(rethdf>=0);rethdf=H5Dclose(dset);assert(rethdf>=0);delete[]rspace_dims;delete[]rspace_offset;}
As far as I can tell, my code selects contiguous chunks of the dataset, and then it puts them into differently shaped array of the same size.
HDF5 should not complain:
see 7.4.1.4. of http://davis.lbl.gov/Manuals/HDF5-1.8.7/UG/12_Dataspaces.html
Ok, I see that was what I was missing, that is great, so let me try to update my version. I use &mem_dims[0] because it is a vector not a pointer (I could use .data()).
I compute 135 % 10, this gives me 5
I divide 135 by 10, this gives me 13
2)
I compute 13 % 10, this gives me 3
I divide 13 by 10, this gives me 1
3)
I compute 1 % 10, this gives me 1