diff --git a/bfps/NSVorticityEquation.py b/bfps/NSVorticityEquation.py
index 0a5678094075e4ccfbdeccd7da4535db17d76128..c50eab9778c76679ae40d72dc552787546643fda 100644
--- a/bfps/NSVorticityEquation.py
+++ b/bfps/NSVorticityEquation.py
@@ -141,7 +141,8 @@ class NSVorticityEquation(_fluid_particle_base):
                     tracers0_integration_steps, // to check coherency between parameters and hdf input file (nb rhs)
                     nparticles,                 // to check coherency between parameters and hdf input file
                     fname,                      // particles input filename
-                    "tracers0",                 // dataset name for initial input
+                    std::string("/tracers0/state/0"),                 // dataset name for initial input
+                    std::string("/tracers0/rhs/0"),                 // dataset name for initial input
                     tracers0_neighbours,        // parameter (interpolation no neighbours)
                     tracers0_smoothness,        // parameter
                     MPI_COMM_WORLD);
@@ -543,6 +544,8 @@ class NSVorticityEquation(_fluid_particle_base):
         with h5py.File(self.get_particle_file_name(), 'a') as ofile:
             s = 0
             ofile.create_group('tracers{0}'.format(s))
+            ofile.create_group('tracers{0}/rhs'.format(s))
+            ofile.create_group('tracers{0}/state'.format(s))
             time_chunk = 2**20 // (8*3*number_of_particles)
             time_chunk = max(time_chunk, 1)
             dims = ((1,
@@ -555,7 +558,7 @@ class NSVorticityEquation(_fluid_particle_base):
                 chunks = (time_chunk, 1) + dims[2:]
             bfps.tools.create_alloc_early_dataset(
                     ofile,
-                    '/tracers{0}/rhs'.format(s),
+                    '/tracers{0}/rhs/0'.format(s),
                     dims, maxshape, chunks)
             if len(pbase_shape) > 1:
                 chunks = (time_chunk, 1) + pbase_shape[1:] + (3,)
@@ -563,7 +566,7 @@ class NSVorticityEquation(_fluid_particle_base):
                 chunks = (time_chunk, pbase_shape[0], 3)
             bfps.tools.create_alloc_early_dataset(
                     ofile,
-                    '/tracers{0}/state'.format(s),
+                    '/tracers{0}/state/0'.format(s),
                     (1,) + pbase_shape + (3,),
                     (h5py.h5s.UNLIMITED,) + pbase_shape + (3,),
                     chunks)
diff --git a/bfps/_fluid_base.py b/bfps/_fluid_base.py
index d599d967f291fa13214938e4c522cfe7ac161172..68787e2e9f3c252a10885721799286f5cb2744a9 100644
--- a/bfps/_fluid_base.py
+++ b/bfps/_fluid_base.py
@@ -442,7 +442,7 @@ class _fluid_particle_base(_code):
             #data[0] = np.array([3.26434, 4.24418, 3.12157])
             data[0] = np.array([ 0.72086101,  2.59043666,  6.27501953])
         with h5py.File(self.get_particle_file_name(), 'r+') as data_file:
-            data_file['tracers{0}/state'.format(species)][0] = data
+            data_file['tracers{0}/state/0'.format(species)][0] = data
         if write_to_file:
             data.tofile(
                     os.path.join(
diff --git a/bfps/cpp/particles/particles_input_hdf5.hpp b/bfps/cpp/particles/particles_input_hdf5.hpp
index 0f1fa1ada359806b2365ce3c784d35be0c329025..dbff9cb710c8b48d9570bcd81b59175a2941805a 100644
--- a/bfps/cpp/particles/particles_input_hdf5.hpp
+++ b/bfps/cpp/particles/particles_input_hdf5.hpp
@@ -17,7 +17,6 @@
 template <class real_number, int size_particle_positions, int size_particle_rhs>
 class particles_input_hdf5 : public abstract_particles_input<real_number> {
     const std::string filename;
-    const std::string dataname;
 
     MPI_Comm mpi_comm;
     int my_rank;
@@ -57,15 +56,17 @@ class particles_input_hdf5 : public abstract_particles_input<real_number> {
     }
 
 public:
-    particles_input_hdf5(const MPI_Comm in_mpi_comm,const std::string& inFilename, const std::string& inDataname,
+    particles_input_hdf5(const MPI_Comm in_mpi_comm,const std::string& inFilename,
+                         const std::string& inDatanameState, const std::string& inDatanameRhs,
                          const real_number my_spatial_low_limit, const real_number my_spatial_up_limit)
-        : particles_input_hdf5(in_mpi_comm, inFilename, inDataname,
+        : particles_input_hdf5(in_mpi_comm, inFilename, inDatanameState, inDatanameRhs,
                                BuildLimitsAllProcesses(in_mpi_comm, my_spatial_low_limit, my_spatial_up_limit)){
     }
 
-    particles_input_hdf5(const MPI_Comm in_mpi_comm,const std::string& inFilename, const std::string& inDataname,
+    particles_input_hdf5(const MPI_Comm in_mpi_comm,const std::string& inFilename,
+                         const std::string& inDatanameState, const std::string& inDatanameRhs,
                          const std::vector<real_number>& in_spatial_limit_per_proc)
-        : filename(inFilename), dataname(inDataname),
+        : filename(inFilename),
           mpi_comm(in_mpi_comm), my_rank(-1), nb_processes(-1), nb_total_particles(0),
           nb_particles_for_me(-1){
         TIMEZONE("particles_input_hdf5");
@@ -84,13 +85,9 @@ public:
         hid_t particle_file = H5Fopen(filename.c_str(), H5F_ACC_RDONLY, plist_id_par);
         assert(particle_file >= 0);
 
-        //hid_t dset, prop_list, dspace;
-        hid_t hdf5_group_id = H5Gopen(particle_file, dataname.c_str(), H5P_DEFAULT);
-        assert(hdf5_group_id >= 0);
-
         {
             TIMEZONE("state");
-            hid_t dset = H5Dopen(hdf5_group_id, "state", H5P_DEFAULT);
+            hid_t dset = H5Dopen(particle_file, inDatanameState.c_str(), H5P_DEFAULT);
             assert(dset >= 0);
 
             hid_t dspace = H5Dget_space(dset); // copy?
@@ -117,7 +114,7 @@ public:
         }
         {
             TIMEZONE("rhs");
-            hid_t dset = H5Dopen(hdf5_group_id, "rhs", H5P_DEFAULT);
+            hid_t dset = H5Dopen(particle_file, inDatanameRhs.c_str(), H5P_DEFAULT);
             assert(dset >= 0);
             hid_t dspace = H5Dget_space(dset); // copy?
             assert(dspace >= 0);
@@ -148,7 +145,7 @@ public:
         std::unique_ptr<real_number[]> split_particles_positions(new real_number[load_splitter.getMySize()*size_particle_positions]);
         {
             TIMEZONE("state-read");
-            hid_t dset = H5Dopen(hdf5_group_id, "state", H5P_DEFAULT);
+            hid_t dset = H5Dopen(particle_file, inDatanameState.c_str(), H5P_DEFAULT);
             assert(dset >= 0);
 
             hid_t rspace = H5Dget_space(dset);
@@ -174,7 +171,7 @@ public:
         std::vector<std::unique_ptr<real_number[]>> split_particles_rhs(nb_rhs);
         {
             TIMEZONE("rhs-read");
-            hid_t dset = H5Dopen(hdf5_group_id, "rhs", H5P_DEFAULT);
+            hid_t dset = H5Dopen(particle_file, inDatanameRhs.c_str(), H5P_DEFAULT);
             assert(dset >= 0);
 
             for(hsize_t idx_rhs = 0 ; idx_rhs < nb_rhs ; ++idx_rhs){
@@ -262,9 +259,7 @@ public:
 
         {
             TIMEZONE("close");
-            int hdfret = H5Gclose(hdf5_group_id);
-            assert(hdfret >= 0);
-            hdfret = H5Fclose(particle_file);
+            int hdfret = H5Fclose(particle_file);
             assert(hdfret >= 0);
             hdfret = H5Pclose(plist_id_par);
             assert(hdfret >= 0);
diff --git a/bfps/cpp/particles/particles_system_builder.hpp b/bfps/cpp/particles/particles_system_builder.hpp
index 5f262f2279fbe62498dd9a0bc332dfcc490bddb0..d314ab5001410ae0c2529395e7910614b432819a 100644
--- a/bfps/cpp/particles/particles_system_builder.hpp
+++ b/bfps/cpp/particles/particles_system_builder.hpp
@@ -117,7 +117,7 @@ struct particles_system_build_container {
              const int nsteps, // to check coherency between parameters and hdf input file (nb rhs)
              const int nparticles, // to check coherency between parameters and hdf input file
              const std::string& fname_input, // particles input filename
-             const std::string& dset_name, // dataset name for initial input
+            const std::string& inDatanameState, const std::string& inDatanameRhs, // input dataset names
              MPI_Comm mpi_comm){
 
         // The size of the field grid (global size) all_size seems
@@ -205,7 +205,7 @@ struct particles_system_build_container {
 
         // Load particles from hdf5
         particles_input_hdf5<particles_rnumber, 3,3> generator(mpi_comm, fname_input,
-                                            dset_name, my_spatial_low_limit_z, my_spatial_up_limit_z);
+                                            inDatanameState, inDatanameRhs, my_spatial_low_limit_z, my_spatial_up_limit_z);
 
         // Ensure parameters match the input file
         if(generator.getNbRhs() != nsteps){
@@ -236,7 +236,7 @@ inline std::unique_ptr<abstract_particles_system<particles_rnumber>> particles_s
         const int nsteps, // to check coherency between parameters and hdf input file (nb rhs)
         const int nparticles, // to check coherency between parameters and hdf input file
         const std::string& fname_input, // particles input filename
-        const std::string& dset_name, // dataset name for initial input
+        const std::string& inDatanameState, const std::string& inDatanameRhs, // input dataset names
         const int interpolation_size,
         const int spline_mode,
         MPI_Comm mpi_comm){
@@ -246,7 +246,7 @@ inline std::unique_ptr<abstract_particles_system<particles_rnumber>> particles_s
                        particles_system_build_container<field_rnumber,be,particles_rnumber>>(
                            interpolation_size, // template iterator 1
                            spline_mode, // template iterator 2
-                           fs_field,fs_kk, nsteps, nparticles, fname_input, dset_name, mpi_comm);
+                           fs_field,fs_kk, nsteps, nparticles, fname_input, inDatanameState, inDatanameRhs, mpi_comm);
 }