Commit 1c9cf2cf authored by Cristian Lalescu's avatar Cristian Lalescu
Browse files

reading arbitrary shaped particle IC works

parent a2b474e0
Pipeline #43272 canceled with stage
...@@ -479,38 +479,6 @@ class DNS(_code): ...@@ -479,38 +479,6 @@ class DNS(_code):
ofile['checkpoint'] = int(0) ofile['checkpoint'] = int(0)
if (self.dns_type in ['NSVE', 'NSVE_no_output']): if (self.dns_type in ['NSVE', 'NSVE_no_output']):
return None return None
if type(particle_ic) == type(None):
pbase_shape = (self.parameters['nparticles'],)
number_of_particles = self.parameters['nparticles']
else:
pbase_shape = particle_ic.shape[:-1]
assert(particle_ic.shape[-1] == 3)
number_of_particles = 1
for val in pbase_shape[1:]:
number_of_particles *= val
ncomponents = 3
if self.dns_type in ['NSVEcomplex_particles']:
ncomponents = 6
with h5py.File(self.get_checkpoint_0_fname(), 'a') as ofile:
s = 0
if not 'tracers{0}'.format(s) in ofile.keys():
ofile.create_group('tracers{0}'.format(s))
ofile.create_group('tracers{0}/rhs'.format(s))
ofile.create_group('tracers{0}/state'.format(s))
ofile['tracers{0}/rhs'.format(s)].create_dataset(
'0',
shape = (
(self.parameters['tracers{0}_integration_steps'.format(s)],) +
pbase_shape +
(ncomponents,)),
dtype = np.float)
ofile['tracers{0}/state'.format(s)].create_dataset(
'0',
shape = (
pbase_shape +
(ncomponents,)),
dtype = np.float)
return None return None
def job_parser_arguments( def job_parser_arguments(
self, self,
...@@ -817,34 +785,48 @@ class DNS(_code): ...@@ -817,34 +785,48 @@ class DNS(_code):
self, self,
rseed = None, rseed = None,
species = 0): species = 0):
with h5py.File(self.get_checkpoint_0_fname(), 'a') as data_file: try:
dset = data_file[ ncomponents = 3
'tracers{0}/state/0'.format(species)] if self.dns_type in ['NSVEcomplex_particles']:
if not type(rseed) == type(None): ncomponents = 6
np.random.seed(rseed) with h5py.File(self.get_checkpoint_0_fname(), 'a') as data_file:
nn = self.parameters['nparticles'] nn = self.parameters['nparticles']
cc = int(0) data_file['tracers{0}/rhs'.format(species)].create_dataset(
batch_size = int(1e6) '0',
def get_random_phases(npoints): shape = (
return np.random.random( (self.parameters['tracers{0}_integration_steps'.format(species)],) +
(npoints, 3))*2*np.pi (nn, ncomponents,)),
def get_random_versors(npoints): dtype = np.float)
bla = np.random.normal( dset = data_file['tracers{0}/state'.format(s)].create_dataset(
size = (npoints, 3)) '0',
bla /= np.sum(bla**2, axis = 1)[:, None]**.5 shape = (nn, ncomponents,),
return bla dtype = np.float)
while nn > 0: if not type(rseed) == type(None):
if nn > batch_size: np.random.seed(rseed)
dset[cc*batch_size:(cc+1)*batch_size, :3] = get_random_phases(batch_size) cc = int(0)
if dset.shape[1] == 6: batch_size = int(1e6)
dset[cc*batch_size:(cc+1)*batch_size, 3:] = get_random_versors(batch_size) def get_random_phases(npoints):
nn -= batch_size return np.random.random(
else: (npoints, 3))*2*np.pi
dset[cc*batch_size:cc*batch_size+nn, :3] = get_random_phases(nn) def get_random_versors(npoints):
if dset.shape[1] == 6: bla = np.random.normal(
dset[cc*batch_size:cc*batch_size+nn, 3:] = get_random_versors(nn) size = (npoints, 3))
nn = 0 bla /= np.sum(bla**2, axis = 1)[:, None]**.5
cc += 1 return bla
while nn > 0:
if nn > batch_size:
dset[cc*batch_size:(cc+1)*batch_size, :3] = get_random_phases(batch_size)
if dset.shape[1] == 6:
dset[cc*batch_size:(cc+1)*batch_size, 3:] = get_random_versors(batch_size)
nn -= batch_size
else:
dset[cc*batch_size:cc*batch_size+nn, :3] = get_random_phases(nn)
if dset.shape[1] == 6:
dset[cc*batch_size:cc*batch_size+nn, 3:] = get_random_versors(nn)
nn = 0
cc += 1
except Exception as e:
print(e)
return None return None
def generate_vector_field( def generate_vector_field(
self, self,
...@@ -976,6 +958,12 @@ class DNS(_code): ...@@ -976,6 +958,12 @@ class DNS(_code):
self, self,
opt = None): opt = None):
if self.parameters['nparticles'] > 0: if self.parameters['nparticles'] > 0:
with h5py.File(self.get_checkpoint_0_fname(), 'a') as ofile:
s = 0
if not 'tracers{0}'.format(s) in ofile.keys():
ofile.create_group('tracers{0}'.format(s))
ofile.create_group('tracers{0}/rhs'.format(s))
ofile.create_group('tracers{0}/state'.format(s))
self.generate_tracer_state( self.generate_tracer_state(
species = 0, species = 0,
rseed = opt.particle_rand_seed) rseed = opt.particle_rand_seed)
...@@ -1046,7 +1034,7 @@ class DNS(_code): ...@@ -1046,7 +1034,7 @@ class DNS(_code):
self, self,
opt = None): opt = None):
if not os.path.exists(self.get_data_file_name()): if not os.path.exists(self.get_data_file_name()):
self.generate_initial_condition() self.generate_initial_condition(opt = opt)
self.write_par() self.write_par()
self.run( self.run(
nb_processes = opt.nb_processes, nb_processes = opt.nb_processes,
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
#define NDEBUG //#define NDEBUG
#include <string> #include <string>
#include <cmath> #include <cmath>
......
...@@ -158,7 +158,8 @@ public: ...@@ -158,7 +158,8 @@ public:
hid_t dset = H5Dopen(particle_file, inDatanameState.c_str(), H5P_DEFAULT); hid_t dset = H5Dopen(particle_file, inDatanameState.c_str(), H5P_DEFAULT);
assert(dset >= 0); assert(dset >= 0);
hid_t rspace = H5Dget_space(dset); hsize_t file_space_dims[2] = {nb_total_particles, size_particle_positions};
hid_t rspace = H5Screate_simple(2, file_space_dims, NULL);
assert(rspace >= 0); assert(rspace >= 0);
hsize_t offset[2] = {load_splitter.getMyOffset(), 0}; hsize_t offset[2] = {load_splitter.getMyOffset(), 0};
...@@ -184,11 +185,11 @@ public: ...@@ -184,11 +185,11 @@ public:
TIMEZONE("rhs-read"); TIMEZONE("rhs-read");
hid_t dset = H5Dopen(particle_file, inDatanameRhs.c_str(), H5P_DEFAULT); hid_t dset = H5Dopen(particle_file, inDatanameRhs.c_str(), H5P_DEFAULT);
assert(dset >= 0); assert(dset >= 0);
hsize_t file_space_dims[3] = {nb_rhs, nb_total_particles, size_particle_rhs};
hid_t rspace = H5Screate_simple(3, file_space_dims, NULL);
assert(rspace >= 0);
for(hsize_t idx_rhs = 0 ; idx_rhs < nb_rhs ; ++idx_rhs){ for(hsize_t idx_rhs = 0 ; idx_rhs < nb_rhs ; ++idx_rhs){
hid_t rspace = H5Dget_space(dset);
assert(rspace >= 0);
if(load_splitter.getMySize()){ if(load_splitter.getMySize()){
split_particles_rhs[idx_rhs].reset(new real_number[load_splitter.getMySize()*size_particle_rhs]); split_particles_rhs[idx_rhs].reset(new real_number[load_splitter.getMySize()*size_particle_rhs]);
} }
...@@ -203,16 +204,17 @@ public: ...@@ -203,16 +204,17 @@ public:
NULL, mem_dims, NULL); NULL, mem_dims, NULL);
variable_used_only_in_assert(rethdf); variable_used_only_in_assert(rethdf);
assert(rethdf >= 0); assert(rethdf >= 0);
//DEBUG_MSG("");
rethdf = H5Dread(dset, type_id, mspace, rspace, H5P_DEFAULT, split_particles_rhs[idx_rhs].get()); rethdf = H5Dread(dset, type_id, mspace, rspace, H5P_DEFAULT, split_particles_rhs[idx_rhs].get());
assert(rethdf >= 0); assert(rethdf >= 0);
rethdf = H5Sclose(mspace); rethdf = H5Sclose(mspace);
assert(rethdf >= 0); assert(rethdf >= 0);
rethdf = H5Sclose(rspace);
assert(rethdf >= 0);
} }
int rethdf = H5Dclose(dset);
int rethdf = H5Sclose(rspace);
assert(rethdf >= 0);
rethdf = H5Dclose(dset);
variable_used_only_in_assert(rethdf); variable_used_only_in_assert(rethdf);
assert(rethdf >= 0); assert(rethdf >= 0);
} }
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment