Commit 1c9cf2cf authored by Cristian Lalescu's avatar Cristian Lalescu
Browse files

reading arbitrary shaped particle IC works

parent a2b474e0
Pipeline #43272 canceled with stage
......@@ -479,38 +479,6 @@ class DNS(_code):
ofile['checkpoint'] = int(0)
if (self.dns_type in ['NSVE', 'NSVE_no_output']):
return None
if type(particle_ic) == type(None):
pbase_shape = (self.parameters['nparticles'],)
number_of_particles = self.parameters['nparticles']
else:
pbase_shape = particle_ic.shape[:-1]
assert(particle_ic.shape[-1] == 3)
number_of_particles = 1
for val in pbase_shape[1:]:
number_of_particles *= val
ncomponents = 3
if self.dns_type in ['NSVEcomplex_particles']:
ncomponents = 6
with h5py.File(self.get_checkpoint_0_fname(), 'a') as ofile:
s = 0
if not 'tracers{0}'.format(s) in ofile.keys():
ofile.create_group('tracers{0}'.format(s))
ofile.create_group('tracers{0}/rhs'.format(s))
ofile.create_group('tracers{0}/state'.format(s))
ofile['tracers{0}/rhs'.format(s)].create_dataset(
'0',
shape = (
(self.parameters['tracers{0}_integration_steps'.format(s)],) +
pbase_shape +
(ncomponents,)),
dtype = np.float)
ofile['tracers{0}/state'.format(s)].create_dataset(
'0',
shape = (
pbase_shape +
(ncomponents,)),
dtype = np.float)
return None
def job_parser_arguments(
self,
......@@ -817,34 +785,48 @@ class DNS(_code):
self,
rseed = None,
species = 0):
with h5py.File(self.get_checkpoint_0_fname(), 'a') as data_file:
dset = data_file[
'tracers{0}/state/0'.format(species)]
if not type(rseed) == type(None):
np.random.seed(rseed)
nn = self.parameters['nparticles']
cc = int(0)
batch_size = int(1e6)
def get_random_phases(npoints):
return np.random.random(
(npoints, 3))*2*np.pi
def get_random_versors(npoints):
bla = np.random.normal(
size = (npoints, 3))
bla /= np.sum(bla**2, axis = 1)[:, None]**.5
return bla
while nn > 0:
if nn > batch_size:
dset[cc*batch_size:(cc+1)*batch_size, :3] = get_random_phases(batch_size)
if dset.shape[1] == 6:
dset[cc*batch_size:(cc+1)*batch_size, 3:] = get_random_versors(batch_size)
nn -= batch_size
else:
dset[cc*batch_size:cc*batch_size+nn, :3] = get_random_phases(nn)
if dset.shape[1] == 6:
dset[cc*batch_size:cc*batch_size+nn, 3:] = get_random_versors(nn)
nn = 0
cc += 1
try:
ncomponents = 3
if self.dns_type in ['NSVEcomplex_particles']:
ncomponents = 6
with h5py.File(self.get_checkpoint_0_fname(), 'a') as data_file:
nn = self.parameters['nparticles']
data_file['tracers{0}/rhs'.format(species)].create_dataset(
'0',
shape = (
(self.parameters['tracers{0}_integration_steps'.format(species)],) +
(nn, ncomponents,)),
dtype = np.float)
dset = data_file['tracers{0}/state'.format(s)].create_dataset(
'0',
shape = (nn, ncomponents,),
dtype = np.float)
if not type(rseed) == type(None):
np.random.seed(rseed)
cc = int(0)
batch_size = int(1e6)
def get_random_phases(npoints):
return np.random.random(
(npoints, 3))*2*np.pi
def get_random_versors(npoints):
bla = np.random.normal(
size = (npoints, 3))
bla /= np.sum(bla**2, axis = 1)[:, None]**.5
return bla
while nn > 0:
if nn > batch_size:
dset[cc*batch_size:(cc+1)*batch_size, :3] = get_random_phases(batch_size)
if dset.shape[1] == 6:
dset[cc*batch_size:(cc+1)*batch_size, 3:] = get_random_versors(batch_size)
nn -= batch_size
else:
dset[cc*batch_size:cc*batch_size+nn, :3] = get_random_phases(nn)
if dset.shape[1] == 6:
dset[cc*batch_size:cc*batch_size+nn, 3:] = get_random_versors(nn)
nn = 0
cc += 1
except Exception as e:
print(e)
return None
def generate_vector_field(
self,
......@@ -976,6 +958,12 @@ class DNS(_code):
self,
opt = None):
if self.parameters['nparticles'] > 0:
with h5py.File(self.get_checkpoint_0_fname(), 'a') as ofile:
s = 0
if not 'tracers{0}'.format(s) in ofile.keys():
ofile.create_group('tracers{0}'.format(s))
ofile.create_group('tracers{0}/rhs'.format(s))
ofile.create_group('tracers{0}/state'.format(s))
self.generate_tracer_state(
species = 0,
rseed = opt.particle_rand_seed)
......@@ -1046,7 +1034,7 @@ class DNS(_code):
self,
opt = None):
if not os.path.exists(self.get_data_file_name()):
self.generate_initial_condition()
self.generate_initial_condition(opt = opt)
self.write_par()
self.run(
nb_processes = opt.nb_processes,
......
......@@ -2,7 +2,7 @@
#define NDEBUG
//#define NDEBUG
#include <string>
#include <cmath>
......
......@@ -158,7 +158,8 @@ public:
hid_t dset = H5Dopen(particle_file, inDatanameState.c_str(), H5P_DEFAULT);
assert(dset >= 0);
hid_t rspace = H5Dget_space(dset);
hsize_t file_space_dims[2] = {nb_total_particles, size_particle_positions};
hid_t rspace = H5Screate_simple(2, file_space_dims, NULL);
assert(rspace >= 0);
hsize_t offset[2] = {load_splitter.getMyOffset(), 0};
......@@ -184,11 +185,11 @@ public:
TIMEZONE("rhs-read");
hid_t dset = H5Dopen(particle_file, inDatanameRhs.c_str(), H5P_DEFAULT);
assert(dset >= 0);
hsize_t file_space_dims[3] = {nb_rhs, nb_total_particles, size_particle_rhs};
hid_t rspace = H5Screate_simple(3, file_space_dims, NULL);
assert(rspace >= 0);
for(hsize_t idx_rhs = 0 ; idx_rhs < nb_rhs ; ++idx_rhs){
hid_t rspace = H5Dget_space(dset);
assert(rspace >= 0);
if(load_splitter.getMySize()){
split_particles_rhs[idx_rhs].reset(new real_number[load_splitter.getMySize()*size_particle_rhs]);
}
......@@ -203,16 +204,17 @@ public:
NULL, mem_dims, NULL);
variable_used_only_in_assert(rethdf);
assert(rethdf >= 0);
//DEBUG_MSG("");
rethdf = H5Dread(dset, type_id, mspace, rspace, H5P_DEFAULT, split_particles_rhs[idx_rhs].get());
assert(rethdf >= 0);
rethdf = H5Sclose(mspace);
assert(rethdf >= 0);
rethdf = H5Sclose(rspace);
assert(rethdf >= 0);
}
int rethdf = H5Dclose(dset);
int rethdf = H5Sclose(rspace);
assert(rethdf >= 0);
rethdf = H5Dclose(dset);
variable_used_only_in_assert(rethdf);
assert(rethdf >= 0);
}
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment