Commit 3bf0ef8d authored by Cristian Lalescu's avatar Cristian Lalescu
Browse files

Merge branch 'feature/postprocessing-wrapper' into develop

parents 477fccc1 fc0d630c
Pipeline #13261 passed with stage
in 6 minutes and 6 seconds
......@@ -559,8 +559,12 @@ class NavierStokes(_fluid_particle_base):
self.particle_stat_src += '}\n'
self.particle_species += nspecies
return None
def get_cache_file_name(self):
return os.path.join(self.work_dir, self.simname + '_cache.h5')
def get_cache_file(self):
return h5py.File(self.get_postprocess_file_name(), 'r')
def get_postprocess_file_name(self):
return os.path.join(self.work_dir, self.simname + '_postprocess.h5')
return self.get_cache_file_name()
def get_postprocess_file(self):
return h5py.File(self.get_postprocess_file_name(), 'r')
def compute_statistics(self, iter0 = 0, iter1 = None):
......@@ -576,7 +580,7 @@ class NavierStokes(_fluid_particle_base):
tensors, and the enstrophy spectrum is also used to
compute the dissipation :math:`\\varepsilon(t)`.
These basic quantities are stored in a newly created HDF5 file,
``simname_postprocess.h5``.
``simname_cache.h5``.
"""
if len(list(self.statistics.keys())) > 0:
return None
......@@ -640,7 +644,7 @@ class NavierStokes(_fluid_particle_base):
"""Compute easy stats.
Further computation of statistics based on the contents of
``simname_postprocess.h5``.
``simname_cache.h5``.
Standard quantities are as follows
(consistent with [Ishihara]_):
......
......@@ -359,21 +359,6 @@ class PP(_code):
'--simname',
type = str, dest = 'simname',
default = 'test')
parser.add_argument(
'-n', '--grid-size',
type = int,
dest = 'n',
default = 32,
metavar = 'N',
help = 'code is run by default in a grid of NxNxN')
for coord in ['x', 'y', 'z']:
parser.add_argument(
'--L{0}'.format(coord), '--box-length-{0}'.format(coord),
type = float,
dest = 'L{0}'.format(coord),
default = 2.0,
metavar = 'length{0}'.format(coord),
help = 'length of the box in the {0} direction will be `length{0} x pi`'.format(coord))
parser.add_argument(
'--wd',
type = str, dest = 'work_dir',
......@@ -384,31 +369,15 @@ class PP(_code):
type = str,
default = 'single')
parser.add_argument(
'--src-wd',
type = str,
dest = 'src_work_dir',
default = '')
parser.add_argument(
'--src-simname',
type = str,
dest = 'src_simname',
default = '')
parser.add_argument(
'--src-iteration',
'--iter0',
type = int,
dest = 'src_iteration',
dest = 'iter0',
default = 0)
parser.add_argument(
'--kMeta',
type = float,
dest = 'kMeta',
default = 2.0)
parser.add_argument(
'--dtfactor',
type = float,
dest = 'dtfactor',
default = 0.5,
help = 'dt is computed as DTFACTOR / N')
'--iter1',
type = int,
dest = 'iter1',
default = 0)
return None
def particle_parser_arguments(
self,
......@@ -493,21 +462,10 @@ class PP(_code):
# merge parameters if needed
for k in self.pp_parameters.keys():
self.parameters[k] = self.pp_parameters[k]
if len(opt.src_work_dir) == 0:
opt.src_work_dir = os.path.realpath(opt.work_dir)
if type(opt.dkx) == type(None):
opt.dkx = 2. / opt.Lx
if type(opt.dky) == type(None):
opt.dky = 2. / opt.Ly
if type(opt.dkx) == type(None):
opt.dkz = 2. / opt.Lz
if type(opt.nx) == type(None):
opt.nx = opt.n
if type(opt.ny) == type(None):
opt.ny = opt.n
if type(opt.nz) == type(None):
opt.nz = opt.n
self.pars_from_namespace(opt)
niter_out = self.get_data_file()['parameters/niter_todo'].value
self.pp_parameters['iteration_list'] = np.arange(
opt.iter0, opt.iter1+niter_out, niter_out, dtype = np.int)
return opt
def launch(
self,
......
......@@ -175,9 +175,12 @@ class _base(object):
def rewrite_par(
self,
group = None,
parameters = None):
parameters = None,
file_name = None):
assert(group != 'parameters')
ofile = h5py.File(os.path.join(self.work_dir, self.simname + '.h5'), 'r+')
if type(file_name) == None:
file_name = os.path.join(self.work_dir, self.simname + '.h5')
ofile = h5py.File(file_name, 'a')
for k in parameters.keys():
if group not in ofile.keys():
ofile.create_group(group)
......@@ -231,9 +234,10 @@ class _base(object):
if type(parameters) == type(None):
parameters = self.parameters
cmd_line_pars = vars(opt)
for k in ['nx', 'ny', 'nz']:
if type(cmd_line_pars[k]) == type(None):
cmd_line_pars[k] = opt.n
if 'n' in cmd_line_pars.keys():
for k in ['nx', 'ny', 'nz']:
if type(cmd_line_pars[k]) == type(None):
cmd_line_pars[k] = opt.n
for k in parameters.keys():
if k in cmd_line_pars.keys():
if not type(cmd_line_pars[k]) == type(None):
......
......@@ -1040,23 +1040,36 @@ void compute_gradient(
assert(!src->real_space_representation);
assert((fc1 == ONE && fc2 == THREE) ||
(fc1 == THREE && fc2 == THREExTHREE));
std::fill_n(dst->get_rdata(), dst->rmemlayout->local_size, 0);
dst->real_space_representation = false;
switch(fc1)
{
case ONE:
kk->CLOOP_K2(
[&](ptrdiff_t cindex,
ptrdiff_t xindex,
ptrdiff_t yindex,
ptrdiff_t zindex,
double k2){
if (k2 < kk->kM2) switch(fc1)
if (k2 < kk->kM2)
{
case ONE:
dst->cval(cindex, 0, 0) = -kk->kx[xindex]*src->cval(cindex, 1);
dst->cval(cindex, 0, 1) = kk->kx[xindex]*src->cval(cindex, 0);
dst->cval(cindex, 1, 0) = -kk->ky[yindex]*src->cval(cindex, 1);
dst->cval(cindex, 1, 1) = kk->ky[yindex]*src->cval(cindex, 0);
dst->cval(cindex, 2, 0) = -kk->kz[zindex]*src->cval(cindex, 1);
dst->cval(cindex, 2, 1) = kk->kz[zindex]*src->cval(cindex, 0);
}});
break;
case THREE:
kk->CLOOP_K2(
[&](ptrdiff_t cindex,
ptrdiff_t xindex,
ptrdiff_t yindex,
ptrdiff_t zindex,
double k2){
if (k2 < kk->kM2)
{
for (unsigned int field_component = 0;
field_component < ncomp(fc1);
field_component++)
......@@ -1068,20 +1081,9 @@ void compute_gradient(
dst->cval(cindex, 2, field_component, 0) = -kk->kz[zindex]*src->cval(cindex, field_component, 1);
dst->cval(cindex, 2, field_component, 1) = kk->kz[zindex]*src->cval(cindex, field_component, 0);
}
//dst->get_cdata()[(cindex*3+0)*ncomp(fc1)+field_component][0] =
// - kk->kx[xindex]*src->get_cdata()[cindex*ncomp(fc1)+field_component][1];
//dst->get_cdata()[(cindex*3+0)*ncomp(fc1)+field_component][1] =
// kk->kx[xindex]*src->get_cdata()[cindex*ncomp(fc1)+field_component][0];
//dst->get_cdata()[(cindex*3+1)*ncomp(fc1)+field_component][0] =
// - kk->ky[yindex]*src->get_cdata()[cindex*ncomp(fc1)+field_component][1];
//dst->get_cdata()[(cindex*3+1)*ncomp(fc1)+field_component][1] =
// kk->ky[yindex]*src->get_cdata()[cindex*ncomp(fc1)+field_component][0];
//dst->get_cdata()[(cindex*3+2)*ncomp(fc1)+field_component][0] =
// - kk->kz[zindex]*src->get_cdata()[cindex*ncomp(fc1)+field_component][1];
//dst->get_cdata()[(cindex*3+2)*ncomp(fc1)+field_component][1] =
// kk->kz[zindex]*src->get_cdata()[cindex*ncomp(fc1)+field_component][0];
}});
break;
}
});
}
template class field<float, FFTW, ONE>;
......
#include "hdf5_tools.hpp"
int hdf5_tools::require_size_single_dataset(hid_t dset, int tsize)
{
int ndims;
hsize_t space;
space = H5Dget_space(dset);
ndims = H5Sget_simple_extent_ndims(space);
hsize_t *dims = new hsize_t[ndims];
hsize_t *maxdims = new hsize_t[ndims];
H5Sget_simple_extent_dims(space, dims, maxdims);
if (dims[0] < hsize_t(tsize) && maxdims[0] == H5S_UNLIMITED)
{
dims[0] = tsize;
H5Dset_extent(dset, dims);
}
H5Sclose(space);
delete[] maxdims;
delete[] dims;
return EXIT_SUCCESS;
}
int hdf5_tools::grow_single_dataset(hid_t dset, int tincrement)
{
int ndims;
......@@ -7,14 +27,34 @@ int hdf5_tools::grow_single_dataset(hid_t dset, int tincrement)
space = H5Dget_space(dset);
ndims = H5Sget_simple_extent_ndims(space);
hsize_t *dims = new hsize_t[ndims];
H5Sget_simple_extent_dims(space, dims, NULL);
dims[0] += tincrement;
H5Dset_extent(dset, dims);
hsize_t *maxdims = new hsize_t[ndims];
H5Sget_simple_extent_dims(space, dims, maxdims);
if (maxdims[0] == H5S_UNLIMITED)
{
dims[0] += tincrement;
H5Dset_extent(dset, dims);
}
H5Sclose(space);
delete[] maxdims;
delete[] dims;
return EXIT_SUCCESS;
}
herr_t hdf5_tools::require_size_dataset_visitor(
hid_t o_id,
const char *name,
const H5O_info_t *info,
void *op_data)
{
if (info->type == H5O_TYPE_DATASET)
{
hsize_t dset = H5Dopen(o_id, name, H5P_DEFAULT);
require_size_single_dataset(dset, *((int*)(op_data)));
H5Dclose(dset);
}
return EXIT_SUCCESS;
}
herr_t hdf5_tools::grow_dataset_visitor(
hid_t o_id,
const char *name,
......@@ -50,6 +90,26 @@ int hdf5_tools::grow_file_datasets(
return file_problems;
}
int hdf5_tools::require_size_file_datasets(
const hid_t stat_file,
const std::string group_name,
int tsize)
{
int file_problems = 0;
hid_t group;
group = H5Gopen(stat_file, group_name.c_str(), H5P_DEFAULT);
H5Ovisit(
group,
H5_INDEX_NAME,
H5_ITER_NATIVE,
require_size_dataset_visitor,
&tsize);
H5Gclose(group);
return file_problems;
}
template <typename number>
std::vector<number> hdf5_tools::read_vector(
const hid_t group,
......
......@@ -48,6 +48,21 @@ namespace hdf5_tools
const std::string group_name,
int tincrement);
int require_size_single_dataset(
hid_t dset,
int tincrement);
herr_t require_size_dataset_visitor(
hid_t o_id,
const char *name,
const H5O_info_t *info,
void *op_data);
int require_size_file_datasets(
const hid_t stat_file,
const std::string group_name,
int tincrement);
template <typename number>
std::vector<number> read_vector(
const hid_t group,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment