Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
TurTLE
TurTLE
Commits
10fcb49b
Commit
10fcb49b
authored
Mar 06, 2020
by
Cristian Lalescu
Browse files
Merge branch 'develop' into feature/collisions
parents
afea23ca
ab9ad5bb
Changes
98
Hide whitespace changes
Inline
Side-by-side
CMakeLists.txt
View file @
10fcb49b
...
...
@@ -3,20 +3,20 @@
# Copyright 2019 Max Planck Institute #
# for Dynamics and Self-Organization #
# #
# This file is part of
bfps.
#
# This file is part of
TurTLE.
#
# #
#
bfps
is free software: you can redistribute it and/or modify
#
#
TurTLE
is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published #
# by the Free Software Foundation, either version 3 of the License, #
# or (at your option) any later version. #
# #
#
bfps
is distributed in the hope that it will be useful,
#
#
TurTLE
is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with
bfps
. If not, see <http://www.gnu.org/licenses/>
#
# along with
TurTLE
. If not, see <http://www.gnu.org/licenses/> #
# #
# Contact: Cristian.Lalescu@ds.mpg.de #
# #
...
...
@@ -272,6 +272,7 @@ set(cpp_for_lib
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/symmetrize_test.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/field_output_test.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/get_rfields.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/write_rpressure.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/bandpass_stats.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/field_single_to_double.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/resize.cpp
...
...
@@ -300,6 +301,7 @@ set(cpp_for_lib
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/test_interpolation.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/NSVEparticles.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/NSVEcomplex_particles.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/NSVE_Stokes_particles.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/NSVEp_extra_sampling.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_inner_computer.cpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/ornstein_uhlenbeck_process.cpp
...
...
@@ -320,6 +322,7 @@ set(hpp_for_lib
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/symmetrize_test.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/field_output_test.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/get_rfields.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/write_rpressure.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/bandpass_stats.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/field_single_to_double.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/resize.hpp
...
...
@@ -348,14 +351,16 @@ set(hpp_for_lib
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/test_interpolation.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/NSVEparticles.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/NSVEcomplex_particles.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/NSVE_Stokes_particles.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/NSVEp_extra_sampling.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_inner_computer.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_inner_computer_2nd_order.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_inner_computer_empty.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/abstract_particles_input.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/abstract_particles_output.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/abstract_particles_system.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/abstract_particles_system_with_p2p.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/alltoall_exchanger.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/env_utils.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/lock_free_bool_array.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/p2p_computer_empty.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/p2p_computer.hpp
...
...
@@ -368,7 +373,6 @@ set(hpp_for_lib
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_distr_mpi.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_field_computer.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_generic_interp.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_inner_computer_empty.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_input_hdf5.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_output_hdf5.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/particles/particles_output_mpiio.hpp
...
...
@@ -382,6 +386,7 @@ set(hpp_for_lib
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/NSVE_no_output.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/full_code/NSVEparticles_no_output.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/base.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/env_utils.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/fftw_interface.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/bfps_timer.hpp
${
PROJECT_SOURCE_DIR
}
/cpp/omputils.hpp
...
...
@@ -408,6 +413,13 @@ configure_file(${PROJECT_SOURCE_DIR}/cmake/TurTLEConfig.cmake.in ${PROJECT_BINAR
install
(
FILES
"
${
PROJECT_BINARY_DIR
}
/TurTLEConfig.cmake"
DESTINATION lib/
)
export
(
TARGETS TurTLE FILE
"
${
PROJECT_BINARY_DIR
}
/TurTLELibraryDepends.cmake"
)
install
(
EXPORT TURTLE_EXPORT DESTINATION lib/
)
if
(
EXISTS
"
${
PROJECT_BINARY_DIR
}
/bash_setup_for_TurTLE.sh"
)
install
(
FILES
"
${
PROJECT_BINARY_DIR
}
/bash_setup_for_TurTLE.sh"
PERMISSIONS OWNER_READ GROUP_READ WORLD_READ
DESTINATION
"lib/"
)
endif
()
#####################################################################################
...
...
@@ -420,3 +432,70 @@ else()
install
(
CODE
"execute_process(COMMAND
${
CMAKE_COMMAND
}
-E copy
${
PROJECT_SOURCE_DIR
}
/pc_host_info.py
${
PROJECT_BINARY_DIR
}
/python/TurTLE/host_info.py)"
)
endif
()
install
(
CODE
"execute_process(COMMAND python3
${
PROJECT_SOURCE_DIR
}
/setup.py install --force --prefix=
${
CMAKE_INSTALL_PREFIX
}
WORKING_DIRECTORY
${
PROJECT_BINARY_DIR
}
/python/)"
)
#####################################################################################
## Add tests
include
(
CTest
)
set
(
TEST_OUTPUT_DIRECTORY
"
${
PROJECT_BINARY_DIR
}
/test_runs/"
)
enable_testing
()
if
(
BUILD_TESTING
)
file
(
MAKE_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
### basic functionality
add_test
(
NAME test_fftw
COMMAND turtle.test_fftw
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
add_test
(
NAME test_Parseval
COMMAND turtle.test_Parseval
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
### compare DNS output to stored results
add_test
(
NAME test_NSVEparticles
COMMAND turtle.test_NSVEparticles --ntpp 2
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
### simple runs of post-processing tools
add_test
(
NAME test_pp_single_to_double
COMMAND turtle PP field_single_to_double --simname dns_nsveparticles --iter0 32 --iter1 32
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
add_test
(
NAME test_pp_get_rfields
COMMAND turtle PP get_rfields --simname dns_nsveparticles --iter0 0 --iter1 64
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
add_test
(
NAME test_pp_write_rpressure
COMMAND turtle PP write_rpressure --simname dns_nsveparticles --iter0 0 --iter1 64
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
add_test
(
NAME test_pp_joint_acc_vel_stats
COMMAND turtle PP joint_acc_vel_stats --simname dns_nsveparticles --iter0 0 --iter1 64
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
add_test
(
NAME test_pp_resize
COMMAND turtle PP resize --simname dns_nsveparticles --new_nx 96 --new_ny 96 --new_nz 96 --new_simname dns_nsveparticles_resized
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
### simple runs of different DNS
add_test
(
NAME test_NSVEp_extra_sampling
COMMAND turtle DNS NSVEp_extra_sampling -n 32 --src-simname dns_nsveparticles --src-iteration 32 --simname dns_nsvep_extra_sampling --nparticles 1000
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
add_test
(
NAME test_NSVEcomplex_particles
COMMAND turtle DNS NSVEcomplex_particles -n 32 --src-simname dns_nsveparticles --src-iteration 32 --simname dns_nsvecomplex_particles --nparticles 1000
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
add_test
(
NAME test_static_field
COMMAND turtle DNS static_field --simname dns_static_field --nparticles 10000
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
add_test
(
NAME test_kraichnan_field
COMMAND turtle DNS kraichnan_field --simname dns_kraichnan_field --dtfactor 0.05 --nparticles 10000 --ntpp 2
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
add_test
(
NAME test_NSVE_Stokes_particles
COMMAND turtle DNS NSVE_Stokes_particles -n 32 --src-simname dns_nsveparticles --src-iteration 32 --simname dns_nsve_tokes_particles --nparticles 10000
WORKING_DIRECTORY
${
TEST_OUTPUT_DIRECTORY
}
)
endif
(
BUILD_TESTING
)
TurTLE/DNS.py
View file @
10fcb49b
...
...
@@ -439,7 +439,14 @@ class DNS(_code):
assert
(
self
.
parameters
[
'niter_todo'
]
%
self
.
parameters
[
'niter_stat'
]
==
0
)
assert
(
self
.
parameters
[
'niter_todo'
]
%
self
.
parameters
[
'niter_out'
]
==
0
)
assert
(
self
.
parameters
[
'niter_out'
]
%
self
.
parameters
[
'niter_stat'
]
==
0
)
if
self
.
dns_type
in
[
'NSVEparticles_no_output'
,
'NSVEcomplex_particles'
,
'NSVEparticles'
,
'static_field'
,
'static_field_with_ghost_collisions'
,
'kraichnan_field'
]:
if
self
.
dns_type
in
[
'NSVEparticles_no_output'
,
'NSVEcomplex_particles'
,
'NSVE_Stokes_particles'
,
'NSVEparticles'
,
'static_field'
,
'static_field_with_ghost_collisions'
,
'kraichnan_field'
]:
assert
(
self
.
parameters
[
'niter_todo'
]
%
self
.
parameters
[
'niter_part'
]
==
0
)
assert
(
self
.
parameters
[
'niter_out'
]
%
self
.
parameters
[
'niter_part'
]
==
0
)
_code
.
write_par
(
self
,
iter0
=
iter0
)
...
...
@@ -654,40 +661,45 @@ class DNS(_code):
'NSVEparticles'
,
help
=
'plain Navier-Stokes vorticity formulation, with basic fluid tracers'
)
parser_NSVE_Stokes_particles
=
subparsers
.
add_parser
(
'NSVE_Stokes_particles'
,
help
=
'plain Navier-Stokes vorticity formulation, with passive Stokes drag particles'
)
parser_NSVEp2p
=
subparsers
.
add_parser
(
'NSVEcomplex_particles'
,
help
=
'plain Navier-Stokes vorticity formulation, with oriented active particles'
)
parser_NSVEp_extra
=
subparsers
.
add_parser
(
'NSVEp_extra_sampling'
,
help
=
'plain Navier-Stokes vorticity formulation, with basic fluid tracers, that sample velocity gradient, as well as pressure and its derivatives.'
)
for
parser
in
[
for
pp
in
[
'NSVE'
,
'NSVE_no_output'
,
'NSVEparticles_no_output'
,
'NSVEp2'
,
'NSVEp2p'
,
'NSVE_Stokes_particles'
,
'NSVEp_extra'
,
'static_field'
,
'static_field_with_ghost_collisions'
,
'kraichnan_field'
]:
eval
(
'self.simulation_parser_arguments({0})'
.
format
(
'parser_'
+
p
arser
))
eval
(
'self.job_parser_arguments({0})'
.
format
(
'parser_'
+
p
arser
))
eval
(
'self.parameters_to_parser_arguments({0})'
.
format
(
'parser_'
+
p
arser
))
eval
(
'self.simulation_parser_arguments({0})'
.
format
(
'parser_'
+
p
p
))
eval
(
'self.job_parser_arguments({0})'
.
format
(
'parser_'
+
p
p
))
eval
(
'self.parameters_to_parser_arguments({0})'
.
format
(
'parser_'
+
p
p
))
eval
(
'self.parameters_to_parser_arguments('
'parser_{0},'
'self.generate_extra_parameters(
\'
{0}
\'
))'
.
format
(
p
arser
))
for
p
arser
in
[
'self.generate_extra_parameters(
\'
{0}
\'
))'
.
format
(
p
p
))
for
p
p
in
[
'NSVEparticles_no_output'
,
'NSVEp2'
,
'NSVEp2p'
,
'NSVE_Stokes_particles'
,
'NSVEp_extra'
,
'static_field'
,
'kraichnan_field'
]:
eval
(
'self.particle_parser_arguments({0})'
.
format
(
'parser_'
+
p
arser
))
eval
(
'self.particle_parser_arguments({0})'
.
format
(
'parser_'
+
p
p
))
eval
(
'self.parameters_to_parser_arguments('
'parser_{0},'
'self.NSVEp_extra_parameters)'
.
format
(
p
arser
))
'self.NSVEp_extra_parameters)'
.
format
(
p
p
))
return
None
def
generate_extra_parameters
(
self
,
...
...
@@ -698,7 +710,11 @@ class DNS(_code):
pars
[
'field_random_seed'
]
=
int
(
1
)
pars
[
'spectrum_slope'
]
=
float
(
-
5.
/
3
)
pars
[
'spectrum_k_cutoff'
]
=
float
(
16
)
pars
[
'spectrum_coefficient'
]
=
float
(
1
)
pars
[
'spectrum_coefficient'
]
=
float
(
0.1
)
if
dns_type
==
'NSVE_Stokes_particles'
:
pars
[
'initial_field_amplitude'
]
=
float
(
0.0
)
pars
[
'initial_particle_vel'
]
=
float
(
0.05
)
pars
[
'drag_coefficient'
]
=
float
(
0.1
)
return
pars
def
prepare_launch
(
self
,
...
...
@@ -737,6 +753,7 @@ class DNS(_code):
if
self
.
dns_type
in
[
'NSVEparticles'
,
'NSVEcomplex_particles'
,
'NSVE_Stokes_particles'
,
'NSVEparticles_no_output'
,
'NSVEp_extra_sampling'
,
'static_field'
,
...
...
@@ -812,7 +829,15 @@ class DNS(_code):
# hardcoded FFTW complex representation size
field_size
=
3
*
(
opt
.
nx
+
2
)
*
opt
.
ny
*
opt
.
nz
*
self
.
fluid_dtype
.
itemsize
checkpoint_size
=
field_size
if
self
.
dns_type
in
[
'static_field'
,
'NSVEparticles'
,
'NSVEcomplex_particles'
,
'NSVEparticles_no_output'
,
'NSVEp_extra_sampling'
,
'static_field_with_ghost_collisions'
,
'kraichnan_field'
]:
if
self
.
dns_type
in
[
'kraichnan_field'
,
'static_field'
,
'static_field_with_ghost_collisions'
,
'NSVEparticles'
,
'NSVEcomplex_particles'
,
'NSVE_Stokes_particles'
,
'NSVEparticles_no_output'
,
'NSVEp_extra_sampling'
]:
rhs_size
=
self
.
parameters
[
'tracers0_integration_steps'
]
if
type
(
opt
.
tracers0_integration_steps
)
!=
type
(
None
):
rhs_size
=
opt
.
tracers0_integration_steps
...
...
@@ -852,7 +877,9 @@ class DNS(_code):
integration_steps
=
self
.
NSVEp_extra_parameters
[
'tracers0_integration_steps'
]
if
'tracers{0}_integration_steps'
.
format
(
species
)
in
self
.
parameters
.
keys
():
integration_steps
=
self
.
parameters
[
'tracers{0}_integration_steps'
.
format
(
species
)]
if
self
.
dns_type
==
'NSVEcomplex_particles'
and
species
==
0
:
if
self
.
dns_type
in
[
'NSVEcomplex_particles'
,
'NSVE_Stokes_particles'
]
and
species
==
0
:
ncomponents
=
6
with
h5py
.
File
(
self
.
get_checkpoint_0_fname
(),
'a'
)
as
data_file
:
nn
=
self
.
parameters
[
'nparticles'
]
...
...
@@ -884,12 +911,18 @@ class DNS(_code):
if
nn
>
batch_size
:
dset
[
cc
*
batch_size
:(
cc
+
1
)
*
batch_size
,
:
3
]
=
get_random_phases
(
batch_size
)
if
dset
.
shape
[
1
]
==
6
:
dset
[
cc
*
batch_size
:(
cc
+
1
)
*
batch_size
,
3
:]
=
get_random_versors
(
batch_size
)
if
self
.
dns_type
==
'NSVE_Stokes_particles'
:
dset
[
cc
*
batch_size
:(
cc
+
1
)
*
batch_size
,
3
:]
=
self
.
parameters
[
'initial_particle_vel'
]
*
get_random_versors
(
batch_size
)
else
:
dset
[
cc
*
batch_size
:(
cc
+
1
)
*
batch_size
,
3
:]
=
get_random_versors
(
batch_size
)
nn
-=
batch_size
else
:
dset
[
cc
*
batch_size
:
cc
*
batch_size
+
nn
,
:
3
]
=
get_random_phases
(
nn
)
if
dset
.
shape
[
1
]
==
6
:
dset
[
cc
*
batch_size
:
cc
*
batch_size
+
nn
,
3
:]
=
get_random_versors
(
nn
)
if
self
.
dns_type
==
'NSVE_Stokes_particles'
:
dset
[
cc
*
batch_size
:
cc
*
batch_size
+
nn
,
3
:]
=
self
.
parameters
[
'initial_particle_vel'
]
*
get_random_versors
(
nn
)
else
:
dset
[
cc
*
batch_size
:
cc
*
batch_size
+
nn
,
3
:]
=
get_random_versors
(
nn
)
nn
=
0
cc
+=
1
except
Exception
as
e
:
...
...
@@ -1036,6 +1069,8 @@ class DNS(_code):
if
self
.
dns_type
in
[
'NSVEcomplex_particles'
]:
particle_file
.
create_group
(
'tracers0/orientation'
)
particle_file
.
create_group
(
'tracers0/velocity_gradient'
)
if
self
.
dns_type
in
[
'NSVE_Stokes_particles'
]:
particle_file
.
create_group
(
'tracers0/momentum'
)
if
self
.
dns_type
in
[
'NSVEp_extra_sampling'
]:
particle_file
.
create_group
(
'tracers0/velocity_gradient'
)
particle_file
.
create_group
(
'tracers0/pressure'
)
...
...
@@ -1049,6 +1084,16 @@ class DNS(_code):
# first, check if initial field exists
need_field
=
False
if
self
.
check_current_vorticity_exists
:
need_field
=
True
if
self
.
dns_type
in
[
'NSVE'
,
'NSVE_no_output'
,
'static_field'
,
'NSVEparticles'
,
'NSVEcomplex_particles'
,
'NSVE_Stokes_particles'
,
'NSVEparticles_no_output'
,
'NSVEp_extra_sampling'
]:
if
not
os
.
path
.
exists
(
self
.
get_checkpoint_0_fname
()):
need_field
=
True
else
:
...
...
@@ -1082,10 +1127,16 @@ class DNS(_code):
f
,
'vorticity/complex/{0}'
.
format
(
0
))
else
:
data
=
self
.
generate_vector_field
(
if
self
.
dns_type
==
'NSVE_Stokes_particles'
:
data
=
self
.
generate_vector_field
(
write_to_file
=
False
,
spectra_slope
=
2.0
,
amplitude
=
self
.
parameters
[
'initial_field_amplitude'
])
else
:
data
=
self
.
generate_vector_field
(
write_to_file
=
False
,
spectra_slope
=
2.0
,
amplitude
=
0.05
)
amplitude
=
0.05
)
f
[
'vorticity/complex/{0}'
.
format
(
0
)]
=
data
f
.
close
()
if
self
.
dns_type
==
'kraichnan_field'
:
...
...
@@ -1100,6 +1151,7 @@ class DNS(_code):
'static_field_with_ghost_collisions'
,
'NSVEparticles'
,
'NSVEcomplex_particles'
,
'NSVE_Stokes_particles'
,
'NSVEparticles_no_output'
,
'NSVEp_extra_sampling'
]:
self
.
generate_particle_data
(
opt
=
opt
)
...
...
TurTLE/PP.py
View file @
10fcb49b
...
...
@@ -145,6 +145,8 @@ class PP(_code):
pars
[
'filter_type'
]
=
'Gauss'
pars
[
'max_velocity_estimate'
]
=
float
(
10
)
pars
[
'histogram_bins'
]
=
int
(
129
)
elif
dns_type
==
'get_rfields'
:
pars
[
'TrS2_on'
]
=
int
(
0
)
return
pars
def
get_data_file_name
(
self
):
return
os
.
path
.
join
(
self
.
work_dir
,
self
.
simname
+
'.h5'
)
...
...
@@ -447,13 +449,17 @@ class PP(_code):
parser_resize
=
subparsers
.
add_parser
(
'resize'
,
help
=
'get joint acceleration and velocity statistics'
)
parser_write_rpressure
=
subparsers
.
add_parser
(
'write_rpressure'
,
help
=
'write real pressure field to binary'
)
for
pp_type
in
[
'resize'
,
'joint_acc_vel_stats'
,
'bandpass_stats'
,
'get_rfields'
,
'field_single_to_double'
,
'native_binary_to_hdf5'
]:
'native_binary_to_hdf5'
,
'write_rpressure'
]:
eval
(
'self.simulation_parser_arguments(parser_'
+
pp_type
+
')'
)
eval
(
'self.job_parser_arguments(parser_'
+
pp_type
+
')'
)
eval
(
'self.parameters_to_parser_arguments(parser_'
+
pp_type
+
')'
)
...
...
@@ -805,15 +811,21 @@ class PP(_code):
with
h5py
.
File
(
os
.
path
.
join
(
self
.
work_dir
,
self
.
simname
+
'_fields.h5'
),
'a'
)
as
ff
:
ff
.
require_group
(
'vorticity'
)
ff
.
require_group
(
'vorticity/complex'
)
checkpoint_file_list
=
glob
.
glob
(
self
.
simname
+
'_checkpoint_*.h5'
)
checkpoint_file_list
=
[
self
.
simname
+
'_checkpoint_{0}.h5'
.
format
(
cp
)
for
cp
in
range
(
df
[
'checkpoint'
][()]
+
1
)]
for
cpf_name
in
checkpoint_file_list
:
cpf
=
h5py
.
File
(
cpf_name
,
'r'
)
for
iter_name
in
cpf
[
'vorticity/complex'
].
keys
():
if
iter_name
not
in
ff
[
'vorticity/complex'
].
keys
():
ff
[
'vorticity/complex/'
+
iter_name
]
=
h5py
.
ExternalLink
(
cpf_name
,
'vorticity/complex/'
+
iter_name
)
cpf
.
close
()
if
os
.
path
.
exists
(
cpf_name
):
cpf
=
h5py
.
File
(
cpf_name
,
'r'
)
if
'vorticity'
not
in
cpf
.
keys
():
print
(
'file '
,
cpf_name
,
' does not have vorticity group'
)
continue
else
:
for
iter_name
in
cpf
[
'vorticity/complex'
].
keys
():
if
iter_name
not
in
ff
[
'vorticity/complex'
].
keys
():
ff
[
'vorticity/complex/'
+
iter_name
]
=
h5py
.
ExternalLink
(
cpf_name
,
'vorticity/complex/'
+
iter_name
)
cpf
.
close
()
return
None
def
launch_jobs
(
self
,
...
...
TurTLE/TEST.py
View file @
10fcb49b
...
...
@@ -188,6 +188,7 @@ class TEST(_code):
scal_rspace_stats
=
[]
if
self
.
dns_type
in
[
'Gauss_field_test'
]:
vec_spectra_stats
.
append
(
'velocity'
)
vec_spectra_stats
.
append
(
'k*velocity'
)
vec4_rspace_stats
.
append
(
'velocity'
)
tens_rspace_stats
.
append
(
'velocity_gradient'
)
scal_rspace_stats
.
append
(
'velocity_divergence'
)
...
...
@@ -377,6 +378,7 @@ class TEST(_code):
eval
(
'self.simulation_parser_arguments(parser_'
+
parser
+
')'
)
eval
(
'self.job_parser_arguments(parser_'
+
parser
+
')'
)
eval
(
'self.parameters_to_parser_arguments(parser_'
+
parser
+
')'
)
eval
(
'self.parameters_to_parser_arguments(parser_'
+
parser
+
', self.generate_extra_parameters(dns_type =
\'
'
+
parser
+
'
\'
))'
)
return
None
def
prepare_launch
(
self
,
...
...
TurTLE/_code.py
View file @
10fcb49b
...
...
@@ -4,18 +4,18 @@
# #
# This file is part of bfps. #
# #
# TurTLE is free software: you can redistribute it and/or modify
#
# TurTLE is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published #
# by the Free Software Foundation, either version 3 of the License, #
# or (at your option) any later version. #
# #
# TurTLE is distributed in the hope that it will be useful,
#
# TurTLE is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with TurTLE. If not, see <http://www.gnu.org/licenses/>
#
# along with TurTLE. If not, see <http://www.gnu.org/licenses/> #
# #
# Contact: Cristian.Lalescu@ds.mpg.de #
# #
...
...
@@ -678,9 +678,9 @@ class _code(_base):
script_file
.
write
(
'#SBATCH -o '
+
out_file
+
'
\n
'
)
# set up environment
script_file
.
write
(
'#SBATCH --get-user-env
\n
'
)
script_file
.
write
(
'#SBATCH --partition={0}
\n
'
.
format
(
self
.
host_info
[
'environment'
]))
if
self
.
host_info
[
'explicit_slurm_environment'
]:
script_file
.
write
(
'#SBATCH --partition={0}
\n
'
.
format
(
self
.
host_info
[
'environment'
]))
if
'account'
in
self
.
host_info
.
keys
():
script_file
.
write
(
'#SBATCH --account={0}
\n
'
.
format
(
self
.
host_info
[
'account'
]))
...
...
@@ -717,14 +717,19 @@ class _code(_base):
script_file
.
write
(
'#SBATCH --mail-type=none
\n
'
)
script_file
.
write
(
'#SBATCH --time={0}:{1:0>2d}:00
\n
'
.
format
(
hours
,
minutes
))
if
'extra_slurm_lines'
in
self
.
host_info
.
keys
():
for
line
in
self
.
host_info
[
'extra_slurm_lines'
]:
script_file
.
write
(
line
+
'
\n
'
)
## following cleans up environment for job.
## put these in the 'extra_slurm_lines' list if you need them
## make sure that "~/.config/TurTLE/bashrc" exists and builds desired job environment
script_file
.
write
(
'#SBATCH --export=NONE
\n
'
)
script_file
.
write
(
'#SBATCH --get-user-env
\n
'
)
script_file
.
write
(
'source ~/.config/TurTLE/bashrc
\n
'
)
#script_file.write('#SBATCH --export=NONE\n')
#script_file.write('#SBATCH --get-user-env\n')
#script_file.write('export OMP_PLACES=cores\n') # or threads, as appropriate
# also look up OMP_PROC_BIND and SLURM_HINT
#script_file.write('source ~/.config/TurTLE/bashrc\n')
if
nb_threads_per_process
>
1
:
script_file
.
write
(
'export OMP_NUM_THREADS={0}
\n
'
.
format
(
nb_threads_per_process
))
script_file
.
write
(
'export OMP_PLACES=cores
\n
'
)
script_file
.
write
(
'export OMP_NUM_THREADS=${SLURM_CPUS_PER_TASK}
\n
'
)
script_file
.
write
(
'echo "Start time is `date`"
\n
'
)
script_file
.
write
(
'cd '
+
self
.
work_dir
+
'
\n
'
)
...
...
TurTLE/test/test_Gaussian_field.py
View file @
10fcb49b
...
...
@@ -5,7 +5,7 @@ from scipy import trapz
from
scipy.stats
import
norm
from
scipy.integrate
import
quad
import
h5py
import
sys
import
sys
,
os
import
time
import
TurTLE
...
...
@@ -17,24 +17,28 @@ except:
plt
=
None
def
main
():
c
=
TEST
()
# size of grid
n
=
256
n
=
1024
slope
=
-
5.
/
3.
k_cutoff
=
30
.
k_cutoff
=
64
.
func
=
lambda
k
,
k_c
=
k_cutoff
,
s
=
slope
:
k
**
s
*
np
.
exp
(
-
k
/
k_c
)
total_energy
=
quad
(
func
,
1
,
k_cutoff
*
4
)[
0
]
total_energy
=
quad
(
func
,
0.6
,
k_cutoff
*
8
)[
0
]
coeff
=
1.
/
total_energy
bin_no
=
100
rseed
=
int
(
time
.
time
())
simname
=
'Gaussianity_test'
c
.
launch
(
if
not
os
.
path
.
exists
(
simname
+
'.h5'
):
c
=
TEST
()
opt
=
c
.
launch
(
[
'Gauss_field_test'
,
'--nx'
,
str
(
n
),
'--ny'
,
str
(
n
),
'--nz'
,
str
(
n
),
'--simname'
,
'Gaussianity_test'
,
'--simname'
,
simname
,
'--np'
,
'4'
,
'--environment'
,
'short'
,
'--minutes'
,
'60'
,
'--ntpp'
,
'1'
,
'--wd'
,
'./'
,
'--histogram_bins'
,
str
(
bin_no
),
...
...
@@ -44,7 +48,7 @@ def main():
'--spectrum_coefficient'
,
str
(
coeff
),
'--field_random_seed'
,
str
(
rseed
)]
+
sys
.
argv
[
1
:])
plot_stuff
(
c
.
simname
,
total_energy
=
total_energy
)
plot_stuff
(
simname
,
total_energy
=
total_energy
)
return
None
def
plot_stuff
(
simname
,
total_energy
=
1.
):
...
...
@@ -82,6 +86,7 @@ def plot_stuff(simname, total_energy = 1.):
f_vel
=
hist_vel
/
np
.
sum
(
hist_vel
,
axis
=
0
,
keepdims
=
True
).
astype
(
float
)
/
velbinsize
print
(
'Energy analytically: {}'
.
format
(
total_energy
))
print
(
np
.
sum
(
energy
*
np
.
arange
(
len
(
energy
))
**
2
))
print
(
'Energy sum: {}'
.
format
(
np
.
sum
(
energy
*
df
[
'kspace/dk'
][()])))
print
(
'Moment sum: {}'
.
format
(
df
[
'statistics/moments/velocity'
][
0
,
2
,
3
]
/
2
))
print
(
'Velocity variances: {}'
.
format
(
trapz
(
vel
[:,
None
]
**
2
*
f_vel
,
vel
[:,
None
],
axis
=
0
)))
...
...
@@ -101,6 +106,17 @@ def plot_stuff(simname, total_energy = 1.):
df
[
'statistics/moments/velocity_divergence'
][
0
,
2
]))
print
(
'Gradient second moment is: {0}'
.
format
(
df
[
'statistics/moments/velocity_gradient'
][
0
,
2
].
mean
()))
print
(
'----------- k2-premultiplied spectrum -----------'
)
k2func
=
lambda
k
,
k_c
=
k_cutoff
,
s
=
slope
:
k
**
(
2
+
s
)
*
np
.
exp
(
-
k
/
k_c
)
k2sum_analytic
=
quad
(
k2func
,
0
,
k_cutoff
*
20
)[
0
]
print
(
'Analytically: {}'
.
format
(
k2sum_analytic
))
k2spec_trace
=
(
df
[
'statistics/spectra/k*velocity_k*velocity'
][...,
0
,
0
]
+
df
[
'statistics/spectra/k*velocity_k*velocity'
][...,
1
,
1
]
+
df
[
'statistics/spectra/k*velocity_k*velocity'
][...,
2
,
2
])
print
(
'Energy sum: {}'
.
format
(
np
.
sum
(
k2spec_trace
*
df
[
'kspace/dk'
][()])
/
2.
/
coeff
))
df
.
close
()
return
None
...
...
TurTLE/test/test_particle_clouds.py
View file @
10fcb49b
...
...
@@ -33,22 +33,22 @@ import sys
import
TurTLE
from
TurTLE
import
DNS
def
main
():
def
basic_test
():
nclouds
=
10
nparticles_per_cloud
=
1000
nparticles
=
nclouds
*
nparticles_per_cloud
niterations
=
32
c
=
DNS
()
c
.
dns_type
=
'NSVEparticles'
c
.
parameters
[
'nparticles'
]
=
nparticles
c
.
parameters
[
'tracers1_integration_steps'
]
=
4
c
.
generate_tracer_state
(
rseed
=
2
,
species
=
1
)
del
c
.
parameters
[
'nparticles'
]
del
c
.
parameters
[
'tracers1_integration_steps'
]
c
.
simname
=
'basic_cloud_test'
f0
=
h5py
.
File
(
os
.
path
.
join
(
os
.
path
.
join
(
TurTLE
.
lib_dir
,
'test'
),
'B32p1e4_checkpoint_0.h5'
),
'r'
)
ic_file
=
h5py
.
File
(
c
.
get_checkpoint_0_fname
(),
'a'
)
ic_file
[
'tracers0/state/0'
]
=
ic_file
[
'tracers
1
/state/0'
][...].
reshape
(
nclouds
,
nparticles_per_cloud
,
3
)
ic_file
[
'tracers0/rhs/0'
]
=
ic_file
[
'tracers
1
/rhs/0'
][...].
reshape
(
4
,
nclouds
,
nparticles_per_cloud
,
3
)
ic_file
[
'tracers0/state/0'
]
=
f0
[
'tracers
0
/state/0'
][...].
reshape
(
nclouds
,
nparticles_per_cloud
,
3
)
ic_file
[
'tracers0/rhs/0'
]
=
f0
[
'tracers
0
/rhs/0'
][...].
reshape
(
4
,
nclouds
,
nparticles_per_cloud
,
3
)
ic_file
.
close
()
c
.
launch
(
[
'NSVEparticles'
,
...
...
@@ -57,12 +57,14 @@ def main():
'--forcing_type'
,
'linear'
,
'--src-wd'
,
TurTLE
.
lib_dir
+
'/test'
,
'--src-iteration'
,
'0'
,
'--simname'
,
c
.
simname
,
'--np'
,
'4'
,
'--ntpp'
,
'1'
,
'--fftw_plan_rigor'
,
'FFTW_PATIENT'
,
'--niter_todo'
,
'{0}'
.
format
(
niterations
),
'--niter_out'
,
'{0}'
.
format
(
niterations
),
'--niter_stat'
,
'1'
,
'--checkpoints_per_file'
,
'{0}'
.
format
(
3
),
'--nparticles'
,
'{0}'
.
format
(
nparticles
),
'--njobs'
,
'2'
,
'--wd'
,
'./'
])
...
...
@@ -79,6 +81,7 @@ def main():
x0
=
f0
[
'tracers0/state/{0}'
.
format
(
iteration
)][...]
x1
=
f1