Commit 9de1ae87 authored by Theo Steininger's avatar Theo Steininger

Removed checks for h5py.

parent c0f44792
......@@ -1916,86 +1916,72 @@ class _slicing_distributor(distributor):
else:
return local_data.reshape(temp_local_shape)
if 'h5py' in gdi:
def save_data(self, data, alias, path=None, overwriteQ=True):
comm = self.comm
h5py_parallel = h5py.get_config().mpi
if comm.size > 1 and not h5py_parallel:
raise RuntimeError("ERROR: Programm is run with MPI " +
"size > 1 but non-parallel version of " +
"h5py is loaded.")
# if no path and therefore no filename was given, use the alias
# as filename
use_path = alias if path is None else path
# create the file-handle
if h5py_parallel and gc['mpi_module'] == 'MPI':
f = h5py.File(use_path, 'a', driver='mpio', comm=comm)
else:
f = h5py.File(use_path, 'a')
# check if dataset with name == alias already exists
try:
f[alias]
# if yes, and overwriteQ is set to False, raise an Error
if overwriteQ is False:
raise ValueError(about_cstring(
"ERROR: overwriteQ is False, but alias already " +
"in use!"))
else: # if yes, remove the existing dataset
del f[alias]
except(KeyError):
pass
# create dataset
dset = f.create_dataset(alias,
shape=self.global_shape,
dtype=self.dtype)
# write the data
dset[self.local_start:self.local_end] = data
# close the file
f.close()
def load_data(self, alias, path):
comm = self.comm
# parse the path
file_path = path if (path is not None) else alias
# create the file-handle
if h5py.get_config().mpi and gc['mpi_module'] == 'MPI':
f = h5py.File(file_path, 'r', driver='mpio', comm=comm)
else:
f = h5py.File(file_path, 'r')
dset = f[alias]
# check shape
if dset.shape != self.global_shape:
raise TypeError(about_cstring(
"ERROR: The shape of the given dataset does not match " +
"the distributed_data_object."))
# check dtype
if dset.dtype != self.dtype:
raise TypeError(about_cstring(
"ERROR: The datatype of the given dataset does not " +
"match the one of the distributed_data_object."))
# if everything seems to fit, load the data
data = dset[self.local_start:self.local_end]
# close the file
f.close()
return data
def save_data(self, data, alias, path=None, overwriteQ=True):
comm = self.comm
h5py_parallel = h5py.get_config().mpi
if comm.size > 1 and not h5py_parallel:
raise RuntimeError("ERROR: Programm is run with MPI " +
"size > 1 but non-parallel version of " +
"h5py is loaded.")
# if no path and therefore no filename was given, use the alias
# as filename
use_path = alias if path is None else path
# create the file-handle
if h5py_parallel and gc['mpi_module'] == 'MPI':
f = h5py.File(use_path, 'a', driver='mpio', comm=comm)
else:
f = h5py.File(use_path, 'a')
# check if dataset with name == alias already exists
try:
f[alias]
# if yes, and overwriteQ is set to False, raise an Error
if overwriteQ is False:
raise ValueError(about_cstring(
"ERROR: overwriteQ is False, but alias already " +
"in use!"))
else: # if yes, remove the existing dataset
del f[alias]
except(KeyError):
pass
def _data_to_hdf5(self, hdf5_dataset, data):
hdf5_dataset[self.local_start:self.local_end] = data
# create dataset
dset = f.create_dataset(alias,
shape=self.global_shape,
dtype=self.dtype)
# write the data
dset[self.local_start:self.local_end] = data
# close the file
f.close()
else:
def save_data(self, *args, **kwargs):
raise ImportError(about_cstring(
"ERROR: h5py is not available"))
def load_data(self, *args, **kwargs):
raise ImportError(about_cstring(
"ERROR: h5py is not available"))
def load_data(self, alias, path):
comm = self.comm
# parse the path
file_path = path if (path is not None) else alias
# create the file-handle
if h5py.get_config().mpi and gc['mpi_module'] == 'MPI':
f = h5py.File(file_path, 'r', driver='mpio', comm=comm)
else:
f = h5py.File(file_path, 'r')
dset = f[alias]
# check shape
if dset.shape != self.global_shape:
raise TypeError(about_cstring(
"ERROR: The shape of the given dataset does not match " +
"the distributed_data_object."))
# check dtype
if dset.dtype != self.dtype:
raise TypeError(about_cstring(
"ERROR: The datatype of the given dataset does not " +
"match the one of the distributed_data_object."))
# if everything seems to fit, load the data
data = dset[self.local_start:self.local_end]
# close the file
f.close()
return data
def _data_to_hdf5(self, *args, **kwargs):
raise ImportError(about_cstring(
"ERROR: h5py is not available"))
def _data_to_hdf5(self, hdf5_dataset, data):
hdf5_dataset[self.local_start:self.local_end] = data
def get_iter(self, d2o):
return d2o_slicing_iter(d2o)
......@@ -2284,88 +2270,74 @@ class _not_distributor(distributor):
a = obj.get_local_data(copy=False)
return np.searchsorted(a=a, v=v, side=side)
if 'h5py' in gdi:
def save_data(self, data, alias, path=None, overwriteQ=True):
comm = self.comm
h5py_parallel = h5py.get_config().mpi
if comm.size > 1 and not h5py_parallel:
raise RuntimeError("ERROR: Programm is run with MPI " +
"size > 1 but non-parallel version of " +
"h5py is loaded.")
# if no path and therefore no filename was given, use the alias
# as filename
use_path = alias if path is None else path
# create the file-handle
if h5py_parallel and gc['mpi_module'] == 'MPI':
f = h5py.File(use_path, 'a', driver='mpio', comm=comm)
else:
f = h5py.File(use_path, 'a')
# check if dataset with name == alias already exists
try:
f[alias]
# if yes, and overwriteQ is set to False, raise an Error
if overwriteQ is False:
raise ValueError(about_cstring(
"ERROR: overwriteQ == False, but alias already " +
"in use!"))
else: # if yes, remove the existing dataset
del f[alias]
except(KeyError):
pass
# create dataset
dset = f.create_dataset(alias,
shape=self.global_shape,
dtype=self.dtype)
# write the data
if comm.rank == 0:
dset[:] = data
# close the file
f.close()
def load_data(self, alias, path):
comm = self.comm
# parse the path
file_path = path if (path is not None) else alias
# create the file-handle
if h5py.get_config().mpi and gc['mpi_module'] == 'MPI':
f = h5py.File(file_path, 'r', driver='mpio', comm=comm)
else:
f = h5py.File(file_path, 'r')
dset = f[alias]
# check shape
if dset.shape != self.global_shape:
raise TypeError(about_cstring(
"ERROR: The shape of the given dataset does not match " +
"the distributed_data_object."))
# check dtype
if dset.dtype != self.dtype:
raise TypeError(about_cstring(
"ERROR: The datatype of the given dataset does not " +
"match the distributed_data_object."))
# if everything seems to fit, load the data
data = dset[:]
# close the file
f.close()
return data
def _data_to_hdf5(self, hdf5_dataset, data):
if self.comm.rank == 0:
hdf5_dataset[:] = data
else:
def save_data(self, *args, **kwargs):
raise ImportError(about_cstring(
"ERROR: h5py is not available"))
def save_data(self, data, alias, path=None, overwriteQ=True):
comm = self.comm
h5py_parallel = h5py.get_config().mpi
if comm.size > 1 and not h5py_parallel:
raise RuntimeError("ERROR: Programm is run with MPI " +
"size > 1 but non-parallel version of " +
"h5py is loaded.")
# if no path and therefore no filename was given, use the alias
# as filename
use_path = alias if path is None else path
# create the file-handle
if h5py_parallel and gc['mpi_module'] == 'MPI':
f = h5py.File(use_path, 'a', driver='mpio', comm=comm)
else:
f = h5py.File(use_path, 'a')
# check if dataset with name == alias already exists
try:
f[alias]
# if yes, and overwriteQ is set to False, raise an Error
if overwriteQ is False:
raise ValueError(about_cstring(
"ERROR: overwriteQ == False, but alias already " +
"in use!"))
else: # if yes, remove the existing dataset
del f[alias]
except(KeyError):
pass
def load_data(self, *args, **kwargs):
raise ImportError(about_cstring(
"ERROR: h5py is not available"))
# create dataset
dset = f.create_dataset(alias,
shape=self.global_shape,
dtype=self.dtype)
# write the data
if comm.rank == 0:
dset[:] = data
# close the file
f.close()
def load_data(self, alias, path):
comm = self.comm
# parse the path
file_path = path if (path is not None) else alias
# create the file-handle
if h5py.get_config().mpi and gc['mpi_module'] == 'MPI':
f = h5py.File(file_path, 'r', driver='mpio', comm=comm)
else:
f = h5py.File(file_path, 'r')
dset = f[alias]
# check shape
if dset.shape != self.global_shape:
raise TypeError(about_cstring(
"ERROR: The shape of the given dataset does not match " +
"the distributed_data_object."))
# check dtype
if dset.dtype != self.dtype:
raise TypeError(about_cstring(
"ERROR: The datatype of the given dataset does not " +
"match the distributed_data_object."))
# if everything seems to fit, load the data
data = dset[:]
# close the file
f.close()
return data
def _data_to_hdf5(self, *args, **kwargs):
raise ImportError(about_cstring(
"ERROR: h5py is not available"))
def _data_to_hdf5(self, hdf5_dataset, data):
if self.comm.rank == 0:
hdf5_dataset[:] = data
def get_iter(self, d2o):
return d2o_not_iter(d2o)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment