Commit 9de1ae87 authored by Theo Steininger's avatar Theo Steininger

Removed checks for h5py.

parent c0f44792
...@@ -1916,86 +1916,72 @@ class _slicing_distributor(distributor): ...@@ -1916,86 +1916,72 @@ class _slicing_distributor(distributor):
else: else:
return local_data.reshape(temp_local_shape) return local_data.reshape(temp_local_shape)
if 'h5py' in gdi: def save_data(self, data, alias, path=None, overwriteQ=True):
def save_data(self, data, alias, path=None, overwriteQ=True): comm = self.comm
comm = self.comm h5py_parallel = h5py.get_config().mpi
h5py_parallel = h5py.get_config().mpi if comm.size > 1 and not h5py_parallel:
if comm.size > 1 and not h5py_parallel: raise RuntimeError("ERROR: Programm is run with MPI " +
raise RuntimeError("ERROR: Programm is run with MPI " + "size > 1 but non-parallel version of " +
"size > 1 but non-parallel version of " + "h5py is loaded.")
"h5py is loaded.") # if no path and therefore no filename was given, use the alias
# if no path and therefore no filename was given, use the alias # as filename
# as filename use_path = alias if path is None else path
use_path = alias if path is None else path
# create the file-handle
# create the file-handle if h5py_parallel and gc['mpi_module'] == 'MPI':
if h5py_parallel and gc['mpi_module'] == 'MPI': f = h5py.File(use_path, 'a', driver='mpio', comm=comm)
f = h5py.File(use_path, 'a', driver='mpio', comm=comm) else:
else: f = h5py.File(use_path, 'a')
f = h5py.File(use_path, 'a') # check if dataset with name == alias already exists
# check if dataset with name == alias already exists try:
try: f[alias]
f[alias] # if yes, and overwriteQ is set to False, raise an Error
# if yes, and overwriteQ is set to False, raise an Error if overwriteQ is False:
if overwriteQ is False: raise ValueError(about_cstring(
raise ValueError(about_cstring( "ERROR: overwriteQ is False, but alias already " +
"ERROR: overwriteQ is False, but alias already " + "in use!"))
"in use!")) else: # if yes, remove the existing dataset
else: # if yes, remove the existing dataset del f[alias]
del f[alias] except(KeyError):
except(KeyError): pass
pass
# create dataset
dset = f.create_dataset(alias,
shape=self.global_shape,
dtype=self.dtype)
# write the data
dset[self.local_start:self.local_end] = data
# close the file
f.close()
def load_data(self, alias, path):
comm = self.comm
# parse the path
file_path = path if (path is not None) else alias
# create the file-handle
if h5py.get_config().mpi and gc['mpi_module'] == 'MPI':
f = h5py.File(file_path, 'r', driver='mpio', comm=comm)
else:
f = h5py.File(file_path, 'r')
dset = f[alias]
# check shape
if dset.shape != self.global_shape:
raise TypeError(about_cstring(
"ERROR: The shape of the given dataset does not match " +
"the distributed_data_object."))
# check dtype
if dset.dtype != self.dtype:
raise TypeError(about_cstring(
"ERROR: The datatype of the given dataset does not " +
"match the one of the distributed_data_object."))
# if everything seems to fit, load the data
data = dset[self.local_start:self.local_end]
# close the file
f.close()
return data
def _data_to_hdf5(self, hdf5_dataset, data): # create dataset
hdf5_dataset[self.local_start:self.local_end] = data dset = f.create_dataset(alias,
shape=self.global_shape,
dtype=self.dtype)
# write the data
dset[self.local_start:self.local_end] = data
# close the file
f.close()
else: def load_data(self, alias, path):
def save_data(self, *args, **kwargs): comm = self.comm
raise ImportError(about_cstring( # parse the path
"ERROR: h5py is not available")) file_path = path if (path is not None) else alias
# create the file-handle
def load_data(self, *args, **kwargs): if h5py.get_config().mpi and gc['mpi_module'] == 'MPI':
raise ImportError(about_cstring( f = h5py.File(file_path, 'r', driver='mpio', comm=comm)
"ERROR: h5py is not available")) else:
f = h5py.File(file_path, 'r')
dset = f[alias]
# check shape
if dset.shape != self.global_shape:
raise TypeError(about_cstring(
"ERROR: The shape of the given dataset does not match " +
"the distributed_data_object."))
# check dtype
if dset.dtype != self.dtype:
raise TypeError(about_cstring(
"ERROR: The datatype of the given dataset does not " +
"match the one of the distributed_data_object."))
# if everything seems to fit, load the data
data = dset[self.local_start:self.local_end]
# close the file
f.close()
return data
def _data_to_hdf5(self, *args, **kwargs): def _data_to_hdf5(self, hdf5_dataset, data):
raise ImportError(about_cstring( hdf5_dataset[self.local_start:self.local_end] = data
"ERROR: h5py is not available"))
def get_iter(self, d2o): def get_iter(self, d2o):
return d2o_slicing_iter(d2o) return d2o_slicing_iter(d2o)
...@@ -2284,88 +2270,74 @@ class _not_distributor(distributor): ...@@ -2284,88 +2270,74 @@ class _not_distributor(distributor):
a = obj.get_local_data(copy=False) a = obj.get_local_data(copy=False)
return np.searchsorted(a=a, v=v, side=side) return np.searchsorted(a=a, v=v, side=side)
if 'h5py' in gdi: def save_data(self, data, alias, path=None, overwriteQ=True):
def save_data(self, data, alias, path=None, overwriteQ=True): comm = self.comm
comm = self.comm h5py_parallel = h5py.get_config().mpi
h5py_parallel = h5py.get_config().mpi if comm.size > 1 and not h5py_parallel:
if comm.size > 1 and not h5py_parallel: raise RuntimeError("ERROR: Programm is run with MPI " +
raise RuntimeError("ERROR: Programm is run with MPI " + "size > 1 but non-parallel version of " +
"size > 1 but non-parallel version of " + "h5py is loaded.")
"h5py is loaded.") # if no path and therefore no filename was given, use the alias
# if no path and therefore no filename was given, use the alias # as filename
# as filename use_path = alias if path is None else path
use_path = alias if path is None else path
# create the file-handle
# create the file-handle if h5py_parallel and gc['mpi_module'] == 'MPI':
if h5py_parallel and gc['mpi_module'] == 'MPI': f = h5py.File(use_path, 'a', driver='mpio', comm=comm)
f = h5py.File(use_path, 'a', driver='mpio', comm=comm) else:
else: f = h5py.File(use_path, 'a')
f = h5py.File(use_path, 'a') # check if dataset with name == alias already exists
# check if dataset with name == alias already exists try:
try: f[alias]
f[alias] # if yes, and overwriteQ is set to False, raise an Error
# if yes, and overwriteQ is set to False, raise an Error if overwriteQ is False:
if overwriteQ is False: raise ValueError(about_cstring(
raise ValueError(about_cstring( "ERROR: overwriteQ == False, but alias already " +
"ERROR: overwriteQ == False, but alias already " + "in use!"))
"in use!")) else: # if yes, remove the existing dataset
else: # if yes, remove the existing dataset del f[alias]
del f[alias] except(KeyError):
except(KeyError): pass
pass
# create dataset
dset = f.create_dataset(alias,
shape=self.global_shape,
dtype=self.dtype)
# write the data
if comm.rank == 0:
dset[:] = data
# close the file
f.close()
def load_data(self, alias, path):
comm = self.comm
# parse the path
file_path = path if (path is not None) else alias
# create the file-handle
if h5py.get_config().mpi and gc['mpi_module'] == 'MPI':
f = h5py.File(file_path, 'r', driver='mpio', comm=comm)
else:
f = h5py.File(file_path, 'r')
dset = f[alias]
# check shape
if dset.shape != self.global_shape:
raise TypeError(about_cstring(
"ERROR: The shape of the given dataset does not match " +
"the distributed_data_object."))
# check dtype
if dset.dtype != self.dtype:
raise TypeError(about_cstring(
"ERROR: The datatype of the given dataset does not " +
"match the distributed_data_object."))
# if everything seems to fit, load the data
data = dset[:]
# close the file
f.close()
return data
def _data_to_hdf5(self, hdf5_dataset, data):
if self.comm.rank == 0:
hdf5_dataset[:] = data
else:
def save_data(self, *args, **kwargs):
raise ImportError(about_cstring(
"ERROR: h5py is not available"))
def load_data(self, *args, **kwargs): # create dataset
raise ImportError(about_cstring( dset = f.create_dataset(alias,
"ERROR: h5py is not available")) shape=self.global_shape,
dtype=self.dtype)
# write the data
if comm.rank == 0:
dset[:] = data
# close the file
f.close()
def load_data(self, alias, path):
comm = self.comm
# parse the path
file_path = path if (path is not None) else alias
# create the file-handle
if h5py.get_config().mpi and gc['mpi_module'] == 'MPI':
f = h5py.File(file_path, 'r', driver='mpio', comm=comm)
else:
f = h5py.File(file_path, 'r')
dset = f[alias]
# check shape
if dset.shape != self.global_shape:
raise TypeError(about_cstring(
"ERROR: The shape of the given dataset does not match " +
"the distributed_data_object."))
# check dtype
if dset.dtype != self.dtype:
raise TypeError(about_cstring(
"ERROR: The datatype of the given dataset does not " +
"match the distributed_data_object."))
# if everything seems to fit, load the data
data = dset[:]
# close the file
f.close()
return data
def _data_to_hdf5(self, *args, **kwargs): def _data_to_hdf5(self, hdf5_dataset, data):
raise ImportError(about_cstring( if self.comm.rank == 0:
"ERROR: h5py is not available")) hdf5_dataset[:] = data
def get_iter(self, d2o): def get_iter(self, d2o):
return d2o_not_iter(d2o) return d2o_not_iter(d2o)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment