Skip to content
Snippets Groups Projects
Commit 106eb43e authored by Himanen, Lauri (himanel1)'s avatar Himanen, Lauri (himanel1)
Browse files

Now the trajectory reading takes into account the CP2K print settings.

parent 78901d61
No related branches found
No related tags found
No related merge requests found
Showing
with 10358 additions and 80 deletions
...@@ -76,11 +76,11 @@ def iread(filename, file_format=None): ...@@ -76,11 +76,11 @@ def iread(filename, file_format=None):
try: try:
io = ase.io.formats.get_ioformat(file_format) io = ase.io.formats.get_ioformat(file_format)
except ValueError: except ValueError:
logger.error("MDTraj could not read the file '{}' with format '{}'. The contents might be malformed or wrong format used.".format(filename, file_format)) logger.error("MDTraj could not read the file '{}' with format '{}'. If MDTraj is supposed to read this format, the contents might be malformed.".format(filename, file_format))
return return
else: else:
# Return the positions in a numpy array instead of an ASE Atoms object # Return the positions in a numpy array instead of an ASE Atoms object
generator = ase.io.iread(filename, index, file_format) generator = ase.io.iread(filename, format=file_format)
for atoms in generator: for atoms in generator:
pos = atoms.positions pos = atoms.positions
yield pos yield pos
...@@ -93,6 +93,12 @@ class CP2KInput(object): ...@@ -93,6 +93,12 @@ class CP2KInput(object):
else: else:
return (None, section) return (None, section)
def get_keyword(self, path, format_value=True):
if format_value:
return self.get_keyword_value_formatted(path)
else:
return self.get_keyword_value_raw(path)
def get_keyword_value_formatted(self, path): def get_keyword_value_formatted(self, path):
""" """
""" """
...@@ -100,12 +106,12 @@ class CP2KInput(object): ...@@ -100,12 +106,12 @@ class CP2KInput(object):
if keyword: if keyword:
return keyword.get_value_formatted() return keyword.get_value_formatted()
def get_keyword_value(self, path): def get_keyword_value_raw(self, path):
""" """
""" """
keyword, section = self.get_keyword_and_section(path) keyword, section = self.get_keyword_and_section(path)
if keyword: if keyword:
return keyword.get_value() return keyword.get_value_raw()
def get_default_keyword(self, path): def get_default_keyword(self, path):
return self.get_section(path).default_keyword.value return self.get_section(path).default_keyword.value
...@@ -181,7 +187,7 @@ class Keyword(InputObject): ...@@ -181,7 +187,7 @@ class Keyword(InputObject):
self.default_value = default_value self.default_value = default_value
self.default_name = default_name self.default_name = default_name
def get_value(self): def get_value_raw(self):
"""Returns the unformatted value of this keyword. This is exactly what """Returns the unformatted value of this keyword. This is exactly what
was set by the used in the input as a string. was set by the used in the input as a string.
""" """
...@@ -327,12 +333,12 @@ class Section(object): ...@@ -327,12 +333,12 @@ class Section(object):
value = keyword_object.get_value_formatted() value = keyword_object.get_value_formatted()
return value return value
def get_keyword_value(self, name): def get_keyword_value_raw(self, name):
"""Returns the keyword value as a raw string as specfied by the used. """Returns the keyword value as a raw string as specfied by the used.
""" """
keyword_object = self.get_keyword_object(name) keyword_object = self.get_keyword_object(name)
if keyword_object is not None: if keyword_object is not None:
return keyword_object.get_value() return keyword_object.get_value_raw()
def get_subsection(self, name): def get_subsection(self, name):
subsection = self.sections.get(name) subsection = self.sections.get(name)
......
...@@ -46,7 +46,7 @@ def generate_object_tree(xml_file, for_metainfo=False): ...@@ -46,7 +46,7 @@ def generate_object_tree(xml_file, for_metainfo=False):
#=============================================================================== #===============================================================================
def recursive_tree_generation(xml_element, for_metainfo=False, name_stack=[]): def recursive_tree_generation(xml_element, for_metainfo=False, name_stack=[], ignore=True):
# Make new section object for the root # Make new section object for the root
section_name_element = xml_element.find("NAME") section_name_element = xml_element.find("NAME")
...@@ -59,17 +59,19 @@ def recursive_tree_generation(xml_element, for_metainfo=False, name_stack=[]): ...@@ -59,17 +59,19 @@ def recursive_tree_generation(xml_element, for_metainfo=False, name_stack=[]):
name_stack.append(section_name) name_stack.append(section_name)
# Ignore most sections that control the print settings # Ignore most sections that control the print settings
ignored = ["EACH", "PRINT"] if ignore:
if section_name in ignored: ignored = ["EACH", "PRINT"]
kept_print_settings = [ if section_name in ignored:
"CP2K_INPUT/FORCE_EVAL/PRINT", kept_print_settings = [
"CP2K_INPUT/MOTION/PRINT", "CP2K_INPUT/FORCE_EVAL/PRINT",
] "CP2K_INPUT/MOTION/PRINT",
name = "/".join(name_stack) ]
if "/".join(name_stack) in kept_print_settings: name = "/".join(name_stack)
print "KEPT {}".format(name) if "/".join(name_stack) in kept_print_settings:
else: print "KEPT {}".format(name)
return ignore = False
else:
return
if for_metainfo: if for_metainfo:
# Descriptions # Descriptions
...@@ -186,7 +188,7 @@ def recursive_tree_generation(xml_element, for_metainfo=False, name_stack=[]): ...@@ -186,7 +188,7 @@ def recursive_tree_generation(xml_element, for_metainfo=False, name_stack=[]):
# Sections # Sections
for sub_section_element in xml_element.findall("SECTION"): for sub_section_element in xml_element.findall("SECTION"):
sub_section = recursive_tree_generation(sub_section_element, for_metainfo, name_stack[::1]) sub_section = recursive_tree_generation(sub_section_element, for_metainfo, name_stack[::1], ignore)
if sub_section is not None: if sub_section is not None:
section.sections[sub_section.name].append(sub_section) section.sections[sub_section.name].append(sub_section)
...@@ -286,13 +288,13 @@ def generate_section_metainfo_json(child, parent, name_stack): ...@@ -286,13 +288,13 @@ def generate_section_metainfo_json(child, parent, name_stack):
if __name__ == "__main__": if __name__ == "__main__":
# xml to pickle # xml to pickle
# xml_file = open("../versions/cp2k262/input_data/cp2k_input.xml", 'r') xml_file = open("../versions/cp2k262/input_data/cp2k_input.xml", 'r')
# object_tree = CP2KInput(generate_object_tree(xml_file)) object_tree = CP2KInput(generate_object_tree(xml_file))
# file_name = "../versions/cp2k262/input_data/cp2k_input_tree.pickle" file_name = "../versions/cp2k262/input_data/cp2k_input_tree.pickle"
# fh = open(file_name, "wb") fh = open(file_name, "wb")
# pickle.dump(object_tree, fh, protocol=2) pickle.dump(object_tree, fh, protocol=2)
# Metainfo generation # Metainfo generation
xml_file = open("../versions/cp2k262/input_data/cp2k_input.xml", 'r') # xml_file = open("../versions/cp2k262/input_data/cp2k_input.xml", 'r')
object_tree = CP2KInput(generate_object_tree(xml_file, for_metainfo=True)) # object_tree = CP2KInput(generate_object_tree(xml_file, for_metainfo=True))
generate_input_metainfos(object_tree) # generate_input_metainfos(object_tree)
...@@ -249,7 +249,6 @@ class CommonMatcher(object): ...@@ -249,7 +249,6 @@ class CommonMatcher(object):
self.cache_service.push_value("number_of_atoms") self.cache_service.push_value("number_of_atoms")
self.cache_service.push_array_values("simulation_cell", unit="angstrom") self.cache_service.push_array_values("simulation_cell", unit="angstrom")
self.cache_service.push_array_values("configuration_periodic_dimensions") self.cache_service.push_array_values("configuration_periodic_dimensions")
self.cache_service.push_array_values("atom_positions", unit="angstrom")
self.cache_service.push_array_values("atom_labels") self.cache_service.push_array_values("atom_labels")
def onClose_section_single_configuration_calculation(self, backend, gIndex, section): def onClose_section_single_configuration_calculation(self, backend, gIndex, section):
......
...@@ -73,7 +73,7 @@ class CP2KGeoOptParser(MainHierarchicalParser): ...@@ -73,7 +73,7 @@ class CP2KGeoOptParser(MainHierarchicalParser):
endReStr=" Conv. in RMS gradients =", endReStr=" Conv. in RMS gradients =",
name="geooptstep", name="geooptstep",
repeats=True, repeats=True,
sections=["section_single_configuration_calculation", "section_system"], # sections=["section_single_configuration_calculation", "section_system"],
subMatchers=[ subMatchers=[
SM( "", SM( "",
forwardMatch=True, forwardMatch=True,
...@@ -131,7 +131,7 @@ class CP2KGeoOptParser(MainHierarchicalParser): ...@@ -131,7 +131,7 @@ class CP2KGeoOptParser(MainHierarchicalParser):
SM( " RMS gradient =\s+(?P<x_cp2k_optimization_rms_gradient__bohr_1hartree>{})".format(self.cm.regex_f)), SM( " RMS gradient =\s+(?P<x_cp2k_optimization_rms_gradient__bohr_1hartree>{})".format(self.cm.regex_f)),
SM( " Conv. in RMS gradients =\s+(?P<x_cp2k_optimization_rms_gradient_convergence>{})".format(self.cm.regex_word)), SM( " Conv. in RMS gradients =\s+(?P<x_cp2k_optimization_rms_gradient_convergence>{})".format(self.cm.regex_word)),
], ],
adHoc=self.adHoc_step() # adHoc=self.adHoc_step()
), ),
] ]
), ),
...@@ -140,11 +140,11 @@ class CP2KGeoOptParser(MainHierarchicalParser): ...@@ -140,11 +140,11 @@ class CP2KGeoOptParser(MainHierarchicalParser):
otherMetaInfo=["geometry_optimization_converged"] otherMetaInfo=["geometry_optimization_converged"]
), ),
SM( " Reevaluating energy at the minimum", SM( " Reevaluating energy at the minimum",
sections=["x_cp2k_section_geometry_optimization_energy_reevaluation", "section_system"], sections=["x_cp2k_section_geometry_optimization_energy_reevaluation"],
subMatchers=[ subMatchers=[
self.cm.quickstep_calculation(), self.cm.quickstep_calculation(),
], ],
adHoc=self.adHoc_step() # adHoc=self.adHoc_step()
), ),
], ],
) )
...@@ -182,9 +182,7 @@ class CP2KGeoOptParser(MainHierarchicalParser): ...@@ -182,9 +182,7 @@ class CP2KGeoOptParser(MainHierarchicalParser):
self.cache_service["frame_sequence_potential_energy"].append(energy) self.cache_service["frame_sequence_potential_energy"].append(energy)
# Push values from cache # Push values from cache
self.cache_service.push_value("number_of_frames_in_sequence")
self.cache_service.push_array_values("frame_sequence_potential_energy") self.cache_service.push_array_values("frame_sequence_potential_energy")
self.cache_service.push_array_values("frame_sequence_local_frames_ref")
self.cache_service.push_value("geometry_optimization_method") self.cache_service.push_value("geometry_optimization_method")
self.backend.addValue("frame_sequence_to_sampling_ref", 0) self.backend.addValue("frame_sequence_to_sampling_ref", 0)
...@@ -201,6 +199,35 @@ class CP2KGeoOptParser(MainHierarchicalParser): ...@@ -201,6 +199,35 @@ class CP2KGeoOptParser(MainHierarchicalParser):
"geometry_optimization_threshold_force", "geometry_optimization_threshold_force",
) )
# Push the information into single configuration and system
steps = section["x_cp2k_section_geometry_optimization_step"]
each = self.cache_service["each_geo_opt"]
add_last = False
add_last_setting = self.cache_service["traj_add_last"]
if add_last_setting == "NUMERIC" or add_last_setting == "SYMBOLIC":
add_last = True
# Push the trajectory
n_steps = len(steps) + 1
last_step = n_steps - 1
for i_step in range(n_steps):
singleId = backend.openSection("section_single_configuration_calculation")
systemId = backend.openSection("section_system")
if self.traj_iterator is not None:
if (i_step + 1) % each == 0 or (i_step == last_step and add_last):
try:
pos = next(self.traj_iterator)
except StopIteration:
logger.error("Could not get the next geometries from an external file. It seems that the number of optimization steps in the CP2K outpufile doesn't match the number of steps found in the external trajectory file.")
else:
backend.addArrayValues("atom_positions", pos, unit="angstrom")
backend.closeSection("section_system", systemId)
backend.closeSection("section_single_configuration_calculation", singleId)
self.cache_service.push_array_values("frame_sequence_local_frames_ref")
backend.addValue("number_of_frames_in_sequence", n_steps)
def onClose_section_sampling_method(self, backend, gIndex, section): def onClose_section_sampling_method(self, backend, gIndex, section):
self.backend.addValue("sampling_method", "geometry_optimization") self.backend.addValue("sampling_method", "geometry_optimization")
...@@ -256,20 +283,11 @@ class CP2KGeoOptParser(MainHierarchicalParser): ...@@ -256,20 +283,11 @@ class CP2KGeoOptParser(MainHierarchicalParser):
self.cache_service["geometry_optimization_method"] = "bfgs" self.cache_service["geometry_optimization_method"] = "bfgs"
return wrapper return wrapper
def adHoc_step(self): # def adHoc_step(self):
"""Called when all the step information has been retrieved from the # """Called when all the step information has been retrieved from the
output file. Here further information is gathered from external files. # output file. Here further information is gathered from external files.
""" # """
def wrapper(parser): # def wrapper(parser):
self.cache_service["number_of_frames_in_sequence"] += 1 # self.cache_service["number_of_frames_in_sequence"] += 1
# Get the next position from the trajectory file
if self.traj_iterator is not None:
try:
pos = next(self.traj_iterator)
except StopIteration:
logger.error("Could not get the next geometries from an external file. It seems that the number of optimization steps in the CP2K outpufile doesn't match the number of steps found in the external trajectory file.")
else:
self.cache_service["atom_positions"] = pos
return wrapper # return wrapper
No preview for this file type
...@@ -116,9 +116,9 @@ class CP2KInputParser(BasicParser): ...@@ -116,9 +116,9 @@ class CP2KInputParser(BasicParser):
if pbe.accessed: if pbe.accessed:
sp = pbe.get_section_parameter() sp = pbe.get_section_parameter()
if sp == "T": if sp == "T":
parametrization = pbe.get_keyword_value_formatted("PARAMETRIZATION") parametrization = pbe.get_keyword("PARAMETRIZATION")
scale_x = pbe.get_keyword_value_formatted("SCALE_X") scale_x = pbe.get_keyword("SCALE_X")
scale_c = pbe.get_keyword_value_formatted("SCALE_C") scale_c = pbe.get_keyword("SCALE_C")
if parametrization == "ORIG": if parametrization == "ORIG":
xc_list.append(XCFunctional("GGA_X_PBE", scale_x)) xc_list.append(XCFunctional("GGA_X_PBE", scale_x))
xc_list.append(XCFunctional("GGA_C_PBE", scale_c)) xc_list.append(XCFunctional("GGA_C_PBE", scale_c))
...@@ -133,12 +133,11 @@ class CP2KInputParser(BasicParser): ...@@ -133,12 +133,11 @@ class CP2KInputParser(BasicParser):
if tpss.accessed: if tpss.accessed:
sp = tpss.get_section_parameter() sp = tpss.get_section_parameter()
if sp == "T": if sp == "T":
scale_x = tpss.get_keyword_value_formatted("SCALE_X") scale_x = tpss.get_keyword("SCALE_X")
scale_c = tpss.get_keyword_value_formatted("SCALE_C") scale_c = tpss.get_keyword("SCALE_C")
xc_list.append(XCFunctional("MGGA_X_TPSS", scale_x)) xc_list.append(XCFunctional("MGGA_X_TPSS", scale_x))
xc_list.append(XCFunctional("MGGA_C_TPSS", scale_c)) xc_list.append(XCFunctional("MGGA_C_TPSS", scale_c))
# Sort the functionals alphabetically by name # Sort the functionals alphabetically by name
xc_list.sort(key=lambda x: x.name) xc_list.sort(key=lambda x: x.name)
xc_summary = "" xc_summary = ""
...@@ -166,7 +165,7 @@ class CP2KInputParser(BasicParser): ...@@ -166,7 +165,7 @@ class CP2KInputParser(BasicParser):
#======================================================================= #=======================================================================
# Cell periodicity # Cell periodicity
periodicity = self.input_tree.get_keyword_value_formatted("FORCE_EVAL/SUBSYS/CELL/PERIODIC") periodicity = self.input_tree.get_keyword("FORCE_EVAL/SUBSYS/CELL/PERIODIC")
if periodicity is not None: if periodicity is not None:
periodicity = periodicity.upper() periodicity = periodicity.upper()
periodicity_list = ("X" in periodicity, "Y" in periodicity, "Z" in periodicity) periodicity_list = ("X" in periodicity, "Y" in periodicity, "Z" in periodicity)
...@@ -179,12 +178,12 @@ class CP2KInputParser(BasicParser): ...@@ -179,12 +178,12 @@ class CP2KInputParser(BasicParser):
self.setup_force_file_name() self.setup_force_file_name()
#======================================================================= #=======================================================================
# Trajectory file name # Trajectory file name and print settings
self.setup_trajectory_file_name() self.setup_trajectory_file_name()
#======================================================================= #=======================================================================
# Stress tensor calculation method # Stress tensor calculation method
stress_tensor_method = self.input_tree.get_keyword_value_formatted("FORCE_EVAL/STRESS_TENSOR") stress_tensor_method = self.input_tree.get_keyword("FORCE_EVAL/STRESS_TENSOR")
if stress_tensor_method != "NONE": if stress_tensor_method != "NONE":
mapping = { mapping = {
"NUMERICAL": "Numerical", "NUMERICAL": "Numerical",
...@@ -208,7 +207,7 @@ class CP2KInputParser(BasicParser): ...@@ -208,7 +207,7 @@ class CP2KInputParser(BasicParser):
normalized_path = path normalized_path = path
# Path is relative, project name added # Path is relative, project name added
else: else:
project_name = self.input_tree.get_keyword_value_formatted("GLOBAL/PROJECT_NAME") project_name = self.input_tree.get_keyword("GLOBAL/PROJECT_NAME")
if path: if path:
normalized_path = "{}-{}".format(project_name, path) normalized_path = "{}-{}".format(project_name, path)
else: else:
...@@ -218,7 +217,7 @@ class CP2KInputParser(BasicParser): ...@@ -218,7 +217,7 @@ class CP2KInputParser(BasicParser):
def setup_force_file_name(self): def setup_force_file_name(self):
"""Setup the force file path. """Setup the force file path.
""" """
force_file = self.input_tree.get_keyword_value_formatted("FORCE_EVAL/PRINT/FORCES/FILENAME") force_file = self.input_tree.get_keyword("FORCE_EVAL/PRINT/FORCES/FILENAME")
extension = "xyz" extension = "xyz"
if force_file is not None and force_file != "__STD_OUT__": if force_file is not None and force_file != "__STD_OUT__":
normalized_path = self.normalize_x_cp2k_path(force_file) normalized_path = self.normalize_x_cp2k_path(force_file)
...@@ -228,12 +227,10 @@ class CP2KInputParser(BasicParser): ...@@ -228,12 +227,10 @@ class CP2KInputParser(BasicParser):
def setup_trajectory_file_name(self): def setup_trajectory_file_name(self):
"""Setup the trajectory file path. """Setup the trajectory file path.
""" """
traj_format = self.input_tree.get_keyword_value_formatted("MOTION/PRINT/TRAJECTORY/FORMAT") traj_format = self.input_tree.get_keyword("MOTION/PRINT/TRAJECTORY/FORMAT")
traj_filename = self.input_tree.get_keyword_value_formatted("MOTION/PRINT/TRAJECTORY/FILENAME") traj_filename = self.input_tree.get_keyword("MOTION/PRINT/TRAJECTORY/FILENAME")
geo_opt_each = self.input_tree.get_keyword_value_formatted("MOTION/PRINT/TRAJECTORY/EACH/GEO_OPT") self.cache_service["traj_add_last"] = self.input_tree.get_keyword("MOTION/PRINT/TRAJECTORY/ADD_LAST")
traj_add_last = self.input_tree.get_keyword_value_formatted("MOTION/PRINT/TRAJECTORY/ADD_LAST") self.cache_service["each_geo_opt"] = self.input_tree.get_keyword("MOTION/PRINT/TRAJECTORY/EACH/GEO_OPT")
self.cache_service["each_geo_opt"] = geo_opt_each
self.cache_service["traj_add_last"] = traj_add_last
if traj_filename is None: if traj_filename is None:
traj_filename = "" traj_filename = ""
self.cache_service["trajectory_format"] = traj_format self.cache_service["trajectory_format"] = traj_format
......
...@@ -85,5 +85,11 @@ class CP2KSinglePointParser(MainHierarchicalParser): ...@@ -85,5 +85,11 @@ class CP2KSinglePointParser(MainHierarchicalParser):
section.add_latest_array_values("x_cp2k_stress_tensor", "stress_tensor") section.add_latest_array_values("x_cp2k_stress_tensor", "stress_tensor")
backend.closeSection("section_stress_tensor", gId) backend.closeSection("section_stress_tensor", gId)
def onClose_section_system(self, backend, gIndex, section):
"""Stores the index of the section method. Should always be 0, but
let's get it dynamically just in case there's something wrong.
"""
self.cache_service.push_array_values("atom_positions", unit="angstrom")
#=========================================================================== #===========================================================================
# adHoc functions # adHoc functions
3
i = 2, E = -17.1941015290
O 12.2353220000 1.3766420000 10.8698800000
H 12.4618486015 2.2314871691 11.3335607388
H 11.9990227122 1.5776813026 10.0384213366
3
i = 4, E = -17.2097667733
O 12.2353220000 1.3766420000 10.8698800000
H 12.4962705528 2.2308411983 11.3355758433
H 11.9975151486 1.5746309898 10.0054430868
3
i = 6, E = -17.2097743229
O 12.2353220000 1.3766420000 10.8698800000
H 12.4958168364 2.2307249171 11.3354322532
H 11.9975556812 1.5748088251 10.0062793864
3
i = 7, E = -17.2097782066
O 12.2353220000 1.3766420000 10.8698800000
H 12.4958164689 2.2307248873 11.3354322515
H 11.9975558616 1.5748085240 10.0062792262
&GLOBAL
PROJECT H2O
RUN_TYPE GEO_OPT
PRINT_LEVEL MEDIUM
&END GLOBAL
&FORCE_EVAL
METHOD QS
STRESS_TENSOR ANALYTICAL
&SUBSYS
&CELL
ABC 12.4138 12.4138 12.4138
&END CELL
&COORD
O 12.235322 1.376642 10.869880
H 12.415139 2.233125 11.257611
H 11.922476 1.573799 9.986994
&END COORD
&KIND H
BASIS_SET DZVP-GTH-PADE
POTENTIAL GTH-PADE-q1
&END KIND
&KIND O
BASIS_SET DZVP-GTH-PADE
POTENTIAL GTH-PADE-q6
&END KIND
&END SUBSYS
&DFT
BASIS_SET_FILE_NAME ../../BASIS_SET
POTENTIAL_FILE_NAME ../../GTH_POTENTIALS
&QS
EPS_DEFAULT 1.0E-7
&END QS
&MGRID
CUTOFF 100
NGRIDS 4
REL_CUTOFF 30
&END MGRID
&SCF
SCF_GUESS ATOMIC
EPS_SCF 1.0E-05
MAX_SCF 200
&DIAGONALIZATION T
ALGORITHM STANDARD
&END DIAGONALIZATION
&MIXING T
ALPHA 0.5
METHOD PULAY_MIXING
NPULAY 5
&END MIXING
&PRINT
&RESTART OFF
&END RESTART
&END PRINT
&END SCF
&XC
&XC_FUNCTIONAL PADE
&END XC_FUNCTIONAL
&END XC
&END DFT
&PRINT
&FORCES ON
&END FORCES
&STRESS_TENSOR ON
&END STRESS_TENSOR
&END PRINT
&END FORCE_EVAL
&MOTION
&GEO_OPT
TYPE MINIMIZATION
MAX_DR 1.0E-03
MAX_FORCE 1.0E-03
RMS_DR 1.0E-03
RMS_FORCE 1.0E-03
MAX_ITER 200
OPTIMIZER CG
&CG
MAX_STEEP_STEPS 0
RESTART_LIMIT 9.0E-01
&END CG
&END GEO_OPT
&CONSTRAINT
&FIXED_ATOMS
COMPONENTS_TO_FIX XYZ
LIST 1
&END FIXED_ATOMS
&END CONSTRAINT
&PRINT
&TRAJECTORY
ADD_LAST NUMERIC
&EACH
GEO_OPT 2
&END EACH
&END TRAJECTORY
&END PRINT
&END MOTION
This diff is collapsed.
...@@ -277,9 +277,9 @@ class TestEnergyForce(unittest.TestCase): ...@@ -277,9 +277,9 @@ class TestEnergyForce(unittest.TestCase):
# cls.results.print_summary() # cls.results.print_summary()
def test_energy_total_scf_iteration(self): def test_energy_total_scf_iteration(self):
energy_total = self.results["energy_total_scf_iteration"] result = self.results["energy_total_scf_iteration"]
expected_result = convert_unit(np.array(-32.2320848878), "hartree") expected_result = convert_unit(np.array(-32.2320848878), "hartree")
self.assertTrue(np.array_equal(energy_total[0], expected_result)) self.assertTrue(np.array_equal(result[0], expected_result))
def test_energy_change_scf_iteration(self): def test_energy_change_scf_iteration(self):
energy_change = self.results["energy_change_scf_iteration"] energy_change = self.results["energy_change_scf_iteration"]
...@@ -476,7 +476,7 @@ class TestGeoOpt(unittest.TestCase): ...@@ -476,7 +476,7 @@ class TestGeoOpt(unittest.TestCase):
def test_frame_sequence_local_frames_ref(self): def test_frame_sequence_local_frames_ref(self):
result = self.results["frame_sequence_local_frames_ref"] result = self.results["frame_sequence_local_frames_ref"]
expected_result = np.array([0, 1, 2, 3, 4, 5]) expected_result = np.array([0, 1, 2, 3, 4, 5, 6])
self.assertTrue(np.array_equal(result, expected_result)) self.assertTrue(np.array_equal(result, expected_result))
def test_sampling_method(self): def test_sampling_method(self):
...@@ -611,23 +611,91 @@ class TestGeoOptOptimizers(unittest.TestCase): ...@@ -611,23 +611,91 @@ class TestGeoOptOptimizers(unittest.TestCase):
result = get_result("geo_opt/lbfgs", "geometry_optimization_method") result = get_result("geo_opt/lbfgs", "geometry_optimization_method")
self.assertEqual(result, "bfgs") self.assertEqual(result, "bfgs")
#===============================================================================
class TestGeoOptTrajectory(unittest.TestCase):
def test_each_and_add_last(self):
"""Test that the EACH and ADD_LAST settings affect the parsing
correctly.
"""
results = get_results("geo_opt/each")
single_conf = results["section_single_configuration_calculation"]
systems = results["section_system"]
i_conf = 0
for calc in single_conf.itervalues():
system_index = calc["single_configuration_calculation_to_system_ref"][0]
system = systems[system_index]
pos = system["atom_positions"]
if i_conf == 0 or i_conf == 2 or i_conf == 4:
self.assertEqual(pos, None)
else:
pos = system["atom_positions"][0]
if i_conf == 1:
expected_pos = convert_unit(
np.array([
[12.2353220000, 1.3766420000, 10.8698800000],
[12.4618486015, 2.2314871691, 11.3335607388],
[11.9990227122, 1.5776813026, 10.0384213366],
]),
"angstrom"
)
self.assertTrue(np.array_equal(pos, expected_pos))
if i_conf == 3:
expected_pos = convert_unit(
np.array([
[12.2353220000, 1.3766420000, 10.8698800000],
[12.4962705528, 2.2308411983, 11.3355758433],
[11.9975151486, 1.5746309898, 10.0054430868],
]),
"angstrom"
)
self.assertTrue(np.array_equal(pos, expected_pos))
if i_conf == 5:
expected_pos = convert_unit(
np.array([
[12.2353220000, 1.3766420000, 10.8698800000],
[12.4958168364, 2.2307249171, 11.3354322532],
[11.9975556812, 1.5748088251, 10.0062793864],
]),
"angstrom"
)
self.assertTrue(np.array_equal(pos, expected_pos))
if i_conf == 6:
expected_pos = convert_unit(
np.array([
[12.2353220000, 1.3766420000, 10.8698800000],
[12.4958164689, 2.2307248873, 11.3354322515],
[11.9975558616, 1.5748085240, 10.0062792262],
]),
"angstrom"
)
self.assertTrue(np.array_equal(pos, expected_pos))
i_conf += 1
#=============================================================================== #===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
logger = logging.getLogger("cp2kparser") logger = logging.getLogger("cp2kparser")
logger.setLevel(logging.ERROR) logger.setLevel(logging.ERROR)
suites = [] suites = []
# suites.append(unittest.TestLoader().loadTestsFromTestCase(TestErrors)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestErrors))
# suites.append(unittest.TestLoader().loadTestsFromTestCase(TestXCFunctional)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestXCFunctional))
# suites.append(unittest.TestLoader().loadTestsFromTestCase(TestEnergyForce)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestEnergyForce))
# suites.append(unittest.TestLoader().loadTestsFromTestCase(TestStressTensorMethods)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestStressTensorMethods))
# suites.append(unittest.TestLoader().loadTestsFromTestCase(TestSelfInteractionCorrectionMethod)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestSelfInteractionCorrectionMethod))
# suites.append(unittest.TestLoader().loadTestsFromTestCase(TestConfigurationPeriodicDimensions)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestConfigurationPeriodicDimensions))
# suites.append(unittest.TestLoader().loadTestsFromTestCase(TestSCFConvergence)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestSCFConvergence))
# suites.append(unittest.TestLoader().loadTestsFromTestCase(TestForceFiles)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestForceFiles))
# suites.append(unittest.TestLoader().loadTestsFromTestCase(TestPreprocessor)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestPreprocessor))
suites.append(unittest.TestLoader().loadTestsFromTestCase(TestGeoOpt)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestGeoOpt))
suites.append(unittest.TestLoader().loadTestsFromTestCase(TestGeoOptTrajFormats)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestGeoOptTrajFormats))
suites.append(unittest.TestLoader().loadTestsFromTestCase(TestGeoOptOptimizers)) suites.append(unittest.TestLoader().loadTestsFromTestCase(TestGeoOptOptimizers))
suites.append(unittest.TestLoader().loadTestsFromTestCase(TestGeoOptTrajectory))
alltests = unittest.TestSuite(suites) alltests = unittest.TestSuite(suites)
unittest.TextTestRunner(verbosity=0).run(alltests) unittest.TextTestRunner(verbosity=0).run(alltests)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment