diff --git a/integrated-pipeline/src/main/scala/eu/nomad_lab/integrated_pipeline/io_integrations/WriteToHDF5ResultsProcessor.scala b/integrated-pipeline/src/main/scala/eu/nomad_lab/integrated_pipeline/io_integrations/WriteToHDF5ResultsProcessor.scala index 8949117bf058c543a409c43ab04afc2558d01e19..d944e1b343d9a96911375f8e8f7b20ca36906e42 100644 --- a/integrated-pipeline/src/main/scala/eu/nomad_lab/integrated_pipeline/io_integrations/WriteToHDF5ResultsProcessor.scala +++ b/integrated-pipeline/src/main/scala/eu/nomad_lab/integrated_pipeline/io_integrations/WriteToHDF5ResultsProcessor.scala @@ -12,11 +12,11 @@ import eu.nomad_lab.parsers.{ H5Backend, ReindexBackend } class WriteToHDF5ResultsProcessor(outputLocation: Path, metaInfo: MetaInfoEnv) extends ParsingResultsProcessor { override def processFileParsingResult(result: FileParsingResult): Unit = { val id = result.task.calculationGid + val archiveGid = result.task.treeTask.archiveId val fileName = Paths.get(id + ".h5") val targetPath = outputLocation(result.treeTask).resolve(fileName) Files.createDirectories(outputLocation(result.treeTask)) - //VERIFY: Is this path the appropriate one for single calculation HDFs? (Is there a standard?) - val h5file = H5File.create(targetPath, Paths.get("/", id, id)) + val h5file = H5File.create(targetPath, Paths.get("/", archiveGid, id)) val h5Backend = H5Backend(metaEnv = metaInfo, h5File = h5file, closeFileOnFinishedParsing = false) val backend = new ReindexBackend(h5Backend) try { diff --git a/integrated-pipeline/src/test/scala/eu/nomad_lab/integrated_pipeline_end_to_end_tests/package.scala b/integrated-pipeline/src/test/scala/eu/nomad_lab/integrated_pipeline_end_to_end_tests/package.scala index 11c3bc464ed5896e1ba61cfd04cedd9280d31475..a327cc68ea0ea43a29d5e416aaa0011cd5ee4ad2 100644 --- a/integrated-pipeline/src/test/scala/eu/nomad_lab/integrated_pipeline_end_to_end_tests/package.scala +++ b/integrated-pipeline/src/test/scala/eu/nomad_lab/integrated_pipeline_end_to_end_tests/package.scala @@ -94,6 +94,7 @@ package object integrated_pipeline_end_to_end_tests extends TestDataBuilders { sample.candidateCalculationsWithParsers.foreach { entry => val task = aFileParsingTask().withTreeTask(treeTask).withRelativePath(entry._1).build() val id = task.calculationGid + val archiveId = treeTask.archiveId val fileName = s"$id.h5" val targetFolder = tmpResultsFolder.resolve(treeTask.prefixFolder).resolve(treeTask.archiveId) val location = targetFolder.resolve(fileName) @@ -101,7 +102,8 @@ package object integrated_pipeline_end_to_end_tests extends TestDataBuilders { location.toFile.exists(), s"HDF5 file '$location' with parsing results does not exist" ) - validateHDF5(targetFolder, id, metaInfo, checkSingleCalculationHDFContent(sample)) + validateHDF5(targetFolder, archiveId, metaInfo, checkSingleCalculationHDFContent(sample), + Some(fileName)) } succeed } diff --git a/integrated-pipeline/src/test/scala/eu/nomad_lab/integrated_pipeline_tests/WriteToHDF5ResultsProcessorSpec.scala b/integrated-pipeline/src/test/scala/eu/nomad_lab/integrated_pipeline_tests/WriteToHDF5ResultsProcessorSpec.scala index c27866a79ec19596cbde3393348df0fd9c9ec053..1eba02690432e9830bdf3d880c79f94331ea450b 100644 --- a/integrated-pipeline/src/test/scala/eu/nomad_lab/integrated_pipeline_tests/WriteToHDF5ResultsProcessorSpec.scala +++ b/integrated-pipeline/src/test/scala/eu/nomad_lab/integrated_pipeline_tests/WriteToHDF5ResultsProcessorSpec.scala @@ -30,12 +30,14 @@ class WriteToHDF5ResultsProcessorSpec extends WordSpec with TestDataBuilders wit val inputs = (1 to 3).map(x => FileParsingTask(sampleTree, Paths.get(s"file$x"), "dummyParser")) inputs.foreach(x => writer.processFileParsingResult(createSuccessfulFileParsingResult(x))) val targetFolder = writer.outputLocation(sampleTree) + val archiveId = sampleTree.archiveId inputs.foreach { entry => val calcName = entry.calculationGid val filePath = targetFolder.resolve(s"$calcName.h5") assert(filePath.toFile.exists(), s"calculation output HDF5 '$filePath' does not exist") val mainFileUri = entry.treeTask.treeBasePath.resolve(entry.relativePath).toUri.toString - validateHDF5(targetFolder, calcName, metaData, validateHDFContent(mainFileUri)) + validateHDF5(targetFolder, archiveId, metaData, validateHDFContent(mainFileUri), + Some(s"$calcName.h5")) } } }