Commit dff1bda7 authored by Daria M. Tomecka's avatar Daria M. Tomecka

adding scala wrapper

parent 3dbb28f1
package eu.nomad_lab.normalizers
import eu.{ nomad_lab => lab }
import eu.nomad_lab.DefaultPythonInterpreter
import org.{ json4s => jn }
import scala.collection.breakOut
import eu.nomad_lab.normalize.ExternalNormalizerGenerator
import eu.nomad_lab.meta
import eu.nomad_lab.query
object PrototypeNormalizer extends ExternalNormalizerGenerator(
name = "PrototypeNormalizer",
info = jn.JObject(
("name" -> jn.JString("PrototypeNormalizer")) ::
("normalizerId" -> jn.JString("PrototypeNormalizer" + lab.PrototypeVersionInfo.version)) ::
("versionInfo" -> jn.JObject(
("nomadCoreVersion" -> jn.JObject(lab.NomadCoreVersionInfo.toMap.map {
case (k, v) => k -> jn.JString(v.toString)
}(breakOut): List[(String, jn.JString)])) ::
(lab.PrototypeVersionInfo.toMap.map {
case (key, value) =>
(key -> jn.JString(value.toString))
}(breakOut): List[(String, jn.JString)])
)) :: Nil
),
context = "calcuation_context",
filter = query.CompiledQuery(query.QueryExpression("section_system"), meta.KnownMetaInfoEnvs.publicMeta),
cmd = Seq(DefaultPythonInterpreter.pythonExe(), "${envDir}/normalizers/prototypes/normalizer/normalizer-prototypes/classify4me_prototypes.py",
"${contextUri}", "${archivePath}"),
resList = Seq(
"normalizer-prototypes/setup_paths.py",
"normalizer-prototypes/classify4me_prototypes.py",
"nomad_meta_info/public.nomadmetainfo.json",
"nomad_meta_info/common.nomadmetainfo.json",
"nomad_meta_info/meta_types.nomadmetainfo.json",
"nomad_meta_info/stats.nomadmetainfo.json"
) ++ DefaultPythonInterpreter.commonFiles(),
dirMap = Map(
"normalizer-prototypes" -> "normalizers/prototypes/normalizer/normalizer-prototypes",
"nomad_meta_info" -> "nomad-meta-info/meta_info/nomad_meta_info",
"python" -> "python-common/common/python/nomadcore"
) ++ DefaultPythonInterpreter.commonDirMapping(),
metaInfoEnv = lab.meta.KnownMetaInfoEnvs.all
) {
val trace: Boolean = false
override def stdInHandler(context: ResolvedRef)(wrapper: ExternalParserWrapper)(pIn: java.io.OutputStream): Unit = {
val out: java.io.Writer = if (trace)
new java.io.BufferedWriter(new java.io.OutputStreamWriter(pIn));
else
null
val stringBuilder = new StringBuilder
def writeOut(s: String): Unit = {
out.write(s)
if (trace) stringBuilder ++= s
}
def flush(): Unit = {
out.flush()
if (trace) {
logger.info(stringBuilder.result())
stringBuilder.clear()
}
}
writeOut("[")
var isFirst = true
try {
context match {
case Calculation(archiveSet, c) =>
val sysTable = c.sectionTable(Seq("section_run", "section_system"))
def outputSysSection(sysSection: SectionH5): Unit = {
if (!isFirst)
writeOut(",")
else
isFirst = false
//writeOut(s"""{
// | "context": ${JsonUtils.escapeString(m.toRef.toUriStr(archiveSet.objectKind))},
// | "section_system": """.stripMargin)
val visitor = new EmitJsonVisitor(
writeOut = writeOut
)
val scanner = new H5EagerScanner
scanner.scanResolvedRef(Section(archiveSet, sysSection), visitor)
//writeOut("}")
flush()
}
}
val nSys = sysTable.lengthL
if (nSys > 0)
outputSysSection(sysTable(0))
if (nSys > 1)
outputSysSection(sysTable(nSys - 1))
writeOut("]")
flush()
case r =>
throw new Exception(s"FhiAimsBasisNormalizer expected a calculation as context, but got $r")
}
} finally {
out.close()
pIn.close()
wrapper.sendStatus = ExternalParserWrapper.SendStatus.Finished
}
}
}
package eu.nomad_lab.normalizers
import eu.nomad_lab.{ parsers, DefaultPythonInterpreter }
import org.scalacheck.Properties
import org.specs2.mutable.Specification
import org.{ json4s => jn }
object PrototypesNormalizerSpec extends Specification {
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment