classify4me_prototypes_aflow.py 9.61 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Reads calculation data and classifies their structures by prototypes 
on the basis of the space_group and normalized_wyckoff, and adds labels to the calculations
 - prototype_label (with labels in the same format as 
in the read_prototypes function).
"""

from __future__ import absolute_import

__author__ = "Daria M. Tomecka and Fawzi Mohamed"
__copyright__ = "Copyright 2017, The NOMAD Project"
__maintainer__ = "Daria M. Tomecka"
__email__ = "tomeckadm@gmail.com;"
__date__ = "12/07/17"

import sys
import ase.io
from ase.data import chemical_symbols
import json
import numpy as np
import time
import datetime
import os, os.path
import logging
import functools
import fractions

import setup_paths

### new updated location for the prototypes
from nomadcore.structure_types import structure_types_by_spacegroup as str_types_by_spg
from nomadcore.parse_streamed_dicts import ParseStreamedDicts
from nomadcore.local_meta_info import loadJsonFile, InfoKindEl
from nomadcore.parser_backend import JsonParseEventsWriterBackend


#prototypes_file = os.path.normpath("/nomad-lab-base/analysis-tools/structural-similarity/python-modules/nomad_sim/structure_types.py")

import sys
#print(sys.path)
#sys.path.append('/home/beaker/py3k/lib/python3.5/')
import spglib

import logging

#LOGGER = logging.getLogger(__name__)
#logging.basicConfig(level=logging.ERROR)


atomSpecies = None

cell = None

def get_normalized_wyckoff(atomic_number, wyckoff):
    """Returns a normalized Wyckoff sequence for the given atomic numbers and
    wyckoff symbols.
    """
    # print("at", atomic_number, wyckoff)
    atomCount = {}
    for nr in atomic_number:
        atomCount[nr] = atomCount.get(nr, 0) + 1
    wycDict = {}
    #logging.error("atomic_number: %s, wyckoff: %s", atomic_number, wyckoff) 
    for i, wk in enumerate(wyckoff):
        oldVal = wycDict.get(wk, {})
        #print("i:",i, "wyckoff", wyckoff, "wk", wk)
        nr = atomic_number[i]
        oldVal[nr] = oldVal.get(nr, 0) + 1
        wycDict[wk] = oldVal
    sortedWyc = list(wycDict.keys())
    sortedWyc.sort()

    def cmpp(a, b):
        return ((a < b) - (a > b))

    def compareAtNr(at1, at2):
        c = cmpp(atomCount[at1], atomCount[at2])
        if (c != 0):
            return c
        for wk in sortedWyc:
            p = wycDict[wk]
            c = cmpp(p.get(at1, 0), p.get(at2, 0))
            if c != 0:
                return c
        return 0

    sortedAt = list(atomCount.keys())
    sortedAt.sort(key=functools.cmp_to_key(compareAtNr))
    standardAtomNames = {}
    for i, at in enumerate(sortedAt):
        standardAtomNames[at] = ("X_%d" % i)
    standardWyc = {}
    for wk, ats in wycDict.items():
        stdAts = {}
        for at, count in ats.items():
            stdAts[standardAtomNames[at]] = count
        standardWyc[wk] = stdAts
    if standardWyc:
        counts = [c for x in standardWyc.values() for c in x.values()]
        # logging.error("counts: %s", counts)
        gcd = counts[0]
        for c in counts[1:]:
            gcd = fractions.gcd(gcd,c)
        if gcd != 1:
            for wk,d in standardWyc.items():
                for at,c in d.items():
                    d[at] = c // gcd
    return standardWyc



def get_structure_type(space_group, norm_wyckoff):
    """Returns the information on the prototype.
    """
    structure_type_info = {}

    for type_description in str_types_by_spg.get((space_group), []):
        
        current_norm_wyckoffs = type_description.get("normalized_wyckoff_spg")
        if current_norm_wyckoffs and current_norm_wyckoffs == norm_wyckoff:
            structure_type_info = type_description
            break
    if structure_type_info:
        return structure_type_info
    else:
        return None



def _structure_type_info(self):
    """Known structure types"""
    return get_structure_type(
        self.space_group,
        self.normalized_wyckoff)


def toAtomNr(string):
    "returns the atom number of the given symbol"
    baseStr = string[:3].title()
    if baseStr.startswith("Uu") and baseStr in chemical_symbols[1:]:
        return chemical_symbols.index(baseStr)
    if baseStr[:2] in chemical_symbols[1:]:
        return chemical_symbols.index(baseStr[:2])
    elif baseStr[:1] in chemical_symbols[1:]:
        return chemical_symbols.index(baseStr[:1])
    else:
        return 0

def dictToNarray(dictValue):
    """function that gets the dictionary with flat data ans shape and give the array"""
    v=dictValue['flatData']
    return np.reshape(np.asarray(v), dictValue['shape'])

def protoNormalizeWycoff(protoDict):
    """recalculates the normalized wyckoff values for the given prototype dictionary"""
    cell = np.asarray(protoDict['lattice_vectors'])
    atomSpecies = [toAtomNr(at) for at in protoDict['atom_labels']]
    atomPos = np.asarray(protoDict['atom_positions'])
    symm = systemToSpg(cell, atomSpecies, atomPos)
    wyckoffs = symm.get("wyckoffs")
    norm_wyckoff = get_normalized_wyckoff(atomSpecies,wyckoffs) 
    return norm_wyckoff

def updatePrototypesWyckoff(protos):
    for sp, pts in protos.items():
        for protoDict in pts:
            try:
                wy = protoNormalizeWycoff(protoDict)
                protoDict['normalized_wyckoff_spg'] = wy
            except:
                logging.exception("Failed to compute normalized wyckoffs for %s", json.dumps(protoDict))

def systemToSpg(cell, atomSpecies, atomPos):
    """uses spg to calculate the symmetry of the given system"""
    acell = cell*1.0e10
    cellInv = np.linalg.inv(cell)
    symm = spglib.get_symmetry_dataset((acell, np.dot(atomPos,cellInv), atomSpecies),
                                                 0.002, -1) # use m instead of Angstrom?

    return symm

def classify_by_norm_wyckoff(sectionSystem):
    try:
        
        ###
        #atomic_number = atom_species
        #as in the normalized version
        cell = None
        conf = sectionSystem
        
        lab = conf.get("atom_labels", None)
        if lab is None : return None
        ##periodicDirs = conf.get("configuration_periodic_dimensions", periodicDirs)
        atomSpecies = [toAtomNr(l) for l in lab['flatData']]
        #print (atomSpecies)
        newCell = conf.get("simulation_cell")
        
        if newCell is None : return None
        
        if newCell:
            cell = dictToNarray(newCell)
        symm = None
        #print("***full:",cell)
        #acell = cell.reshape(3,3)
        atomPos = dictToNarray(conf.get("atom_positions"))
        
        if atomPos is None : return None
        
        symm = systemToSpg(cell, atomSpecies, atomPos)
        wyckoffs = symm.get("wyckoffs")
        spg_nr = symm.get("number")
        
        ### adds recalculated wyckoff positions
        updatePrototypesWyckoff(str_types_by_spg)

        ###
        norm_wyckoff = get_normalized_wyckoff(atomSpecies,wyckoffs) 
        protoDict = get_structure_type(spg_nr, norm_wyckoff)

        if protoDict is None:
            proto = "%d-_" % spg_nr
        else:
            #if protoDict.get("Notes","") not in ["", "_", "-", "–"]:
            #    proto = '%d-%s' % (spg_nr, protoDict)
            #else:
            #proto = '%d-%s' % (spg_nr, protoDict)
            proto = '%d-%s-%s' % (spg_nr, protoDict.get("Prototype","-"),protoDict.get("Pearsons Symbol","-"))
            aflow_prototype_id = protoDict.get("aflow_prototype_id","-")
            aflow_prototype_url = protoDict.get("aflow_prototype_url","-")
        return proto, aflow_prototype_id, aflow_prototype_url
    except:
        #logging.exception("failure while computing for %r",json_file_name)
        logging.exception("failure while computing for that example")

    return None

#classify_by_norm_wyckoff(json_list)
            

def main():
    metapath = '../../../../nomad-meta-info/meta_info/nomad_meta_info/' +\
          'common.nomadmetainfo.json'
    metaInfoPath = os.path.normpath(
    os.path.join(os.path.dirname(os.path.abspath(__file__)), metapath))
  
    metaInfoEnv, warns = loadJsonFile(filePath=metaInfoPath,
                                      dependencyLoader=None,
                                      extraArgsHandling=InfoKindEl.ADD_EXTRA_ARGS,
                                      uri=None)
    backend = JsonParseEventsWriterBackend(metaInfoEnv)
    #Start 
    calcContext=sys.argv[1]
    backend.startedParsingSession(
      calcContext,
      parserInfo = {'name':'PrototypesNormalizer', 'version': '1.0'})

  

    dictReader = ParseStreamedDicts(sys.stdin)
    while True:
        sectSys = dictReader.readNextDict()
        if sectSys is None:
            break
        
        if "aflow_prototype_id" in sectSys:
            backend.addValue("prototype_aflow_id",sectSys["aflow_prototype_id"])
        if "aflow_prototype_url" in sectSys:
            backend.addValue("prototype_aflow_url",sectSys["aflow_prototype_url"])
            
        try:
            label = classify_by_norm_wyckoff(sectSys)
            if label:
                backend.openContext(sectSys['uri'])
                backend.addValue("prototype_label", label)

                #                backend.addValue("prototype_aflow_id", aflow_prototype_id)
                #                backend.addValue("prototype_aflow_url", aflow_prototype_url)
            #    if "aflow_prototype_url" in sectSys:
            #    backend.addValue("prototype_aflow_url",sectSys["aflow_prototype_url"])

                backend.closeContext(sectSys['uri'])
                sys.stdout.flush()
        except:
            logging.exception("exception trying to calculate prototype for %s",sectSys)

    backend.finishedParsingSession("ParseSuccess", None)
    sys.stdout.flush()
 


if __name__ == '__main__':
    main()