Commit 63619fe1 authored by Carl Poelking's avatar Carl Poelking
Browse files

soapy.util module.

parent 9d3c59fe
install(FILES __init__.py kernel.py simspace.py pca.py dimred.py lagraph.py DESTINATION ${LOCAL_INSTALL_DIR}/soapy)
install(FILES __init__.py util.py kernel.py simspace.py pca.py dimred.py lagraph.py DESTINATION ${LOCAL_INSTALL_DIR}/soapy)
......@@ -28,7 +28,12 @@ def flg_compute_k_flg(S1, S2):
S2_inv = la.inv(S2)
S12_inv = 0.5*(S1_inv+S2_inv)
S12_inv_inv = la.inv(S12_inv)
return (la.det(S12_inv_inv))**0.5/(la.det(S1)*la.det(S2))**0.25
det_12 = la.det(S12_inv_inv)
det_1_2 = la.det(S1)*la.det(S2)
if det_12 < 0. or det_1_2 < 0.:
print "WARNING__%+1.7f<0__%+1.7f<0__" % (det_12, det_1_2)
#return (la.det(S12_inv_inv))**0.5/(la.det(S1)*la.det(S2))**0.25
return det_12**0.5/det_1_2**0.25
def compare_graphs_kernelized(L1, L2, K12, options, zero_eps=1e-10):
......@@ -63,6 +68,12 @@ def compare_graphs_kernelized(L1, L2, K12, options, zero_eps=1e-10):
return k_flg
def compare_graphs_laplacian_kernelized(g1, g2, options):
if options['laplacian.hierarchical']:
return compare_graphs_hierarchical(g1, g2, options)
else:
return compare_graphs(g1, g2, options)
def compare_graphs(g1, g2, options):
# EXTRACT LAPLACIANS (L), FEATURE MATRICES (P), SHAPES
......@@ -228,12 +239,11 @@ class ParticleGraph(object):
def setupVertexFeatures(self, atoms, options):
n_atoms = len(atoms)
descriptor_type = options['laplacian.descriptor']
if descriptor_type == 'atom_type':
feature_map = {
1:0,
6:1,
7:2,
8:3}
options_descriptor = options['descriptor.%s' % descriptor_type]
if descriptor_type == 'atom_type':
feature_map = {}
feature_list = options_descriptor['type_map']
for idx, f in enumerate(feature_list): feature_map[f] = idx
dim = len(feature_map.keys())
P = np.zeros((n_atoms, dim))
for idx, atom in enumerate(atoms):
......@@ -246,13 +256,18 @@ class ParticleGraph(object):
structure = soap.tools.setup_structure_ase(self.label, atoms)
# Options
options_soap = soap.Options()
for item in options.iteritems():
options_soap.set(*item)
for item in options_descriptor.items():
key = item[0]
val = item[1]
if type(val) == list: continue # TODO Exclusions loaded separately, fix this
options_soap.set(key, val)
options_soap.excludeCenters(options_descriptor['exclude_centers'])
options_soap.excludeTargets(options_descriptor['exclude_targets'])
# Spectrum
spectrum = soap.Spectrum(structure, options_soap)
spectrum.compute()
spectrum.computePower()
if options['spectrum.gradients']:
if options_descriptor['spectrum.gradients']:
spectrum.computePowerGradients()
spectrum.computeGlobal()
# Adapt spectrum
......
import numpy as np
import multiprocessing as mp
import functools as fct
import json
def mp_compute_column_block(gi, gj_list, kfct):
"""
Evaluates kfct for each pair (gi, gj), with gj from gj_list
See Also
--------
mp_compute_upper_triangle
"""
krow = []
for gj in gj_list:
if gj.mp_idx < gi.mp_idx:
k = 0.
else:
k = kfct(gi, gj)
krow.append(k)
return krow
def mp_compute_upper_triangle(kfct, g_list, n_procs, n_blocks, log=None, **kwargs):
"""
Compute kernel matrix computed from pairs of objects in object list
Parameters
----------
kfct : function reference to be evaluated between g_list items
g_list : list of items supplied to kfct(gi, gj, **kwargs)
n_procs: number of processors requested
n_blocks: number of column blocks onto which computation is split
kwargs: keyword arguments supplied to kfct
"""
dim = len(g_list)
kmat = np.zeros((dim,dim))
# Embed mp index in g-list objects
for mp_idx, g in enumerate(g_list): g.mp_idx = mp_idx
# Divide onto column blocks
col_idcs = np.arange(len(g_list))
col_div_list = np.array_split(col_idcs, n_blocks)
for col_div in col_div_list:
# Column start, column end
c0 = col_div[0]
c1 = col_div[-1]+1
if log: log << "Column block [%d:%d]" % (c0, c1) << log.endl
gj_list = g_list[c0:c1]
# Prime kernel function
kfct_primed = fct.partial(
kfct,
**kwargs)
pool = mp.Pool(processes=n_procs)
# Prime mp function
mp_compute_column_block_primed = fct.partial(
mp_compute_column_block,
gj_list=gj_list,
kfct=kfct_primed)
# Map & close
kmat_column_block = pool.map(mp_compute_column_block_primed, g_list)
kmat_column_block = np.array(kmat_column_block)
kmat[:,c0:c1] = kmat_column_block
pool.close()
pool.join()
return kmat
def json_load_utf8(file_handle):
return _byteify(
json.load(file_handle, object_hook=_byteify),
ignore_dicts=True
)
def json_loads_utf8(json_text):
return _byteify(
json.loads(json_text, object_hook=_byteify),
ignore_dicts=True
)
def _byteify(data, ignore_dicts = False):
# if this is a unicode string, return its string representation
if isinstance(data, unicode):
return data.encode('utf-8')
# if this is a list of values, return list of byteified values
if isinstance(data, list):
return [ _byteify(item, ignore_dicts=True) for item in data ]
# if this is a dictionary, return dictionary of byteified keys and values
# but only if we haven't already byteified it
if isinstance(data, dict) and not ignore_dicts:
return {
_byteify(key, ignore_dicts=True): _byteify(value, ignore_dicts=True)
for key, value in data.iteritems()
}
# if it's anything else, return it in its original form
return data
......@@ -17,7 +17,7 @@ def write_xyz(xyz_file, structure):
ofs.close()
return
def ase_load_all(folder, log=None):
def ase_load_all(folder, log=None, n=None):
cwd = os.getcwd()
os.chdir(folder)
config_listdir = sorted(os.listdir('./'))
......@@ -25,6 +25,7 @@ def ase_load_all(folder, log=None):
for config_file in config_listdir:
if os.path.isfile(config_file):
config_files.append(config_file)
if n: config_files = config_files[0:n]
ase_config_list = AseConfigList(config_files, log=log)
os.chdir(cwd)
return ase_config_list
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment