Commit e8b31758 authored by Carl Poelking's avatar Carl Poelking
Browse files

Automated partitioning of structures.

parent c1a1e4cd
......@@ -6,6 +6,7 @@ test "$is_csh" = 101 && goto CSH
export PYTHONPATH="${PYTHONPATH}:@CMAKE_INSTALL_PREFIX@"
export SOAP_ROOT="@CMAKE_INSTALL_PREFIX@"
export LD_LIBRARY_PATH="{LD_LIBRARY_PATH}:@CMAKE_INSTALL_PREFIX@/soap"
return
# CSH/TCSH
CSH:
......
from _soapxx import *
from linalg import *
from . import soapy
from . import tools
install(FILES __init__.py elements.py util.py npthreads.py kernel.py simspace.py pca.py dimred.py lagraph.py lamatch.py math.py momo.py DESTINATION ${LOCAL_INSTALL_DIR}/soapy)
install(FILES __init__.py learn.py elements.py util.py npthreads.py kernel.py simspace.py pca.py dimred.py lagraph.py lamatch.py math.py momo.py DESTINATION ${LOCAL_INSTALL_DIR}/soapy)
......@@ -5,4 +5,5 @@ from pca import *
from util import *
from math import *
from elements import *
import learn
......@@ -162,21 +162,29 @@ class KernelAdaptorSpecificUnique(object):
self.types = types_global
self.S = len(types_global)
return
def adapt(self, spectrum):
IX = np.zeros((0.,0.), dtype='float64')
def adapt(self, spectrum, return_pos_matrix=False):
IX = np.zeros((0,0), dtype='float64') # feature matrix
dimX = -1
IR = np.zeros((0,0), dtype='float64') # position matrix
for atomic_i in spectrum:
Xi_unnorm, Xi_norm = self.adaptScalar(atomic_i)
Ri = atomic_i.getCenter().pos
dimX = Xi_norm.shape[0]
if not IX.any():
IX = np.copy(Xi_norm) # TODO Is this necessary?
IX.resize((1, dimX))
IR = np.copy(Ri)
IR.resize((1, 3))
else:
i = IX.shape[0]
IX.resize((i+1, dimX))
IX[-1,:] = Xi_norm
#print IX
return IX
IR.resize((i+1, 3))
IR[-1,:] = Ri
if return_pos_matrix:
return IX, IR
else:
return IX
def adaptScalar(self, atomic):
X = reduce_xnklab_atomic(atomic, self.types)
X_norm = X/np.dot(X,X)**0.5
......@@ -203,7 +211,6 @@ class KernelAdaptorSpecific(object):
i = IX.shape[0]
IX.resize((i+1, dimX))
IX[-1,:] = Xi_norm
#print IX
return IX
def adaptScalar(self, atomic):
xnklab_atomic = Xnklab(atomic, self.types)
......
......@@ -680,10 +680,7 @@ class ParticleGraph(object):
#print z, z_idx
P = np.array(ix)
dim = P.shape[1]
if options_soap.get('kernel.adaptor') in [ 'global-generic', 'global-specific' ]:
pass
else:
assert P.shape[0] == n_atoms
assert P.shape[0] == n_atoms
#print P.dot(P.T)
elif descriptor_type == 'npy_load':
folder = options_descriptor["folder"]
......
import numpy as np
def subsample_array(array, n_select, method='stride', stride_shift=0):
# Number of data items; selected, discarded items
if type(array) == list:
n_data = len(array)
else:
n_data = array.shape[0]
n_discard = n_data - n_select
assert n_discard >= 0 # Trying to select more data than available?
# Subsample
if method == 'random':
# Random access indices
idcs = np.arange(0, n_data)
np.random.shuffle(idcs)
# Create selections
if type(array) == list:
array_select = []
array_discard = []
for i in range(n_data):
if i < n_select:
array_select.append(array[idcs[i]])
else:
array_discard.append(array[idcs[i]])
else:
raise NotImplementedError("<subsample_data_array> Array type other than list")
elif method == 'stride':
# Construct index sets
idcs = np.arange(0, n_data)
idcs_sel = [ int(float(idcs.shape[0])/n_select*i) for i in range(n_select) ]
idcs_sel = np.array(idcs_sel)
# Take into account periodic shift
if stride_shift != 0:
idcs_sel = np.sort((idcs_sel+stride_shift) % idcs.shape[0])
# Index complement
mask = np.zeros(idcs.shape[0], dtype=bool)
mask[idcs_sel] = True
idcs_disc = idcs[~mask]
# Create selections
if type(array) == list:
array_select = []
array_discard = []
for i in idcs_sel:
array_select.append(array[i])
for i in idcs_disc:
array_discard.append(array[i])
else:
raise NotImplementedError("<subsample_data_array> Array type other than list")
return array_select, array_discard
if __name__ == "__main__":
print "Data array"
a = range(20)
print a
print "Select with stride, shift=0"
a1, a2 = subsample_array(a, 5, 'stride', 0)
print a1, a2
print "Select with stride, shift=6"
a1, a2 = subsample_array(a, 5, 'stride', 6)
print a1, a2
print "Select random subsample"
a1, a2 = subsample_array(a, 5, 'random')
print a1, a2
install(FILES __init__.py loadwrite.py inverse.py extract.py DESTINATION ${LOCAL_INSTALL_DIR}/tools)
install(FILES __init__.py loadwrite.py inverse.py extract.py partition.py DESTINATION ${LOCAL_INSTALL_DIR}/tools)
import os
import numpy as np
import itertools
import partition
from .. import _soapxx as soap
from ..soapy import momo
from ..soapy import elements
try:
import ase
......@@ -10,6 +12,85 @@ try:
except ImportError:
print("Note: ase.io import failed. Install PYTHON-ASE to harvest full reader functionality.")
def structures_from_xyz(xyz_file, do_partition=True, add_fragment_com=True):
# Read xyz via ASE
ase_configs = ase.io.read(xyz_file, index=':')
return structures_from_ase(
ase_configs=ase_configs,
do_partition=do_partition,
add_fragment_com=add_fragment_com)
def structures_from_ase(ase_configs, do_partition=True, add_fragment_com=True, use_center_of_geom=False, log=None):
# NOTE Center of mass is computed without considering PBC => Requires unwrapped coordinates
# Create structures
structures = []
for config in ase_configs:
# System properties
label = config.info['label']
positions = config.get_positions()
types = config.get_chemical_symbols()
if log: log << log.back << "Reading '%s'" % label << log.flush
# Simulation cell
if config.pbc.all():
box = np.array([ase_config.cell[0], ase_config.cell[1], ase_config.cell[2]])
elif not config.pbc.any():
box = np.zeros((3,3))
else:
raise NotImplementedError("<structures_from_xyz> Partial periodicity not implemented.")
# Partition
if do_partition:
frags, top = partition.PartitionStructure(types, positions)
positions = [ atm.xyz for atm in itertools.chain(*frags) ]
types = [ atm.e for atm in itertools.chain(*frags) ]
else:
top = [('SEG', 1, len(positions))]
# Check particle count consistent with top
atom_count = 0
for section in top:
atom_count += section[1]*section[2]
assert atom_count == len(positions) # Does topology match structure?
# Create segments, particles
structure = soap.Structure(label)
structure.box = box
atom_idx = 0
for section in top:
seg_type = section[0]
n_segs = section[1]
n_atoms = section[2]
for i in range(n_segs):
segment = structure.addSegment()
segment.name = seg_type
# Add particles, compute CoM
com = np.array([0.,0.,0.])
com_weight_total = 0
for j in range(n_atoms):
particle = structure.addParticle(segment)
particle.pos = positions[atom_idx]
particle.weight = 1.
particle.sigma = 0.5
particle.type = types[atom_idx]
# Compute CoMass/CoGeom
if use_center_of_geom:
com_weight = 1.
else:
com_weight = elements.periodic_table[particle.type].mass
com_weight_total += com_weight
com = com + com_weight*positions[atom_idx]
atom_idx += 1
com = com/com_weight_total
# Add CoM particle if requested
if add_fragment_com:
segment = structure.addSegment()
segment.name = "%s.COM" % seg_type
particle = structure.addParticle(segment)
particle.pos = com
particle.weight = 0.
particle.sigma = 0.5
particle.type = "COM"
structures.append(structure)
if log: log << log.endl
return structures
def write_xyz(xyz_file, structure):
ofs = open(xyz_file, 'w')
ofs.write('%d\n%s\n' % (len(list(structure.particles)), structure.label))
......
This diff is collapsed.
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment