...

Commits (4)
 ## likelihood module ### likelihood provides enssential functions related to calculating likelihood ### simple likelihood provides the trivial likelihood calculation ### ensemble likelihood provides ensemble likelihood calculation
 ... ... @@ -41,11 +41,9 @@ class EnsembleLikelihood(Likelihood): def _process_simple_field(self, observable, measured_data, data_covariance): # https://en.wikipedia.org/wiki/Sherman%E2%80%93Morrison_formula#Generalization # B = A^{-1} + U U^dagger # A = data_covariance # B^{-1} c = (A_inv - # A_inv U (I_k + U^dagger A_inv U)^{-1} U^dagger A_inv) c # Theo's scheme of inversing matrices now modifined by Jiaxin # use numpy.linalg.solve(A,b) to get inv(A)*b # thus safely avoid any possible difficulties brought by inversing matrices data_covariance = data_covariance.copy() k = observable.shape[0] n = observable.shape[1] ... ... @@ -54,82 +52,33 @@ class EnsembleLikelihood(Likelihood): obs_mean = observable.ensemble_mean().val.get_full_data() U = obs_val - obs_mean U *= np.sqrt(n) # OAS calculation modifined by Jiaxin # emperical S S = np.vdot(U,U)/k # trace of S TrS = np.trace(S) # trace of S^2 TrS2 = np.trace(np.dot(S,S)) # compute quantities for OAS estimator mu = np.vdot(U, U)/k/n alpha = (np.einsum(U, [0, 1], U, [2, 1])**2).sum() alpha /= k**2 numerator = (1 - 2./n)*alpha + (mu*n)**2 denominator = (k + 1 - 2./n) * (alpha - ((mu*n)**2)/n) numerator = (1.-2./n)*TrS2 + TrS**2 denominator = (k+1.-2./n)*(TrS2-(TrS**2)/n) if denominator == 0: rho = 1 else: rho = np.min([1, numerator/denominator]) self.logger.debug("rho: %f = %f / %f" % (rho, numerator, denominator)) # rescale U half/half V = U * np.sqrt(1-rho) / np.sqrt(k) self.logger.info(('data_cov', np.mean(data_covariance), 'rho*mu', rho*mu, 'rho', rho, 'mu', mu, 'alpha', alpha)) B = data_covariance + rho*mu V_B = V/B # build middle-matrix (kxk) middle = (np.eye(k) + np.einsum(V.conjugate(), [0, 1], V_B, [2, 1])) middle = np.linalg.inv(middle) c = measured_data - obs_mean self.logger.debug("rho: %f = %f / %f" % (rho, numerator, denominator)) # total covariance AC = data_covariance + (1-rho)*S + np.eye(n)*rho*TrS/n # obs - mean(sim) dc = measured_data - obs_mean # If the data was incomplete, i.e. contains np.NANs, set those values # to zero. c = np.nan_to_num(c) # assuming that A == A^dagger, this can be shortend # a_c = A.inverse_times(c) # u_a_c = a_c.dot(U, spaces=1) # u_a_c = u_a_c.conjugate() # and: double conjugate shouldn't make a difference # u_a_c = c.conjugate().dot(a_u, spaces=1).conjugate() # Pure NIFTy is # u_a_c = c.dot(a_u, spaces=1) # u_a_c_val = u_a_c.val.get_full_data() V_B_c = np.einsum(c, [1], V_B, [0, 1]) first_summand_val = c/B second_summand_val = np.einsum(middle, [0, 1], V_B_c, [1]) second_summand_val = np.einsum(V_B, [0, 1], second_summand_val, [0]) # # second_summand_val *= -1 # second_summand = first_summand.copy_empty() # second_summand.val = second_summand_val result_1 = np.vdot(c, first_summand_val) result_2 = -np.vdot(c, second_summand_val) # compute regularizing determinant of the covariance # det(A + UV^T) = det(A) det(I + V^T A^-1 U) if self.use_determinant: log_det = np.sum(np.log(data_covariance + np.sum((obs_val-obs_mean)**2, axis=0)/k))/n else: log_det = 0. result = -0.5*(result_1 + result_2 + log_det) self.logger.info("Calculated (%s): -1/2(%g + %g + %g) = %g" % (self.observable_name, result_1, result_2, log_det, result)) # result_array[i] = result # total_result = result_array.mean() dc = np.nan_to_num(dc) # calculate likelihood (sign_AC, logdet_AC) = np.linalg.slogdet(AC*2.*np.pi) result = -0.5*(dc*np.linalg.solve(AC,dc)) - 0.5*sing_AC*logdet_AC return result
 ... ... @@ -37,7 +37,9 @@ class SimpleLikelihood(Likelihood): diff = data - obs_mean if self.data_covariance is not None: right = diff/self.data_covariance # modified by Jiaxin # it is not a good practice to directly inverse matrices right = np.linalg.solve (self.data_covariance,diff) else: right = diff return -0.5 * np.vdot(diff, right)
 ... ... @@ -18,8 +18,8 @@ from imagine.magnetic_fields.magnetic_field import MagneticField class ConstantMagneticField(MagneticField): @property def parameter_list(self): parameter_list = ['b_x', 'b_y', 'b_z'] ... ...
 ... ... @@ -17,14 +17,11 @@ # and financially supported by the Studienstiftung des deutschen Volkes. import numpy as np from imagine.magnetic_fields.magnetic_field.magnetic_field_factory \ import MagneticFieldFactory from imagine.magnetic_fields.magnetic_field.magnetic_field_factory import MagneticFieldFactory from constant_magnetic_field import ConstantMagneticField class ConstantMagneticFieldFactory(MagneticFieldFactory): @property def magnetic_field_class(self): return ConstantMagneticField ... ...
 ... ... @@ -17,9 +17,7 @@ # and financially supported by the Studienstiftung des deutschen Volkes. from mpi4py import MPI import simplejson as json import numpy as np from nifty import Field, FieldArray, RGSpace ... ... @@ -29,13 +27,12 @@ comm = MPI.COMM_WORLD size = comm.size rank = comm.rank class MagneticField(Field): def __init__(self, parameters={}, domain=None, val=None, dtype=None, distribution_strategy=None, copy=False, random_seed=None): super(MagneticField, self).__init__( domain=domain, super(MagneticField, self).__init__(domain=domain, val=val, dtype=dtype, distribution_strategy=distribution_strategy, ... ... @@ -47,19 +44,17 @@ class MagneticField(Field): assert(isinstance(self.domain[2], FieldArray)) self._parameters = {} for p in self.descriptor_lookup: for p in self.descriptor_lookup:#{ self._parameters[p] = np.float(parameters[p]) casted_random_seed = distributed_data_object( global_shape=(self.shape[0],), #} casted_random_seed = distributed_data_object(global_shape=(self.shape[0],), dtype=np.int, distribution_strategy=distribution_strategy) if random_seed is None: if random_seed is None:#{ random_seed = np.random.randint(np.uint32(-1)/3, size=self.shape[0]) random_seed = comm.bcast(random_seed, root=0) #} casted_random_seed[:] = random_seed self.random_seed = casted_random_seed ... ... @@ -72,9 +67,9 @@ class MagneticField(Field): return self._parameters def set_val(self, new_val=None, copy=False): if new_val is not None: raise RuntimeError("Setting the field values explicitly is not " "supported by MagneticField.") if new_val is not None:#{ raise RuntimeError("Setting the field values explicitly is not supported by MagneticField.") #} self._val = self._create_field() def _to_hdf5(self, hdf5_group): ... ...
 ... ... @@ -17,16 +17,11 @@ # and financially supported by the Studienstiftung des deutschen Volkes. import numpy as np from keepers import Loggable from imagine.carrier_mapper import unity_mapper from nifty import FieldArray, RGSpace from magnetic_field import MagneticField class MagneticFieldFactory(Loggable, object): def __init__(self, box_dimensions, resolution): ... ... @@ -34,9 +29,7 @@ class MagneticFieldFactory(Loggable, object): self.box_dimensions = box_dimensions self.resolution = resolution self._parameter_defaults = self._initial_parameter_defaults self._variable_to_parameter_mappings = \ self._initial_variable_to_parameter_mappings self._variable_to_parameter_mappings = self._initial_variable_to_parameter_mappings distances = np.array(self.box_dimensions) / np.array(self.resolution) self._grid_space = RGSpace(shape=self.resolution, distances=distances) ... ... @@ -44,9 +37,9 @@ class MagneticFieldFactory(Loggable, object): self._ensemble_cache = {} def _get_ensemble(self, ensemble_size): if ensemble_size not in self._ensemble_cache: self._ensemble_cache[ensemble_size] = \ FieldArray(shape=(ensemble_size,)) if ensemble_size not in self._ensemble_cache:#{ self._ensemble_cache[ensemble_size] = FieldArray(shape=(ensemble_size,)) #} return self._ensemble_cache[ensemble_size] @property ... ... @@ -56,8 +49,9 @@ class MagneticFieldFactory(Loggable, object): @box_dimensions.setter def box_dimensions(self, box_dimensions): dim = tuple(np.array(box_dimensions, dtype=np.float)) if len(dim) != 3: if len(dim) != 3:#{ raise ValueError("Input of box_dimensions must have length three.") #} self._box_dimensions = dim @property ... ... @@ -67,8 +61,9 @@ class MagneticFieldFactory(Loggable, object): @resolution.setter def resolution(self, resolution): resolution = tuple(np.array(resolution, dtype=np.int)) if len(resolution) != 3: if len(resolution) != 3:#{ raise ValueError("Input for resolution must have length three.") #} self._resolution = resolution @property ... ... @@ -104,10 +99,11 @@ class MagneticFieldFactory(Loggable, object): @property def variable_defaults(self): variable_defaults = {} for parameter in self.parameter_defaults: for parameter in self.parameter_defaults:#{ low, high = self.variable_to_parameter_mappings[parameter] default = self.parameter_defaults[parameter] variable_defaults[parameter] = (default - low)/(high - low) #} return variable_defaults @property ... ... @@ -122,17 +118,17 @@ class MagneticFieldFactory(Loggable, object): value: [min, max] """ for k, v in new_mapping.items(): if k in self._variable_to_parameter_mappings: if k in self._variable_to_parameter_mappings:#{ key = str(k) value = [np.float(v[0]), np.float(v[1])] self._variable_to_parameter_mappings.update({key: value}) self.logger.debug("Updated variable_to_parameter_mapping %s " "to %s" % (key, str(value))) self.logger.debug("Updated variable_to_parameter_mapping %s to %s" % (key, str(value))) #} def _map_variables_to_parameters(self, variables): parameter_dict = {} for variable_name in variables: if variable_name in self.variable_to_parameter_mappings: if variable_name in self.variable_to_parameter_mappings:#{ mapping = self.variable_to_parameter_mappings[variable_name] mapped_variable = unity_mapper(variables[variable_name], a=mapping[0], ... ... @@ -141,8 +137,10 @@ class MagneticFieldFactory(Loggable, object): # a=mapping[0], # m=mapping[1], # b=mapping[2]) else: #} else:#{ mapped_variable = np.float(variables[variable_name]) #} parameter_dict[variable_name] = mapped_variable return parameter_dict ... ... @@ -150,16 +148,12 @@ class MagneticFieldFactory(Loggable, object): mapped_variables = self._map_variables_to_parameters(variables) work_parameters = self.parameter_defaults.copy() work_parameters.update(mapped_variables) domain = (self._get_ensemble(ensemble_size), self._grid_space, self._vector) result_magnetic_field = self.magnetic_field_class( domain=domain, result_magnetic_field = self.magnetic_field_class(domain=domain, parameters=work_parameters, distribution_strategy='equal', random_seed=random_seed) self.logger.debug("Generated magnetic field with work-parameters %s" % work_parameters) self.logger.debug("Generated magnetic field with work-parameters %s" % work_parameters) return result_magnetic_field
 ... ... @@ -18,19 +18,20 @@ from imagine.magnetic_fields.magnetic_field import MagneticField class WMAP3yrMagneticField(MagneticField): @property def descriptor_lookup(self): lookup = \ {'b0': ['./MagneticField/Regular/WMAP/b0', 'value'], lookup = {'b0': ['./MagneticField/Regular/WMAP/b0', 'value'], 'psi0': ['./MagneticField/Regular/WMAP/psi0', 'value'], 'psi1': ['./MagneticField/Regular/WMAP/psi1', 'value'], 'chi0': ['./MagneticField/Regular/WMAP/chi0', 'value'], 'random_rms': ['./MagneticField/Random/Global/rms', 'value'], 'random_rho': ['./MagneticField/Random/Global/rho', 'value'], 'random_a0': ['./MagneticField/Random/Global/a0', 'value'] 'random_rms': ['./MagneticField/Random/Global/ES/rms', 'value'], 'random_rho': ['./MagneticField/Random/Global/ES/rho', 'value'], 'random_a0': ['./MagneticField/Random/Global/ES/a0', 'value'], 'random_k0': ['./MagneticField/Random/Global/ES/k0', 'value'], 'random_r0': ['./MagneticField/Random/Global/ES/r0', 'value'], 'random_z0': ['./MagneticField/Random/Global/ES/z0', 'value'] } return lookup ... ...
 ... ... @@ -16,13 +16,11 @@ # IMAGINE is being developed at the Max-Planck-Institut fuer Astrophysik # and financially supported by the Studienstiftung des deutschen Volkes. from imagine.magnetic_fields.magnetic_field.magnetic_field_factory \ import MagneticFieldFactory from imagine.magnetic_fields.magnetic_field.magnetic_field_factory import MagneticFieldFactory from wmap3yr_magnetic_field import WMAP3yrMagneticField class WMAP3yrMagneticFieldFactory(MagneticFieldFactory): @property def magnetic_field_class(self): return WMAP3yrMagneticField ... ... @@ -33,9 +31,13 @@ class WMAP3yrMagneticFieldFactory(MagneticFieldFactory): 'psi0': 27.0, 'psi1': 0.9, 'chi0': 25.0, # using ES random field generator in hammurabiX 'random_rms': 2.0, 'random_rho': 0.5, 'random_a0': 1.7} 'random_a0': 1.7, 'random_k0': 0.5, 'random_r0': 8.0, 'random_z0': 1.0} return defaults @property ... ... @@ -43,13 +45,15 @@ class WMAP3yrMagneticFieldFactory(MagneticFieldFactory): return self._generate_variable_to_parameter_mapping_defaults(n=3) def _generate_variable_to_parameter_mapping_defaults(self, n): defaults = { 'b0': self._positive_interval(6.0, 1.9, n), # b0 astro-ph/0603450 defaults = {'b0': self._positive_interval(6.0, 1.9, n), # b0 astro-ph/0603450 'psi0': self._positive_interval(27.0, 7.0, n), # psi0 astro-ph/0603450 'psi1': self._positive_interval(0.9, 5.0, n), # psi1 astro-ph/0603450 'chi0': self._positive_interval(25, 8.0, n), # xsi0 astro-ph/0603450 'random_rms': self._positive_interval(2.0, 0.6, n), 'random_rho': self._positive_interval(0.5, 0.166, n), 'random_a0': self._positive_interval(1.7, 0.5, n) 'random_a0': self._positive_interval(1.7, 0.5, n), 'random_k0': self._positive_interval(0.5, 0.15, n), 'random_r0': self._positive_interval(8.0, 2.0, n), 'random_z0': self._positive_interval(1.0, 0.3, n) } return defaults
 ... ... @@ -18,18 +18,15 @@ from nifty import Field, FieldArray class Observable(Field): def __init__(self, domain=None, val=None, dtype=None, distribution_strategy=None, copy=False): super(Observable, self).__init__( domain=domain, super(Observable, self).__init__(domain=domain, val=val, dtype=dtype, distribution_strategy=distribution_strategy, copy=copy) assert(len(self.domain) == 2) assert(isinstance(self.domain[0], FieldArray)) ... ... @@ -42,8 +39,8 @@ class Observable(Field): return self._ensemble_mean def _to_hdf5(self, hdf5_group): if hasattr(self, '_ensemble_mean'): return_dict = {'ensemble_mean': self._ensemble_mean} if hasattr(self, "_ensemble_mean"): return_dict = {"ensemble_mean": self._ensemble_mean} else: return_dict = {} return_dict.update( ... ... @@ -55,7 +52,7 @@ class Observable(Field): new_field = super(Observable, cls)._from_hdf5(hdf5_group=hdf5_group, repository=repository) try: observable_mean = repository.get('ensemble_mean', hdf5_group) observable_mean = repository.get("ensemble_mean", hdf5_group) new_field._observable_mean = observable_mean except(KeyError): pass ... ...
This diff is collapsed.
This diff is collapsed.
 ... ... @@ -19,13 +19,14 @@ from .magnetic_field_model import MagneticFieldModel from imagine.magnetic_fields.wmap3yr_magnetic_field import WMAP3yrMagneticField class WMAP3yrMixin(MagneticFieldModel): def update_parameter_xml(self, root): custom_parameters = [ ['./MagneticField/Regular', 'type', 'WMAP'], custom_parameters = [['./MagneticField/Regular', 'type', 'WMAP'], ['./MagneticField/Regular','cue','1'] ['./MagneticField/Random', 'cue', '1'], ['./MagneticField/Random', 'type', 'Global']] ['./MagneticField/Random', 'type', 'Global'], ['./MagneticField/Random/Global','type','ES']] for parameter in custom_parameters: root.find(parameter[0]).set(parameter[1], str(parameter[2])) ... ...
 ... ... @@ -17,11 +17,10 @@ # and financially supported by the Studienstiftung des deutschen Volkes. import xml.etree.ElementTree as et from .observable_mixin import ObservableMixin class DMMixin(ObservableMixin): @property def obs_name(self): return 'dm' ... ... @@ -31,7 +30,6 @@ class DMMixin(ObservableMixin): return ['dm'] def update_parameter_xml(self, root): element = et.Element('DM', {'cue': '1', 'filename': self.obs_name+'.fits'}) element = et.Element('DM', {'cue': '1', 'filename': self.obs_name+'.fits'}) output = root.find('Obsout') output.append(element)
 ... ... @@ -17,11 +17,10 @@ # and financially supported by the Studienstiftung des deutschen Volkes. import xml.etree.ElementTree as et from .observable_mixin import ObservableMixin class FDMixin(ObservableMixin): @property def obs_name(self): return 'fd' ... ... @@ -31,7 +30,6 @@ class FDMixin(ObservableMixin): return ['fd'] def update_parameter_xml(self, root): element = et.Element('Faraday', {'cue': '1', 'filename': self.obs_name+'.fits'}) element = et.Element('Faraday', {'cue': '1', 'filename': self.obs_name+'.fits'}) output = root.find('Obsout') output.append(element)
 ... ... @@ -18,13 +18,12 @@ import abc import os import healpy from keepers import Loggable class ObservableMixin(Loggable, object): @abc.abstractproperty def obs_name(self): raise NotImplementedError ... ... @@ -42,10 +41,10 @@ class ObservableMixin(Loggable, object): map_list = self._read_fits_file(path=working_directory, name=self.obs_name + '.fits', nside=nside) for i, map_component in enumerate(map_list): for i, map_component in enumerate(map_list):#{ temp_obs = observable_dict[self.component_names[i]] temp_obs.val.data[local_ensemble_index] = map_component #} def _read_fits_file(self, path, name, nside): map_path = os.path.join(path, name) ... ... @@ -53,8 +52,7 @@ class ObservableMixin(Loggable, object): i = 0 while True: try: loaded_map = healpy.read_map(map_path, verbose=False, field=i) loaded_map = healpy.read_map(map_path, verbose=False, field=i) # loaded_map = healpy.ud_grade(loaded_map, nside_out=nside) result_list += [loaded_map] i += 1 ... ...
 ... ... @@ -17,11 +17,10 @@ # and financially supported by the Studienstiftung des deutschen Volkes. import xml.etree.ElementTree as et from .observable_mixin import ObservableMixin class SyncMixin(ObservableMixin): def __init__(self, frequency='23'): self.frequency = str(frequency) ... ...
This diff is collapsed.