Commit 4e97ffbe by Philipp Frank

### helper for single to multispectra

parent 3d3e41dc
 ... @@ -70,6 +70,19 @@ def testAmplitudesConsistency(seed, sspace): ... @@ -70,6 +70,19 @@ def testAmplitudesConsistency(seed, sspace): print("Expected total fluct. Std: " + str(tot_flm)) print("Expected total fluct. Std: " + str(tot_flm)) print("Estimated total fluct. Std: " + str(fluct_total)) print("Estimated total fluct. Std: " + str(fluct_total)) fa = ift.CorrelatedFieldMaker() fa.add_fluctuations(target1, intergated_fluct_std1, 1., 3.1, 1., .5, .1, -4, 1., 'freq') m = 3. x = fa.moment_slice_to_average(m) fa.add_fluctuations(target0, x, 1.5, 1.1, 2., 2.1, .5, -2, 1., 'spatial', 0) op = fa.finalize(offset_std, .1, '') em, estd = fa.stats(fa.slice_fluctuation(0),samples) print("Forced slice fluct. space Std: "+str(m)) print("Expected slice fluct. Std: " + str(em)) np.testing.assert_allclose(m, em, rtol=0.5) # Move to tests # Move to tests ... ...
 ... @@ -42,12 +42,24 @@ def _lognormal_moments(mean, sig): ... @@ -42,12 +42,24 @@ def _lognormal_moments(mean, sig): logmean = np.log(mean) - logsig**2/2 logmean = np.log(mean) - logsig**2/2 return logmean, logsig return logmean, logsig class _lognormal_moment_matching(Operator): def __init__(self,mean, sig, key): key = str(key) logmean, logsig = _lognormal_moments(mean, sig) self._mean = mean self._sig = sig op = _normal(logmean, logsig, key).exp() self._domain = op.domain self._target = op.target self.apply = op.apply def _lognormal_moment_matching(mean, sig, key): @property key = str(key) def mean(self): logmean, logsig = _lognormal_moments(mean, sig) return self._mean return _normal(logmean, logsig, key).exp() @property def std(self): return self._sig def _normal(mean, sig, key): def _normal(mean, sig, key): return Adder(Field.scalar(mean)) @ ( return Adder(Field.scalar(mean)) @ ( ... @@ -227,7 +239,8 @@ class CorrelatedFieldMaker: ... @@ -227,7 +239,8 @@ class CorrelatedFieldMaker: asperity_stddev, asperity_stddev, loglogavgslope_mean, loglogavgslope_mean, loglogavgslope_stddev, loglogavgslope_stddev, prefix=''): prefix='', index = None): fluctuations_mean = float(fluctuations_mean) fluctuations_mean = float(fluctuations_mean) fluctuations_stddev = float(fluctuations_stddev) fluctuations_stddev = float(fluctuations_stddev) flexibility_mean = float(flexibility_mean) flexibility_mean = float(flexibility_mean) ... @@ -254,8 +267,11 @@ class CorrelatedFieldMaker: ... @@ -254,8 +267,11 @@ class CorrelatedFieldMaker: prefix + 'asperity') prefix + 'asperity') avgsl = _normal(loglogavgslope_mean, loglogavgslope_stddev, avgsl = _normal(loglogavgslope_mean, loglogavgslope_stddev, prefix + 'loglogavgslope') prefix + 'loglogavgslope') self._a.append( amp = _Amplitude(target, fluct, flex, asp, avgsl, prefix + 'spectrum') _Amplitude(target, fluct, flex, asp, avgsl, prefix + 'spectrum')) if index is not None: self._a.insert(index, amp) else: self._a.append(amp) def finalize_from_op(self, zeromode, prefix=''): def finalize_from_op(self, zeromode, prefix=''): assert isinstance(zeromode, Operator) assert isinstance(zeromode, Operator) ... @@ -396,4 +412,20 @@ class CorrelatedFieldMaker: ... @@ -396,4 +412,20 @@ class CorrelatedFieldMaker: sc = StatCalculator() sc = StatCalculator() for s in samples: for s in samples: sc.add(op(s.extract(op.domain))) sc.add(op(s.extract(op.domain))) return sc.mean.to_global_data(), sc.var.sqrt().to_global_data() return sc.mean.to_global_data(), sc.var.sqrt().to_global_data() \ No newline at end of file def moment_slice_to_average(self, fluctuations_slice_mean, nsamples = 1000): fluctuations_slice_mean = float(fluctuations_slice_mean) assert fluctuations_slice_mean > 0 scm = 1. for a in self._a: m, std = a.fluctuation_amplitude.mean, a.fluctuation_amplitude.std mu, sig = _lognormal_moments(m,std) flm = np.exp(mu + sig * np.random.normal(size=nsamples)) scm *= flm**2 + 1. scm = np.mean(np.sqrt(scm)) return fluctuations_slice_mean / scm \ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!