...
 
Commits (4)
FROM ubuntu:latest
RUN apt-get update
RUN apt-get install -y build-essential python python-pip python-dev git gfortran autoconf gsl-bin libgsl-dev wget unzip
#need python-tk???
RUN apt-get install -y build-essential python python-pip python-dev git gfortran autoconf gsl-bin libgsl-dev wget unzip vim cmake
RUN pip install numpy scipy cython astropy ipython==5.3.0
RUN mkdir /home/Downloads
......@@ -15,18 +15,11 @@ RUN ./configure --prefix=/usr/local/ && make && make install
WORKDIR ..
#FFTW
RUN wget http://www.fftw.org/fftw-3.3.5.tar.gz && tar xzf fftw-3.3.5.tar.gz
WORKDIR fftw-3.3.5
RUN wget http://www.fftw.org/fftw-3.3.8.tar.gz && tar xzf fftw-3.3.8.tar.gz
WORKDIR fftw-3.3.8
RUN ./configure --enable-threads --enable-openmp --enable-shared --prefix=/usr/local/ && make && make install
WORKDIR ..
#GSL
RUN wget http://nl.mirror.babylon.network/gnu/gsl/gsl-2.3.tar.gz && tar xzf gsl-2.3.tar.gz
WORKDIR gsl-2.3
RUN ./configure --enable-shared --prefix=/usr/local/ && make && make install
WORKDIR ..
ENV LD_LIBRARY_PATH=/usr/local/lib
#HEALPIX
RUN wget http://downloads.sourceforge.net/project/healpix/Healpix_3.31/Healpix_3.31_2016Aug26.tar.gz && tar xzf Healpix_3.31_2016Aug26.tar.gz
WORKDIR Healpix_3.31
......@@ -37,9 +30,8 @@ RUN echo '4\n\
y\n\
0\n'\
> hlpx_config
RUN ./configure < healpy_config && make
RUN ./configure -L < hlpx_config && make
WORKDIR ..
#RUN [ -r /root/.healpix/3_31_Linux/config ] && . /root/.healpix/3_31_Linux/config
ENV HEALPIX_TARGET optimized_gcc
ENV HEALPIX /home/Downloads/Healpix_3.31
......@@ -47,15 +39,7 @@ ENV HEALPIX /home/Downloads/Healpix_3.31
RUN pip install healpy
#(Py)MultiNest
RUN apt-get update && apt-get install -y libblas3 libblas-dev \
liblapack3 liblapack-dev \
libatlas3-base libatlas-dev \
cmake \
build-essential \
git \
gfortran\
python-tk
#RUN apt-get install -y libopenmpi-dev openmpi-bin openmpi-doc
RUN apt-get install -y libblas3 libblas-dev liblapack3 liblapack-dev libatlas-base-dev libatlas3-base
RUN pip install numpy scipy matplotlib progressbar ipython==5.3.0
RUN git clone https://github.com/JohannesBuchner/MultiNest.git
......@@ -73,7 +57,7 @@ RUN apt-get install -y libopenmpi-dev openmpi-bin openmpi-doc
RUN pip install mpi4py
#hdf5
RUN apt-get install -y libhdf5-10 libhdf5-dev libhdf5-openmpi-10 libhdf5-openmpi-dev hdf5-tools
RUN apt-get install -y libhdf5-100 libhdf5-dev libhdf5-openmpi-100 libhdf5-openmpi-dev hdf5-tools
#h5py
RUN wget https://api.github.com/repos/h5py/h5py/tags -O - | grep tarball_url | grep -v rc | head -n 1 | cut -d '"' -f 4 | wget -i - -O h5py.tar.gz
......@@ -87,13 +71,8 @@ RUN python setup.py build
RUN python setup.py install
WORKDIR ..
#hampy
RUN pip install jupyter pandas
ARG CACHE_DATE=2017-10-31
#NIFTy
RUN git clone https://gitlab.mpcdf.mpg.de/ift/NIFTy.git -b master
RUN git clone https://gitlab.mpcdf.mpg.de/ift/NIFTy.git
WORKDIR NIFTy
RUN python setup.py install
WORKDIR ..
......@@ -101,12 +80,15 @@ WORKDIR ..
#Hammurabi
RUN git clone https://bitbucket.org/hammurabicode/hamx
WORKDIR hamx
RUN make -f install/Makefile
ENV HAMMURABI=/home/Downloads/hamx/bin/hamx
#RUN mkdir build && cd build && cmake .. && make install
#ENV HAMMURABI=/home/Downloads/hamx/bin/hamx
WORKDIR ..
#for pyhamx
RUN pip install jupyter pandas matplotlib
#IMAGINE
RUN git clone https://gitlab.mpcdf.mpg.de/ift/IMAGINE.git -b master
RUN git clone https://gitlab.mpcdf.mpg.de/ift/IMAGINE.git -b jxw
WORKDIR IMAGINE
RUN python setup.py install
WORKDIR ..
......
## likelihood module
### likelihood
provides enssential functions related to calculating likelihood
### simple likelihood
provides the trivial likelihood calculation
### ensemble likelihood
provides ensemble likelihood calculation
......@@ -41,11 +41,9 @@ class EnsembleLikelihood(Likelihood):
def _process_simple_field(self, observable, measured_data,
data_covariance):
# https://en.wikipedia.org/wiki/Sherman%E2%80%93Morrison_formula#Generalization
# B = A^{-1} + U U^dagger
# A = data_covariance
# B^{-1} c = (A_inv -
# A_inv U (I_k + U^dagger A_inv U)^{-1} U^dagger A_inv) c
# Theo's scheme of inversing matrices now modifined by Jiaxin
# use numpy.linalg.solve(A,b) to get inv(A)*b
# thus safely avoid any possible difficulties brought by inversing matrices
data_covariance = data_covariance.copy()
k = observable.shape[0]
n = observable.shape[1]
......@@ -54,82 +52,33 @@ class EnsembleLikelihood(Likelihood):
obs_mean = observable.ensemble_mean().val.get_full_data()
U = obs_val - obs_mean
U *= np.sqrt(n)
# OAS calculation modifined by Jiaxin
# emperical S
S = np.vdot(U,U)/k
# trace of S
TrS = np.trace(S)
# trace of S^2
TrS2 = np.trace(np.dot(S,S))
# compute quantities for OAS estimator
mu = np.vdot(U, U)/k/n
alpha = (np.einsum(U, [0, 1], U, [2, 1])**2).sum()
alpha /= k**2
numerator = (1 - 2./n)*alpha + (mu*n)**2
denominator = (k + 1 - 2./n) * (alpha - ((mu*n)**2)/n)
numerator = (1.-2./n)*TrS2 + TrS**2
denominator = (k+1.-2./n)*(TrS2-(TrS**2)/n)
if denominator == 0:
rho = 1
else:
rho = np.min([1, numerator/denominator])
self.logger.debug("rho: %f = %f / %f" % (rho, numerator, denominator))
# rescale U half/half
V = U * np.sqrt(1-rho) / np.sqrt(k)
self.logger.info(('data_cov', np.mean(data_covariance),
'rho*mu', rho*mu,
'rho', rho,
'mu', mu,
'alpha', alpha))
B = data_covariance + rho*mu
V_B = V/B
# build middle-matrix (kxk)
middle = (np.eye(k) +
np.einsum(V.conjugate(), [0, 1],
V_B, [2, 1]))
middle = np.linalg.inv(middle)
c = measured_data - obs_mean
self.logger.debug("rho: %f = %f / %f" % (rho, numerator, denominator))
# total covariance
AC = data_covariance + (1-rho)*S + np.eye(n)*rho*TrS/n
# obs - mean(sim)
dc = measured_data - obs_mean
# If the data was incomplete, i.e. contains np.NANs, set those values
# to zero.
c = np.nan_to_num(c)
# assuming that A == A^dagger, this can be shortend
# a_c = A.inverse_times(c)
# u_a_c = a_c.dot(U, spaces=1)
# u_a_c = u_a_c.conjugate()
# and: double conjugate shouldn't make a difference
# u_a_c = c.conjugate().dot(a_u, spaces=1).conjugate()
# Pure NIFTy is
# u_a_c = c.dot(a_u, spaces=1)
# u_a_c_val = u_a_c.val.get_full_data()
V_B_c = np.einsum(c, [1], V_B, [0, 1])
first_summand_val = c/B
second_summand_val = np.einsum(middle, [0, 1], V_B_c, [1])
second_summand_val = np.einsum(V_B, [0, 1],
second_summand_val, [0])
# # second_summand_val *= -1
# second_summand = first_summand.copy_empty()
# second_summand.val = second_summand_val
result_1 = np.vdot(c, first_summand_val)
result_2 = -np.vdot(c, second_summand_val)
# compute regularizing determinant of the covariance
# det(A + UV^T) = det(A) det(I + V^T A^-1 U)
if self.use_determinant:
log_det = np.sum(np.log(data_covariance +
np.sum((obs_val-obs_mean)**2, axis=0)/k))/n
else:
log_det = 0.
result = -0.5*(result_1 + result_2 + log_det)
self.logger.info("Calculated (%s): -1/2(%g + %g + %g) = %g" %
(self.observable_name,
result_1, result_2, log_det, result))
# result_array[i] = result
# total_result = result_array.mean()
dc = np.nan_to_num(dc)
# calculate likelihood
(sign_AC, logdet_AC) = np.linalg.slogdet(AC*2.*np.pi)
result = -0.5*(dc*np.linalg.solve(AC,dc)) - 0.5*sing_AC*logdet_AC
return result
......@@ -37,7 +37,9 @@ class SimpleLikelihood(Likelihood):
diff = data - obs_mean
if self.data_covariance is not None:
right = diff/self.data_covariance
# modified by Jiaxin
# it is not a good practice to directly inverse matrices
right = np.linalg.solve (self.data_covariance,diff)
else:
right = diff
return -0.5 * np.vdot(diff, right)
......@@ -18,8 +18,8 @@
from imagine.magnetic_fields.magnetic_field import MagneticField
class ConstantMagneticField(MagneticField):
@property
def parameter_list(self):
parameter_list = ['b_x', 'b_y', 'b_z']
......
......@@ -17,14 +17,11 @@
# and financially supported by the Studienstiftung des deutschen Volkes.
import numpy as np
from imagine.magnetic_fields.magnetic_field.magnetic_field_factory \
import MagneticFieldFactory
from imagine.magnetic_fields.magnetic_field.magnetic_field_factory import MagneticFieldFactory
from constant_magnetic_field import ConstantMagneticField
class ConstantMagneticFieldFactory(MagneticFieldFactory):
@property
def magnetic_field_class(self):
return ConstantMagneticField
......
......@@ -17,9 +17,7 @@
# and financially supported by the Studienstiftung des deutschen Volkes.
from mpi4py import MPI
import simplejson as json
import numpy as np
from nifty import Field, FieldArray, RGSpace
......@@ -29,13 +27,12 @@ comm = MPI.COMM_WORLD
size = comm.size
rank = comm.rank
class MagneticField(Field):
def __init__(self, parameters={}, domain=None, val=None, dtype=None,
distribution_strategy=None, copy=False, random_seed=None):
super(MagneticField, self).__init__(
domain=domain,
super(MagneticField, self).__init__(domain=domain,
val=val,
dtype=dtype,
distribution_strategy=distribution_strategy,
......@@ -47,19 +44,17 @@ class MagneticField(Field):
assert(isinstance(self.domain[2], FieldArray))
self._parameters = {}
for p in self.descriptor_lookup:
for p in self.descriptor_lookup:#{
self._parameters[p] = np.float(parameters[p])
casted_random_seed = distributed_data_object(
global_shape=(self.shape[0],),
#}
casted_random_seed = distributed_data_object(global_shape=(self.shape[0],),
dtype=np.int,
distribution_strategy=distribution_strategy)
if random_seed is None:
if random_seed is None:#{
random_seed = np.random.randint(np.uint32(-1)/3,
size=self.shape[0])
random_seed = comm.bcast(random_seed, root=0)
#}
casted_random_seed[:] = random_seed
self.random_seed = casted_random_seed
......@@ -72,9 +67,9 @@ class MagneticField(Field):
return self._parameters
def set_val(self, new_val=None, copy=False):
if new_val is not None:
raise RuntimeError("Setting the field values explicitly is not "
"supported by MagneticField.")
if new_val is not None:#{
raise RuntimeError("Setting the field values explicitly is not supported by MagneticField.")
#}
self._val = self._create_field()
def _to_hdf5(self, hdf5_group):
......
......@@ -17,16 +17,11 @@
# and financially supported by the Studienstiftung des deutschen Volkes.
import numpy as np
from keepers import Loggable
from imagine.carrier_mapper import unity_mapper
from nifty import FieldArray, RGSpace
from magnetic_field import MagneticField
class MagneticFieldFactory(Loggable, object):
def __init__(self, box_dimensions, resolution):
......@@ -34,9 +29,7 @@ class MagneticFieldFactory(Loggable, object):
self.box_dimensions = box_dimensions
self.resolution = resolution
self._parameter_defaults = self._initial_parameter_defaults
self._variable_to_parameter_mappings = \
self._initial_variable_to_parameter_mappings
self._variable_to_parameter_mappings = self._initial_variable_to_parameter_mappings
distances = np.array(self.box_dimensions) / np.array(self.resolution)
self._grid_space = RGSpace(shape=self.resolution,
distances=distances)
......@@ -44,9 +37,9 @@ class MagneticFieldFactory(Loggable, object):
self._ensemble_cache = {}
def _get_ensemble(self, ensemble_size):
if ensemble_size not in self._ensemble_cache:
self._ensemble_cache[ensemble_size] = \
FieldArray(shape=(ensemble_size,))
if ensemble_size not in self._ensemble_cache:#{
self._ensemble_cache[ensemble_size] = FieldArray(shape=(ensemble_size,))
#}
return self._ensemble_cache[ensemble_size]
@property
......@@ -56,8 +49,9 @@ class MagneticFieldFactory(Loggable, object):
@box_dimensions.setter
def box_dimensions(self, box_dimensions):
dim = tuple(np.array(box_dimensions, dtype=np.float))
if len(dim) != 3:
if len(dim) != 3:#{
raise ValueError("Input of box_dimensions must have length three.")
#}
self._box_dimensions = dim
@property
......@@ -67,8 +61,9 @@ class MagneticFieldFactory(Loggable, object):
@resolution.setter
def resolution(self, resolution):
resolution = tuple(np.array(resolution, dtype=np.int))
if len(resolution) != 3:
if len(resolution) != 3:#{
raise ValueError("Input for resolution must have length three.")
#}
self._resolution = resolution
@property
......@@ -104,10 +99,11 @@ class MagneticFieldFactory(Loggable, object):
@property
def variable_defaults(self):
variable_defaults = {}
for parameter in self.parameter_defaults:
for parameter in self.parameter_defaults:#{
low, high = self.variable_to_parameter_mappings[parameter]
default = self.parameter_defaults[parameter]
variable_defaults[parameter] = (default - low)/(high - low)
#}
return variable_defaults
@property
......@@ -122,17 +118,17 @@ class MagneticFieldFactory(Loggable, object):
value: [min, max]
"""
for k, v in new_mapping.items():
if k in self._variable_to_parameter_mappings:
if k in self._variable_to_parameter_mappings:#{
key = str(k)
value = [np.float(v[0]), np.float(v[1])]
self._variable_to_parameter_mappings.update({key: value})
self.logger.debug("Updated variable_to_parameter_mapping %s "
"to %s" % (key, str(value)))
self.logger.debug("Updated variable_to_parameter_mapping %s to %s" % (key, str(value)))
#}
def _map_variables_to_parameters(self, variables):
parameter_dict = {}
for variable_name in variables:
if variable_name in self.variable_to_parameter_mappings:
if variable_name in self.variable_to_parameter_mappings:#{
mapping = self.variable_to_parameter_mappings[variable_name]
mapped_variable = unity_mapper(variables[variable_name],
a=mapping[0],
......@@ -141,8 +137,10 @@ class MagneticFieldFactory(Loggable, object):
# a=mapping[0],
# m=mapping[1],
# b=mapping[2])
else:
#}
else:#{
mapped_variable = np.float(variables[variable_name])
#}
parameter_dict[variable_name] = mapped_variable
return parameter_dict
......@@ -150,16 +148,12 @@ class MagneticFieldFactory(Loggable, object):
mapped_variables = self._map_variables_to_parameters(variables)
work_parameters = self.parameter_defaults.copy()
work_parameters.update(mapped_variables)
domain = (self._get_ensemble(ensemble_size),
self._grid_space,
self._vector)
result_magnetic_field = self.magnetic_field_class(
domain=domain,
result_magnetic_field = self.magnetic_field_class(domain=domain,
parameters=work_parameters,
distribution_strategy='equal',
random_seed=random_seed)
self.logger.debug("Generated magnetic field with work-parameters %s" %
work_parameters)
self.logger.debug("Generated magnetic field with work-parameters %s" % work_parameters)
return result_magnetic_field
......@@ -18,19 +18,20 @@
from imagine.magnetic_fields.magnetic_field import MagneticField
class WMAP3yrMagneticField(MagneticField):
@property
def descriptor_lookup(self):
lookup = \
{'b0': ['./MagneticField/Regular/WMAP/b0', 'value'],
lookup = {'b0': ['./MagneticField/Regular/WMAP/b0', 'value'],
'psi0': ['./MagneticField/Regular/WMAP/psi0', 'value'],
'psi1': ['./MagneticField/Regular/WMAP/psi1', 'value'],
'chi0': ['./MagneticField/Regular/WMAP/chi0', 'value'],
'random_rms': ['./MagneticField/Random/Global/rms', 'value'],
'random_rho': ['./MagneticField/Random/Global/rho', 'value'],
'random_a0': ['./MagneticField/Random/Global/a0', 'value']
'random_rms': ['./MagneticField/Random/Global/ES/rms', 'value'],
'random_rho': ['./MagneticField/Random/Global/ES/rho', 'value'],
'random_a0': ['./MagneticField/Random/Global/ES/a0', 'value'],
'random_k0': ['./MagneticField/Random/Global/ES/k0', 'value'],
'random_r0': ['./MagneticField/Random/Global/ES/r0', 'value'],
'random_z0': ['./MagneticField/Random/Global/ES/z0', 'value']
}
return lookup
......
......@@ -16,13 +16,11 @@
# IMAGINE is being developed at the Max-Planck-Institut fuer Astrophysik
# and financially supported by the Studienstiftung des deutschen Volkes.
from imagine.magnetic_fields.magnetic_field.magnetic_field_factory \
import MagneticFieldFactory
from imagine.magnetic_fields.magnetic_field.magnetic_field_factory import MagneticFieldFactory
from wmap3yr_magnetic_field import WMAP3yrMagneticField
class WMAP3yrMagneticFieldFactory(MagneticFieldFactory):
@property
def magnetic_field_class(self):
return WMAP3yrMagneticField
......@@ -33,9 +31,13 @@ class WMAP3yrMagneticFieldFactory(MagneticFieldFactory):
'psi0': 27.0,
'psi1': 0.9,
'chi0': 25.0,
# using ES random field generator in hammurabiX
'random_rms': 2.0,
'random_rho': 0.5,
'random_a0': 1.7}
'random_a0': 1.7,
'random_k0': 0.5,
'random_r0': 8.0,
'random_z0': 1.0}
return defaults
@property
......@@ -43,13 +45,15 @@ class WMAP3yrMagneticFieldFactory(MagneticFieldFactory):
return self._generate_variable_to_parameter_mapping_defaults(n=3)
def _generate_variable_to_parameter_mapping_defaults(self, n):
defaults = {
'b0': self._positive_interval(6.0, 1.9, n), # b0 astro-ph/0603450
defaults = {'b0': self._positive_interval(6.0, 1.9, n), # b0 astro-ph/0603450
'psi0': self._positive_interval(27.0, 7.0, n), # psi0 astro-ph/0603450
'psi1': self._positive_interval(0.9, 5.0, n), # psi1 astro-ph/0603450
'chi0': self._positive_interval(25, 8.0, n), # xsi0 astro-ph/0603450
'random_rms': self._positive_interval(2.0, 0.6, n),
'random_rho': self._positive_interval(0.5, 0.166, n),
'random_a0': self._positive_interval(1.7, 0.5, n)
'random_a0': self._positive_interval(1.7, 0.5, n),
'random_k0': self._positive_interval(0.5, 0.15, n),
'random_r0': self._positive_interval(8.0, 2.0, n),
'random_z0': self._positive_interval(1.0, 0.3, n)
}
return defaults
......@@ -18,18 +18,15 @@
from nifty import Field, FieldArray
class Observable(Field):
def __init__(self, domain=None, val=None, dtype=None,
distribution_strategy=None, copy=False):
super(Observable, self).__init__(
domain=domain,
super(Observable, self).__init__(domain=domain,
val=val,
dtype=dtype,
distribution_strategy=distribution_strategy,
copy=copy)
assert(len(self.domain) == 2)
assert(isinstance(self.domain[0], FieldArray))
......@@ -42,8 +39,8 @@ class Observable(Field):
return self._ensemble_mean
def _to_hdf5(self, hdf5_group):
if hasattr(self, '_ensemble_mean'):
return_dict = {'ensemble_mean': self._ensemble_mean}
if hasattr(self, "_ensemble_mean"):
return_dict = {"ensemble_mean": self._ensemble_mean}
else:
return_dict = {}
return_dict.update(
......@@ -55,7 +52,7 @@ class Observable(Field):
new_field = super(Observable, cls)._from_hdf5(hdf5_group=hdf5_group,
repository=repository)
try:
observable_mean = repository.get('ensemble_mean', hdf5_group)
observable_mean = repository.get("ensemble_mean", hdf5_group)
new_field._observable_mean = observable_mean
except(KeyError):
pass
......
This diff is collapsed.
......@@ -19,13 +19,14 @@
from .magnetic_field_model import MagneticFieldModel
from imagine.magnetic_fields.wmap3yr_magnetic_field import WMAP3yrMagneticField
class WMAP3yrMixin(MagneticFieldModel):
def update_parameter_xml(self, root):
custom_parameters = [
['./MagneticField/Regular', 'type', 'WMAP'],
custom_parameters = [['./MagneticField/Regular', 'type', 'WMAP'],
['./MagneticField/Regular','cue','1']
['./MagneticField/Random', 'cue', '1'],
['./MagneticField/Random', 'type', 'Global']]
['./MagneticField/Random', 'type', 'Global'],
['./MagneticField/Random/Global','type','ES']]
for parameter in custom_parameters:
root.find(parameter[0]).set(parameter[1], str(parameter[2]))
......
......@@ -17,11 +17,10 @@
# and financially supported by the Studienstiftung des deutschen Volkes.
import xml.etree.ElementTree as et
from .observable_mixin import ObservableMixin
class DMMixin(ObservableMixin):
@property
def obs_name(self):
return 'dm'
......@@ -31,7 +30,6 @@ class DMMixin(ObservableMixin):
return ['dm']
def update_parameter_xml(self, root):
element = et.Element('DM', {'cue': '1',
'filename': self.obs_name+'.fits'})
element = et.Element('DM', {'cue': '1', 'filename': self.obs_name+'.fits'})
output = root.find('Obsout')
output.append(element)
......@@ -17,11 +17,10 @@
# and financially supported by the Studienstiftung des deutschen Volkes.
import xml.etree.ElementTree as et
from .observable_mixin import ObservableMixin
class FDMixin(ObservableMixin):
@property
def obs_name(self):
return 'fd'
......@@ -31,7 +30,6 @@ class FDMixin(ObservableMixin):
return ['fd']
def update_parameter_xml(self, root):
element = et.Element('Faraday', {'cue': '1',
'filename': self.obs_name+'.fits'})
element = et.Element('Faraday', {'cue': '1', 'filename': self.obs_name+'.fits'})
output = root.find('Obsout')
output.append(element)
......@@ -18,13 +18,12 @@
import abc
import os
import healpy
from keepers import Loggable
class ObservableMixin(Loggable, object):
@abc.abstractproperty
def obs_name(self):
raise NotImplementedError
......@@ -42,10 +41,10 @@ class ObservableMixin(Loggable, object):
map_list = self._read_fits_file(path=working_directory,
name=self.obs_name + '.fits',
nside=nside)
for i, map_component in enumerate(map_list):
for i, map_component in enumerate(map_list):#{
temp_obs = observable_dict[self.component_names[i]]
temp_obs.val.data[local_ensemble_index] = map_component
#}
def _read_fits_file(self, path, name, nside):
map_path = os.path.join(path, name)
......@@ -53,8 +52,7 @@ class ObservableMixin(Loggable, object):
i = 0
while True:
try:
loaded_map = healpy.read_map(map_path, verbose=False,
field=i)
loaded_map = healpy.read_map(map_path, verbose=False, field=i)
# loaded_map = healpy.ud_grade(loaded_map, nside_out=nside)
result_list += [loaded_map]
i += 1
......
......@@ -17,11 +17,10 @@
# and financially supported by the Studienstiftung des deutschen Volkes.
import xml.etree.ElementTree as et
from .observable_mixin import ObservableMixin
class SyncMixin(ObservableMixin):
def __init__(self, frequency='23'):
self.frequency = str(frequency)
......
This diff is collapsed.