Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
ift
NIFTy
Commits
2fedb50b
Commit
2fedb50b
authored
Aug 03, 2018
by
Martin Reinecke
Browse files
fixes and convenience functions
parent
55564314
Changes
7
Hide whitespace changes
Inline
Side-by-side
demos/polynomial_fit.py
View file @
2fedb50b
...
...
@@ -53,7 +53,7 @@ class PolynomialResponse(ift.LinearOperator):
def
apply
(
self
,
x
,
mode
):
self
.
_check_input
(
x
,
mode
)
val
=
x
.
to_global_data
()
val
=
x
.
to_global_data
_rw
()
if
mode
==
self
.
TIMES
:
# FIXME Use polynomial() here
out
=
self
.
_mat
.
dot
(
val
)
...
...
@@ -132,6 +132,7 @@ plt.close()
# Print parameters
mean
=
sc
.
mean
.
to_global_data
()
sigma
=
np
.
sqrt
(
sc
.
var
.
to_global_data
())
for
ii
in
range
(
len
(
mean
)):
print
(
'Coefficient x**{}: {:.2E} +/- {:.2E}'
.
format
(
ii
,
mean
[
ii
],
sigma
[
ii
]))
if
ift
.
dobj
.
master
:
for
ii
in
range
(
len
(
mean
)):
print
(
'Coefficient x**{}: {:.2E} +/- {:.2E}'
.
format
(
ii
,
mean
[
ii
],
sigma
[
ii
]))
nifty5/data_objects/distributed_do.py
View file @
2fedb50b
...
...
@@ -33,7 +33,7 @@ __all__ = ["ntask", "rank", "master", "local_shape", "data_object", "full",
"np_allreduce_min"
,
"np_allreduce_max"
,
"distaxis"
,
"from_local_data"
,
"from_global_data"
,
"to_global_data"
,
"redistribute"
,
"default_distaxis"
,
"is_numpy"
,
"lock"
,
"locked"
,
"uniform_full"
,
"transpose"
]
"lock"
,
"locked"
,
"uniform_full"
,
"transpose"
,
"to_global_data_rw"
]
_comm
=
MPI
.
COMM_WORLD
ntask
=
_comm
.
Get_size
()
...
...
@@ -403,6 +403,13 @@ def to_global_data(arr):
return
tmp
.
_data
def
to_global_data_rw
(
arr
):
if
arr
.
_distaxis
==
-
1
:
return
arr
.
_data
.
copy
()
tmp
=
redistribute
(
arr
,
dist
=-
1
)
return
tmp
.
_data
def
redistribute
(
arr
,
dist
=
None
,
nodist
=
None
):
if
dist
is
not
None
:
if
nodist
is
not
None
:
...
...
nifty5/data_objects/numpy_do.py
View file @
2fedb50b
...
...
@@ -32,7 +32,7 @@ __all__ = ["ntask", "rank", "master", "local_shape", "data_object", "full",
"np_allreduce_min"
,
"np_allreduce_max"
,
"distaxis"
,
"from_local_data"
,
"from_global_data"
,
"to_global_data"
,
"redistribute"
,
"default_distaxis"
,
"is_numpy"
,
"lock"
,
"locked"
,
"uniform_full"
]
"lock"
,
"locked"
,
"uniform_full"
,
"to_global_data_rw"
]
ntask
=
1
rank
=
0
...
...
@@ -106,6 +106,10 @@ def to_global_data(arr):
return
arr
def
to_global_data_rw
(
arr
):
return
arr
.
copy
()
def
redistribute
(
arr
,
dist
=
None
,
nodist
=
None
):
return
arr
...
...
nifty5/domains/log_rg_space.py
View file @
2fedb50b
...
...
@@ -94,7 +94,7 @@ class LogRGSpace(StructuredDomain):
def
get_expk_length_array
(
self
):
# FIXME This is a hack! Only for plotting. Seems not to be the final version.
out
=
exp
(
self
.
get_k_length_array
()).
to_global_data
().
copy
()
out
=
exp
(
self
.
get_k_length_array
()).
to_global_data
_rw
()
out
[
1
:]
=
out
[:
-
1
]
out
[
0
]
=
0
return
Field
.
from_global_data
(
self
,
out
)
nifty5/field.py
View file @
2fedb50b
...
...
@@ -124,14 +124,19 @@ class Field(object):
-------
numpy.ndarray : array containing all field entries.
Its shape is identical to `self.shape`.
Notes
-----
Do not write to the returned array! Depending on whether MPI is
active or not, this may or may not change the field's data content.
"""
return
dobj
.
to_global_data
(
self
.
_val
)
def
to_global_data_rw
(
self
):
"""Returns a modifiable array containing the full data of the field.
Returns
-------
numpy.ndarray : array containing all field entries, which can be
modified. Its shape is identical to `self.shape`.
"""
return
dobj
.
to_global_data_rw
(
self
.
_val
)
@
property
def
local_data
(
self
):
"""numpy.ndarray : locally residing field data
...
...
@@ -154,10 +159,6 @@ class Field(object):
-------
Field
Field living on `new_domain`, but with the same data as `self`.
Notes
-----
No copy is made. If needed, use an additional copy() invocation.
"""
return
Field
(
DomainTuple
.
make
(
new_domain
),
self
.
_val
)
...
...
test/test_minimization/test_minimizers.py
View file @
2fedb50b
...
...
@@ -86,11 +86,11 @@ class Test_Minimizers(unittest.TestCase):
@
property
def
value
(
self
):
return
rosen
(
self
.
_position
.
to_global_data
().
copy
())
return
rosen
(
self
.
_position
.
to_global_data
_rw
())
@
property
def
gradient
(
self
):
inp
=
self
.
_position
.
to_global_data
().
copy
()
inp
=
self
.
_position
.
to_global_data
_rw
()
out
=
ift
.
Field
.
from_global_data
(
space
,
rosen_der
(
inp
))
return
out
...
...
@@ -98,7 +98,7 @@ class Test_Minimizers(unittest.TestCase):
def
metric
(
self
):
class
RBCurv
(
ift
.
EndomorphicOperator
):
def
__init__
(
self
,
loc
):
self
.
_loc
=
loc
.
to_global_data
().
copy
()
self
.
_loc
=
loc
.
to_global_data
_rw
()
@
property
def
domain
(
self
):
...
...
@@ -110,7 +110,7 @@ class Test_Minimizers(unittest.TestCase):
def
apply
(
self
,
x
,
mode
):
self
.
_check_input
(
x
,
mode
)
inp
=
x
.
to_global_data
().
copy
()
inp
=
x
.
to_global_data
_rw
()
out
=
ift
.
Field
.
from_global_data
(
space
,
rosen_hess_prod
(
self
.
_loc
.
copy
(),
inp
))
return
out
...
...
test/test_models/test_model_gradients.py
View file @
2fedb50b
...
...
@@ -75,12 +75,13 @@ class Model_Tests(unittest.TestCase):
ift
.
FieldAdapter
(
dom
,
"s1"
)
*
ift
.
FieldAdapter
(
dom
,
"s2"
))
pos
=
ift
.
from_random
(
"normal"
,
dom
)
ift
.
extra
.
check_value_gradient_consistency
(
model
,
pos
)
model
=
lambda
inp
:
ift
.
ScalingOperator
(
2.456
,
space
)(
i
np
[
"s1"
]
*
i
np
[
"s2"
]).
positive_tanh
(
)
model
=
ift
.
positive_tanh
(
ift
.
ScalingOperator
(
2.456
,
space
)
.
chain
(
i
ft
.
FieldAdapter
(
dom
,
"s1"
)
*
i
ft
.
FieldAdapter
(
dom
,
"s2"
))
)
pos
=
ift
.
from_random
(
"normal"
,
dom
)
ift
.
extra
.
check_value_gradient_consistency
(
model
,
pos
)
if
isinstance
(
space
,
ift
.
RGSpace
):
model
=
lambda
inp
:
ift
.
FFTOperator
(
space
)(
inp
[
"s1"
]
*
inp
[
"s2"
])
model
=
ift
.
FFTOperator
(
space
).
chain
(
ift
.
FieldAdapter
(
dom
,
"s1"
)
*
ift
.
FieldAdapter
(
dom
,
"s2"
))
pos
=
ift
.
from_random
(
"normal"
,
dom
)
ift
.
extra
.
check_value_gradient_consistency
(
model
,
pos
)
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment