Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
N
NIFTy
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
13
Issues
13
List
Boards
Labels
Service Desk
Milestones
Merge Requests
8
Merge Requests
8
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
CI / CD
Repository
Value Stream
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ift
NIFTy
Commits
73c8b1a9
Commit
73c8b1a9
authored
Apr 08, 2020
by
Rouven Lemmerz
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Added test
parent
78a5e223
Pipeline
#72488
passed with stages
in 15 minutes and 17 seconds
Changes
3
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
46 additions
and
1 deletion
+46
-1
nifty6/__init__.py
nifty6/__init__.py
+2
-0
nifty6/operator_tree_optimiser.py
nifty6/operator_tree_optimiser.py
+2
-1
test/test_operator_tree_optimiser.py
test/test_operator_tree_optimiser.py
+42
-0
No files found.
nifty6/__init__.py
View file @
73c8b1a9
...
...
@@ -96,5 +96,7 @@ from .linearization import Linearization
from
.operator_spectrum
import
operator_spectrum
from
.operator_tree_optimiser
import
optimise_operator
# We deliberately don't set __all__ here, because we don't want people to do a
# "from nifty6 import *"; that would swamp the global namespace.
nifty6/operator_tree_optimiser.py
View file @
73c8b1a9
...
...
@@ -270,6 +270,8 @@ def optimise_operator(op):
"""
Merges redundant operations in the tree structure of an operator.
For example it is ensured that for ``(f@x + x)`` ``x`` is only computed once.
Currently works only on ``_OpChain``, ``_OpSum`` and ``_OpProd`` and does not optimise their linear pendants
``ChainOp`` and ``SumOperator``.
Parameters
----------
...
...
@@ -289,7 +291,6 @@ def optimise_operator(op):
>>> op = (uni1 + uni2)*(uni1 + uni2)
is replaced by something comparable to
>>> from nifty6 import UniformOperator, DomainTuple
>>> uni = UniformOperator(DomainTuple.scalar_domain())
>>> uni_add = uni + uni
>>> op = uni_add * uni_add
...
...
test/test_operator_tree_optimiser.py
0 → 100644
View file @
73c8b1a9
from
numpy.testing
import
assert_
,
assert_allclose
import
numpy
as
np
from
copy
import
deepcopy
import
nifty6
as
ift
class
CountingOp
(
ift
.
Operator
):
#FIXME: Not a LinearOperator since ChainOps not supported yet
def
__init__
(
self
,
domain
):
self
.
_domain
=
self
.
_target
=
ift
.
sugar
.
makeDomain
(
domain
)
self
.
_count
=
0
def
apply
(
self
,
x
):
self
.
_count
+=
1
return
x
@
property
def
count
(
self
):
return
self
.
_count
def
test_operator_tree_optimiser
():
dom
=
ift
.
RGSpace
(
10
,
harmonic
=
True
)
hdom
=
dom
.
get_default_codomain
()
cop1
=
CountingOp
(
dom
)
op1
=
(
ift
.
UniformOperator
(
dom
,
-
1
,
2
)
@
cop1
).
ducktape
(
'a'
)
cop2
=
CountingOp
(
dom
)
op2
=
ift
.
FieldZeroPadder
(
dom
,
(
11
,))
@
cop2
cop3
=
CountingOp
(
op2
.
target
)
op3
=
ift
.
ScalingOperator
(
op2
.
target
,
3
)
@
cop3
cop4
=
CountingOp
(
op2
.
target
)
op4
=
ift
.
ScalingOperator
(
op2
.
target
,
1.5
)
@
cop4
op1
=
op1
*
op1
# test layering in between two levels
op
=
op3
@
op2
@
op1
+
op2
@
op1
+
op3
@
op2
@
op1
+
op2
@
op1
op
=
op
+
op
op
=
op4
@
(
op4
@
op
+
op4
@
op
)
fld
=
ift
.
from_random
(
'normal'
,
op
.
domain
,
np
.
float64
)
op_orig
=
deepcopy
(
op
)
op
=
ift
.
operator_tree_optimiser
.
_optimise_operator
(
op
)
assert_allclose
(
op
(
fld
).
val
,
op_orig
(
fld
).
val
,
rtol
=
np
.
finfo
(
np
.
float64
).
eps
)
assert_
(
1
==
(
(
cop4
.
count
-
1
)
*
cop3
.
count
*
cop2
.
count
*
cop1
.
count
))
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment