Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
ift
NIFTy
Commits
65532b56
Commit
65532b56
authored
Nov 13, 2017
by
Martin Reinecke
Browse files
cleanups
parent
cbe067d1
Pipeline
#21524
passed with stage
in 4 minutes and 17 seconds
Changes
4
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
nifty/data_objects/distributed_do.py
View file @
65532b56
...
...
@@ -4,7 +4,7 @@ from mpi4py import MPI
__all__
=
[
"ntask"
,
"rank"
,
"master"
,
"local_shape"
,
"data_object"
,
"full"
,
"empty"
,
"zeros"
,
"ones"
,
"empty_like"
,
"vdot"
,
"abs"
,
"exp"
,
"log"
,
"sqrt"
,
"bincount"
,
"from_object"
,
"from_random"
,
"log"
,
"sqrt"
,
"from_object"
,
"from_random"
,
"local_data"
,
"ibegin"
,
"np_allreduce_sum"
,
"distaxis"
,
"from_local_data"
,
"from_global_data"
,
"to_global_data"
,
"redistribute"
,
"default_distaxis"
]
...
...
@@ -290,13 +290,6 @@ def sqrt(a, out=None):
return
_math_helper
(
a
,
np
.
sqrt
,
out
)
def
bincount
(
x
,
weights
=
None
,
minlength
=
None
):
if
weights
is
not
None
:
weights
=
weights
.
_data
res
=
np
.
bincount
(
x
.
_data
,
weights
,
minlength
)
return
data_object
(
res
)
def
from_object
(
object
,
dtype
=
None
,
copy
=
True
):
return
data_object
(
object
.
_shape
,
np
.
array
(
object
.
_data
,
dtype
=
dtype
,
copy
=
copy
),
...
...
nifty/data_objects/numpy_do.py
View file @
65532b56
...
...
@@ -3,12 +3,12 @@
import
numpy
as
np
from
numpy
import
ndarray
as
data_object
from
numpy
import
full
,
empty
,
empty_like
,
sqrt
,
ones
,
zeros
,
vdot
,
abs
,
\
bincount
,
exp
,
log
exp
,
log
from
.random
import
Random
__all__
=
[
"ntask"
,
"rank"
,
"master"
,
"local_shape"
,
"data_object"
,
"full"
,
"empty"
,
"zeros"
,
"ones"
,
"empty_like"
,
"vdot"
,
"abs"
,
"exp"
,
"log"
,
"sqrt"
,
"bincount"
,
"from_object"
,
"from_random"
,
"log"
,
"sqrt"
,
"from_object"
,
"from_random"
,
"local_data"
,
"ibegin"
,
"np_allreduce_sum"
,
"distaxis"
,
"from_local_data"
,
"from_global_data"
,
"to_global_data"
,
"redistribute"
,
"default_distaxis"
]
...
...
nifty/operators/fft_operator_support.py
View file @
65532b56
...
...
@@ -66,12 +66,12 @@ class RGRGTransformation(Transformation):
tmpax
=
(
dobj
.
distaxis
(
x
.
val
),)
tmp
=
dobj
.
redistribute
(
x
.
val
,
nodist
=
tmpax
)
ldat
=
dobj
.
local_data
(
tmp
)
ldat
=
fftn
(
ldat
,
axes
=
tmpax
)
if
len
(
axes
)
==
1
:
# we are done
ldat
=
ldat
.
real
+
ldat
.
imag
if
len
(
axes
)
==
1
:
# only one transform needed
ldat
=
hartley
(
ldat
,
axes
=
tmpax
)
tmp
=
dobj
.
from_local_data
(
tmp
.
shape
,
ldat
,
distaxis
=
dobj
.
distaxis
(
tmp
))
tmp
=
dobj
.
redistribute
(
tmp
,
dist
=
tmpax
[
0
])
else
:
else
:
# two separate transforms
ldat
=
fftn
(
ldat
,
axes
=
tmpax
)
tmp
=
dobj
.
from_local_data
(
tmp
.
shape
,
ldat
,
distaxis
=
dobj
.
distaxis
(
tmp
))
tmp
=
dobj
.
redistribute
(
tmp
,
dist
=
tmpax
[
0
])
tmpax
=
tuple
(
i
for
i
in
axes
if
i
not
in
tmpax
)
...
...
nifty/spaces/power_space.py
View file @
65532b56
...
...
@@ -150,13 +150,12 @@ class PowerSpace(Space):
minlength
=
nbin
)
temp_rho
=
dobj
.
np_allreduce_sum
(
temp_rho
)
assert
not
(
temp_rho
==
0
).
any
(),
"empty bins detected"
# The explicit conversion to float64 is necessary because bincount
# sometimes returns its result as an integer array, even when
# floating-point weights are present ...
temp_k_lengths
=
np
.
bincount
(
dobj
.
local_data
(
temp_pindex
).
ravel
(),
weights
=
dobj
.
local_data
(
k_length_array
.
val
).
ravel
(),
minlength
=
nbin
)
# This conversion is necessary because bincount sometimes returns
# its result as an integer array, even when floating-point weights
# are present ...
temp_k_lengths
=
temp_k_lengths
.
astype
(
np
.
float64
)
minlength
=
nbin
).
astype
(
np
.
float64
)
temp_k_lengths
=
dobj
.
np_allreduce_sum
(
temp_k_lengths
)
/
temp_rho
temp_dvol
=
temp_rho
*
pdvol
self
.
_powerIndexCache
[
key
]
=
(
binbounds
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment