Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
I
IR_data_access
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Model registry
Operate
Environments
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Holger Niemann
IR_data_access
Commits
848bc730
Commit
848bc730
authored
Nov 10, 2017
by
Holger Niemann
Browse files
Options
Downloads
Patches
Plain Diff
add delay into daily upload, fix of upload for <4gb files
parent
9b7cda18
No related branches found
No related tags found
No related merge requests found
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
Upload_Day_IRdata.py
+12
-4
12 additions, 4 deletions
Upload_Day_IRdata.py
__pycache__/uploadingversionIRdata.cpython-35.pyc
+0
-0
0 additions, 0 deletions
__pycache__/uploadingversionIRdata.cpython-35.pyc
uploadingversionIRdata.py
+7
-7
7 additions, 7 deletions
uploadingversionIRdata.py
with
19 additions
and
11 deletions
Upload_Day_IRdata.py
+
12
−
4
View file @
848bc730
...
@@ -11,13 +11,15 @@ import h5py as h5
...
@@ -11,13 +11,15 @@ import h5py as h5
import
numpy
as
np
import
numpy
as
np
import
datetime
import
datetime
import
sys
import
sys
from
time
import
sleep
#from getfoldernames import Searchdatalocation as gfn
#from getfoldernames import Searchdatalocation as gfn
#%% set global parameter
#%% set global parameter
is_local_test
=
True
# local test on one IRcam PC (not central PC)
is_local_test
=
True
# local test on one IRcam PC (not central PC)
active_PC
=
[
0
]
# selection of the following camera PCs to loop over
active_PC
=
[
0
]
# selection of the following camera PCs to loop over
delayupload
=
True
startuploadtime
=
"
20:00:00
"
reupload
=
False
reupload
=
False
reason
=
""
reason
=
""
retry
=
2
retry
=
2
...
@@ -35,8 +37,7 @@ nuc_parlog=True#False
...
@@ -35,8 +37,7 @@ nuc_parlog=True#False
nuc
=
True
nuc
=
True
metastream
=
True
metastream
=
True
dates
=
[[
2017
,
10
,
11
],[
2017
,
10
,
12
],[
2017
,
10
,
17
],[
2017
,
10
,
18
]]
dates
=
[[
2017
,
11
,
9
]]
#date = [2017,9,26]
ports
=
[
'
AEF10
'
,
'
AEF11
'
,
ports
=
[
'
AEF10
'
,
'
AEF11
'
,
'
AEF20
'
,
'
AEF21
'
,
'
AEF20
'
,
'
AEF21
'
,
...
@@ -73,6 +74,13 @@ class Tee(object):
...
@@ -73,6 +74,13 @@ class Tee(object):
def
flush
(
self
)
:
def
flush
(
self
)
:
for
f
in
self
.
files
:
for
f
in
self
.
files
:
f
.
flush
()
f
.
flush
()
if
delayupload
:
now
=
datetime
.
datetime
.
now
()
start
=
datetime
.
datetime
(
now
.
year
,
now
.
month
,
now
.
day
,
int
(
startuploadtime
.
split
(
"
:
"
)[
0
]),
int
(
startuploadtime
.
split
(
"
:
"
)[
1
]),
int
(
startuploadtime
.
split
(
"
:
"
)[
2
]))
sleeptime
=
(
start
-
now
).
total_seconds
()
#seconds
if
sleeptime
<
0
:
sleeptime
=
1
sleep
(
sleeptime
)
original
=
sys
.
stdout
original
=
sys
.
stdout
...
...
...
...
This diff is collapsed.
Click to expand it.
__pycache__/uploadingversionIRdata.cpython-35.pyc
0 → 100644
+
0
−
0
View file @
848bc730
File added
This diff is collapsed.
Click to expand it.
uploadingversionIRdata.py
+
7
−
7
View file @
848bc730
...
@@ -11,7 +11,7 @@ import urllib
...
@@ -11,7 +11,7 @@ import urllib
import
json
import
json
#from getfoldernames import Searchdatalocation as gfn
#from getfoldernames import Searchdatalocation as gfn
import
numpy
as
np
import
numpy
as
np
from
binfilesreadingv2
import
BinVideoReader
as
bvreader
#
from binfilesreadingv2 import BinVideoReader as bvreader
import
os
import
os
import
datetime
as
dt
import
datetime
as
dt
...
@@ -814,7 +814,7 @@ def writeH5_from_File(stream,filename,key,dimof,idx=None):
...
@@ -814,7 +814,7 @@ def writeH5_from_File(stream,filename,key,dimof,idx=None):
"""
"""
# stream = path.stream
# stream = path.stream
# dtype = str(data.dtype)
# dtype = str(data.dtype)
filesize
=
os
.
stat
(
filename
).
stsize
filesize
=
os
.
stat
(
filename
).
st
_
size
f5
=
h5reader
.
File
(
filename
,
'
r
'
)
f5
=
h5reader
.
File
(
filename
,
'
r
'
)
if
filesize
<
4000000000
:
if
filesize
<
4000000000
:
tmpfile
=
"
archive_
"
+
stream
+
'
_
'
+
str
(
dimof
[
0
])
tmpfile
=
"
archive_
"
+
stream
+
'
_
'
+
str
(
dimof
[
0
])
...
@@ -835,9 +835,9 @@ def writeH5_from_File(stream,filename,key,dimof,idx=None):
...
@@ -835,9 +835,9 @@ def writeH5_from_File(stream,filename,key,dimof,idx=None):
times
=
[]
times
=
[]
limits
=
[
0
]
limits
=
[
0
]
shapi
=
f5
[
key
].
shape
shapi
=
f5
[
key
].
shape
intervall
=
int
(
np
.
shape
(
dimof
)
/
nrfiles
)
intervall
=
int
(
np
.
shape
(
dimof
)
[
0
]
/
nrfiles
)
for
i
in
range
(
nrfiles
-
1
):
for
i
in
range
(
0
,
nrfiles
-
1
):
limits
.
append
(
intervall
*
i
)
limits
.
append
(
intervall
*
(
i
+
1
)
)
times
.
append
(
dimof
[
limits
[
i
]:
limits
[
i
+
1
]])
times
.
append
(
dimof
[
limits
[
i
]:
limits
[
i
+
1
]])
limits
.
append
(
np
.
shape
(
dimof
)[
0
])
limits
.
append
(
np
.
shape
(
dimof
)[
0
])
times
.
append
(
dimof
[
limits
[
nrfiles
-
1
]:
limits
[
nrfiles
]])
times
.
append
(
dimof
[
limits
[
nrfiles
-
1
]:
limits
[
nrfiles
]])
...
@@ -847,8 +847,8 @@ def writeH5_from_File(stream,filename,key,dimof,idx=None):
...
@@ -847,8 +847,8 @@ def writeH5_from_File(stream,filename,key,dimof,idx=None):
tmpfile
+=
"
.h5
"
tmpfile
+=
"
.h5
"
with
h5reader
.
File
(
tmpfile
,
'
w
'
)
as
f
:
with
h5reader
.
File
(
tmpfile
,
'
w
'
)
as
f
:
g
=
f
.
create_group
(
'
data
'
)
# requires [row,col,time]
g
=
f
.
create_group
(
'
data
'
)
# requires [row,col,time]
g
.
create_dataset
(
'
timestamps
'
,
data
=
list
(
times
[
i
]),
dtype
=
'
uint64
'
,
compression
=
"
gzip
"
)
g
.
create_dataset
(
'
timestamps
'
,
data
=
list
(
times
[
i
]),
dtype
=
'
uint64
'
)
#
,compression="gzip")
dset
=
g
.
create_dataset
(
stream
,
shape
=
(
shapi
[
0
],
shapi
[
1
],
limits
[
i
+
1
]
-
limits
[
i
]),
dtype
=
'
uint16
'
,
chunks
=
(
shapi
[
0
],
shapi
[
1
],
1
),
compression
=
'
gzip
'
)
dset
=
g
.
create_dataset
(
stream
,
shape
=
(
shapi
[
0
],
shapi
[
1
],
limits
[
i
+
1
]
-
limits
[
i
]),
dtype
=
'
uint16
'
,
chunks
=
(
shapi
[
0
],
shapi
[
1
],
1
)
)
#
,compression='gzip')
for
n
in
range
(
limits
[
i
+
1
]
-
limits
[
i
]):
for
n
in
range
(
limits
[
i
+
1
]
-
limits
[
i
]):
dset
[:,:,
n
]
=
f5
[
key
][:,:,
limits
[
i
]
+
n
]
dset
[:,:,
n
]
=
f5
[
key
][:,:,
limits
[
i
]
+
n
]
tmpfiles
.
append
(
tmpfile
)
tmpfiles
.
append
(
tmpfile
)
...
...
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
sign in
to comment