Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Holger Niemann
IR_data_access
Commits
678e8197
Commit
678e8197
authored
Jul 17, 2018
by
Holger Niemann
Browse files
bugfix in downoad, update upload for OP1.2b
parent
422bfb4d
Changes
5
Hide whitespace changes
Inline
Side-by-side
IR_image_tools.py
View file @
678e8197
...
...
@@ -63,7 +63,7 @@ def get_OP_by_time(time_ns=None, shot_no=None, program_str=None):
return
None
def
bestimmtheitsma
ß
_general
(
data
,
fit
):
def
bestimmtheitsma
ss
_general
(
data
,
fit
):
R
=
0
if
len
(
fit
)
==
len
(
data
):
mittel
=
np
.
sum
(
data
)
/
len
(
data
)
...
...
@@ -113,7 +113,7 @@ def check_coldframe(coldframe,references=None,threshold=0.5,plot_it=False):
for
i_dat
in
range
(
len
(
datasets
)):
dat
=
datasets
[
i_dat
]
reference
=
references
[
i_dat
]
bestimmtheit
.
append
(
bestimmtheitsma
ß
_general
(
dat
,
reference
))
bestimmtheit
.
append
(
bestimmtheitsma
ss
_general
(
dat
,
reference
))
if
plot_it
:
plt
.
plot
(
dat
,
label
=
'data'
)
plt
.
plot
(
reference
,
label
=
'reference'
)
...
...
@@ -168,8 +168,8 @@ def read_bad_pixels_from_file(port, shot_no=None, program=None):
port_name
=
'AEF{0}'
.
format
(
port
)
bad_pixel_file
=
'badpixel_{0}.bpx'
.
format
(
portcamdict
[
OP
][
port_name
][
6
:])
data
=
np
.
genfromtxt
(
IRCAMBadPixels_path
+
bad_pixel_file
,
dtype
=
int
)
try
:
data
=
np
.
genfromtxt
(
IRCAMBadPixels_path
+
bad_pixel_file
,
dtype
=
int
)
bad_pixle_list
=
list
(
zip
(
data
[:,
1
],
data
[:,
0
]))
except
:
bad_pixle_list
=
[]
...
...
@@ -220,8 +220,10 @@ def correct_images(images,badpixels):
if
type
(
badpixels
)
!=
int
:
if
type
(
images
)
==
list
:
# return corrected images also as list of 2D arrays
images
=
restore_bad_pixels
(
images
,
np
.
invert
(
badpixels
==
1
))
#.astype(np.float32)
images
=
list
(
images
)
# images = restore_bad_pixels(images, np.invert(badpixels==1))#.astype(np.float32)
# images = list(images)
for
i
in
range
(
len
(
images
)):
images
[
i
]
=
restore_bad_pixels
(
images
[
i
],
np
.
invert
(
badpixels
==
1
))
else
:
# keep shape
images
=
restore_bad_pixels
(
images
,
np
.
invert
(
badpixels
==
1
)).
astype
(
np
.
float32
)
...
...
Upload_Day_IRdata.py
View file @
678e8197
...
...
@@ -12,21 +12,22 @@ import numpy as np
import
datetime
import
sys
from
time
import
sleep
from
os.path
import
join
#from getfoldernames import Searchdatalocation as gfn
#%% set global parameter
is_local_test
=
True
# local test on one IRcam PC (not central PC)
### default old settings, will be overrided by the loaded config file!!! ###
local
=
True
# local test on one IRcam PC (not central PC)
active_PC
=
[
0
]
# selection of the following camera PCs to loop over
delayupload
=
True
startuploadtime
=
"20:00:00"
reupload
=
False
reason
=
""
retry
=
2
port
=
"AEF10"
singleupload
=
False
#True#False
singlelist
=
[]
raw_parlog
=
True
raw_data
=
True
#False#True
raw1
=
True
#False#True#False#True
...
...
@@ -36,8 +37,10 @@ background=True#False#True
nuc_parlog
=
True
#False
nuc
=
True
metastream
=
True
dates
=
[[
2017
,
11
,
9
]]
temperature
=
False
only_upload_lattest
=
False
use_today
=
False
ports
=
[
'AEF10'
,
'AEF11'
,
'AEF20'
,
'AEF21'
,
...
...
@@ -59,9 +62,35 @@ pc_list = ['Pc-e4-qrt-7', 'Pc-e4-qrt-2',
'Pc-dia-ws-16'
,
'Pc-e4-qrt-6'
,
'Pc-dia-ws-17'
,
'Pc-dia-ws-18'
,
'Pc-e4-qrt-5'
,
'Pc-e4-qrt-1'
]
try
:
cFile
=
open
(
"upload_config"
)
for
line
in
cFile
:
if
line
[
0
:
3
]
!=
"###"
:
vars
()[
line
.
split
(
"="
)[
0
]]
=
eval
(
line
.
split
(
"="
)[
1
].
split
(
"
\n
"
)[
0
])
cFile
.
close
()
active_PC
=
[
ports
.
index
(
port
)]
Tup
[
ports
.
index
(
port
)]
=
temperature
if
use_today
:
dummy
=
datetime
.
datetime
.
now
()
dates
=
[[
dummy
.
year
,
dummy
.
month
,
dummy
.
day
]]
except
Exception
as
E
:
raise
Exception
(
"Config file was not loaded properly"
,
E
)
toff
=
0
#delay time of the programm for the raw stuff in ms
def
findthelatestfilename
(
Rawdatapath
):
'''
find the latest modified file name for easy upload.
@Rawdatapath: the path to search from, e.g: 'R:
\\
2018.06.07'.
return the latest modified file name in string.
'''
filelist
=
os
.
listdir
(
Rawdatapath
)
timearray
=
[
os
.
path
.
getmtime
(
join
(
Rawdatapath
,
i
))
for
i
in
filelist
]
maxtimestampindex
=
np
.
argmax
(
timearray
)
latestfilename
=
filelist
[
maxtimestampindex
]
return
latestfilename
#create upload log file:
class
Tee
(
object
):
...
...
@@ -107,7 +136,7 @@ for date in dates:
calibdatapath
=
rawdatapath
tempdatapath
=
"
\\\\
{0:s}
\\
t
\\
{1[0]:4d}.{1[1]:02d}.{1[2]:02d}"
if
is_
local
_test
:
if
local
:
rawdatapath
=
"R:
\\
{1[0]:4d}.{1[1]:02d}.{1[2]:02d}"
calibdatapath
=
rawdatapath
tempdatapath
=
"T:
\\
{1[0]:4d}.{1[1]:02d}.{1[2]:02d}"
...
...
@@ -118,7 +147,9 @@ for date in dates:
raw_data
=
False
for
i
in
active_PC
:
if
only_upload_lattest
:
single_upload
=
True
singlelist
=
[
findthelatestfilename
(
rawdatapath
.
format
(
pc_list
[
i
],
date
))]
port
=
int
(
ports
[
i
].
split
(
'AEF'
)[
1
])
path
=
rawdatapath
.
format
(
pc_list
[
i
],
date
)
print
(
datetime
.
datetime
.
now
(),
"Starting Upload for "
+
ports
[
i
],
date
)
...
...
@@ -152,20 +183,24 @@ for date in dates:
if
len
(
filelist
)
>
0
:
# check for empty folder!
for
file
in
filelist
:
dummy
=
file
.
split
(
"_"
)
if
dummy
[
1
]
==
'background'
:
blist
[
dummy
[
7
]]
=
file
elif
dummy
[
1
]
==
'nuc'
:
if
dummy
[
2
]
==
'cold'
:
clist
[
dummy
[
8
]]
=
file
elif
dummy
[
2
]
==
'gain'
:
glist
[
dummy
[
8
]]
=
file
elif
dummy
[
2
]
==
'offset'
:
olist
[
dummy
[
8
]]
=
file
elif
dummy
[
1
]
==
'raw'
:
rlist
[
dummy
[
3
]]
=
file
else
:
print
(
"unexpected file in the folder!"
)
foutf
.
write
(
datetime
.
datetime
.
now
(),
"unexpected file in the folder"
,
prog_time
,
"
\n
"
)
if
date
[
0
]
>=
2018
:
if
dummy
[
4
]
==
'raw'
or
dummy
[
4
]
==
'cal'
:
rlist
[
dummy
[
1
]]
=
file
else
:
if
dummy
[
1
]
==
'background'
:
blist
[
dummy
[
7
]]
=
file
elif
dummy
[
1
]
==
'nuc'
:
if
dummy
[
2
]
==
'cold'
:
clist
[
dummy
[
8
]]
=
file
elif
dummy
[
2
]
==
'gain'
:
glist
[
dummy
[
8
]]
=
file
elif
dummy
[
2
]
==
'offset'
:
olist
[
dummy
[
8
]]
=
file
elif
dummy
[
1
]
==
'raw'
:
rlist
[
dummy
[
3
]]
=
file
else
:
print
(
"unexpected file in the folder!"
)
foutf
.
write
(
datetime
.
datetime
.
now
(),
"unexpected file in the folder"
,
prog_time
,
"
\n
"
)
print
(
datetime
.
datetime
.
now
(),
" Start NUC upload"
)
# upload of NUC files, background files
if
cameras
[
i
]
==
"IRCAM"
:
...
...
@@ -281,7 +316,8 @@ for date in dates:
'pos_left'
:
int
(
np
.
asarray
(
attri1
[
'ROI_x'
])[
0
]),
'pos_top'
:
int
(
np
.
asarray
(
attri1
[
'ROI_y'
])[
0
]),
'bitdepth'
:
int
(
np
.
asarray
(
attri1
[
'bitdepth'
])[
0
])
}
}
# tmeta1={'divertortemperature_K':np.asarray(attri1['divertor_temp_K'])[0],
# 'timestamp':np.asarray(attri1['timestamp'])[0]}
time1
=
np
.
asarray
(
hf1
[
'timestamps'
])
...
...
@@ -298,8 +334,8 @@ for date in dates:
'pos_left'
:
int
(
np
.
asarray
(
attri2
[
'ROI_x'
])[
0
]),
'pos_top'
:
int
(
np
.
asarray
(
attri2
[
'ROI_y'
])[
0
]),
'bitdepth'
:
int
(
np
.
asarray
(
attri2
[
'bitdepth'
])[
0
])
}
}
# tmeta2={'divertortemperature_K':np.asarray(attri2['divertor_temp_K'])[0],
# 'timestamp':np.asarray(attri2['timestamp'])[0]}
time2
=
np
.
asarray
(
hf2
[
'timestamps'
])
...
...
@@ -311,36 +347,100 @@ for date in dates:
metadata1
[
'cameraname'
]
=
'IRCam_Caleo768kL'
metadata2
[
'cameraname'
]
=
'IRCam_Caleo768kL'
metadata1
[
'purpose'
]
=
'NUC+background'
metadata2
[
'purpose'
]
=
'experiment'
metadata2
[
'purpose'
]
=
'experiment'
#['ROI_h', 'ROI_w', 'ROI_x', 'ROI_y', 'Size',
#'bitdepth', 'camera_temp_C', 'divertor_temp_K', 'frame_number',
#'frame_rate', 'integration_time_us', 'sensor_temp_C', 'timestamp']
else
:
metadata1
[
'cameraname'
]
=
'INFRATEC_IMAGEIR9300'
metadata1
[
'filter'
]
=
int
(
np
.
asarray
(
attri1
[
'filter'
])[
0
])
#metadata1['multi_exposure']=np.asarray(attri1['multi_exposure'])[0]
metadata2
[
'cameraname'
]
=
'INFRATEC_IMAGEIR9300'
metadata2
[
'filter'
]
=
int
(
np
.
asarray
(
attri2
[
'filter'
])[
0
])
metadata1
[
'cameraname'
]
=
'INFRATEC_IMAGEIR9300'
metadata2
[
'cameraname'
]
=
'INFRATEC_IMAGEIR9300'
metadata1
[
'purpose'
]
=
'background'
metadata2
[
'purpose'
]
=
'experiment'
#metadata2['multi_exposure']=np.asarray(attri2['multi_exposure'])[0]
#metadata1['multi_exposure']=np.asarray(attri1['multi_exposure'])[0]
if
date
[
0
]
>=
2018
:
metadata1
[
'software_version'
]
=
hf1
[
'images'
].
attrs
[
'software_version'
].
decode
(
"UTF-8"
)
metadata1
[
'filter'
]
=
int
(
np
.
asarray
(
attri1
[
'filter'
])[
0
])
metadata1
[
'view_flip_h'
]
=
bool
(
np
.
asarray
(
attri1
[
'view_flip_h'
])[
0
])
metadata1
[
'view_flip_v'
]
=
bool
(
np
.
asarray
(
attri1
[
'view_flip_v'
])[
0
])
metadata2
[
'software_version'
]
=
hf2
[
'images'
].
attrs
[
'software_version'
].
decode
(
"UTF-8"
)
metadata2
[
'filter'
]
=
int
(
np
.
asarray
(
attri2
[
'filter'
])[
0
])
metadata2
[
'view_flip_h'
]
=
bool
(
np
.
asarray
(
attri2
[
'view_flip_h'
])[
0
])
metadata2
[
'view_flip_v'
]
=
bool
(
np
.
asarray
(
attri2
[
'view_flip_v'
])[
0
])
metadata1
[
'cameraname'
]
=
hf1
[
'images'
].
attrs
[
'camera'
].
decode
(
"UTF-8"
)
metadata2
[
'cameraname'
]
=
hf2
[
'images'
].
attrs
[
'camera'
].
decode
(
"UTF-8"
)
framenumber1
=
np
.
asarray
(
attri1
[
'frame_number'
])
framenumber2
=
np
.
asarray
(
attri2
[
'frame_number'
])
try
:
ftype1
=
np
.
asarray
(
attri1
[
'frame_type'
])
frametype1
=
[]
for
ele
in
ftype1
:
if
ele
==
"nuc"
:
frametype1
.
append
(
0
)
elif
ele
==
"background"
:
frametype1
.
append
(
1
)
elif
ele
==
"discharge"
:
frametype1
.
append
(
2
)
else
:
frametype1
.
append
(
-
1
)
except
:
frametype1
=
np
.
zeros
((
len
(
framenumber1
)))
-
1
try
:
ftype2
=
np
.
asarray
(
attri2
[
'frame_type'
])
frametype2
=
[]
for
ele
in
ftype2
:
if
ele
==
"nuc"
:
frametype2
.
append
(
0
)
elif
ele
==
"background"
:
frametype2
.
append
(
1
)
elif
ele
==
"discharge"
:
frametype2
.
append
(
2
)
else
:
frametype2
.
append
(
-
1
)
except
:
frametype2
=
np
.
zeros
((
len
(
framenumber2
)))
-
1
channel_names
=
[
"sensortemperature"
,
"cameratemperature"
,
"framerate"
,
"exposuretime"
,
"framenumber"
,
"frametype"
]
channel_units
=
[
"oC"
,
"oC"
,
'Hz'
,
'us'
,
'none'
,
'none'
]
channel_values1
=
[
sensor1
,
chip1
,
framerate1
,
expo1
,
framenumber1
,
frametype1
]
channel_values2
=
[
sensor2
,
chip2
,
framerate2
,
expo2
,
framenumber2
,
frametype2
]
comment_meta1
=
"frametype: 0 cold, 1 background, 2 discharge, -1 unknown"
comment_meta2
=
"frametype: 0 cold, 1 background, 2 discharge, -1 unknown"
triggered
=
np
.
asarray
(
attri2
[
'triggered'
])
trig_index
=
np
.
where
(
triggered
==
1
)
if
len
(
trig_index
[
0
])
>
0
:
trig_index
=
min
(
trig_index
[
0
])
toff
=
(
time2
[
0
]
-
time2
[
trig_index
])
/
1e6
else
:
print
(
"triggered frame was not found! Assumeing that frame 0 is the t1 frame"
)
toff
=
0
print
(
datetime
.
datetime
.
now
(),
" regenerate timevectors"
)
time1
=
IRup
.
regenerate_timevector
(
time1
,
0
,
t2
,
0
)
time2
=
IRup
.
regenerate_timevector
(
time2
,
1
,
t2
,
toff
)
else
:
channel_names
=
[
"sensortemperature"
,
"cameratemperature"
,
"framerate"
,
"exposuretime"
]
channel_units
=
[
"oC"
,
"oC"
,
'Hz'
,
'us'
]
channel_values1
=
[
sensor1
,
chip1
,
framerate1
,
expo1
]
channel_values2
=
[
sensor2
,
chip2
,
framerate2
,
expo2
]
comment_meta1
=
""
comment_meta2
=
""
print
(
datetime
.
datetime
.
now
(),
" regenerate timevectors"
)
time1
=
IRup
.
regenerate_timevector
(
time1
,
0
,
t2
,
0
)
time2
=
IRup
.
regenerate_timevector
(
time2
,[
1
,
6
],
t2
,
0
)
hf1
.
close
()
del
attri1
hf2
.
close
()
del
attri2
print
(
datetime
.
datetime
.
now
(),
" regenerate timevectors"
)
time1
=
IRup
.
regenerate_timevector
(
time1
,
0
,
t2
,
0
)
time2
=
IRup
.
regenerate_timevector
(
time2
,[
1
,
6
],
t2
,
0
)
del
attri2
print
(
datetime
.
datetime
.
now
(),
" Upload Meta Stream(s)"
)
if
metastream
:
try
:
IRup
.
upload_meta_stream
(
cameras
[
i
],
port
,
np
.
asarray
(
time1
),
sensor1
,
chip1
,
framerate1
,
expo
1
,
newversion
=
newversion
,
reason
=
reason
)
IRup
.
upload_meta_stream
_V2
(
cameras
[
i
],
port
,
np
.
asarray
(
time1
),
channel_names
,
channel_units
,
channel_values
1
,
newversion
=
newversion
,
reason
=
reason
)
except
Exception
as
E
:
print
(
E
)
foutf
.
write
(
datetime
.
datetime
.
now
(),
"metastream 1 upload failed"
,
E
,
"
\n
"
)
del
sensor1
,
chip1
,
framerate1
,
expo1
try
:
IRup
.
upload_meta_stream
(
cameras
[
i
],
port
,
np
.
asarray
(
time2
),
sensor2
,
chip2
,
framerate2
,
expo2
)
IRup
.
upload_meta_stream
_V2
(
cameras
[
i
],
port
,
np
.
asarray
(
time2
),
channel_names
,
channel_units
,
channel_values2
,
newversion
=
newversion
,
reason
=
reason
)
except
Exception
as
E
:
print
(
E
)
foutf
.
write
(
datetime
.
datetime
.
now
(),
"metastream 2 upload failed"
,
E
,
"
\n
"
)
...
...
@@ -389,7 +489,7 @@ for date in dates:
print
(
"raw upload deactivated"
)
foutf
.
write
(
datetime
.
datetime
.
now
(),
"raw upload deactivated"
,
"
\n
"
)
else
:
#folder is empty
print
(
"no files in"
,
prog_time
)
print
(
"no files in"
,
prog_time
)
reupload
=
False
except
Exception
as
E
:
print
(
"Cannot Upload Data for folder "
+
str
(
prog_time
))
...
...
downloadversionIRdata.py
View file @
678e8197
...
...
@@ -165,8 +165,8 @@ def download_LUT(port,time,exposure=0,emissivity=0,camera_filter=0,version=0, ve
raise
Exception
if
version
==
0
:
version
=
get_latest_version
(
stream
+
"DATASTREAM"
)
if
verbose
>
0
:
print
(
"LUT V"
+
str
(
version
)
+
" is used"
)
if
verbose
>
0
:
print
(
"LUT V"
+
str
(
version
)
+
" is used"
)
#time=int(fu.TimeToNs([2017,9,26],[8,0,0,0]))
LUTpar
=
read_restdb_old
(
larchivepath
+
"PARLOG/V"
+
str
(
version
)
+
"/_signal.json?from="
+
str
(
time
-
10
)
+
"&upto="
+
str
(
time
+
20
))
if
LUTpar
[
0
]:
...
...
@@ -1528,7 +1528,11 @@ def make_FOV_mask(port):
y0
=
points
[
1
]
x0
=
points
[
0
]
r1
=
points
[
2
]
da
,
time
,
back
=
download_background_by_program
(
port
,
"20171109.045"
,
50
)
if
port
==
50
:
exposure
=
50
else
:
exposure
=
9
da
,
time
,
back
=
download_background_by_program
(
port
,
"20171109.045"
,
exposure
)
# fig = plt.figure()
# plt.imshow(back,vmin=np.average(back)-200,vmax=np.average(back)+500)
# inner_circle = mlt.patches.Circle((x0,y0), r1,color = 'r', fill = False)
...
...
@@ -1611,13 +1615,16 @@ def estimate_offset(port,program,plot_it=False):
if
__name__
==
'__main__'
:
print
(
"local function calling"
)
port
=
11
port
=
20
# prog="20171108.015"
prog
=
"20171109.021"
# prog="20171025.030"
# prog="20171012.035"
prog
=
"20171005.024"
status
,
time
,
images
,
valid
=
get_temp_from_raw_by_program
(
port
,
prog
,
time_s
=
[
0
,
1
],
emi
=
0.82
,
T_version
=
2
,
version
=
0
,
threads
=
1
,
give_ERROR
=
False
,
use_firstframe_as_background
=
False
)
prog
=
"20171109.056"
status
,
time
,
images
,
valid
=
get_temp_from_raw_by_program
(
port
,
prog
,
time_s
=
[
0
,
6
],
emi
=
0.82
,
T_version
=
2
,
version
=
0
,
threads
=
4
,
give_ERROR
=
False
,
use_firstframe_as_background
=
False
)
# status,time,images=download_raw_images_by_program_via_png(port,prog,time_s=6,threads=4,verbose=10)
# print(len(images))
# import h5py as h5
# szene=h5.File("X:\\E4 Diagnostics\\QIR\Calibrations\\scene_models\\AEF"+str(port)+"\\AEF"+str(port)+"_scene_model.h5")
# Xloc=np.asarray(szene['x'])
...
...
@@ -1634,5 +1641,4 @@ if __name__=='__main__':
plt
.
imshow
(
images
[
-
1
],
vmin
=
300
,
vmax
=
600
,
cmap
=
exJet
)
cb
=
plt
.
colorbar
()
cb
.
set_label
(
"temperature K"
,
rotation
=
270
,
labelpad
=
26
,
fontsize
=
26
)
cb
.
ax
.
tick_params
(
labelsize
=
26
)
cb
.
ax
.
tick_params
(
labelsize
=
26
)
\ No newline at end of file
upload_config
0 → 100644
View file @
678e8197
### specifications ###
port="AEF10"
local=True
delayupload=True
starttime="20:00:00"
single_upload=False
use_today=False
upload_only_lattest=False
dates=[[2017,11,9]]
singlelist=[]
### settings ###
delete_Files=False
rawdata=True
raw1=True
raw2=True
raw_parlog=True
back_parlog=False
background=False
nuc_parlog=False
nuc=False
metastream=True
temperature=False
retry=2
archivedb=False
### versioning ###
reupload=False
reason=""
uploadingversionIRdata.py
View file @
678e8197
...
...
@@ -285,7 +285,11 @@ def TimeToNs(date,time):
### START of the UPLOAD ROUTINES ###
####################################
def
upload_meta_stream
(
camera
,
portnum
,
time
,
sensortemp
,
cameratemp
,
framerate
,
exposuretime
,
comment
=
""
,
newversion
=
False
,
reason
=
""
):
group
=
"QRT_"
+
camera
#+"_TEST"
"""
old: sensortemp,cameratemp,framerate,exposuretime
new: channels with names, units and values
"""
group
=
"QRT_IRCAM"
#+camera#+"_TEST"
datastream_name
=
"AEF"
+
str
(
portnum
)
+
"_meta"
url
=
"http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"
+
project
+
"/"
+
group
+
"/"
+
datastream_name
if
newversion
:
...
...
@@ -320,6 +324,45 @@ def upload_meta_stream(camera,portnum,time,sensortemp,cameratemp,framerate,expos
"dimensions"
:
time
.
tolist
()
}
upload_fulldataset
(
url
,
parobj
,
dataobj
,
version
)
def
upload_meta_stream_V2
(
camera
,
portnum
,
time
,
channel_names
,
channel_units
,
channel_values
,
comment
=
""
,
newversion
=
False
,
reason
=
""
):
"""
old: sensortemp,cameratemp,framerate,exposuretime
new: channels with names, units and values
"""
if
len
(
time
)
==
len
(
channel_values
[
0
])
and
len
(
channel_names
)
==
len
(
channel_units
):
group
=
"QRT_IRCAM"
#+camera#+"_TEST"
datastream_name
=
"AEF"
+
str
(
portnum
)
+
"_meta"
url
=
"http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"
+
project
+
"/"
+
group
+
"/"
+
datastream_name
if
newversion
:
archive_create_stream_version
(
url
,
reason
,
False
,
"n/a"
,
"n/a"
)
ver
=
get_latest_version
(
"Test/raw/"
+
project
+
"/"
+
group
+
"/"
+
datastream_name
+
"_PARLOG"
)
version
=
ver
else
:
ver
=
get_latest_version
(
"Test/raw/"
+
project
+
"/"
+
group
+
"/"
+
datastream_name
+
"_PARLOG"
)
if
ver
==
None
:
reason
=
"Versioning of Data"
archive_create_stream_version
(
url
,
reason
,
False
,
"n/a"
,
"n/a"
)
version
=
1
else
:
version
=
ver
chanDesc
=
{}
for
i
in
range
(
len
(
channel_names
)):
chanDesc
[
"["
+
str
(
i
)
+
"]"
]
=
{
'name'
:
channel_names
[
i
],
'physicalQuantity'
:{
'type'
:
channel_units
[
i
]},
'active'
:
1
}
parobj
=
{
"label"
:
"parms"
,
"description"
:
"metadata informations for "
+
camera
+
" in AEF"
+
str
(
portnum
),
"values"
:[{
"chanDescs"
:
chanDesc
,
'comment'
:
comment
}],
"dimensions"
:[
time
[
0
],
time
[
-
1
]]
}
channels
=
[]
for
vals
in
channel_values
:
channels
.
append
(
vals
)
channels
=
np
.
asarray
(
channels
)
dataobj
=
{
"datatype"
:
"float"
,
"values"
:
channels
.
tolist
(),
"dimensions"
:
time
.
tolist
()
}
upload_fulldataset
(
url
,
parobj
,
dataobj
,
version
)
def
upload_raw_data
(
camera
,
portnum
,
time
,
images
,
metadata
=
{},
comment
=
""
,
newversion
=
False
,
reason
=
""
):
"""
...
...
@@ -332,7 +375,7 @@ def upload_raw_data(camera,portnum,time,images,metadata={},comment="",newversion
(bool) newversion: defines whether a new version is needed
(str) reason: reason for the new version
"""
group
=
"QRT_
"
+
camera
#+"_TEST" # test part will be removed in final version
group
=
"QRT_
IRCAM"
#
+camera#+"_TEST" # test part will be removed in final version
datastream_name
=
"AEF"
+
str
(
portnum
)
+
"_raw"
#_TEST_001"
chanDesc
=
[{
'name'
:
'raw'
,
'physicalQuantity'
:{
'type'
:
'none'
},
'active'
:
1
}]
#adjusted to metadataforcurrentvideo
...
...
@@ -384,6 +427,7 @@ def regenerate_timevector(time,reftrig=1,tref=0,toff=0):
else
:
print
(
"Program for this reference timestamp not found, cannot generate new timevector"
)
return
np
.
asarray
(
time
)
def
upload_raw_data_from_H5
(
camera
,
portnum
,
time
,
h5file
,
metadata
=
{},
comment
=
""
,
parlog
=
True
,
rawdata
=
True
,
newversion
=
False
,
reason
=
""
):
"""
...
...
@@ -396,7 +440,7 @@ def upload_raw_data_from_H5(camera,portnum,time,h5file,metadata={},comment="",pa
(str) reason: reason for the new version
"""
#camera="IRCAM"
group
=
"QRT_
"
+
camera
#+"_TEST" # test part will be removed in final version
group
=
"QRT_
IRCAM"
#
+camera#+"_TEST" # test part will be removed in final version
datastream_name
=
"AEF"
+
str
(
portnum
)
+
"_raw"
#_TEST7"
#not tested#
chanDesc
=
[{
'name'
:
'raw'
,
'physicalQuantity'
:{
'type'
:
'none'
},
'active'
:
1
}]
...
...
@@ -460,7 +504,7 @@ def upload_temperaure_data(camera,portnum,images,time,metadata={},comment="",new
upload_images
(
camera
,
portnum
,
images
,
time
,
"temperature"
,
"K"
,
parlog
,
newversion
=
False
,
reason
=
""
)
def
upload_Background_frame
(
camera
,
portnum
,
time
,
texp_filter
,
background
,
parlog
=
True
,
data
=
True
,
newversion
=
False
,
reason
=
""
):
group
=
"QRT_
"
+
camera
# test part will be removed in final version
group
=
"QRT_
IRCAM"
#
+camera # test part will be removed in final version
# group="QRT_IRCAM_TEST" # test part will be removed in final version
name
=
[
"background"
]
datastream_name
=
"AEF"
+
str
(
portnum
)
+
"_background"
# test part will be removed in final version
...
...
@@ -517,7 +561,7 @@ def upload_NUC_ref(camera,portnum,time,texp_filter,gain,offset,cold,bad,gain_e=[
(str) reason: reason for the new version
"""
#camera="IRCAM" #will be removed in final version
group
=
"QRT_
"
+
camera
#+"_TEST" # test part will be removed in final version
group
=
"QRT_
IRCAM"
#
+camera#+"_TEST" # test part will be removed in final version
name
=
[
"NUC"
]
datastream_name
=
"AEF"
+
str
(
portnum
)
+
"_NUC"
# test part will be removed in final version
if
np
.
shape
(
gain
)
==
np
.
shape
(
bad
):
...
...
@@ -588,7 +632,7 @@ def upload_LUT_tables(LUTs,LUTref,camera,portnum,time,newversion=False,reason=""
(str) reason: reason for the new version
"""
#camera="IRCAM" #will be removed in final version
group
=
"QRT_
"
+
camera
#+"_TEST" # test part will be removed in final version
group
=
"QRT_
IRCAM"
#
+camera#+"_TEST" # test part will be removed in final version
name
=
[
"LUT"
]
datastream_name
=
"AEF"
+
str
(
portnum
)
+
"_LUT"
#test of dataformat
...
...
@@ -649,21 +693,21 @@ def upload_hot_cold_reference(port,path):
"""
"""
if
port
==
50
:
camera
=
"INFRATEC"
else
:
camera
=
"IRCAM"
group
=
"QRT_
"
+
camera
#+"_TEST" # test part will be removed in final version
#
if port==50:
#
camera="INFRATEC"
#
else:
camera
=
"IRCAM"
group
=
"QRT_
IRCAM"
#
+camera#+"_TEST" # test part will be removed in final version
name
=
[
"raw"
]
datastream_name
=
"AEF"
+
str
(
port
)
+
"_raw"
print
(
"not ready"
)
def
upload_scene_model
(
port
,
time_ns
,
path
=
"
\\\\
sv-e4-fs-1
\\
E4-Mitarbeiter
\\
E4 Diagnostics
\\
QIR
\\
Calibrations
\\
scene_models"
,
newversion
=
False
,
reason
=
""
):
if
port
==
50
:
camera
=
"INFRATEC"
else
:
camera
=
"IRCAM"
group
=
"QRT_
"
+
camera
#+"_TEST" # test part will be removed in final version
#
if port==50:
#
camera="INFRATEC"
#
else:
camera
=
"IRCAM"
group
=
"QRT_
IRCAM"
#
+camera#+"_TEST" # test part will be removed in final version
name
=
[
"scene_model"
]
datastream_name
=
"AEF"
+
str
(
port
)
+
"_scene_model"
url
=
"http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"
+
project
+
"/"
+
group
+
"/"
+
datastream_name
...
...
@@ -700,6 +744,27 @@ def upload_scene_model(port,time_ns,path="\\\\sv-e4-fs-1\\E4-Mitarbeiter\\E4 Dia
}
upload_Parlog
(
url
,
parobj
,
version
=
version
)
upload_images
(
camera
,
port
,
framelist
,
time_ns_list
,
"scene_model"
,
version
)
#def upload_NUC_ref():
# """
#
# """
# print("not ready")
def
download_LUT_tables
(
camera
,
port
,
time
,
texp
=
0
,
emissivity
=
0
,
Filter
=
0
):
"""
(str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path
(numpy.array) time: time vector in uint64 ns of length t
(bool) newversion: defines whether a new version is needed
(int) portnum: portnumber as int, example: 10 or 11 etc.
in case of texp=0 and emissivity=0 it will give back all LUTs
"""
print
(
"not ready"
)
return
0
def
upload_Parlog
(
url
,
parobj
,
versionnumber
=
0
):
# print('>upload_Parlog: json of parlog')
...
...
@@ -734,24 +799,24 @@ def upload_fulldataset(url,parlog,data,versionnumber,bparlog=True,bdata=True):
### from here on subfunctions
def
upload_images
(
camera
,
portnum
,
images
,
time
,
typ
,
version
):
def
upload_images
(
camera
,
portnum
,
images
,
time
,
typ
,
new
version
=
False
,
reason
=
""
):
"""
upload_images(camera,portnum,images,time,metadata={},newversion=False,reason="")
"""
#camera="IRCAM" #will be removed in final version
group
=
"QRT_
"
+
camera
#+"_TEST" # test part will be removed in final version
group
=
"QRT_
IRCAM"
#
+camera#+"_TEST" # test part will be removed in final version
# names=[typ]
datastream_name
=
"AEF"
+
str
(
portnum
)
+
"_"
+
typ
#+"_TEST001"
#url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name
#
if newversion:
#
print('no version control of HDF5 files yet!')
if
newversion
:
print
(
'no version control of HDF5 files yet!'
)
# archive_create_stream_version(url, reason, False, "n/a", "n/a" )
try
:
# archive.write_signals(images,time,group,project,names, datastream_name,units,metadata,2,True)
tmp
=
writeH5
(
datastream_name
,
images
,
time
)
uploadH5
_Versioning
(
group
,
datastream_name
,
tmp
,
version
,
True
)
uploadH5
(
group
,
datastream_name
,
tmp
,
True
)
except
Exception
as
E
:
print
(
"Upload Failed"
)
print
(
E
)
...
...
@@ -857,6 +922,8 @@ def writeH5_from_File(stream,filename,key,dimof,idx=None):
"""
# stream = path.stream
# dtype = str(data.dtype)
import
struct
get_P
=
8
*
struct
.
calcsize
(
"P"
)
filesize
=
os
.
stat
(
filename
).
st_size
f5
=
h5reader
.
File
(
filename
,
'r'
)
if
filesize
<
4000000000
:
...
...
@@ -892,8 +959,13 @@ def writeH5_from_File(stream,filename,key,dimof,idx=None):
g
=
f
.
create_group
(
'data'
)
# requires [row,col,time]
g
.
create_dataset
(
'timestamps'
,
data
=
list
(
times
[
i
]),
dtype
=
'uint64'
)
#,compression="gzip")
dset
=
g
.
create_dataset
(
stream
,
shape
=
(
shapi
[
0
],
shapi
[
1
],
limits
[
i
+
1
]
-
limits
[
i
]),
dtype
=
'uint16'
,
chunks
=
(
shapi
[
0
],
shapi
[
1
],
1
))
#,compression='gzip')
for
n
in
range
(
limits
[
i
+
1
]
-
limits
[
i
]):
dset
[:,:,
n
]
=
f5
[
key
][:,:,
limits
[
i
]
+
n
]
if
get_P
<
64
:
#check python version of 32 or 64 bit to decide which method should be used