-
Holger Niemann authoredHolger Niemann authored
uploadingversionIRdata.py 40.34 KiB
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 24 11:24:52 2017
@author: aali
"""
### code follows ###
import urllib
import json
#from getfoldernames import Searchdatalocation as gfn
import numpy as np
#from binfilesreadingv2 import BinVideoReader as bvreader
import os
import datetime as dt
try:
import h5py as h5reader
except:
print('WARNING: "h5py" package not found.\nImage upload will not be available')
"""
This is the json version of converting the local time (ISO-8601) to W7-X time
http://archive-webapi.ipp-hgw.mpg.de/w7xtime.json
"""
"""
Create versioned archive stream
Inputs:
:param url: give complete URL without _PARLOG or _DATASTREAM
:param reason: give string explaining version bump
:param parlog_only: create only PARLOG, no DATASTREAM
:param code_release: code release
:param analysis_environment: analysis environment
Output:
return status of the upload
Sample call
archive_create_stream_version("Test/raw/W7X/ADB_Massenspektrometer/VersioningTest", "testwise version bump", parlog_only=False)
archive_create_stream_version("Test/raw/W7X/QSR_IRCam/AEF10/VersioningTest", "testwise version bump", parlog_only=False)
"""
def archive_create_stream_version(url, reason, parlog_only = False, code_release = "n/a", analysis_environment = "n/a"):
#by Georg Schlisio, edit bei Holger Niemann
# check sanity
assert isinstance(reason, str)
assert reason != ""
# refine url format
if url.find('archive-webapi.ipp-hgw.mpg.de/') == -1:
url = 'archive-webapi.ipp-hgw.mpg.de/' + url
url = url.replace(" ","%20")
assert len(url.replace('archive-webapi.ipp-hgw.mpg.de/', "").split("/")) == 7
assert url.find("_DATASTREAM") == -1
assert url.find("_PARLOG") == -1
# create version object
vobject = {"versionInfo":[
{"reason": reason,
"producer": "holn", # TODO
"code_release": code_release,
"analysis_environment": analysis_environment
}]
}
parlog_request = urllib.request.Request( url + "_PARLOG" + "/_versions.json", data = json.dumps(vobject).encode("utf-8"),
headers = { "Content-type" : "application/json" } )
try:
parlog_response = urllib.request.urlopen(parlog_request)
except urllib.error.HTTPError as ex:
msg = ex.read()
raise RuntimeError(msg)
if not parlog_only:
datastream_request = urllib.request.Request( url + "_DATASTREAM" + "/_versions.json",
data=json.dumps(vobject).encode("utf-8"), headers={ "Content-type":"application/json" } )
try:
datastream_response = urllib.request.urlopen(datastream_request)
except urllib.error.HTTPError as ex:
msg = ex.read()
raise RuntimeError(msg)
return (parlog_response.code == 201, datastream_response == 201)
return (parlog_response.code == 201)
def get_latest_version(stream):
"""Find out latest version of given stream
:param stream url stream of interest
:return int of version number of None if non-versioned or non-existing stream
author: Georg Schlisio"""
request = urllib.request.Request("http://archive-webapi.ipp-hgw.mpg.de/" + stream + "/_versions.json", headers={"Accept": "application/json"})
try:
response = urllib.request.urlopen(request)
d = json.loads(response.read().decode('utf-8'))
except urllib.error.HTTPError as ex:
msg = ex.read()
raise RuntimeError(msg)
# detect unversioned or non-existing stream
if d["versionInfo"] == []:
return None
versions = []
for i in d['versionInfo']:
versions.append(i['number'])
return max(versions)
def sendparlogjsonfileonly(url , jsondata , nameofdatastream ) :
if url.find('http://archive-webapi.ipp-hgw.mpg.de/') == -1:
url = 'http://archive-webapi.ipp-hgw.mpg.de/' + url
url = url.replace(" ","%20")
assert len(url.replace('http://archive-webapi.ipp-hgw.mpg.de/', "").split("/")) == 5
print(len(url.replace('http://archive-webapi.ipp-hgw.mpg.de/', "").split("/")))
assert url.find("_DATASTREAM") == -1
assert url.find("_PARLOG") == -1
parlog_request = urllib.request.Request( url + nameofdatastream + "_DATASTREAM" , data = json.dumps(jsondata).encode("utf-8"),
headers = { "Content-type" : "application/json" } )
try:
parlog_response = urllib.request.urlopen(parlog_request)
except urllib.error.HTTPError as ex:
msg = ex.read()
raise RuntimeError(msg)
return (parlog_response.code == 201)
#data['values'][0]['badpixelmap'] = np.swapaxes( data['values'][0]['badpixelmap'] , 0 ,1 )
#resp = sendparlogjsonfileonly("Test/raw/W7X/QRT_IRCAM_TEST/" , data , "LUT_test_adnanv11")
"""
---------------------------------------------------------------------------------------------------------------------------
from here on the uploadroutines from holn
---------------------------------------------------------------------------------------------------------------------------
"""
#import archive
project="W7X"
###definition of interactions with the codac archive to get times and other needed functions
import datetime
#import os
from requests import post
def get_program_now(tol=60):
"""
get_program_now delivers an array [status,dict]
"""
program_url = 'http://archive-webapi.ipp-hgw.mpg.de/programs.json'
try:
res = urllib.request.urlopen(program_url)
# prog_list = json.loads(res.read().decode('utf-8'))
prog_raw=res.read()
res.close()
except urllib.error.URLError as e:
print('!get_program_now: Error opening URL, try: timewindow from now +- '+str(tol)+'s')
print(e)
# return False,0
now=datetime.datetime.utcnow()
ns=TimeToNs([now.year,now.month,now.day],[now.hour,now.minute,now.second,now.microsecond])
return read_program(ns)
else:
prog_string=prog_raw.decode(encoding='UTF-8')
prog_list = json.loads(prog_string)
pl=prog_list['programs'][0]
return True, pl
def match_time_vectors(timevector,T0,T1,T6,ref_T=1,raw=False):
"""
match_time_vectors(timevector,t0,t1,t6,ref='t1')
(np.array/array) timevector: array or list of the timestamps for each frame, in µs
(integer) t0: trigger t0 in ns
(integer) t1: trigger t1 in ns
(integer) t6: trigger t6 in ns
(int) ref: says which trigger is the reference, so where the timevector starts
(bool) raw: is this a raw timevector with cold and background images included?
"""
ctimev=[]
tstart=0
if ref_T==0:
tstart=T0
elif ref_T==1:
tstart=T1
elif ref_T==6:
tstart==T6
else:
print("reference Trigger outside the expected format, wrong reference, stopping matching")
exit()
if raw:
print("not implemented")
# for t in range(1,len(timevector)):
# if
else:
ctimev.append(tstart)
for t in range(1,len(timevector)):
ctimev.append(tstart+(timevector[t]-timevector[0]))
return ctimev
def match_time_vectors_lowlevel(timevector,tstart,tend=None):
"""
match_time_vectors(timevector,t0,t1,t6,ref='t1')
(np.array/array) timevector: array or list of the timestamps for each frame, in ns
(integer) tstart: starttime in ns
(integer) tend: endtime in ns
"""
if tend is None:
ctimev=np.subtract(timevector,timevector[0])+tstart
else:
r=(timevector[-1]-timevector[0])/(tend-tstart)
ctimev=np.subtract(timevector,timevector[0])*r+tstart
return ctimev
def read_program(timestampstart,timestamp_end=0,tol=60):
"""
read_program()
"""
program_url = 'http://archive-webapi.ipp-hgw.mpg.de/programs.json?from'
if timestamp_end!=0:
jsonurl=program_url+"="+str(int(timestampstart-tol*1e9))+"&upto="+str(int(timestamp_end+tol*1e9))
# print(program_url+"="+str(int(timestampstart-tol*1e9))+"&upto="+str(int(timestamp_end+tol*1e9)))
else:
jsonurl=program_url+"="+str(int(timestampstart-tol*1e9))+"&upto="+str(int(timestampstart+tol*1e9))
# print(program_url+"="+str(int(timestampstart-tol*1e9))+"&upto="+str(int(timestampstart+tol*1e9)))
try:
res = urllib.request.urlopen(jsonurl)
# prog_list = json.loads(res.read().decode('utf-8'))
prog_raw=res.read()
res.close()
except urllib.error.URLError as e:
print('!get_program: Error opening URL')
print(e)
return False,0
else:
prog_string=prog_raw.decode(encoding='UTF-8')
prog_list = json.loads(prog_string)
pl=prog_list['programs'][0]
return True, pl
def get_last_T0(ns=True):
"""
get_last_T0(ns=True)
(bool) ns True or False, whether ns timestamp should be returned, if false a datetime will be returned
"""
url="http://sv-coda-wsvc-3/last_trigger"
try:
res=urllib.request.urlopen(url)
except urllib.error.HTTPError as ex:
msg = ex.read()
raise RuntimeError(msg)
t=int(res.read())
if ns:
return t
else:
return datetime.datetime.utcfromtimestamp(t/1e9)
def TimeToNs(date,time):
"""
TimeToNs(date,time)
(list) date [year,month,day]
(list) time [hours,minutes,seconds,microseconds]
"""
date_time=datetime.datetime(date[0],date[1],date[2],time[0],time[1],time[2],time[3])
div=date_time-datetime.datetime(1970,1,1,0,0,0)
# nsdate=calendar.timegm(datetime.datetime.timetuple(date_time))*1e9#drops the ms,µs!!
nsdate=div.total_seconds()*1e9
return nsdate
####################################
### START of the UPLOAD ROUTINES ###
####################################
def upload_meta_stream(camera,portnum,time,sensortemp,cameratemp,framerate,exposuretime,comment="",newversion=False,reason=""):
group="QRT_"+camera#+"_TEST"
datastream_name="AEF"+str(portnum)+"_meta"
url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name
if newversion:
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
else:
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
reason="Versioning of Data"
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
version=1
else:
version=ver
chanDesc={"[0]":{'name':'sensortemperature','physicalQuantity':{'type':'oC'},'active':1},
"[1]":{'name':'cameratemperature','physicalQuantity':{'type':'oC'},'active':1},
"[2]":{'name':'framerate','physicalQuantity':{'type':'none'},'active':1},
"[3]":{'name':'exposuretime','physicalQuantity':{'type':'us'},'active':1}
}
parobj={"label":"parms",
"description":"metadata informations for "+camera+" in AEF"+str(portnum),
"values":[{"chanDescs":chanDesc}],
"dimensions":[time[0],time[-1]]
}
channels=[sensortemp,
cameratemp,
framerate,
exposuretime]
channels=np.asarray(channels)
dataobj={"datatype":"float",
"values":channels.tolist(),
"dimensions":time.tolist()
}
upload_fulldataset(url,parobj,dataobj,version)
def upload_raw_data(camera,portnum,time,images,metadata={},comment="",newversion=False,reason=""):
"""
upload_raw_data(camera,portnum,images,time,metadata={},newversion=False,reason="")
(str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path
(int) portnum: portnumber as int, example: 10 or 11 etc.
(numpy.array) images: t x w x h numpy array
(numpy.array) time: time vector in uint64 ns of length t
(dict) metadata: the metadata of the file, should contain texp, filter in case of INFRATEC, camera_modell, serial_number,framerate, if croped: pos_left, pos_right, multi_exp, sensor_temperature in K, bitdepth
(bool) newversion: defines whether a new version is needed
(str) reason: reason for the new version
"""
group="QRT_"+camera#+"_TEST" # test part will be removed in final version
datastream_name="AEF"+str(portnum)+"_raw"#_TEST_001"
chanDesc=[{'name':'raw','physicalQuantity':{'type':'none'},'active':1}]
#adjusted to metadataforcurrentvideo
print("convert image data")
images=np.asarray(images,dtype=np.int16)
# parlog={'chanDescs':chanDesc,'meta-data':header,'comment':comment}
parobj={"label":"parms",
"description":"raw data for "+camera+" in AEF"+str(portnum),
"values":[{"chanDescs":chanDesc,'meta-data':metadata,'comment':comment}],
"dimensions":[time.tolist()[0],time.tolist()[-1]]
}
url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name
if newversion:
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
else:
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
reason="Versioning of Data"
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
version=1
else:
version=ver
print("writing parlog")
upload_Parlog(url,parobj,version)
upload_images(camera,portnum,images,time,"raw",newversion,reason)
def regenerate_timevector(time,reftrig=1,tref=0,toff=0):
"""
regenerate_timevector(time,reftrig,tref,toff)
(np.array) time: timevector of length t, 1d, in ns
(int) reftrig: number of the trigger which is used to refere the newtime to
(tref) tref: reference time for searching t0,t1,t6, for =0 it uses the first value in time
(toff toff: offset of the timevector in ms
"""
if tref==0:
tref=time[0]
prog=read_program(int(tref),int(tref+time[-1]-time[0]),70)
if prog[0]:
if np.size(reftrig)==1: #shift of timevector
tref=prog[1].get('trigger').get(str(np.squeeze(reftrig)))[0]
ntime=np.asarray(match_time_vectors_lowlevel(time,tref+toff*1e6))
else: #shift and rescaling
tref=prog[1].get('trigger').get(str(reftrig[0]))[0]
tref2=prog[1].get('trigger').get(str(reftrig[1]))[0]
ntime=np.asarray(match_time_vectors_lowlevel(time,tref+toff*1e6,tref2))
return ntime
else:
print("Program for this reference timestamp not found, cannot generate new timevector")
return np.asarray(time)
def upload_raw_data_from_H5(camera,portnum,time,h5file,metadata={},comment="",parlog=True,rawdata=True,newversion=False,reason=""):
"""
upload_raw_data(camera,portnum,images,time,metadata={},newversion=False,reason="")
(str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path
(int) portnum: portnumber as int, example: 10 or 11 etc.
(np.array) time: timevector of length t in ns
(dict) metadata: the metadata of the file, should contain texp, filter in case of INFRATEC, camera_modell, serial_number,framerate, if croped: pos_left, pos_right, multi_exp, sensor_temperature in K, bitdepth
(bool) newversion: defines whether a new version is needed
(str) reason: reason for the new version
"""
#camera="IRCAM"
group="QRT_"+camera#+"_TEST" # test part will be removed in final version
datastream_name="AEF"+str(portnum)+"_raw"#_TEST7"
#not tested#
chanDesc=[{'name':'raw','physicalQuantity':{'type':'none'},'active':1}]
#adjusted to metadataforcurrentvideo
url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name
if newversion:
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
else:
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
reason="Versioning of Data"
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
version=1
else:
version=ver
parobj={"label":"parms",
"description":"raw data for "+camera+" in AEF"+str(portnum),
"values":[{"chanDescs":chanDesc,'meta-data':metadata,'comment':comment}],
"dimensions":[int(time[0]),int(time[-1])]
}
if parlog:
print("writing parlog")
upload_Parlog(url,parobj,version)
if rawdata:
print("writing done, creating upload H5file")
tmp=writeH5_from_File(datastream_name,h5file,'images',time)
print("uploading H5file")
if type(tmp) is list:
n=1
for ele in tmp:
print("uploading part "+str(n)+" of "+str(len(tmp)))
n+=1
bla= uploadH5_Versioning(group,datastream_name,ele,version,True)
if bla[0]:
print("upload of part "+str(n-1)+" of "+str(len(tmp))+" was successfull")
else:
print("upload of part "+str(n-1)+" of "+str(len(tmp))+" failed")
return bla
else:
return uploadH5_Versioning(group,datastream_name,tmp,version,True)
def upload_temperaure_data(camera,portnum,images,time,metadata={},comment="",newversion=False,reason=""):
"""
upload_temperaure_data(camera,portnum,images,time,metadata={},newversion=False,reason="")
(str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path
(int) portnum: portnumber as int, example: 10 or 11 etc.
(numpy.array) images: t x w x h numpy array
(numpy.array) time: time vector in uint64 ns of length t
(numpy.array) texp_filter: a list with one or two elements texp or texp,filter for INFRATEC
(dict) metadata: the metadata of the file, should contain texp, filter in case of INFRATEC, camera_modell, serial_number,framerate, if croped: pos_left, pos_right, multi_exp, sensor_temperature in K, bitdepth as integer,LUT_V as integer,LUT_timestamp in ns,NUC_timestamp in ns,Divertor_temperature in K,Divertor_TC_path as str,Divertor_TC_time in ns
(bool) newversion: defines whether a new version is needed
(str) reason: reason for the new version
"""
#not tested#
print("convert image data")
images=np.asarray(images,dtype=np.int16)
chanDesc=[{'name':'temperature','physicalQuantity':{'type':'K'},'active':1}]
parlog={'chanDescs':chanDesc,'meta-data':metadata,'comment':comment}
upload_images(camera,portnum,images,time,"temperature","K",parlog,newversion=False,reason="")
def upload_Background_frame(camera,portnum,time,texp_filter,background,parlog=True,data=True,newversion=False,reason=""):
group="QRT_"+camera # test part will be removed in final version
# group="QRT_IRCAM_TEST" # test part will be removed in final version
name=["background"]
datastream_name="AEF"+str(portnum)+"_background" # test part will be removed in final version
structure={
"Texp":texp_filter[0]
}
if camera=="INFRATEC":
structure['filter']=texp_filter[1]
url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name
if newversion:
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
print("Version for upload is "+str(version))
else:
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
reason="Versioning of Data"
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
version=1
else:
version=ver
print("Version for upload is "+str(version))
chanDesc={"[0]":{'name':name[0],'physicalQuantity':{'type':'none'},'active':1}}
fullframe=np.array([background])
parobj={"label":"parms",
"description":"background for "+camera+" in AEF"+str(portnum),
"values":[{"chanDescs":chanDesc,"structure":structure}],
"dimensions":[time[0],int(time[0]+np.int64(texp_filter[0]*1E3))]
}
dataobj={"datatype":"float",
"values":fullframe.tolist(),
"dimensions":time
}
upload_fulldataset(url,parobj,dataobj,version,parlog,data)
def upload_NUC_ref(camera,portnum,time,texp_filter,gain,offset,cold,bad,gain_e=[],offset_e=[],parlog=True,data=True,newversion=False,reason=""):
"""
upload_NUC_ref(gain,offset,hot,cold,bad,gain_e,offset_e,camera,portnum,time,newversion=False)
(str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path
(int) portnum: portnumber as int, example: 10 or 11 etc.
(numpy.array) time: time vector in uint64 ns of length t
(numpy.array) texp_filter: a list with one or two elements texp or texp,filter for INFRATEC
(numpy.array) gain: w x h array, image of full size
(numpy.array) offset: w x h array, image of full size
(numpy.array) cold: w x h array, image of full size
(numpy.array) bad: w x h array or n x 2 array, image of full size or a pixelist
(numpy.array) gain_e: w x h array, image of full size
(numpy.array) offset_e: w x h array, image of full size
(bool) newversion: defines whether a new version is needed
(str) reason: reason for the new version
"""
#camera="IRCAM" #will be removed in final version
group="QRT_"+camera#+"_TEST" # test part will be removed in final version
name=["NUC"]
datastream_name="AEF"+str(portnum)+"_NUC" # test part will be removed in final version
if np.shape(gain)==np.shape(bad):
badframe=bad
else:
badframe=np.zeros(np.shape(gain))
if len(bad)>0:
if np.shape(bad)[1]==2:
for i in range(np.shape(bad)[0]):
badframe[bad[i][1]][bad[i][0]]=1
else:
print("badpixel map or list does not fit the requirements")
else:
print("bad pixel list empty, no bad pixels stored!")
if gain_e==[] or gain_e==None:
gain_e=np.zeros(np.shape(gain))
if offset_e==[] or offset_e==None:
offset_e=np.zeros(np.shape(offset))
fullframe=np.array([np.vstack((gain,offset,cold,badframe,gain_e,offset_e))])
structure={
"gain_index":0,
"offset_index":np.shape(gain)[0],
"cold_index":np.shape(gain)[0]+np.shape(offset)[0],
"badpixels_index":np.shape(gain)[0]+np.shape(offset)[0]+np.shape(cold)[0],
"gain_error_index":np.shape(gain)[0]+np.shape(offset)[0]+np.shape(cold)[0]+np.shape(bad)[0],
"offset_error_index":np.shape(gain)[0]+np.shape(offset)[0]+np.shape(cold)[0]+np.shape(bad)[0]+np.shape(gain_e)[0],
"Texp":texp_filter[0]
}
if camera=="INFRATEC":
structure['filter']=texp_filter[1]
url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name
if newversion:
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
print("Version for upload is "+str(version))
else:
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
reason="Versioning of Data"
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
version=1
else:
version=ver
print("Version for upload is "+str(version))
chanDesc={"[0]":{'name':name[0],'physicalQuantity':{'type':'none'},'active':1}}
parobj={"label":"parms",
"description":"calib for "+camera+" in AEF"+str(portnum),
"values":[{"chanDescs":chanDesc,"structure":structure}],
"dimensions":[time[0],int(time[0]+np.int64(texp_filter[0]*1E3))]
}
dataobj={"datatype":"float",
"values":fullframe.tolist(),
"dimensions":time
}
upload_fulldataset(url,parobj,dataobj,version,parlog,data)
def upload_LUT_tables(LUTs,LUTref,camera,portnum,time,newversion=False,reason="",extra_par=None):
"""
upload_LUT_tables(LUTs,LUTref,camera,portnum,time,newversion=False,reason="")
(numpy.array) LUTs: t x n x 3 array DL,T,T_er, in general t=1, n= number of look-up-tables
(numpy.array) LUTref: t x n x 2 texp, emissivity or t x n x 3 filter,texp, emissivity for INFRATEC, in general t=1, n= number of LUTs
(str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path
(int) portnum: portnumber as int, example: 10 or 11 etc.
(numpy.array) time: time vector in uint64 ns of length t
(bool) newversion: defines whether a new version is needed
(str) reason: reason for the new version
"""
#camera="IRCAM" #will be removed in final version
group="QRT_"+camera#+"_TEST" # test part will be removed in final version
name=["LUT"]
datastream_name="AEF"+str(portnum)+"_LUT"
#test of dataformat
if np.shape(LUTs)[0]!=len(time):
print("missmatch in between time vector and LUTs timestamps")
exit()
data=[]
structure={}
# checking whether DL is always the same?? needed?
for t in range(len(time)):
data.append([])
tLUT=LUTs[0]
data[t].append(tLUT[0][0])
for i in range(np.shape(tLUT)[0]):
data[t].append(tLUT[i][1])
data[t].append(tLUT[i][2])
data=np.array(data)
if len(time)==1:
for n in range(np.shape(LUTref[0])[0]):
if camera=="INFRATEC":
structure["Filter_"+str(LUTref[0][n][0])+"_Texp_"+str(LUTref[0][n][1])+"us_e_"+str(LUTref[0][n][2])]=2*n+1
else:
structure["Texp_"+str(LUTref[0][n][0])+"us_e_"+str(LUTref[0][n][1])]=2*n+1
else:
print("multiple timestamps for LUTs are not jet implemented")
exit()
if type(extra_par)==dict:
structure.update(extra_par)
url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name
if newversion:
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
else:
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
reason="Versioning of Data"
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
version=1
else:
version=ver
#using here selfmade uploadroutine to avoid that LUTs table appear as images
chanDesc={"[0]":{'name':name[0],'physicalQuantity':{'type':'none'},'active':1}}
parobj={"label":"parms",
"description":"LUT for "+camera+" in AEF"+str(portnum),
"values":[{"chanDescs":chanDesc,"structure":structure}],
"dimensions":[time[0],time[0]+10]
}
dataobj={"datatype":"float",
"values":data.tolist(),
"dimensions":time
}
upload_fulldataset(url,parobj,dataobj,version)
def upload_hot_cold_reference(port,path):
"""
"""
if port==50:
camera="INFRATEC"
else:
camera="IRCAM"
group="QRT_"+camera#+"_TEST" # test part will be removed in final version
name=["raw"]
datastream_name="AEF"+str(port)+"_raw"
print("not ready")
def upload_scene_model(port,time_ns,path="\\\\sv-e4-fs-1\\E4-Mitarbeiter\\E4 Diagnostics\\QIR\\Calibrations\\scene_models",newversion=False,reason=""):
if port==50:
camera="INFRATEC"
else:
camera="IRCAM"
group="QRT_"+camera#+"_TEST" # test part will be removed in final version
name=["scene_model"]
datastream_name="AEF"+str(port)+"_scene_model"
url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name
Fullpath=os.path.join(path,"AEF"+str(port),"AEF"+str(port)+"_scene_model.h5")
File=h5reader.File(Fullpath,'r')
keylist=list(File.keys())
time_ns_list=[]
framelist=[]
ii=0
if newversion:
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
else:
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
reason="Versioning of Data"
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
version=1
else:
version=ver
structure={}
for key in keylist:
framelist.append(np.array(File[key]))
time_ns_list.append(time_ns+20*ii)
ii+=0
structure[key]=time_ns+20*ii
chanDesc={"[0]":{'name':name[0],'physicalQuantity':{'type':'none'},'active':1}}
parobj={"label":"parms",
"description":"scene model for AEF"+str(port),
"values":[{"chanDescs":chanDesc,"structure":structure}],
"dimensions":[time_ns_list[0],time_ns_list[-1]]
}
upload_Parlog(url,parobj,version=version)
upload_images(camera,port,framelist,time_ns_list,"scene_model",version)
def upload_Parlog(url,parobj,versionnumber=0):
# print('>upload_Parlog: json of parlog')
# print(json.dumps(parobj).encode("utf-8"))
if versionnumber==0:
parlog_request = urllib.request.Request( url + "_PARLOG", data = json.dumps(parobj).encode("utf-8"),headers = { "Content-type" : "application/json" } )
else:
parlog_request = urllib.request.Request( url + "_PARLOG/V"+str(versionnumber), data = json.dumps(parobj).encode("utf-8"),headers = { "Content-type" : "application/json" } )
try:
parlog_response = urllib.request.urlopen(parlog_request)
except urllib.error.HTTPError as ex:
msg = ex.read()
raise RuntimeError(msg)
def upload_data(url,dataobj,versionnumber=0):
if versionnumber==0:
datastream_request = urllib.request.Request( url + "_DATASTREAM",data=json.dumps(dataobj).encode("utf-8"), headers={ "Content-type":"application/json" } )
else:
datastream_request = urllib.request.Request( url + "_DATASTREAM/V"+str(versionnumber),data=json.dumps(dataobj).encode("utf-8"), headers={ "Content-type":"application/json" } )
try:
datastream_response = urllib.request.urlopen(datastream_request)
except urllib.error.HTTPError as ex:
msg = ex.read()
raise RuntimeError(msg)
def upload_fulldataset(url,parlog,data,versionnumber,bparlog=True,bdata=True):
if bparlog:
upload_Parlog(url,parlog,versionnumber)
if bdata:
upload_data(url,data,versionnumber)
### from here on subfunctions
def upload_images(camera,portnum,images,time,typ,version):
"""
upload_images(camera,portnum,images,time,metadata={},newversion=False,reason="")
"""
#camera="IRCAM" #will be removed in final version
group="QRT_"+camera#+"_TEST" # test part will be removed in final version
# names=[typ]
datastream_name="AEF"+str(portnum)+"_"+typ#+"_TEST001"
#url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name
# if newversion:
# print('no version control of HDF5 files yet!')
# archive_create_stream_version(url, reason, False, "n/a", "n/a" )
try:
# archive.write_signals(images,time,group,project,names, datastream_name,units,metadata,2,True)
tmp=writeH5(datastream_name,images,time)
uploadH5_Versioning(group,datastream_name,tmp,version,True)
except Exception as E:
print("Upload Failed")
print(E)
def uploadH5(group,stream, h5file, delete=False, timeout=None):
# path=Path, h5file=h5-file
base="http://archive-webapi/Test/raw/W7X/"
try:
headers = {'Content-Type': 'application/x-hdf'}
link = base+group+'?dataPath=/data/'+stream+'&timePath=/data/timestamps'
f = open(h5file, 'rb')
try:
result = post(link, headers=headers, data=f, timeout=timeout)
finally:
f.close()
finally:
if delete:
try:
os.remove(h5file)
except KeyboardInterrupt as ki: raise ki
except:
print('could not delete file "%s"' % h5file)
pass
if not result.ok:
print(result.reason)
print(result.text)
else:
print("successfully uploaded h5file")
# result = _sup.requeststr(result)
# _sup.debug(result,3)
# return result
def uploadH5_Versioning(group,stream, h5file,version, delete=False, timeout=None):
# path=Path, h5file=h5-file
base="http://archive-webapi/Test/raw/W7X/" #"http://sv-coda-wsvc-3/Test/raw/W7X/"#
#http://sv-coda-wsvc-3/Test/raw/W7X/QRT_IRCAM_TEST?dataPath=/data/AEF100_RAW_TEST2&timePath=/data/timestamps&versionNumber=1
try:
headers = {'Content-Type': 'application/x-hdf'}
link = base+group+'?dataPath=/data/'+stream+'&timePath=/data/timestamps&versionNumber='+str(version)
f = open(h5file, 'rb')
try:
result = post(link, headers=headers, data=f, timeout=timeout)
finally:
f.close()
finally:
if delete:
try:
os.remove(h5file)
except KeyboardInterrupt as ki: raise ki
except:
print('could not delete file "%s"' % h5file)
pass
if not result.ok:
print(result.reason)
print(result.text)
return [False,result.reason,result.text]
else:
print("successfully uploaded h5file")
return [True]
def writeH5(stream,data,dimof,idx=None):
"""
path=Path, data=numpy.array, dimof=list of long
data.shape = (width,height,time) or (width,height) if len(dim)==1
"""
# stream = path.stream
dtype = str(data.dtype)
tmpfile = "archive_"+stream+'_'+str(dimof[0])
if idx: tmpfile += '_%d'%(idx,)
tmpfile += ".h5"
if data.ndim<3:
data = data.reshape(list(data.shape)+[1])
data = data.swapaxes(0,1) # (width,height,time) -> (row,col,time)
with h5reader.File(tmpfile, 'w') as f:
g = f.create_group('data') # requires [row,col,time]
g.create_dataset('timestamps', data=list(dimof), dtype='uint64',compression="gzip")
g.create_dataset(stream, data=data.tolist(), dtype=dtype,compression="gzip")
return tmpfile
#def writeH5_V2(stream,dataname,data,dimof,idx=None):
# """
# path=Path, data=numpy.array, dimof=list of long
# data.shape = (width,height,time) or (width,height) if len(dim)==1
# """
## stream = path.stream
# dtype = str(data.dtype)
# tmpfile = "archive_"+stream+'_'+str(dimof[0])
# if idx: tmpfile += '_%d'%(idx,)
# tmpfile += ".h5"
# if data.ndim<3:
# data = data.reshape(list(data.shape)+[1])
# data = data.swapaxes(0,1) # (width,height,time) -> (row,col,time)
# with h5reader.File(tmpfile, 'w') as f:
# g = f.create_group('data') # requires [row,col,time]
# g.create_dataset('timestamps', data=list(dimof), dtype='uint64',compression="gzip")
# g.create_dataset(dataname, data=data.tolist(), dtype=dtype,compression="gzip")
# return tmpfile
def writeH5_from_File(stream,filename,key,dimof,idx=None):
"""
path=Path, data=numpy.array, dimof=list of long
data.shape = (width,height,time) or (width,height) if len(dim)==1
"""
# stream = path.stream
# dtype = str(data.dtype)
filesize=os.stat(filename).st_size
f5=h5reader.File(filename,'r')
if filesize<4000000000:
tmpfile = "archive_"+stream+'_'+str(dimof[0])
if idx: tmpfile += '_%d'%(idx,)
tmpfile += ".h5"
with h5reader.File(tmpfile, 'w') as f:
g = f.create_group('data') # requires [row,col,time]
g.create_dataset('timestamps', data=list(dimof), dtype='uint64',compression="gzip")
f5.copy(key,f['/data'],name=stream)
f.close()
# g.create_dataset(stream, data=data.tolist(), dtype=dtype,compression="gzip")
f5.close()
return tmpfile
else:
nrfiles=int(np.ceil(filesize/3500000000))
print("Warning! File for upload is larger than 4 GB ("+str(round(filesize/1000000000,2))+"), split it into "+str(nrfiles)+" files for uploading to smaller fractions. This will take more time")
tmpfiles=[]
times=[]
limits=[0]
shapi=f5[key].shape
intervall=int(np.shape(dimof)[0]/nrfiles)
for i in range(0,nrfiles-1):
limits.append(intervall*(i+1))
times.append(dimof[limits[i]:limits[i+1]])
limits.append(np.shape(dimof)[0])
times.append(dimof[limits[nrfiles-1]:limits[nrfiles]])
for i in range(nrfiles):
tmpfile = "archive_"+stream+'_'+str(dimof[0])+"_part"+str(i+1)
if idx: tmpfile += '_%d'%(idx,)
tmpfile += ".h5"
with h5reader.File(tmpfile, 'w') as f:
g = f.create_group('data') # requires [row,col,time]
g.create_dataset('timestamps', data=list(times[i]), dtype='uint64')#,compression="gzip")
dset=g.create_dataset(stream,shape=(shapi[0],shapi[1],limits[i+1]-limits[i]),dtype='uint16',chunks=(shapi[0],shapi[1],1))#,compression='gzip')
for n in range(limits[i+1]-limits[i]):
dset[:,:,n]=f5[key][:,:,limits[i]+n]
tmpfiles.append(tmpfile)
return tmpfiles
#def writeH5_from_2_Files(stream,filename1,filename2,key,dimof,idx=None):
# """
# path=Path, data=numpy.array, dimof=list of long
# data.shape = (width,height,time) or (width,height) if len(dim)==1
# """
## stream = path.stream
## dtype = str(data.dtype)
# f5=h5reader.File(filename1,'r')
# f5_2=h5reader.File(filename2,'r')
# tmpfile = "archive_"+stream+'_'+str(dimof[0])
# if idx: tmpfile += '_%d'%(idx,)
# tmpfile += ".h5"
## if data.ndim<3:
## data = data.reshape(list(data.shape)+[1])
## data = data.swapaxes(0,1) # (width,height,time) -> (row,col,time)
# with h5reader.File(tmpfile, 'w') as f:
# g = f.create_group('data') # requires [row,col,time]
# g.create_dataset('timestamps', data=list(dimof), dtype='uint64',compression="gzip")
# f5.copy(key,f['/data'],name=stream)
## g.create_dataset(stream, data=data.tolist(), dtype=dtype,compression="gzip")
# return tmpfile
####################################
### END of the UPLOAD ROUTINES ###
####################################
"""
from read_restdb.py by Henning Thomsen copied read_restdb
"""
def read_restdb(request_url):
"""
Reads JSON data from W7-X REST API
Returns:
valid: access ok
t: numpy-array of time
signal: numpy-array of requested signals
To get NaN, Infinity as their counter parts in Python (not 'Null')
add the string +'&allowNaN=true' in the request.
"""
try:
res = urllib.request.urlopen(request_url)
signal_list = json.loads(res.read().decode('utf-8'))
res.close()
except urllib.error.URLError as e:
return False, [0], [-1]
else:
try:
signal0=np.array(signal_list['values'])
t=np.array(signal_list['dimensions'])
except ValueError as e:
print(signal_list['status'])
print(e)
return False, [0], [-2]
else:
return True, np.squeeze(t), np.squeeze(np.double(signal0))