diff --git a/CHANGELOG b/CHANGELOG index 0809d5bbfae687124175f17d75d60c3a61878ece..ad866953ecbe4a7494ab9617b75be625b0d26a7b 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,5 +1,9 @@ <<<<<<< HEAD - +26.03.2019: Upadte to V3.2.0: + - scne model download possible + - scene model reference download possible for heat flux data + - downsampled temperature image stream possible + - extration of temperature profiles possible 27/28/29.11.2018: update to V3 - add file upload_heatflux.py - add plot_heatflux_example.py @@ -44,6 +48,7 @@ bugfixes in downloadversionIRdata: - fixed: wrong time intervall for TC divertor data Versions: +V3.2.0: download of scene models possible, temperature profiles can be extracted, downsampled temperature images available V3.1.0: add of divertor loads in the upload and the download V3.0.2: change the upload of heat flux from float64 to float32. extract profile accepts now also timewindows. V3.0.1: extension of the heat flux upload (scene model reference can be uploaded) diff --git a/IR_image_tools.py b/IR_image_tools.py index 949be0484e317785c1c140239ec33cf5d0ae34cb..22e3fe729c279d9c67bbab2a6a0c528fa57afa39 100644 --- a/IR_image_tools.py +++ b/IR_image_tools.py @@ -70,6 +70,18 @@ def bestimmtheitsmass_general(data,fit): else: print("bestimmtheitsmass_general: Arrays must have same dimensions") return R + +def bestimmheitsmass_linear(data,fit,debugmode=False): + R2=0 + if len(fit)==len(data): + mittel_D=np.mean(data)#np.sum(data)/len(data) + mittel_F=np.mean(fit) + R2=quad_abweich_mittel(fit,mittel_D)/quad_abweich_mittel(data,mittel_D) + if debugmode: + print(mittel_D,mittel_F,quad_abweich_mittel(fit,mittel_D),quad_abweich_mittel(data,mittel_D),R2) + else: + print("bestimmtheitsmass_linear: Arrays must have same dimensions") + return R2 def quad_abweich_mittel(data,mittel): R=0 diff --git a/ToDO.txt b/ToDO.txt index fba1f1f9dd32f13909e7ff20b4930bebe9548209..ccd2421383ddca6edd9d3b785efa1c2fb28a18a3 100644 --- a/ToDO.txt +++ b/ToDO.txt @@ -6,14 +6,3 @@ download: - implement download of the stored temperature data (After the upload) - implement download of the stored heat flux data --> done in V3.0.0 - implement download of FLIR data --> Done in V2.8.0, in testing phase - -upload: -- upload the new nucing files for AEF50 (the workaround) -- upload remaining data from failed uploads -- upload scene models -- upload temperature -- upload heat flux data (started) -- upload FLIR data (partly done) -- upload FLIR LUT (partly done) - - diff --git a/Upload_Day_IRdata.py b/Upload_Day_IRdata.py deleted file mode 100644 index bdca4980f967607cf9dbfa1121692e595d9ec5c6..0000000000000000000000000000000000000000 --- a/Upload_Day_IRdata.py +++ /dev/null @@ -1,658 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Tue Sep 26 16:01:06 2017 -Version: 2.9.4 -@author: holn -""" - -import uploadingversionIRdata as IRup -import os -import h5py as h5 -import numpy as np -import datetime -import sys -from time import sleep -from os.path import join -#from getfoldernames import Searchdatalocation as gfn - -#%% set global parameter - -### default old settings, will be overrided by the loaded config file!!! ### -local= True # local test on one IRcam PC (not central PC) -active_PC = [0] # selection of the following camera PCs to loop over -delayupload=True -startuploadtime="20:00:00" -reupload=False -reason="" -retry=2 -port="AEF10" -singleupload=False#True#False -singlelist=[] -raw_parlog=True -raw_data=True#False#True -raw1=True#False#True#False#True -raw2=True -back_parlog=True#False#True -background=True#False#True -nuc_parlog=True#False -nuc=True -metastream=True -dates=[[2017,11,9]] -temperature=False -only_upload_lattest=False -use_today=False -delete_Files=False -logpath=""#"C:\\QIR\\Upload\\" -temppath=""#"C:\\QIR\\Upload\\" - -ports = ['AEF10', 'AEF11', - 'AEF20', 'AEF21', - 'AEF30', 'AEF31', - 'AEF40', 'AEF41', - 'AEF50', 'AEF51','AEK51'] -cameras = ['IRCAM', 'IRCAM', - 'IRCAM', 'IRCAM', - 'IRCAM', 'IRCAM', - 'IRCAM', 'IRCAM', - 'INFRATEC', 'IRCAM','FLIR'] -Tup = [ False, True, - True, True, - True, False, - False, True, - False, True] -pc_list = ['Pc-e4-qrt-7', 'Pc-e4-qrt-2', - 'Pc-e4-qrt-3', 'Pc-e4-qrt-4', - 'Pc-dia-ws-16', 'Pc-e4-qrt-6', - 'Pc-dia-ws-17', 'Pc-dia-ws-18', - 'Pc-e4-qrt-5', 'Pc-e4-qrt-1','PC-HC-MESS-2'] -try: - cFile=open("upload_config") - for line in cFile: - if line[0:3]!="###": - vars()[line.split("=")[0]]=eval(line.split("=")[1].split("\n")[0]) - cFile.close() - if use_today: - dummy=datetime.datetime.now() - dates=[[dummy.year,dummy.month,dummy.day]] -except Exception as E: - raise Exception("Config file was not loaded properly",E) - - -def findthelatestfilename(Rawdatapath): - ''' - find the latest modified file name for easy upload. - @Rawdatapath: the path to search from, e.g: 'R:\\2018.06.07'. - - return the latest modified file name in string. - ''' - filelist = os.listdir(Rawdatapath) - timearray = [os.path.getmtime(join(Rawdatapath, i)) for i in filelist] - maxtimestampindex = np.argmax(timearray) - latestfilename = filelist[maxtimestampindex] - return latestfilename - -#create upload log file: - -class Tee(object): - def __init__(self, *files): - self.files = files - def write(self, obj): - for f in self.files: - f.write(obj) - f.flush() # If you want the output to be visible immediately - def flush(self) : - for f in self.files: - f.flush() -if delayupload: - now=datetime.datetime.now() - start=datetime.datetime(now.year,now.month,now.day,int(startuploadtime.split(":")[0]),int(startuploadtime.split(":")[1]),int(startuploadtime.split(":")[2])) - sleeptime=(start-now).total_seconds() #seconds - if sleeptime<0: - sleeptime=1 - sleep(sleeptime) - -original = sys.stdout - -nrprograms=0 -fully_uploaded=0 -sum_meta1=0 -sum_meta2=0 -sum_raw1=0 -sum_raw2=0 -start=datetime.datetime.now() -fout=open(logpath+"upload_log_"+str(start.date())+"_"+str(start.hour)+"_"+str(start.minute)+"_"+str(start.second)+".txt",'w') -#foutf=open("upload_log_failes_"+str(start.date())+"_"+str(start.hour)+"_"+str(start.minute)+"_"+str(start.second)+".txt",'w') -sys.stdout = Tee(sys.stdout, fout) -#sys.stdout = fout -f_success=open(logpath+"upload_log_successlist_"+str(start.date())+"_"+str(start.hour)+"_"+str(start.minute)+"_"+str(start.second)+".txt",'w') -try: - try: - sysport=os.environ['qir_port'] - except Exception as E: - sysport=port - print("unable to read the port from the system, using config port") - raise Exception(E) - toff = 0 #delay time of the programm for the raw stuff in ms - port=sysport - active_PC=[ports.index(port)] - Tup[ports.index(port)]=temperature - #%% decide data path format - for date in dates: - - if date[0]==2017 and date[1]<=9 and date[2]<=25: - # old file and folder names - rawdatapath = "\\\\{0:s}\\r\\{1[0]:4d}-{1[1]:02d}-{1[2]:02d}" - calibdatapath = "\\\\{0:s}\\r\\Calibration\\{1[0]:4d}-{1[1]:02d}-{1[2]:02d}" - tempdatapath = "\\\\{0:s}\\t\\{1[0]:4d}-{1[1]:02d}-{1[2]:02d}" - """ - have to be finished!! - """ - else: - # recent file and folder naming convention - rawdatapath = "\\\\{0:s}\\r\\{1[0]:4d}.{1[1]:02d}.{1[2]:02d}" - calibdatapath = rawdatapath - tempdatapath = "\\\\{0:s}\\t\\{1[0]:4d}.{1[1]:02d}.{1[2]:02d}" - - if local: - rawdatapath = "R:\\{1[0]:4d}.{1[1]:02d}.{1[2]:02d}" - calibdatapath = rawdatapath - tempdatapath = "T:\\{1[0]:4d}.{1[1]:02d}.{1[2]:02d}" - - #%% loop over different IRcamera PC and upload data - if not raw1 and not raw2: - raw_parlog=False - raw_data=False - - for i in active_PC: - if only_upload_lattest: - single_upload=True - singlelist = [findthelatestfilename(rawdatapath.format(pc_list[i],date))] - port = int(ports[i].split('AEF')[1]) - path = rawdatapath.format(pc_list[i],date) - print(datetime.datetime.now(),"Starting Upload for "+ports[i],date) - f_success.write(ports[i]+" \n") - f_success.flush() - f_success.write("program folder meta1 meta2 raw1 raw2 \n") - f_success.flush() - # foutf.write(datetime.datetime.now(),"Starting Upload for "+ports[i],date,"\n") - # find all programs of this date on this PC - programtimes = [] - if singleupload: - programtimes=singlelist - else: - for o in os.listdir(path): - if os.path.isdir(os.path.join(path,o)): - if len(o.split("_"))>1 and not date[0]>=2018: - print("unsupported folderstructure at the moment!!") - else: - programtimes.append(o) - - # # loop over all programs and upload data - for prog_time in programtimes: - nrprograms+=1 - # loop over one specific program for test and upload data - print("start upload of files for "+str(prog_time)+" for date: ",date) - # foutf.write("start upload of files for "+str(prog_time)+" for date: ",date,"\n") - try: - path = os.path.join(rawdatapath.format(pc_list[i],date), prog_time) - # sort files into background, cold image, offset, gain and raw data - filelist = os.listdir(path) - blist = {} # background dic - clist = {} # cold file dic - olist = {} # offset file dic - glist = {} # gain file dic - rlist = {} # raw file dic - if len(filelist)>0:# check for empty folder! - for file in filelist: - dummy = file.split("_") - if date[0]>=2018: - if dummy[4]=='raw' or dummy[4]=='cal': - rlist[dummy[1]]=file - else: - if dummy[1]=='background': - blist[dummy[7]] = file - elif dummy[1]=='nuc': - if dummy[2]=='cold': - clist[dummy[8]] = file - elif dummy[2]=='gain': - glist[dummy[8]] = file - elif dummy[2]=='offset': - olist[dummy[8]] = file - elif dummy[1]=='raw': - rlist[dummy[3]] = file - else: - print("unexpected file in the folder!") - # foutf.write(datetime.datetime.now(),"unexpected file in the folder",prog_time,"\n") - print(datetime.datetime.now()," Start NUC upload") - # upload of NUC files, background files - if cameras[i]=="IRCAM": - newversion=reupload - if background or nuc: - for t_exp in range(1,10): - try: - print(datetime.datetime.now()," prepare upload of nuc data set for "+str(t_exp)+"us") - fb = h5.File(os.path.join(path, blist[str(t_exp)+"us"]),'r') - background_image = np.asarray(fb[list(fb.keys())[0]]) - fb.close() - fg = h5.File(os.path.join(path, glist[str(t_exp)+"us"]),'r') - gain_image = np.asarray(fg[list(fg.keys())[0]]) - fg.close() - fo = h5.File(os.path.join(path, olist[str(t_exp)+"us"]),'r') - offset_image = np.asarray(fo[list(fo.keys())[0]]) - fo.close() - fc = h5.File(os.path.join(path, clist[str(t_exp)+"us"]),'r') - cold_image = np.asarray(fc[list(fc.keys())[0]]) - fc.close() - bad = [] - ftime = blist[str(t_exp)+"us"].split("_")[3].split(".") - ftime = list(map(int, ftime)) - ftime[3] = ftime[3]*1000 #µs not ms! - time_stamp = int(IRup.TimeToNs(date,ftime)) - #time_stamp = time_stamp #+ 6 * 6E10 - if background: - try: - IRup.upload_Background_frame(cameras[i], port, [time_stamp], [t_exp], - background_image, newversion=newversion,reason=reason) - except Exception as E: - print("upload of background files for "+str(t_exp)+"us failed") - print(E) - # foutf.write(datetime.datetime.now(),"upload of background files for "+str(t_exp)+"us failed",E,"\n") - else: - print("background upload deactivated") - # foutf.write(datetime.datetime.now(),"background upload deactivated","\n") - if nuc: - try: - IRup.upload_NUC_ref(cameras[i], port, [time_stamp], [t_exp], - gain_image, offset_image, cold_image, bad,parlog=nuc_parlog,data=nuc,newversion=newversion,reason=reason) - except Exception as E: - print("upload of NUC files for "+str(t_exp)+"us failed") - print(E) - # foutf.write(datetime.datetime.now(),"upload of NUC files for "+str(t_exp)+"us failed",E,"\n") - else: - print("nuc upload deactivated") - # foutf.write(datetime.datetime.now(),"nuc upload deactivated","\n") - newversion=False#otherwise 6 new versions! - except Exception as E: - print(E) - # foutf.write(datetime.datetime.now(),"error in reading",E,"\n") - else: - print("background and NUC upload deactivated") - # foutf.write(datetime.datetime.now(),"background and NUC upload deactivated","\n") - # -->| tested --> works for IRCAM - else: #case for the Infratec files - newversion=reupload - if background: - fkeys=list(blist.keys()) - print(datetime.datetime.now()," prepare upload of background data set for "+str(fkeys[0])) - filename=blist[fkeys[0]] - fb = h5.File(os.path.join(path, filename),'r') - background_image = np.asarray(fb[list(fb.keys())[0]]) - ftime = filename.split("_")[3].split(".") - ftime = list(map(int, ftime)) - ftime[3] = ftime[3]*1000 #µs not ms! - time_stamp = IRup.TimeToNs(date,ftime) - - t_exp=int(filename.split("_")[7].split("us")[0]) - cfilter=int(filename.split("_")[5]) - try: - IRup.upload_Background_frame(cameras[i], port, [time_stamp], [t_exp,cfilter], - background_image, newversion=newversion,reason=reason) - except Exception as E: - print("upload of background file for "+str(t_exp)+"us failed") - print(E) - # foutf.write(datetime.datetime.now(),"upload of background file for "+str(t_exp)+"us failed",E,"\n") - else: - print("background upload deactivated") - # foutf.write(datetime.datetime.now(),"background upload deactivated","\n") - - newversion=reupload - #%% ###upload now raw files### - fkeys=list(rlist.keys()) - ftime1=fkeys[0].split(".") - ftime1 = list(map(int, ftime1)) - ftime1[3]=ftime1[3]*1000#µs not ms! - ftime2=fkeys[1].split(".") - ftime2 = list(map(int, ftime2)) - ftime2[3]=ftime2[3]*1000#µs not ms! - t1=int(IRup.TimeToNs(date,ftime1)) - t2=int(IRup.TimeToNs(date,ftime2)) - if t1>t2: - f1=1 - f2=0 - dummy=t2 - t2=t1 - t1=dummy - else: - f1=0 - f2=1 - # bla='09.34.44.100000'#'09.29.23.172000' - # bla=bla.split(".") - # bla = list(map(int, bla)) - # t2=int(IRup.TimeToNs(date,bla)) - hf1=h5.File(path+"\\"+rlist[fkeys[f1]],'r') - attri1=hf1['dynamic_attributes'] - metadata1={ - 'serialnumer': rlist[fkeys[f1]].split("_")[-1].split(".")[0], - 'width':int(np.asarray(attri1['ROI_w'])[0]), - 'heigth':int(np.asarray(attri1['ROI_h'])[0]), - 'pos_left':int(np.asarray(attri1['ROI_x'])[0]), - 'pos_top':int(np.asarray(attri1['ROI_y'])[0]), - 'bitdepth': int(np.asarray(attri1['bitdepth'])[0]) - } - - # tmeta1={'divertortemperature_K':np.asarray(attri1['divertor_temp_K'])[0], - # 'timestamp':np.asarray(attri1['timestamp'])[0]} - time1=np.asarray(hf1['timestamps']) - sensor1=np.asarray(attri1['sensor_temp_C']) - chip1=np.asarray(attri1['camera_temp_C']) - framerate1=np.asarray(attri1['frame_rate']) - expo1=np.asarray(attri1['integration_time_us']) - hf2=h5.File(path+"\\"+rlist[fkeys[f2]],'r') - attri2=hf2['dynamic_attributes'] - metadata2={ - 'serialnumer': rlist[fkeys[f1]].split("_")[-1].split(".")[0], - 'width':int(np.asarray(attri2['ROI_w'])[0]), - 'heigth':int(np.asarray(attri2['ROI_h'])[0]), - 'pos_left':int(np.asarray(attri2['ROI_x'])[0]), - 'pos_top':int(np.asarray(attri2['ROI_y'])[0]), - 'bitdepth': int(np.asarray(attri2['bitdepth'])[0]) - } - - # tmeta2={'divertortemperature_K':np.asarray(attri2['divertor_temp_K'])[0], - # 'timestamp':np.asarray(attri2['timestamp'])[0]} - time2=np.asarray(hf2['timestamps']) - sensor2=np.asarray(attri2['sensor_temp_C']) - chip2=np.asarray(attri2['camera_temp_C']) - framerate2=np.asarray(attri2['frame_rate']) - expo2=np.asarray(attri2['integration_time_us']) - if cameras[i]=="IRCAM": - metadata1['cameraname']= 'IRCam_Caleo768kL' - metadata2['cameraname']= 'IRCam_Caleo768kL' - metadata1['purpose']='NUC+background' - metadata2['purpose']='experiment' - #['ROI_h', 'ROI_w', 'ROI_x', 'ROI_y', 'Size', - #'bitdepth', 'camera_temp_C', 'divertor_temp_K', 'frame_number', - #'frame_rate', 'integration_time_us', 'sensor_temp_C', 'timestamp'] - elif cameras[i]=="INFRATEC": - metadata1['cameraname']= 'INFRATEC_IMAGEIR9300' - metadata2['cameraname']= 'INFRATEC_IMAGEIR9300' - metadata1['purpose']='background' - metadata2['purpose']='experiment' - #metadata2['multi_exposure']=np.asarray(attri2['multi_exposure'])[0] - #metadata1['multi_exposure']=np.asarray(attri1['multi_exposure'])[0] - elif cameras[i]=="FLIR": - metadata1['cameraname']= 'FLIR_SC8303_00037' - metadata2['cameraname']= 'FLIR_SC8303_00037' - metadata1['purpose']='background' - metadata2['purpose']='experiment' - else: - raise Exception("unknown camera") - if date[0]>=2018: - metadata1['software_version']=hf1['images'].attrs['software_version'].decode("UTF-8") - metadata1['filter']=int(np.asarray(attri1['filter'])[0]) - metadata1['view_flip_h']=bool(np.asarray(attri1['view_flip_h'])[0]) - metadata1['view_flip_v']=bool(np.asarray(attri1['view_flip_v'])[0]) - metadata2['software_version']=hf2['images'].attrs['software_version'].decode("UTF-8") - metadata2['filter']=int(np.asarray(attri2['filter'])[0]) - metadata2['view_flip_h']=bool(np.asarray(attri2['view_flip_h'])[0]) - metadata2['view_flip_v']=bool(np.asarray(attri2['view_flip_v'])[0]) - metadata1['cameraname']= hf1['images'].attrs['camera'].decode("UTF-8") - metadata2['cameraname']= hf2['images'].attrs['camera'].decode("UTF-8") - framenumber1=np.asarray(attri1['frame_number']) - framenumber2=np.asarray(attri2['frame_number']) - try: - frametype1=np.asarray(attri1['frame_type']) - # frametype1=[] - # for ele in ftype1: - # if ele == "nuc": - # frametype1.append(0) - # elif ele == "background": - # frametype1.append(1) - # elif ele == "discharge": - # frametype1.append(2) - # else: - # frametype1.append(-1) - except: - frametype1=np.zeros((len(framenumber1)))-1 - try: - frametype2=np.asarray(attri2['frame_type']) - # frametype2=[] - # for ele in ftype2: - # if ele == "nuc": - # frametype2.append(0) - # elif ele == "background": - # frametype2.append(1) - # elif ele == "discharge": - # frametype2.append(2) - # else: - # frametype2.append(-1) - except: - frametype2=np.zeros((len(framenumber2)))-1 - wrong1,correction_list1=IRup.check_timevector_framenumbers(time1,framenumber1) - wrong2,correction_list2=IRup.check_timevector_framenumbers(time2,framenumber2) - time1_0=time1.copy() - time2_0=time2.copy() - if cameras[i]=="FLIR": - wrong3,correction_list3=IRup.check_extra_frames_FLIR(time2,framenumber2) - wrong2=wrong2 or wrong3 - correction_list2=correction_list3+correction_list2 - if wrong1: - sensor1=np.delete(sensor1,correction_list1) - chip1=np.delete(chip1,correction_list1) - framerate1=np.delete(framerate1,correction_list1) - frametype1=np.delete(frametype1,correction_list1) - expo1=np.delete(expo1,correction_list1) - time1=np.delete(time1,correction_list1) - if wrong2: - sensor2=np.delete(sensor2,correction_list2) - chip2=np.delete(chip2,correction_list2) - framerate2=np.delete(framerate2,correction_list2) - frametype2=np.delete(frametype2,correction_list2) - expo2=np.delete(expo2,correction_list2) - time2=np.delete(time2,correction_list1) - channel_names=["sensortemperature","cameratemperature","framerate","exposuretime","framenumber","frametype"] - channel_units=["oC","oC",'Hz','us','none','none'] - channel_values1=[sensor1, chip1,framerate1,expo1,framenumber1,frametype1] - channel_values2=[sensor2, chip2,framerate2,expo2,framenumber2,frametype2] - comment_meta1="frametype: 0 cold, 1 background, 2 discharge, -1 unknown" - comment_meta2="frametype: 0 cold, 1 background, 2 discharge, -1 unknown" - triggered=np.asarray(attri2['triggered']) - trig_index=np.where(triggered==1) - if len(trig_index[0])>0: - trig_index=min(trig_index[0]) - toff=(time2[0]-time2[trig_index])/1e6 - else: - print("triggered frame was not found! Assumeing that frame 0 is the t1 frame") - toff=0 - print(datetime.datetime.now()," regenerate timevectors") - time1=IRup.regenerate_timevector(time1,0,t2,0) - time2,Program=IRup.regenerate_timevector(time2,1,t2,toff,True) - time1_0=IRup.regenerate_timevector(time1_0,0,t2,0) - time2_0=IRup.regenerate_timevector(time2_0,1,t2,toff) - else: - correction_list1=[] - correction_list2=[] - channel_names=["sensortemperature","cameratemperature","framerate","exposuretime"] - channel_units=["oC","oC",'Hz','us'] - channel_values1=[sensor1, chip1,framerate1,expo1] - channel_values2=[sensor2, chip2,framerate2,expo2] - comment_meta1="" - comment_meta2="" - print(datetime.datetime.now()," regenerate timevectors") - time1=IRup.regenerate_timevector(time1,0,t2,0) - time2,Program=IRup.regenerate_timevector(time2,[1,6],t2,0,True) - time1_0=time1 - time2_0=time2 - hf1.close() - del attri1 - hf2.close() - del attri2 - metares1=0 - metares2=0 - rawres1=0 - rawres2=0 - print(datetime.datetime.now()," Upload Meta Stream(s)") - if metastream: - try: - meta1up=IRup.upload_meta_stream_V2(cameras[i],port,np.asarray(time1),channel_names,channel_units,channel_values1,newversion=newversion,reason=reason, Return=True) - if meta1up: - metares1=1 - else: - metares1=0 - except Exception as E: - print(E) - metares1=0 - # foutf.write(datetime.datetime.now(),"metastream 1 upload failed",E,"\n") - del sensor1, chip1,framerate1,expo1 - try: - meta2up=IRup.upload_meta_stream_V2(cameras[i],port,np.asarray(time2),channel_names,channel_units,channel_values2,newversion=newversion,reason=reason, Return=True) - if meta2up: - metares2=1 - else: - metares2=0 - except Exception as E: - print(E) - metares2=0 - # foutf.write(datetime.datetime.now(),"metastream 2 upload failed",E,"\n") - del sensor2,chip2,framerate2,expo2 - else: - print("metastream upload deactivated") - metares1=0 - metares2=0 - # foutf.write(datetime.datetime.now(),"metastream upload deactivated","\n") - if raw_parlog or raw_data: - print(datetime.datetime.now()," Start Raw Data Upload for "+str(prog_time)) - if raw1: - success=False - trys=0 - temp_raw=raw_parlog - while (trys<retry and not success): - try: - response,pari=IRup.upload_raw_data_from_H5(cameras[i],port,np.asarray(time1_0,dtype=np.int64),path+"\\"+rlist[fkeys[f1]],metadata1,parlog=temp_raw,rawdata=raw_data,newversion=newversion,reason=reason,del_list=correction_list1,temppath=temppath)#upload cold and backgrounds - if response[0]: - success=True - rawres1=1 - else: - print(datetime.datetime.now(),"raw 1 upload failed",response[1],response[2]) - rawres1=0 - temp_raw=not pari - trys+=1 - # foutf.write(datetime.datetime.now(),"raw 1 upload failed",response[1],response[2],"\n") - except Exception as E: - trys+=1 - print("Upload for rawfile 1 failed!, try:"+str(trys),E) - rawres1=0 - # foutf.write(datetime.datetime.now(),"Upload for rawfile 1 failed!, try:"+str(trys),E,"\n") - - else: - rawres1=0 - if raw2: - success=False - trys=0 - temp_raw=raw_parlog - while (trys<retry and not success): - print(datetime.datetime.now()," Upload second file") - try: - response,pari=IRup.upload_raw_data_from_H5(cameras[i],port,np.asarray(time2_0,dtype=np.int64),path+"\\"+rlist[fkeys[f2]],metadata2,parlog=temp_raw,rawdata=raw_data,del_list=correction_list2,temppath=temppath)#upload raw images from plasma - if response[0]: - success=True - rawres2=1 - else: - print(datetime.datetime.now(),"raw 2 upload failed",response[1],response[2]) - rawres2=0 - temp_raw=not pari - trys+=1 - # foutf.write(datetime.datetime.now(),"raw 2 upload failed",response[1],response[2],"\n") - except Exception as E: - trys+=1 - print("Upload for rawfile 2 failed!, try"+str(trys),E) - rawres2=0 - # foutf.write(datetime.datetime.now(),"Upload for rawfile 2 failed!, try:"+str(trys),E,"\n") - else: - rawres2=0 - print(datetime.datetime.now()," Upload finished") - else: - print("raw upload deactivated") - rawres1=0 - rawres2=0 - f_success.write(Program+" "+str(prog_time)+" "+str(metares1)+" "+str(metares2)+" "+str(rawres1)+" "+str(rawres2)+" \n") - f_success.flush() - sum_meta1+=metares1 - sum_meta2+=metares2 - sum_raw1+=rawres1 - sum_raw2+=rawres2 - if metares1==1 and metares2==metares1 and rawres1==rawres2 and rawres1==metares1: - fully_uploaded+=1 - if delete_Files: - print("Upload of the 4 parts was successfull, deleting now the files in "+str(prog_time)) - try: - os.remove(path+"\\"+rlist[fkeys[f1]]) - except KeyboardInterrupt as ki: raise ki - except: - print('could not delete file "%s"' % path+"\\"+rlist[fkeys[f1]]) - pass - try: - os.remove(path+"\\"+rlist[fkeys[f2]]) - except KeyboardInterrupt as ki: raise ki - except: - print('could not delete file "%s"' % path+"\\"+rlist[fkeys[f2]]) - pass - try: - print("deleting now the folder") - os.rmdir(path) - except Exception as E: - print("could not delete the folder",str(prog_time),E) - # foutf.write(datetime.datetime.now(),"raw upload deactivated","\n") - else:#folder is empty - print("no files in",prog_time) - reupload=False - except Exception as E: - print("Cannot Upload Data for folder "+str(prog_time)) - print(E) - # foutf.write(datetime.datetime.now(),"Cannot Upload Data for folder "+str(prog_time),"\n") - ###upload the temp files if allowed### - """ - if Tup[i]: - path=tempdatapath.format(pc_list[i],date)+"\\"+prog_time - filelist=os.listdir(path) - #assumption that there are only temp files - fkeys=list(rlist.keys()) - ftime1=fkeys[0].split(".") - for e in range(len(ftime1)): - ftime1[e]=int(ftime1[e]) - ftime1[3]=ftime1[3]*1000#µs not ms! - ftime2=fkeys.split(".") - for e in range(len(ftime2)): - ftime2[e]=int(ftime2[e]) - ftime2[3]=ftime2[3]*1000#µs not ms! - t1=IRup.TimeToNs(date,ftime1) - t2=IRup.TimeToNs(date,ftime2) - if t1>t2: - f1=1 - f2=0 - dummy=t2 - t2=t1 - t1=dummy - else: - f1=0 - f2=1 - IRup.upload_temperaure_data(cameras[i],port,images1,time1,tmeta1) - del images1, time1, tmeta1 - IRup.upload_temperaure_data(cameras[i],port,images2,time2,tmeta2) - del images2, time2, tmeta2 - #""" - ##[o for o in os.listdir(path) if os.path.isdir(os.path.join(path,o)) and (o[0]=='2')] - ##"{:02d}".format(zahl) -except Exception as E: - print("Upload failed! :",E) -print("script is done, the upload is to "+str(round(fully_uploaded/nrprograms*100,2))+"% completed; "+str(fully_uploaded)+" of "+str(nrprograms)+" folders are uploaded") -if fully_uploaded<nrprograms: - print("Calibration Meta Datastreams: "+str(sum_meta1)+" of "+str(nrprograms)) - print("Plasma Meta Datastreams: "+str(sum_meta2)+" of "+str(nrprograms)) - print("Calibration raw Datastreams: "+str(sum_raw1)+" of "+str(nrprograms)) - print("plasma raw Datastreams: "+str(sum_raw2)+" of "+str(nrprograms)) -fout.close() -f_success.close() -#foutf.close() -sys.stdout = original -char=input("script is done, press the enter button to close the program.") \ No newline at end of file diff --git a/downloadversionIRdata.py b/downloadversionIRdata.py index 29a970b9b61564cdaff8241f47e84b92055f0667..d2dd46c80cd6ec515083f06939ac517b8ff1fb3a 100644 --- a/downloadversionIRdata.py +++ b/downloadversionIRdata.py @@ -4,7 +4,7 @@ Created on Wed Oct 25 15:51:46 2017 updated on Tue Aug 21 10:20:00 2018 last update on Fr Nov 23 15:37:00 2018 -Version: 3.1.0 +Version: 3.2.1 (Numbering: #of big changes(OP1.2a download V1, OP1.2b download V2, heatflux V3) . #of updates to add functionalities . #number of updates for bug fixes ) @author: holn """ @@ -44,40 +44,6 @@ except: config_path="" -# These two functions seem unused and will be deleted. -# They are redundant to -# IR_tools.get_OP_by_time(time_ns=None, shot_no=None, program_str=None) -# which should be used instead. -# -#def get_OP_by_program(program): -# try: -# bla=int(program.split(".")[0]) -# time_ns=int(TimeToNs([bla // 10000,bla % 10000 // 100,bla % 10000 % 100],[10,0,0,0])) -# return get_OP_by_time(time_ns) -# except Exception as E: -# print(E) -# return None -# -#def get_OP_by_time(time_ns): -# dateOP=datetime.datetime.utcfromtimestamp(time_ns/1e9) -# if dateOP.year==2017: -# if dateOP.month>8 and dateOP.month<12: -# return "OP1.2a" -# elif dateOP.month==8 and dateOP.day>=28: -# return "OP1.2a" -# elif dateOP.month==12 and dateOP.day<8: -# return "OP1.2a" -# else: -# return None -# elif dateOP.year==2018: -# return "OP1.2b" -# elif dateOP.year<=2016 and dateOP.year>=2015: -# if (dateOP.year==2016 and dateOP.month<=3) or (dateOP.year==2015 and dateOP.month==12): -# return "OP1.1" -# else: -# return None - - def get_latest_version(stream,project="W7X",Test=False,t_from=None,t_to=None,program=None): """Find out latest version of given stream :param stream url stream of interest @@ -96,7 +62,7 @@ def get_latest_version(stream,project="W7X",Test=False,t_from=None,t_to=None,pro if program is not None: prog=get_program_from_PID(program) if prog[0]: - t_from=prog[1]['trigger']['1'][0] + t_from=prog[1]['trigger']['0'][0] t_to=prog[1]['trigger']['6'][0] elif t_from is not None and t_to==None: t_to=int(t_from+100e9) @@ -134,7 +100,7 @@ def read_program(timestamp_start,timestamp_end=0,tol=60): read_program() """ program_url = 'http://archive-webapi.ipp-hgw.mpg.de/programs.json?from' - if timestamp_end!=0: + if timestamp_end==0: timestamp_end = timestamp_start jsonurl = '{0}={1}&upto={2}'.format(program_url, int(timestamp_start-tol*1e9), @@ -427,13 +393,25 @@ def download_calibration_raw_files_by_time(port,t_exp,starttime,endtime,frametyp raise Exception("download_calibration_raw_files_by_time: requested Frametype unknown and not implemented!") else: print("download_calibration_raw_files_by_time: Error! frametype not found") - return False,[0],[0] + frametimes=time_t[expinds] + diftime=[frametimes[i]-frametimes[i-1] for i in range(1,len(frametimes))] + turnpoint=np.where(np.asarray(diftime)>11e6)[0][0] + if frametype==0: + ref_t=[np.min(frametimes[0:turnpoint+1]),np.max(frametimes[0:turnpoint+1])] + print((ref_t[1]-ref_t[0])/1e9) + elif frametype==1: +# print(len(frametimes[turnpoint+1:]),len(frametimes[0:turnpoint+1]),len(frametimes)) + ref_t=[np.min(frametimes[turnpoint+1:]),np.max(frametimes[turnpoint+1:])] + print((ref_t[1]-ref_t[0])/1e9) + else: + raise Exception("download_calibration_raw_files_by_time: requested Frametype unknown and not implemented!") +# return False,[0],[0] t1date=datetime.datetime.utcfromtimestamp((endtime-100)/1e9) t1date=t1date.isoformat() t0date=datetime.datetime.utcfromtimestamp((starttime-15e6)/1e9) t0date=t0date.isoformat() if version==0: - version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM") + version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM",t_from=starttime,t_to=endtime) larchivepath=archivepath+"W7X/"+portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" timest0=AKF_2.get_time_intervals(larchivepath,t0date.replace("T"," "),t1date.replace("T"," ")) t_offset=timest0[-1][0]-time_t[0] @@ -454,7 +432,7 @@ def download_hot_cold_reference_by_times(port,exposure,starttime=150390720000000 larchivepath=archivepath+"W7X/"+portpathdict[OP]["AEF"+str(port)]+"raw_" # NUC_parlog=AKF_1.read_restdb_old(archivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) if version==0: - version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM") + version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM",t_from=starttime) try: res = urllib.request.urlopen(larchivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(starttime)+"&upto="+str(int(starttime+1e9))) signal_list = json.loads(res.read().decode('utf-8')) @@ -551,7 +529,9 @@ def get_NUCed_background_by_times(port,t0,t1,t_exp,cfilter,gain,offset,version=0 OP1.2b function """ exist,btime,backgroundframes=download_calibration_raw_files_by_time(port,t_exp,t0,t1,frametype=1,version=version,verbose=verbose-1) - camera=portpathdict["OP1.2b"]["AEF"+str(port)] + camera=portcamdict["OP1.2b"]["AEF"+str(port)] + if verbose>50: + print("camera is",camera) background=np.zeros(np.shape(backgroundframes[0]),dtype=np.uint64) for frame in backgroundframes: background+=frame @@ -600,7 +580,7 @@ def download_raw_images_by_times(port,starttime,stoptime,version=0,intervalSize= """ OP=IR_tools.get_OP_by_time(time_ns=starttime) if version==0: - version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM") + version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM",t_from=starttime,t_to=stoptime) larchivepath=archivepath+"W7X/"+portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" return download_images_by_times(larchivepath,starttime=starttime,stoptime=stoptime,version=version,intervalSize=intervalSize,verbose=verbose-1) @@ -644,7 +624,9 @@ def download_images_by_times(larchivepath,starttime,stoptime,version=0,intervalS return success,time,allimages else: try: - res = urllib.request.urlopen(larchivepath+"/_signal.json?from="+str(starttime-10)+"&upto="+str(stoptime)) + if verbose>100: + print(larchivepath+"/_signal.json?from="+str(starttime-10)+"&upto="+str(stoptime)) + res = urllib.request.urlopen(larchivepath+"/_signal.json?from="+str(starttime-10)+"&upto="+str(stoptime)) signal_list = json.loads(res.read().decode('utf-8')) res.close() images=[np.array(ele, dtype=typo) for ele in signal_list['values']] @@ -672,14 +654,14 @@ if fastDL: stoptime=prog[1]['trigger']['6'][0] OP=IR_tools.get_OP_by_time(time_ns=starttime) if version==0: - version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM") + version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM",t_from=starttime,t_to=stoptime) larchivepath=archivepath+"W7X/"+portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" return download_images_by_time_via_png(larchivepath,starttime,stoptime, time_window=time_window,threads=threads, version=version,verbose=verbose-1) def download_images_by_time_via_png(larchivepath,starttime,stoptime,time_window=0, - threads=1,version=0,verbose=0): + threads=1,version=0,verbose=0,framerate='max'): """ """ success=True @@ -705,9 +687,23 @@ if fastDL: try: times=AKF_2.get_time_intervals(larchivepath,stdate.replace("T"," "),enddate.replace("T"," "))# except Exception as E: - print('download_images_by_time_via_png: Error loading times from ',larchivepath) + print('download_images_by_time_via_png: Error loading times from ',larchivepath,stdate.replace("T"," "),enddate.replace("T"," ")) raise Warning(E) - return False,[0],[0] + return False,[0],[0] + + if framerate!='max': + if verbose>5: + print("download_images_by_time_via_png: downsampling active; number of original frames:",len(times)) + if type(framerate)==float or type(framerate)==int: + realrate=1e9/((times[0]-times[-1])[0]/len(times)) + if framerate<realrate: + steps=int(round(realrate/framerate,0)) + dummy=[] + for i in range(0,len(times),steps): + dummy.append(times[i]) + times=dummy + if verbose>5: + print("download_images_by_time_via_png: downsampling active; number of downsampeld frames:",len(times),steps) time=[] images=[] lnt=len(times) @@ -826,7 +822,7 @@ if fastDL: class NUC_raw_thread(threading.Thread): def __init__ (self,larchivepath,times,resultlist,threadID,version=0,background=0, LUT=[[],[],[]],refT=28.5,gain=0,offset=0,gain_error=0,offset_error=0, - give_ERROR=False,FOV=0,badpixels=[]): + give_ERROR=False,FOV=0,badpixels=[],verbose=0): threading.Thread.__init__(self) self.threadID = threadID self.times = times @@ -843,6 +839,7 @@ if fastDL: self.give_ERROR=give_ERROR self.FOV=FOV self.badpixels=badpixels + self.verbose=verbose def run(self): images=[] time=[] @@ -869,37 +866,47 @@ if fastDL: images=IR_tools.correct_images(images,self.badpixels,verbose=self.verbose-1) self.resultlist.append([self.threadID,successes,time,images]) -def download_raw_FLIR_images_via_png(t1,t6,time_window=0,version=0,threads=1,verbose=0): +def download_raw_FLIR_images_via_png(t1,t6,time_window=0,version=0,threads=1,verbose=0,Test=True): """ """ - larchivepath=archivepath+"W7X/QSR07_FLIR/AEK51_raw" + if Test: + larchivepath="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+"W7X/QSR07_FLIR/AEK51_raw" + else: + larchivepath=archivepath+"W7X/QSR07_FLIR/AEK51_raw" if type(time_window)==list: tstart=time_window[0] tstop=time_window[1] if tstop<tstart: raise Exception("download_raw_FLIR_images_via_png: endtime before starttime") - stdate=datetime.datetime.utcfromtimestamp((t1-100)/1e9+tstart) - stdate=stdate.isoformat() - enddate=datetime.datetime.utcfromtimestamp((t1)/1e9+tstop) - enddate=enddate.isoformat() + starttime=int(t1-100+tstart*1e9) + endtime=int(t1+tstop*1e9) +# stdate=datetime.datetime.utcfromtimestamp((t1-100)/1e9+tstart) +# stdate=stdate.isoformat() +# enddate=datetime.datetime.utcfromtimestamp((t1)/1e9+tstop) +# enddate=enddate.isoformat() else: - stdate=datetime.datetime.utcfromtimestamp((t1-100)/1e9) - stdate=stdate.isoformat() + starttime=int(t1-100) if time_window==0: - enddate=datetime.datetime.utcfromtimestamp(t6/1e9) - enddate=enddate.isoformat() + endtime=t6 else: - enddate=datetime.datetime.utcfromtimestamp((t1)/1e9+time_window) - enddate=enddate.isoformat() + endtime=int((t1)+time_window*1e9) +# stdate=datetime.datetime.utcfromtimestamp((t1-100)/1e9) +# stdate=stdate.isoformat() +# if time_window==0: +# enddate=datetime.datetime.utcfromtimestamp(t6/1e9) +# enddate=enddate.isoformat() +# else: +# enddate=datetime.datetime.utcfromtimestamp((t1)/1e9+time_window) +# enddate=enddate.isoformat() #"2017-11-15 08:00:00" - typ1_version=get_latest_version("QSR07_FLIR/AEK51_raw_DATASTREAM") + typ1_version=get_latest_version("QSR07_FLIR/AEK51_raw_DATASTREAM",Test=Test,t_from=tstart) if typ1_version==None: mode=1 else: mode=2 if mode==1: ### okay the data is stored probably as different sets (so recorded with FLIR software) - timess,versions=get_FLIR_Pset_times(stdate,enddate,version) + timess,versions=get_FLIR_Pset_times(starttime,endtime,version,Test=Test)#stdate,enddate,version,Test=Test) if verbose>0: print("download_raw_FLIR_images_via_png: found {0} frames in the different sets".format([np.shape(x) for x in timess])) ### we not know the times for all presets for the program, now we have to download them, if times is not empty @@ -926,16 +933,19 @@ def download_raw_FLIR_images_via_png(t1,t6,time_window=0,version=0,threads=1,ver return exist,time,frames,mode,tsets,versions -def get_FLIR_Pset_times(stdate,enddate,version=0,verbose=0): +def get_FLIR_Pset_times(stdate,enddate,version=0,verbose=0,Test=True): """ """ - larchivepath=archivepath+"W7X/QSR07_FLIR/AEK51_raw" + if Test: + larchivepath="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+"W7X/QSR07_FLIR/AEK51_raw" + else: + larchivepath=archivepath+"W7X/QSR07_FLIR/AEK51_raw" if version==0: versions=[] - versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS0_DATASTREAM")) - versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS1_DATASTREAM")) - versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS2_DATASTREAM")) - versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS3_DATASTREAM")) + versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS0_DATASTREAM",Test=Test,t_from=stdate)) + versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS1_DATASTREAM",Test=Test,t_from=stdate)) + versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS2_DATASTREAM",Test=Test,t_from=stdate)) + versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS3_DATASTREAM",Test=Test,t_from=stdate)) else: versions=[version,version,version,version] timess=[] @@ -945,21 +955,27 @@ def get_FLIR_Pset_times(stdate,enddate,version=0,verbose=0): counter=-1 for version0 in versions: counter+=1 - if version0>0: - found=False - while (version0>0 and not found): - try: - url = '{0}_PS{1}_DATASTREAM/V{2}/0/raw'.format(larchivepath,counter,version0) - times_0=AKF_2.get_time_intervals(url,stdate.replace("T"," "),enddate.replace("T"," "))# - found=True - except Exception as E: - print('get_FLIR_Pset_times: Error querrying {0}\n{1}'.format(url,E)) - found=False - times_0=[] - version0=version0-1 - timess.append(times_0) - else: + print(versions) + try: + if version0>0: + found=False + while (version0>0 and not found): + try: + url = '{0}_PS{1}_DATASTREAM/V{2}/0/raw'.format(larchivepath,counter,version0) + times_0=AKF_2.get_time_intervals(url,stdate,enddate)#stdate.replace("T"," "),enddate.replace("T"," "))# + found=True + except Exception as E: + print('get_FLIR_Pset_times: Error querrying {0}\n{1}'.format(url,E)) + found=False + times_0=[] + version0=version0-1 + timess.append(times_0) + else: + timess.append([]) + except: timess.append([]) + if verbose>0: + print("get_FLIr_Pset_times: no version for PS"+str(counter)) else: counter=-1 for version0 in versions: @@ -1078,7 +1094,7 @@ def download_last_raw_image_by_time(larchivepath,starttime,stoptime,version=0): """ """ try: - url='{0}/_signal.png?from={1}&upto='.format(larchivepath,starttime-10,stoptime) + url='{0}/_signal.png?from={1}&upto={2}'.format(larchivepath,starttime-10,stoptime) res = urllib.request.urlopen(url) img = Image.open(BytesIO(res.read())) res.close() @@ -1612,7 +1628,7 @@ def get_temp_from_raw_by_program_V2(portnr,program,time_window=0,emi=0.8,version def get_temp_from_raw_by_program(portnr,program,time_window=0,emi=0.8,T_version=2, version=0,threads=1,give_ERROR=False, use_firstframe_as_background=False,back_emissivity=0.8, - verbose=0): + verbose=0,Test=False,framerate='max'): """ """ FLIR=False @@ -1644,7 +1660,7 @@ def get_temp_from_raw_by_program(portnr,program,time_window=0,emi=0.8,T_version= raise Exception("get_temp_from_raw_by_program: the given port is neither a number or a valid String!") else: if FLIR: - exist,time,frames,mode,tsets,versions=download_raw_FLIR_images_via_png(t1=t1,t6=t6,time_window=time_window,version=version,threads=threads,verbose=verbose-1) + exist,time,frames,mode,tsets,versions=download_raw_FLIR_images_via_png(t1=t1,t6=t6,time_window=time_window,version=version,threads=threads,verbose=verbose-1,Test=Test) if exist: if mode==1: Psets=IR_tools.check_dublicates(tsets) @@ -1656,7 +1672,10 @@ def get_temp_from_raw_by_program(portnr,program,time_window=0,emi=0.8,T_version= single=False if single: PS=Psets[0] - parlog=read_restdb_old(archivepath+"W7X/QSR07_FLIR/AEK51_raw_PS"+str(PS)+"_PARLOG/V"+str(versions[PS])+"/_signal.json?from="+str(t1-10)+"&upto="+str(t6+20)) + if Test: + parlog=read_restdb_old("http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+"W7X/QSR07_FLIR/AEK51_raw_PS"+str(PS)+"_PARLOG/V"+str(versions[PS])+"/_signal.json?from="+str(t1-10)+"&upto="+str(t6+20)) + else: + parlog=read_restdb_old(archivepath+"W7X/QSR07_FLIR/AEK51_raw_PS"+str(PS)+"_PARLOG/V"+str(versions[PS])+"/_signal.json?from="+str(t1-10)+"&upto="+str(t6+20)) tsets=[0 for i in range(len(frames))] if parlog[0]: pari=parlog[2][0]['meta-data'] @@ -1677,8 +1696,11 @@ def get_temp_from_raw_by_program(portnr,program,time_window=0,emi=0.8,T_version= Temp_Co.append([]) else: for PS in range(4): - if PS in Psets: - parlog=read_restdb_old(archivepath+"W7X/QSR07_FLIR/AEK51_raw_PS"+str(PS)+"_PARLOG/V"+str(versions[PS])+"/_signal.json?from="+str(t1-10)+"&upto="+str(t6+20)) + if PS in Psets: + if Test: + parlog=read_restdb_old("http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+"W7X/QSR07_FLIR/AEK51_raw_PS"+str(PS)+"_PARLOG/V"+str(versions[PS])+"/_signal.json?from="+str(t1-10)+"&upto="+str(t6+20)) + else: + parlog=read_restdb_old(archivepath+"W7X/QSR07_FLIR/AEK51_raw_PS"+str(PS)+"_PARLOG/V"+str(versions[PS])+"/_signal.json?from="+str(t1-10)+"&upto="+str(t6+20)) if parlog[0]: pari=parlog[2][0]['meta-data'] c1=[float(pari['Coeff0_'+str(PS)].split("\n")[0]),float(pari['Coeff1_'+str(PS)].split("\n")[0])] @@ -1724,12 +1746,12 @@ def get_temp_from_raw_by_program(portnr,program,time_window=0,emi=0.8,T_version= return False,[0],[0],False else: if version==0: - version=get_latest_version(portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM") + version=get_latest_version(portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM",program=program) larchivepath=archivepath+"W7X/"+portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" if verbose>0: print(datetime.datetime.now(),"get_temp_from_raw_by_program: raw download start") if fastDL: - exist,time,frames=download_images_by_time_via_png(larchivepath,starttime=t1,stoptime=t6,time_window=time_window,version=version,threads=threads,verbose=verbose-1) + exist,time,frames=download_images_by_time_via_png(larchivepath,starttime=t1,stoptime=t6,time_window=time_window,version=version,threads=threads,verbose=verbose-1,framerate=framerate) else: exist,time,frames=download_raw_images_by_program(portnr,program,time_window,version,verbose=verbose-1) if exist: @@ -2009,7 +2031,7 @@ def get_nuced_raw_by_program_fullthreads(portnr,program,time_window=0,emi=0.8, if verbose>0: print("get_nuced_raw_by_program_fullthreads: Start Thread ",i+1) # p = multiprocessing.Process(target=NUC_raw_thread, args=(portnr,tim[intervalls[i]:intervalls[i+1]],out_q,i,version,background,LUT,refT,gain,offset,gain_error,offset_error,give_ERROR,FOV,badpixels,)) - p = NUC_raw_thread(larchivepath,tim[intervalls[i]:intervalls[i+1]],resultdict,i,version,background,LUT,refT,gain,offset,gain_error,offset_error,give_ERROR,FOV,badpixels) + p = NUC_raw_thread(larchivepath,tim[intervalls[i]:intervalls[i+1]],resultdict,i,version,background,LUT,refT,gain,offset,gain_error,offset_error,give_ERROR,FOV,badpixels,verbose=verbose-1) jobs.append(p) p.start() for p in jobs: @@ -2605,7 +2627,64 @@ def download_heatflux_mapping_reference(timepoint=None,version=0,testmode=True,v mappings['Finger_ID']=(frames[5],"legend: first three digits are fingernumber,starting @0, last two are the profile number") mappings['Target']=(frames[6],{1:"TM1-4h",2:"TM5-6h",3:"TM7-9h",4:"TM1-3v"}) return exist,mappings - + +def download_heatflux_scene_model_reference(port,timepoint=None,program=None,version=0,testmode=True,verbose=0): + """ + return exist(boolean) and dictonary of the mappings informations + """ + if type(port)==int or type(port)==float: + portnam="AEF"+str(port) + elif type(port)==str: + portnam=port + if timepoint==None and program==None: + timepoint=int(TimeToNs([2017,8,28],[8,0,0,0])) + elif timepoint==None: + prog=get_program_from_PID(program) + if prog[0]: + timepoint=prog[1]['trigger']['1'][0] + else: + raise Exception("download_heatflux_scene_model_reference: Error! Program "+program+" not found!") + if testmode: + base="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/" + else: + base=archivepath +# OP=IR_tools.get_OP_by_time(time_ns=timepoint) + if version==0: + version=get_latest_version("QRT_IRCAM/"+str(portnam)+"_SceneModel_reference_PARLOG",project=project_ana,Test=testmode,t_from=timepoint) + larchivepath=base+project_ana+"/"+"QRT_IRCAM/"+str(portnam)+"_SceneModel_reference_DATASTREAM/V"+str(version)+"/0/scene%20model%20reference" + larchivepath_par=base+project_ana+"/QRT_IRCAM/"+str(portnam)+"_SceneModel_reference_PARLOG/V"+str(version)+"/parms/header/Pixel_X" + ### test for the testsample### +#============================================================================== +# if version==0: +# version=get_latest_version("QRT_IRCAM_Test/"+str(portnam)+"_SceneModel_reference_PARLOG",project=project_ana,Test=testmode) +# larchivepath=base+project_ana+"/QRT_IRCAM_Test/"+str(portnam)+"_SceneModel_reference_DATASTREAM/V"+str(version)+"/0/scene%20model%20reference" +# larchivepath_par=base+project_ana+"/QRT_IRCAM_Test/"+str(portnam)+"_SceneModel_reference_PARLOG/V"+str(version)+"/parms/header/Pixel_X" +#============================================================================== + ### end of testsample ### + try: + res = urllib.request.urlopen(larchivepath_par+"/_signal.json?from="+str(timepoint)+"&upto="+str(timepoint)) + signal_list = json.loads(res.read().decode('utf-8')) + res.close() + goon=True + except urllib.error.URLError as e: + print('download_heatflux_scene_model_reference: Error! ',e,larchivepath_par+"/_signal.json?from="+str(timepoint)+"&upto="+str(timepoint)) + goon=False + if goon: + timepoint=signal_list['values'][0] + exist,time,frames=download_images_by_times(larchivepath,starttime=timepoint-10,stoptime=int(timepoint+1e8),version=version,verbose=verbose-1) + mappings={} + if exist: + mappings['Pixel_X']=frames[0] + mappings['Pixel_Y']=frames[1] + mappings['CAD_X']=frames[2] + mappings['CAD_Y']=frames[3] + mappings['CAD_Z']=frames[4] + mappings['PFC']=(frames[5],"legend: https://wikis.ipp-hgw.mpg.de/W7X/images/9/9c/PFC2.pdf") + + return exist,mappings + else: + return False,{} + def give_finger_ID(profile,finger=None): """ profile: string, integer or float, string: "TM3h_5_5" or "1605" or "16.05" or "5", similar for float and integer. single number only if the finger is given! @@ -2812,8 +2891,69 @@ def extract_heatflux_profile_from_DL(time,images,profile,finger=None,time_window print("extract_heatflux_profile_from_DL: mapping was not found, cannot extract the profile") return False,0,0,0 +def extract_temperature_profile_from_DL(port,time,images,profile,finger=None,time_window=1, + inputchecked=False,verbose=0,reference_time=0): + """ + return exist,time,s in m, heat flux in W/m2 + time_point default (None) returns entire shot. + """ + goon=True + if time_window==None: + Tid_start=0 + Tid_end=len(time) + elif type(time_window)==int or type(time_window)==float: + try: + Tid_start=IR_tools.find_nearest(time,time_window) + Tid_end=Tid_start+1 + except Exception as E: + goon=False + elif type(time_window)==list or type(time_window)==np.ndarray and len(time_window)>0: + try: + Tid_start=IR_tools.find_nearest(time,time_window[0]) + except Exception as E: + goon=False + if len(time_window)>1: + try: + Tid_end=IR_tools.find_nearest(time,time_window[1]) + except Exception as E: + goon=False + else: + Tid_end=Tid_start+1 + else: + goon=False + E="unknown Case" + if not goon: + if verbose>0: + print('extract_temperature_profile_from_DL: ',E) + return False,0,0,0 + else: + if not inputchecked: + checkprof=give_finger_ID(profile,finger) + if checkprof[0]: + fingerID=checkprof[1] + else: + return False,0,0,0 + else: + fingerID=finger*100+profile + ### okay finderID is fine, data is given, lets get the mapping + exist,mapping=download_heatflux_mapping_reference() + if exist: + Fingermap=np.asarray(mapping['Finger_ID'][0],dtype=np.int) + locs=np.where(Fingermap==fingerID) + S=mapping['s'][locs] + exist_s,scene=download_heatflux_scene_model_reference(port,timepoint=reference_time,verbose=verbose-1) + if exist_s: + X=scene['Pixel_X'][locs] + Y=scene['Pixel_Y'][locs] + q=np.asarray([images[i][np.asarray(Y,dtype=np.int),np.asarray(X,dtype=np.int)] for i in range(Tid_start,Tid_end)]) + return True,time[Tid_start:Tid_end],S,q + else: + if verbose>0: + print("extract_temperature_profile_from_DL: mapping was not found, cannot extract the profile") + return False,0,0,0 + -def download_divertor_load(port,targetmodule=None,program=None,tstart=None,tend=None,version=0,verbose=0,testmode=True): +def download_divertor_load(port,targetmodule=None,program=None,tstart=None,tend=None,version=0,verbose=0,testmode=True,request=True): """ """ TMchadict={ @@ -2924,7 +3064,73 @@ def download_divertor_load(port,targetmodule=None,program=None,tstart=None,tend= time=0 load=-1 error=-1 + if exist==False: + now=datetime.datetime.now() + if verbose>0: + print(now,"download_divertor_load: divertor_load data is not available, creating request") + if request: + f=open(heatflux_requestlist_path+str(now.year)+str(now.month)+"_load_requests.txt",'a') + try: + programid=AKF_2.get_program_id(tstart) + except Exception as E: + if verbose>0: + print('download_divertor_load: Error! ',E) + programid=str(tstart) + f.write(programid+"\t"+str(port)+"\n") + f.close() + return exist,time,load,error + +def download_scene_model(port,program=None,timepoint=None,version=0,verbose=0): + if type(port)==int or type(port)==float: + portnam="AEF"+str(port) + elif type(port)==str: + portnam=port + if program==None and timepoint==None: + dati=datetime.datetime.now() + timepoint=int(TimeToNs([dati.year,dati.month,dati.day],[dati.hour,dati.minute,dati.second,dati.microsecond])) + elif timepoint==None: + prog=get_program_from_PID(program) + if prog[0]: + timepoint=prog[1]['trigger']['1'][0] + else: + raise Exception("download_scene_model: Error! Program "+program+" not found!") + base=archivepath + if version==0: + version=get_latest_version("QRT_IRCAM/"+str(portnam)+"_scene_model_PARLOG",project=project,Test=False,t_from=timepoint) + larchivepath=base+project+"/"+"QRT_IRCAM/"+str(portnam)+"_scene_model_DATASTREAM/V"+str(version)+"/0/"+str(portnam)+"_scene_model" + larchivepath_par=base+project+"/QRT_IRCAM/"+str(portnam)+"_scene_model_PARLOG/V"+str(version) + + try: + res = urllib.request.urlopen(larchivepath_par+"/_signal.json?from="+str(timepoint)+"&upto="+str(timepoint)) + signal_list = json.loads(res.read().decode('utf-8')) + res.close() + goon=True + except urllib.error.URLError as e: + print('download_heatflux_scene_model_reference: Error! ',e) + goon=False + if goon: + timepoint=signal_list['values'][0]['meta-data']['timestamp'] + height=signal_list['values'][0]['meta-data']['height'] + exist,time,frames=download_images_by_times(larchivepath,starttime=timepoint-10,stoptime=int(timepoint+1e8),version=version,verbose=verbose-1) + stacked_image = np.array(frames[0], dtype='float64') + channels = np.array(np.vsplit(stacked_image, stacked_image.shape[0]/height)) + scene_model = { + "CAD": channels[0], + "FOV": channels[1], + "PFC": channels[2], + "angle": channels[3], + "distance": channels[4], + "emissivity": channels[5], + "phi": channels[6], + "theta": channels[7], + "x": channels[8], + "y": channels[9], + "z": channels[10] + } + return exist,scene_model + else: + return False,[0] #%% general download functions """ @@ -3036,14 +3242,16 @@ end of the copied functions #%% dummy run of the script if __name__=='__main__': - print("local function calling") + print("local function calling") + #%% temperature download and plotting example -# port=51#"AEF51" -# prog="20180925.017" -# status,time,images,valid=get_temp_from_raw_by_program(port,prog,time_window=[0,0.15],emi=0.82,T_version=2,version=0,threads=4,give_ERROR=False,use_firstframe_as_background=False,verbose=5) +# port=50#"AEF50"#"AEF51" +# prog="20180920.042"#"20171109.021"#"20181010.036" +# status,time,images,valid=get_temp_from_raw_by_program(port,prog,time_window=[0,2],emi=0.82,T_version=2,version=0,threads=1,give_ERROR=False,use_firstframe_as_background=False,verbose=50,framerate=1) +# success,t,s,profile=extract_temperature_profile_from_DL(port,np.asarray(time-time[0])/1e9,images,profile="TM3h_5_5",verbose=10,reference_time=time[0]) # if status: # plt.figure() -# plt.imshow(images[-1],vmin=330,vmax=1000,cmap=exJet) +# plt.imshow(images[0],vmin=330,vmax=1000,cmap=exJet) # cb=plt.colorbar() # cb.set_label("temperature in K",rotation=270,labelpad=20,fontsize=20) # cb.ax.tick_params(labelsize=20) @@ -3060,9 +3268,12 @@ if __name__=='__main__': # status,mapping=download_heatflux_mapping_reference(verbose=4) # test=get_heatflux_profile(20,1605,timepoint=1,program="20171109.008",verbose=4) #%% loads test - port=21 - prog="20171114.052" - status,time,load,error=download_divertor_load(port,targetmodule=['all','tm1h'],program=prog,verbose=5,debug=True) +# port=21 +# prog="20171114.052" +# status,time,load,error=download_divertor_load(port,targetmodule=['all','tm1h'],program=prog,verbose=5,debug=True) +#%% scene model test +# port=10 +# exist,model=download_scene_model(port) #%% HDF5 writing test # bla=get_temp_from_raw_by_program_fullthreads(51,prog,time_window=[0,4],threads=4) # bla2=get_nuced_raw_by_program_fullthreads(51,prog,time_window=[0,4],threads=4) diff --git a/plot_heatflux_example.py b/plot_heatflux_example.py index bc6f25ed6679722cdcd22ab0791b927bf4e0c381..edbef8380cc03e745ab1fad6d2042f5b297e9cfc 100644 --- a/plot_heatflux_example.py +++ b/plot_heatflux_example.py @@ -1,58 +1,65 @@ # -*- coding: utf-8 -*- """ Created on Thu Nov 29 17:41:40 2018 -V3.0.2 +V3.2.0 @author: holn """ import numpy as np -import downloadversionIRdata as IR +import IR_data_access.downloadversionIRdata as IR import matplotlib.pyplot as plt import datetime + +port=20 +program="20181004.012" + if __name__=='__main__': #%% loading data print(datetime.datetime.now(),"start") - status,times,images=IR.download_heatflux_by_program(20,"20171109.008",timewindow=0.9,version=1,threads=1,verbose=5) + status,times,images=IR.download_heatflux_by_program(port,program,time_window=0.9,version=1,threads=1,verbose=5) print('done') #%% plotting data - status2,time,s,q=IR.extract_heatflux_profile_from_DL((np.asarray(times)-times[0])/1e9,images,profile="TM3h_5_5",timewindow=0.5,verbose=5) - plt.figure() - plt.imshow(images[-1]/1e6) - plt.clim([0,4]) - plt.tick_params(axis='both', # changes apply to the x-axis - which='both', # both major and minor ticks are affected - bottom='off', # ticks along the bottom edge are off - top='off', # ticks along the top edge are off - left='off', - right='off', - labelleft='off', - labelbottom='off') # labels along the bottom edge are off - c_ax = plt.colorbar() - c_ax.set_label('q [MW/m2]') - plt.savefig('20171109.008 - '+str(round((np.asarray(times[-1])-times[0])/1e9,2))+' - AEF20IR - in MWm2.png', dpi=300, bbox_inches='tight') - if len(np.shape(q))==1 or np.shape(q)[0]==1: - if np.shape(q)[0]==1: - q=q[0] + if status: + status2,time,s,q=IR.extract_heatflux_profile_from_DL((np.asarray(times)-times[0])/1e9,images,profile="TM3h_5_5",time_window=0.5,verbose=5) plt.figure() - plt.plot(s,q/1e6,'ko-') - plt.xlabel("s [m]",fontsize=26) - plt.ylabel("q [MW/m2]",fontsize=26) - plt.tick_params(labelsize=26) - plt.title("20171109.008 heat flux profile for TM3, finger 5, profile 5 @1s",fontsize=26) - plt.grid() - plt.ylim(bottom=0) - plt.show() - elif len(np.shape(q))==2: - plt.figure() - X,Y=np.meshgrid(time,s) - plt.pcolor(X,Y,q.swapaxes(0,1)/1e6,vmin=0) - plt.xlabel("time [s]",fontsize=26) - plt.ylabel("s [m]",fontsize=26) - plt.tick_params(labelsize=26) - cb=plt.colorbar() - cb.set_label('heatflux in MW/m²', rotation=270, labelpad=15,fontsize=26) - cb.ax.tick_params(labelsize=26) - plt.title("20171109.008 heat flux profile for TM3, finger 5, profile 5",fontsize=26) - plt.show() + plt.imshow(images[-1]/1e6) + plt.clim([0,4]) + plt.tick_params(axis='both', # changes apply to the x-axis + which='both', # both major and minor ticks are affected + bottom='off', # ticks along the bottom edge are off + top='off', # ticks along the top edge are off + left='off', + right='off', + labelleft='off', + labelbottom='off') # labels along the bottom edge are off + c_ax = plt.colorbar() + c_ax.set_label('q [MW/m2]') + plt.savefig('20171109.008 - '+str(round((np.asarray(times[-1])-times[0])/1e9,2))+' - AEF20IR - in MWm2.png', dpi=300, bbox_inches='tight') + if len(np.shape(q))==1 or np.shape(q)[0]==1: + if np.shape(q)[0]==1: + q=q[0] + plt.figure() + plt.plot(s,q/1e6,'ko-') + plt.xlabel("s [m]",fontsize=26) + plt.ylabel("q [MW/m2]",fontsize=26) + plt.tick_params(labelsize=26) + plt.title("20171109.008 heat flux profile for TM3, finger 5, profile 5 @1s",fontsize=26) + plt.grid() + plt.ylim(bottom=0) + plt.show() + elif len(np.shape(q))==2: + plt.figure() + X,Y=np.meshgrid(time,s) + plt.pcolor(X,Y,q.swapaxes(0,1)/1e6,vmin=0) + plt.xlabel("time [s]",fontsize=26) + plt.ylabel("s [m]",fontsize=26) + plt.tick_params(labelsize=26) + cb=plt.colorbar() + cb.set_label('heatflux in MW/m²', rotation=270, labelpad=15,fontsize=26) + cb.ax.tick_params(labelsize=26) + plt.title("20171109.008 heat flux profile for TM3, finger 5, profile 5",fontsize=26) + plt.show() +# status3,mapping=IR.download_heatflux_scene_model_reference(port,program=program,verbose=0) +# bild=np.zeros((768,1024)) \ No newline at end of file diff --git a/upload_heatflux.py b/upload_heatflux.py deleted file mode 100644 index 8a13e7e87e9567f07cb1dd6289456e85919dafa5..0000000000000000000000000000000000000000 --- a/upload_heatflux.py +++ /dev/null @@ -1,272 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Fri Sep 21 09:32:29 2018 -V3.0.2 -@author: holn -""" - -import numpy as np -import scipy.io as sio -#import matplotlib.pyplot as plt -import h5py as h5 -import uploadingversionIRdata as IRup -import downloadversionIRdata as IR -import datetime -#125,190 vertical, horizontal *3 mm - - - -port=10 -program="20171109.008" -#programlist=["20171011.009"] -programlist=["20180814.024","20181016.016"]#["20171108.018","20171109.040","20171109.045"] -path="F:\\tmp\\upload_it\\"#"F:\\holn\\Documents\\Doktorarbeit\\RESULTS\\OP1.2a\\Fulldivertor_res\\EJM\\H\\" -outpfad="F:\\holn\\Documents\\Doktorarbeit\\DATEN\\Mapping\\LinesforStacks\\" -scenepath="" - -project="W7XAnalysis" -group="QRT_IRCAM" -stream="AEF"+str(port)+"_heatflux" -comment="Test upload" -header={'code_version':4,'alpha_corrected':1} -stream2="Mapping_reference" -upload_Mapping=False -upload_scene_reference=False -stream_scene="AEF"+str(port)+"_SceneModel_reference" -scene_reference_time=0#ns timestamp goes here! - -for program in programlist: - - File=sio.loadmat(path+program+"_AEF"+str(port)+"_heatflux_V4.mat") - time=np.asarray(File['time'][0]) - tilelist=np.asarray(File['tiles']) - ### camera, program dependent things### - heatpics=np.zeros((125+190+10,108*12+4,len(time)),dtype=np.float32)# define the pictures for the heat flux - alphapic=np.zeros((125+190+10,108*12+4,1),dtype=np.float32) # define the picture for the alpha values - alphapic.fill(np.nan) - heatpics.fill(np.nan) - if upload_Mapping: - ### mapping dependent things ### - Xpic=alphapic.copy() #X-Coordiate in the Mapping - Ypic=alphapic.copy() #Y-Coordiate in the Mapping - FXpic=alphapic.copy() #X-Coordiate in the Fingersystem in the Mapping - FYpic=alphapic.copy() #Y-Coordiate in the Fingersystem in the Mapping - profpic=alphapic.copy() #profiles or s coordiante in the Mapping - # profnpic=alphapic.copy() - tilepic=alphapic.copy() #target indentifier, 1: low iota, 2: middle part, 3: high iota, 4: vertical - fingerpic=alphapic.copy() #finger ID from the mapping, last two digits are the profile number, the 3 to 6. digit are the finger id (100*i+j; i:0-136; j:0-16) - if upload_scene_reference: - ### Camera ### - PXpic=alphapic.copy() #orignal Pixel X - PYpic=alphapic.copy() #orignal Pixel Y - Xpic3D=alphapic.copy() #CAD 3D X from Scene Model - Ypic3D=alphapic.copy() #CAD 3D Y from Scene Model - Zpic3D=alphapic.copy() #CAD 3D Z from Scene Model - CID=alphapic.copy() #Component ID from the Scene Model - szene=h5.File(scenepath+"AEF"+str(port)+"_scene_model_"+str(scene_reference_time)+".h5",'r') - Xloc=np.asarray(szene['x']) - Yloc=np.asarray(szene['y']) - Zloc=np.asarray(szene['z']) - #CAD=np.asarray(szene['CAD']) - PFC=np.asarray(szene['PFC']) - - linedict={'h_l':30,'h_m':54,'h_h':24,'v':29} - numstacks={ - 'h_l':{0:13,1:14,2:14,3:14,4:14,5:14,6:14,7:14,8:14,9:14,10:15,11:15,12:14,13:14,14:14,15:13,16:13,17:13,18:13,19:13,20:13,21:13,22:13,23:13,24:13,25:13,26:13,27:13,28:13,29:13}, - 'h_m':{0:14,1:15,2:14,3:15,4:14,5:15,6:14,7:15,8:14,9:15,10:14,11:15,12:14,13:15,14:14,15:15,16:14,17:15,18:14,19:15,20:14,21:15,22:14,23:15,24:14,25:15,26:14,27:15,28:14,29:15,30:14,31:15,32:14,33:15,34:14,35:15,36:14,37:15,38:14,39:15,40:14,41:15,42:14,43:15,44:14,45:15,46:14,47:15,48:15,49:14,50:14,51:14,52:14,53:15}, - 'h_h':{0:16,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15}, - 'v':{0:15,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15,24:15,25:15,26:15,27:15,28:15} - } - - def splitStacktoLines(locationall, rowcountarray): - locations = [] - for i in range(len(rowcountarray) - 1): - locations.append(np.array(locationall[rowcountarray[i]:rowcountarray[i+1]])) - locations = np.array(locations) - return locations - ii=0 - V_index=0 - xoff=0 - maxyoff=0 - tn=0 - for tile in tilelist:#['h_l','h_m','h_h','v']: - tn+=1 - tile=tile.replace(" ","") - linn=linedict[tile] - if tile!='v': - V_index+=linn - yoff=130 - else: - yoff=1 - xoff=1 - for i in range(linn): - if ii<30: - iko=ii - elif ii<84: - iko=ii-30 - elif ii<108: - iko=ii-84 - else: - iko=ii-108 - if ii>=78 and ii<108: - tn=3 - fingerfile=str(port//10) - if port % 10 ==0: - fingerfile=fingerfile+"l"+tile+"_" - else: - fingerfile=fingerfile+"u"+tile+"_" - if port==20 and int(program.split(".")[0])<20171020: - linefile=fingerfile+'{0:02d}'.format(iko)+"2Dlines_biliniar_before_20171020.h5" - else: - linefile=fingerfile+'{0:02d}'.format(iko)+"2Dlines_biliniar.h5" - linf=h5.File(outpfad+linefile,'r') - linid=list(linf['LineNos']) - lines=linf['locationall2D']# - linlocid=linf['rowcountarray'] - linrefpoi=linf['refPoi'] - linweipoi=linf['weightPoi'] - if port==50: - shapi=[1024, 1280] - else: - shapi=[768, 1024] - locations=splitStacktoLines(lines,linlocid) - locpoi=splitStacktoLines(linrefpoi,linlocid) - locwei=splitStacktoLines(linweipoi,linlocid) - linf.close() - xoff+=1 - if tile=='h_m' and iko<48 and iko%2==1: - shift=True - xoff=xoff-numstacks[tile][i-1]-1 - maxyoff+=1 - else: - shift=False - maxyoff=0 - for j in range(numstacks[tile][i]): - j2=linid.index(j) - if shift: - yloff=maxyoff - else: - yloff=0 - lprof=np.array(File['profiles_'+str(ii)+"_"+str(j2)][0]) - lalpha=np.array(File['alpha_'+str(ii)+"_"+str(j2)][0]) - lheat=np.array(File['heat_'+str(ii)+"_"+str(j2)]) - lXY=np.array(File['plot_profiles_'+str(ii)+"_"+str(j2)]) - xf=np.array(np.asarray(locpoi[j2])%shapi[1],dtype=np.int16) - yf=np.array(np.asarray(locpoi[j2])//shapi[1],dtype=np.int16) - PX=np.asarray([np.dot(xf[o],locwei[j2][o]) for o in range(len(xf))]) - PY=np.asarray([np.dot(yf[o],locwei[j2][o]) for o in range(len(yf))]) - loco=locations[j2] - profilelength=int(np.max(lprof)/3e-3)+1 - for k in range(profilelength): - pos=k*3e-3#3.143e-3 - pid=IR.IR_tools.find_nearest(lprof,pos) - if abs(pos-lprof[pid])<2e-3: - alphapic[yoff+yloff,xoff]=lalpha[pid] - heatpics[yoff+yloff,xoff,:]=lheat[:,pid] - if upload_Mapping: - Xpic[yoff+yloff,xoff]=lXY[pid,0] - Ypic[yoff+yloff,xoff]=lXY[pid,1] - FXpic[yoff+yloff,xoff]=loco[pid,0] - FYpic[yoff+yloff,xoff]=loco[pid,1] - profpic[yoff+yloff,xoff]=lprof[pid] - # profnpic[yoff+yloff,xoff]=j - tilepic[yoff+yloff,xoff]=tn - fingerpic[yoff+yloff,xoff]=ii*100+j - if upload_scene_reference: - PXpic[yoff+yloff,xoff]=int(round(PX[pid],0)) - PYpic[yoff+yloff,xoff]=int(round(PY[pid],0)) - Xpic3D[yoff+yloff,xoff]=Xloc[int(round(PY[pid],0)),int(round(PX[pid],0))] - Ypic3D[yoff+yloff,xoff]=Yloc[int(round(PY[pid],0)),int(round(PX[pid],0))] - Zpic3D[yoff+yloff,xoff]=Zloc[int(round(PY[pid],0)),int(round(PX[pid],0))] - CID[yoff+yloff,xoff]=PFC[int(round(PY[pid],0)),int(round(PX[pid],0))] - yloff+=1 - xoff+=1 - if yloff>maxyoff and not shift: - maxyoff=yloff - ii+=1 - del lalpha,lheat,lXY,loco,lprof,tn,File - #plt.figure();plt.imshow(alphapic[:,:,0]) - #plt.figure();plt.imshow(tilepic[:,:,0]) - #plt.figure();plt.imshow(fingerpic[:,:,0]) - #plt.figure();plt.imshow(heatpics[:,:,50]) - #testupload #### - - archivepath="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/" - prog=IR.get_program_from_PID(program) - if prog[0]: - t1=prog[1]['trigger']['1'][0] - time_ns=[int(t1-1e9)] - for t3 in time: - time_ns.append(int(t1+t3*1e9)) - images=np.append(alphapic,heatpics,axis=2) - chanDesc=[{'name':'heatflux','physicalQuantity':{'type':'W/m^2'},'active':1}] - parobj={"label":"parms", - "description":"heat flux for IR camera in AEF"+str(port), - "values":[{"chanDescs":chanDesc,'header':header,'comment':comment}], - "dimensions":[int(time_ns[0]),int(time_ns[-1])]} - - url=archivepath+project+"/"+group+"/AEF"+str(port)+"_heatflux" - # url=IR.archivepath+project+"/"+group+"/"+stream - ver=IR.get_latest_version(group+"/"+stream+"_PARLOG",project=project,Test=True) - if ver==None: - reason="Versioning of Data" - IRup.archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - print(datetime.datetime.now(),"starting upload") -# IRup.upload_Parlog(url,parobj,versionnumber=1) - print(datetime.datetime.now(),"parlog written") - IRup.upload_images(images,time=time_ns,group=group,stream=stream,project=project,version=1,swapping=False,Test=True) - print(datetime.datetime.now(),"data uploaded") - del images,alphapic,heatpics,time_ns - if upload_Mapping: - ### upload reference stream ###once! - time_ns2=[] - timet1=int(IR.TimeToNs([2017,9,26],[8,0,0,0])) - for t in range(7): - time_ns2.append(int(timet1+10*t)) - images2=np.append(profpic,Xpic,axis=2) - images2=np.append(images2,Ypic,axis=2) - images2=np.append(images2,FXpic,axis=2) - images2=np.append(images2,FYpic,axis=2) - images2=np.append(images2,fingerpic,axis=2) # images2=np.append(images2,profnpic,axis=2) - images2=np.append(images2,tilepic,axis=2) - header2={'profile':int(timet1),'MX':int(timet1+10),'MY':int(timet1+20),'FX':int(timet1+30),'FY':int(timet1+40),'finger_profile':int(timet1+50),'targetgroup':int(timet1+60)} - chanDesc=[{'name':'reference','physicalQuantity':{'type':'none'},'active':1}] - parobj={"label":"parms", - "description":"reference data for the heat flux", - "values":[{"chanDescs":chanDesc,'header':header2,'comment':comment}], - "dimensions":[int(time_ns2[0]),int(time_ns2[-1])] - } - # url=IR.archivepath+project+"/"+group+"/"+stream2 - url=archivepath+project+"/"+group+"/"+stream2 - ver=IR.get_latest_version(group+"/"+stream2+"_PARLOG",project=project,Test=True) - if ver==None: - reason="Versioning of Data" - IRup.archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - IRup.upload_Parlog(url,parobj,versionnumber=1) - IRup.upload_images(images2,time=time_ns2,group=group,stream=stream2,project=project,version=1,swapping=False,Test=True) - if upload_scene_reference: - ### upload scene model camera refrence stream (once per scene model) - time_ns_scene=[] - for t in range(6): - time_ns_scene.append(int(scene_reference_time+10*t)) - images3=np.append(PXpic,PYpic,axis=2) - images3=np.append(images3,Xpic3D,axis=2) - images3=np.append(images3,Ypic3D,axis=2) - images3=np.append(images3,Zpic3D,axis=2) - images3=np.append(images3,CID,axis=2) - header3={'Pixel_X':int(scene_reference_time),'Pixel_Y':int(scene_reference_time+10),'CAD_X':int(scene_reference_time+20),'CAD_Y':int(scene_reference_time+30),'CAD_Z':int(scene_reference_time+40),'Component_ID':int(timet1+50)} - chanDesc=[{'name':'scene model reference','physicalQuantity':{'type':'none'},'active':1}] - parobj={"label":"parms", - "description":"scene model reference data for the heat flux", - "values":[{"chanDescs":chanDesc,'header':header3,'comment':comment}], - "dimensions":[int(time_ns_scene[0]),int(time_ns_scene[-1])] - } - # url=IR.archivepath+project+"/"+group+"/"+stream2 - url=archivepath+project+"/"+group+"/"+stream_scene - ver=IR.get_latest_version(group+"/"+stream_scene+"_PARLOG",project=project,Test=True) - if ver==None: - reason="Versioning of Data" - IRup.archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - IRup.upload_Parlog(url,parobj,versionnumber=1) - IRup.upload_images(images3,time=time_ns_scene,group=group,stream=stream_scene,project=project,version=1,swapping=False,Test=True) - diff --git a/upload_heatflux_folder.py b/upload_heatflux_folder.py deleted file mode 100644 index 4f4af495e7451327fcbbf34c3fd4ad93a1b2ca17..0000000000000000000000000000000000000000 --- a/upload_heatflux_folder.py +++ /dev/null @@ -1,237 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Fri Sep 21 09:32:29 2018 -V3.0.2 -@author: holn -""" - -import numpy as np -import scipy.io as sio -#import matplotlib.pyplot as plt -import h5py as h5 -import uploadingversionIRdata as IRup -import downloadversionIRdata as IR -import datetime -import os -import sys -#125,190 vertical, horizontal *3 mm - - - -exception_ports=[] -path="F:\\holn\\Documents\\Doktorarbeit\\RESULTS\\OP1.2b\\Fulldivertor_res\\DBM\\H\\" -path="F:\\holn\\Documents\\Doktorarbeit\\Python-Programme\\THEODOR\\Divertor\\data\\" -path="F:\\tmp\\upload_it\\" -outpfad="F:\\holn\\Documents\\Doktorarbeit\\DATEN\\Mapping\\LinesforStacks\\" -archivepath="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/" -project="W7XAnalysis" -group="QRT_IRCAM" - -comment="Test upload" -header={'code_version':4,'alpha_corrected':1} -stream2="Mapping_reference" -upload_Mapping=False - -class Tee(object): - def __init__(self, *files): - self.files = files - def write(self, obj): - for f in self.files: - f.write(obj) - f.flush() # If you want the output to be visible immediately - def flush(self) : - for f in self.files: - f.flush() - -original = sys.stdout -start=datetime.datetime.now() -fout=open("upload_log_heatflux_"+str(start.date())+"_"+str(start.hour)+"_"+str(start.minute)+"_"+str(start.second)+".txt",'w') -#foutf=open("upload_log_failes_"+str(start.date())+"_"+str(start.hour)+"_"+str(start.minute)+"_"+str(start.second)+".txt",'w') -sys.stdout = Tee(sys.stdout, fout) - -filelist=os.listdir(path) - -for file in filelist: - if "heatflux_V4.mat" in file: - program=file.split("_")[0] - port=int(file.split("_")[1].split("AEF")[1]) - prog=IR.get_program_from_PID(program) - if prog[0]: - t1=prog[1]['trigger']['1'][0] - time_ns=[int(t1-1e9)] - try: -# import archivedb as AKF - exist,tt,fra=IR.download_heatflux_by_times(port,t1,int(t1+100*1e6),version=1,request=False) - except: - exist=False - if exist: - print(datetime.datetime.now(),"Data already uploaded for AEF"+str(port)+" , "+program) - elif port not in exception_ports: - stream="AEF"+str(port)+"_heatflux" - print(datetime.datetime.now(),"loading "+program+"_AEF"+str(port)+"_heatflux_V4.mat") - File=sio.loadmat(path+program+"_AEF"+str(port)+"_heatflux_V4.mat") - time=np.asarray(File['time'][0]) - tilelist=np.asarray(File['tiles']) - heatpics=np.zeros((125+190+10,108*12+4,len(time)),dtype=np.float32) - alphapic=np.zeros((125+190+10,108*12+4,1),dtype=np.float32) - alphapic.fill(np.nan) - heatpics.fill(np.nan) - Xpic=alphapic.copy() - Ypic=alphapic.copy() - FXpic=alphapic.copy() - FYpic=alphapic.copy() - profpic=alphapic.copy() - profnpic=alphapic.copy() - tilepic=alphapic.copy() - fingerpic=alphapic.copy() - - linedict={'h_l':30,'h_m':54,'h_h':24,'v':29} - numstacks={ - 'h_l':{0:13,1:14,2:14,3:14,4:14,5:14,6:14,7:14,8:14,9:14,10:15,11:15,12:14,13:14,14:14,15:13,16:13,17:13,18:13,19:13,20:13,21:13,22:13,23:13,24:13,25:13,26:13,27:13,28:13,29:13}, - 'h_m':{0:14,1:15,2:14,3:15,4:14,5:15,6:14,7:15,8:14,9:15,10:14,11:15,12:14,13:15,14:14,15:15,16:14,17:15,18:14,19:15,20:14,21:15,22:14,23:15,24:14,25:15,26:14,27:15,28:14,29:15,30:14,31:15,32:14,33:15,34:14,35:15,36:14,37:15,38:14,39:15,40:14,41:15,42:14,43:15,44:14,45:15,46:14,47:15,48:15,49:14,50:14,51:14,52:14,53:15}, - 'h_h':{0:16,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15}, - 'v':{0:15,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15,24:15,25:15,26:15,27:15,28:15} - } - - def splitStacktoLines(locationall, rowcountarray): - locations = [] - for i in range(len(rowcountarray) - 1): - locations.append(np.array(locationall[rowcountarray[i]:rowcountarray[i+1]])) - locations = np.array(locations) - return locations - ii=0 - V_index=0 - xoff=0 - maxyoff=0 - tn=0 - for tile in tilelist:#['h_l','h_m','h_h','v']: - tn+=1 - tile=tile.replace(" ","") - linn=linedict[tile] - if tile!='v': - V_index+=linn - yoff=130 - else: - yoff=1 - xoff=1 - for i in range(linn): - if ii<30: - iko=ii - elif ii<84: - iko=ii-30 - elif ii<108: - iko=ii-84 - else: - iko=ii-108 - if ii>=78 and ii<108: - tn=3 - fingerfile=str(port//10) - if port % 10 ==0: - fingerfile=fingerfile+"l"+tile+"_" - else: - fingerfile=fingerfile+"u"+tile+"_" - if port==20 and int(program.split(".")[0])<20171020: - linefile=fingerfile+'{0:02d}'.format(iko)+"2Dlines_biliniar_before_20171020.h5" - else: - linefile=fingerfile+'{0:02d}'.format(iko)+"2Dlines_biliniar.h5" - linf=h5.File(outpfad+linefile,'r') - linid=list(linf['LineNos']) - lines=linf['locationall2D']# - linlocid=linf['rowcountarray'] - locations=splitStacktoLines(lines,linlocid) - linf.close() - xoff+=1 - if tile=='h_m' and iko<48 and iko%2==1: - shift=True - xoff=xoff-numstacks[tile][i-1]-1 - maxyoff+=1 - else: - shift=False - maxyoff=0 - for j in range(numstacks[tile][i]): - j2=linid.index(j) - if shift: - yloff=maxyoff - else: - yloff=0 - lprof=np.array(File['profiles_'+str(ii)+"_"+str(j2)][0]) - lalpha=np.array(File['alpha_'+str(ii)+"_"+str(j2)][0]) - lheat=np.array(File['heat_'+str(ii)+"_"+str(j2)]) - lXY=np.array(File['plot_profiles_'+str(ii)+"_"+str(j2)]) - loco=locations[j2] - profilelength=int(np.max(lprof)/3e-3)+1 - for k in range(profilelength): - pos=k*3e-3#3.143e-3 - pid=IR.IR_tools.find_nearest(lprof,pos) - if abs(pos-lprof[pid])<2e-3: - alphapic[yoff+yloff,xoff]=lalpha[pid] - heatpics[yoff+yloff,xoff,:]=lheat[:,pid] - Xpic[yoff+yloff,xoff]=lXY[pid,0] - Ypic[yoff+yloff,xoff]=lXY[pid,1] - FXpic[yoff+yloff,xoff]=loco[pid,0] - FYpic[yoff+yloff,xoff]=loco[pid,1] - profpic[yoff+yloff,xoff]=lprof[pid] - # profnpic[yoff+yloff,xoff]=j - tilepic[yoff+yloff,xoff]=tn - fingerpic[yoff+yloff,xoff]=ii*100+j - yloff+=1 - xoff+=1 - if yloff>maxyoff and not shift: - maxyoff=yloff - ii+=1 - del lalpha,lheat,lXY,loco,lprof,tn,File - #plt.figure();plt.imshow(alphapic[:,:,0]) - #plt.figure();plt.imshow(tilepic[:,:,0]) - #plt.figure();plt.imshow(fingerpic[:,:,0]) - #plt.figure();plt.imshow(heatpics[:,:,50]) - #testupload #### - print(datetime.datetime.now(),"loading and preparation done, finding program and starting upload") - for t3 in time: - time_ns.append(int(t1+t3*1e9)) - images=np.append(alphapic,heatpics,axis=2) - chanDesc=[{'name':'heatflux','physicalQuantity':{'type':'W/m^2'},'active':1}] - parobj={"label":"parms", - "description":"heat flux for IR camera in AEF"+str(port), - "values":[{"chanDescs":chanDesc,'header':header,'comment':comment}], - "dimensions":[int(time_ns[0]),int(time_ns[-1])]} - url=archivepath+project+"/"+group+"/AEF"+str(port)+"_heatflux" - # url=IR.archivepath+project+"/"+group+"/"+stream - ver=IR.get_latest_version(group+"/"+stream+"_PARLOG",project=project,Test=True) - if ver==None: - reason="Versioning of Data" - IRup.archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - print(datetime.datetime.now(),"starting upload") - IRup.upload_Parlog(url,parobj,versionnumber=1) - print(datetime.datetime.now(),"parlog written") - IRup.upload_images(images,time=time_ns,group=group,stream=stream,project=project,version=1,swapping=False,Test=True) - print(datetime.datetime.now(),"data uploaded") - del images,alphapic,heatpics,time_ns - if upload_Mapping: - ### upload reference stream ###once! - time_ns2=[] - timet1=int(IR.TimeToNs([2017,9,26],[8,0,0,0])) - for t in range(7): - time_ns2.append(int(timet1+10*t)) - images2=np.append(profpic,Xpic,axis=2) - images2=np.append(images2,Ypic,axis=2) - images2=np.append(images2,FXpic,axis=2) - images2=np.append(images2,FYpic,axis=2) - images2=np.append(images2,fingerpic,axis=2) # images2=np.append(images2,profnpic,axis=2) - images2=np.append(images2,tilepic,axis=2) - header2={'profile':int(timet1),'MX':int(timet1+10),'MY':int(timet1+20),'FX':int(timet1+30),'FY':int(timet1+40),'finger_profile':int(timet1+50),'targetgroup':int(timet1+60)} - chanDesc=[{'name':'reference','physicalQuantity':{'type':'none'},'active':1}] - parobj={"label":"parms", - "description":"reference data for the heat flux", - "values":[{"chanDescs":chanDesc,'header':header2,'comment':comment}], - "dimensions":[int(time_ns2[0]),int(time_ns2[-1])] - } - # url=IR.archivepath+project+"/"+group+"/"+stream2 - url=archivepath+project+"/"+group+"/"+stream2 - ver=IR.get_latest_version(group+"/"+stream2+"_PARLOG",project=project,Test=True) - if ver==None: - reason="Versioning of Data" - IRup.archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - IRup.upload_Parlog(url,parobj,versionnumber=1) - IRup.upload_images(images2,time=time_ns2,group=group,stream=stream2,project=project,version=1,swapping=False,Test=True) -fout.close() -sys.stdout = original diff --git a/uploadingversionIRdata.py b/uploadingversionIRdata.py deleted file mode 100644 index bf41b1ad0f2fd83ad2965f06f586425db3da62d7..0000000000000000000000000000000000000000 --- a/uploadingversionIRdata.py +++ /dev/null @@ -1,1316 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Thu Aug 24 11:24:52 2017 -Version: 3.1.0 -@author: holn -""" - -### code follows ### - -import urllib -import json -import numpy as np -import os -#import datetime as dt -from IR_config_constants import archivepath,portcamdict -from IR_image_tools import get_OP_by_time - -try: - import h5py as h5reader -except: - print('WARNING: "h5py" package not found.\nImage upload will not be available') - -""" -This is the json version of converting the local time (ISO-8601) to W7-X time -http://archive-webapi.ipp-hgw.mpg.de/w7xtime.json -""" - - - - -""" - Create versioned archive stream - Inputs: - :param url: give complete URL without _PARLOG or _DATASTREAM - :param reason: give string explaining version bump - :param parlog_only: create only PARLOG, no DATASTREAM - :param code_release: code release - :param analysis_environment: analysis environment - - Output: - return status of the upload - - Sample call - archive_create_stream_version("Test/raw/W7X/ADB_Massenspektrometer/VersioningTest", "testwise version bump", parlog_only=False) - archive_create_stream_version("Test/raw/W7X/QSR_IRCam/AEF10/VersioningTest", "testwise version bump", parlog_only=False) - -""" - -def archive_create_stream_version(url, reason, parlog_only = False, code_release = "n/a", analysis_environment = "n/a"): - #by Georg Schlisio, edit bei Holger Niemann - - - # check sanity - assert isinstance(reason, str) - assert reason != "" - - # refine url format - if url.find('archive-webapi.ipp-hgw.mpg.de/') == -1: - url = 'archive-webapi.ipp-hgw.mpg.de/' + url - url = url.replace(" ","%20") - assert len(url.replace('archive-webapi.ipp-hgw.mpg.de/', "").split("/")) == 7 - assert url.find("_DATASTREAM") == -1 - assert url.find("_PARLOG") == -1 - - # create version object - vobject = {"versionInfo":[ - {"reason": reason, - "producer": "holn", # TODO - "code_release": code_release, - "analysis_environment": analysis_environment - }] - } - - parlog_request = urllib.request.Request( url + "_PARLOG" + "/_versions.json", data = json.dumps(vobject).encode("utf-8"), - headers = { "Content-type" : "application/json" } ) - - try: - parlog_response = urllib.request.urlopen(parlog_request) - except urllib.error.HTTPError as ex: - msg = ex.read() - raise RuntimeError(msg) - - if not parlog_only: - datastream_request = urllib.request.Request( url + "_DATASTREAM" + "/_versions.json", - data=json.dumps(vobject).encode("utf-8"), headers={ "Content-type":"application/json" } ) - try: - datastream_response = urllib.request.urlopen(datastream_request) - except urllib.error.HTTPError as ex: - msg = ex.read() - raise RuntimeError(msg) - - return (parlog_response.code == 201, datastream_response == 201) - return (parlog_response.code == 201) - - -def get_latest_version(stream): - """Find out latest version of given stream - :param stream url stream of interest - :return int of version number of None if non-versioned or non-existing stream - author: Georg Schlisio""" - - request = urllib.request.Request("http://archive-webapi.ipp-hgw.mpg.de/" + stream + "/_versions.json", headers={"Accept": "application/json"}) - try: - response = urllib.request.urlopen(request) - d = json.loads(response.read().decode('utf-8')) - except urllib.error.HTTPError as ex: - msg = ex.read() - raise RuntimeError(msg) - # detect unversioned or non-existing stream - if d["versionInfo"] == []: - return None - versions = [] - for i in d['versionInfo']: - versions.append(i['number']) - return max(versions) - - -def sendparlogjsonfileonly(url , jsondata , nameofdatastream ) : - if url.find('http://archive-webapi.ipp-hgw.mpg.de/') == -1: - url = 'http://archive-webapi.ipp-hgw.mpg.de/' + url - url = url.replace(" ","%20") - assert len(url.replace('http://archive-webapi.ipp-hgw.mpg.de/', "").split("/")) == 5 - print(len(url.replace('http://archive-webapi.ipp-hgw.mpg.de/', "").split("/"))) - assert url.find("_DATASTREAM") == -1 - assert url.find("_PARLOG") == -1 - - parlog_request = urllib.request.Request( url + nameofdatastream + "_DATASTREAM" , data = json.dumps(jsondata).encode("utf-8"), - headers = { "Content-type" : "application/json" } ) - - try: - parlog_response = urllib.request.urlopen(parlog_request) - except urllib.error.HTTPError as ex: - msg = ex.read() - raise RuntimeError(msg) - - return (parlog_response.code == 201) - - - -#data['values'][0]['badpixelmap'] = np.swapaxes( data['values'][0]['badpixelmap'] , 0 ,1 ) -#resp = sendparlogjsonfileonly("Test/raw/W7X/QRT_IRCAM_TEST/" , data , "LUT_test_adnanv11") - -""" ---------------------------------------------------------------------------------------------------------------------------- -from here on the uploadroutines from holn ---------------------------------------------------------------------------------------------------------------------------- -""" - - -###definition of interactions with the codac archive to get times and other needed functions -import datetime -#import os -from requests import post - -def get_program_now(tol=60): - """ - get_program_now delivers an array [status,dict] - """ - program_url = 'http://archive-webapi.ipp-hgw.mpg.de/programs.json' - try: - res = urllib.request.urlopen(program_url) -# prog_list = json.loads(res.read().decode('utf-8')) - prog_raw=res.read() - res.close() - except urllib.error.URLError as e: - print('!get_program_now: Error opening URL, try: timewindow from now +- '+str(tol)+'s') - print(e) -# return False,0 - now=datetime.datetime.utcnow() - ns=TimeToNs([now.year,now.month,now.day],[now.hour,now.minute,now.second,now.microsecond]) - return read_program(ns) - else: - prog_string=prog_raw.decode(encoding='UTF-8') - prog_list = json.loads(prog_string) - pl=prog_list['programs'][0] - return True, pl - -def match_time_vectors(timevector,T0,T1,T6,ref_T=1,raw=False): - """ - match_time_vectors(timevector,t0,t1,t6,ref='t1') - (np.array/array) timevector: array or list of the timestamps for each frame, in µs - (integer) t0: trigger t0 in ns - (integer) t1: trigger t1 in ns - (integer) t6: trigger t6 in ns - (int) ref: says which trigger is the reference, so where the timevector starts - (bool) raw: is this a raw timevector with cold and background images included? - """ - ctimev=[] - tstart=0 - if ref_T==0: - tstart=T0 - elif ref_T==1: - tstart=T1 - elif ref_T==6: - tstart==T6 - else: - print("reference Trigger outside the expected format, wrong reference, stopping matching") - exit() - if raw: - print("not implemented") -# for t in range(1,len(timevector)): -# if - else: - ctimev.append(tstart) - for t in range(1,len(timevector)): - ctimev.append(tstart+(timevector[t]-timevector[0])) - return ctimev - - -def match_time_vectors_lowlevel(timevector,tstart,tend=None): - """ - match_time_vectors(timevector,t0,t1,t6,ref='t1') - (np.array/array) timevector: array or list of the timestamps for each frame, in ns - (integer) tstart: starttime in ns - (integer) tend: endtime in ns - """ - if tend is None: - ctimev=np.subtract(timevector,timevector[0])+tstart - else: - r=(timevector[-1]-timevector[0])/(tend-tstart) - ctimev=np.subtract(timevector,timevector[0])*r+tstart - return ctimev - -def read_program(timestampstart,timestamp_end=0,tol=60): - """ - read_program() - """ - program_url = 'http://archive-webapi.ipp-hgw.mpg.de/programs.json?from' - if timestamp_end!=0: - jsonurl=program_url+"="+str(int(timestampstart-tol*1e9))+"&upto="+str(int(timestamp_end+tol*1e9)) -# print(program_url+"="+str(int(timestampstart-tol*1e9))+"&upto="+str(int(timestamp_end+tol*1e9))) - else: - jsonurl=program_url+"="+str(int(timestampstart-tol*1e9))+"&upto="+str(int(timestampstart+tol*1e9)) -# print(program_url+"="+str(int(timestampstart-tol*1e9))+"&upto="+str(int(timestampstart+tol*1e9))) - try: - res = urllib.request.urlopen(jsonurl) -# prog_list = json.loads(res.read().decode('utf-8')) - prog_raw=res.read() - res.close() - except urllib.error.URLError as e: - print('!get_program: Error opening URL') - print(e) - return False,0 - else: - prog_string=prog_raw.decode(encoding='UTF-8') - prog_list = json.loads(prog_string) - pl=prog_list['programs'][0] - return True, pl - - - -def get_last_T0(ns=True): - """ - get_last_T0(ns=True) - (bool) ns True or False, whether ns timestamp should be returned, if false a datetime will be returned - """ - url="http://sv-coda-wsvc-3/last_trigger" - try: - res=urllib.request.urlopen(url) - except urllib.error.HTTPError as ex: - msg = ex.read() - raise RuntimeError(msg) - t=int(res.read()) - if ns: - return t - else: - return datetime.datetime.utcfromtimestamp(t/1e9) - -def TimeToNs(date,time): - """ - TimeToNs(date,time) - (list) date [year,month,day] - (list) time [hours,minutes,seconds,microseconds] - """ - date_time=datetime.datetime(date[0],date[1],date[2],time[0],time[1],time[2],time[3]) - div=date_time-datetime.datetime(1970,1,1,0,0,0) -# nsdate=calendar.timegm(datetime.datetime.timetuple(date_time))*1e9#drops the ms,µs!! - nsdate=div.total_seconds()*1e9 - return nsdate - -#################################### -### START of the UPLOAD ROUTINES ### -#################################### -def upload_meta_stream(camera,portnum,time,sensortemp,cameratemp,framerate,exposuretime,comment="",newversion=False,reason=""): - """ - old: sensortemp,cameratemp,framerate,exposuretime - new: channels with names, units and values - """ - project="W7X" - group="QRT_IRCAM"#+camera#+"_TEST" - datastream_name="AEF"+str(portnum)+"_meta" - url=archivepath+project+"/"+group+"/"+datastream_name - if "Test" in url: - database="Test" - else: - database="ArchiveDB" - if newversion: - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - version=ver - else: - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - if ver==None: - reason="Versioning of Data" - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - version=1 - else: - version=ver - chanDesc={"[0]":{'name':'sensortemperature','physicalQuantity':{'type':'oC'},'active':1}, - "[1]":{'name':'cameratemperature','physicalQuantity':{'type':'oC'},'active':1}, - "[2]":{'name':'framerate','physicalQuantity':{'type':'none'},'active':1}, - "[3]":{'name':'exposuretime','physicalQuantity':{'type':'us'},'active':1} - } - parobj={"label":"parms", - "description":"metadata informations for "+camera+" in AEF"+str(portnum), - "values":[{"chanDescs":chanDesc}], - "dimensions":[time[0],time[-1]] - } - channels=[sensortemp, - cameratemp, - framerate, - exposuretime] - channels=np.asarray(channels) - dataobj={"datatype":"float", - "values":channels.tolist(), - "dimensions":time.tolist() - } - upload_fulldataset(url,parobj,dataobj,version) - -def upload_meta_stream_V2(camera,portnum,time,channel_names,channel_units,channel_values,comment="",newversion=False,reason="",Return=False): - """ - old: sensortemp,cameratemp,framerate,exposuretime - new: channels with names, units and values - """ - project="W7X" - if len(time)==len(channel_values[0]) and len(channel_names)==len(channel_units): - group="QRT_IRCAM"#+camera#+"_TEST" - datastream_name="AEF"+str(portnum)+"_meta" - url=archivepath+project+"/"+group+"/"+datastream_name - if "Test" in url: - database="Test" - else: - database="ArchiveDB" - if newversion: - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - version=ver - else: - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - if ver==None: - reason="Versioning of Data" - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - version=1 - else: - version=ver - chanDesc={} - for i in range(len(channel_names)): - chanDesc["["+str(i)+"]"]={'name':channel_names[i],'physicalQuantity':{'type':channel_units[i]},'active':1} - parobj={"label":"parms", - "description":"metadata informations for "+camera+" in AEF"+str(portnum), - "values":[{"chanDescs":chanDesc,'comment':comment}], - "dimensions":[time[0],time[-1]] - } - channels=[] - for vals in channel_values: - channels.append(vals) - channels=np.asarray(channels) - dataobj={"datatype":"float", - "values":channels.tolist(), - "dimensions":time.tolist() - } - if Return: return upload_fulldataset(url,parobj,dataobj,version,Return=Return) - else: upload_fulldataset(url,parobj,dataobj,version) - else: - print("Length of timevector and length of channel values does not match or the number of channel names does not match the number of units") - if Return: - return False - -def upload_raw_data(camera,portnum,time,images,metadata={},comment="",newversion=False,reason=""): - """ - upload_raw_data(camera,portnum,images,time,metadata={},newversion=False,reason="") - (str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path - (int) portnum: portnumber as int, example: 10 or 11 etc. - (numpy.array) images: t x w x h numpy array - (numpy.array) time: time vector in uint64 ns of length t - (dict) metadata: the metadata of the file, should contain texp, filter in case of INFRATEC, camera_modell, serial_number,framerate, if croped: pos_left, pos_right, multi_exp, sensor_temperature in K, bitdepth - (bool) newversion: defines whether a new version is needed - (str) reason: reason for the new version - """ - project="W7X" - group="QRT_IRCAM"#+camera#+"_TEST" # test part will be removed in final version - datastream_name="AEF"+str(portnum)+"_raw"#_TEST_001" - chanDesc=[{'name':'raw','physicalQuantity':{'type':'none'},'active':1}] - #adjusted to metadataforcurrentvideo - print("convert image data") - images=np.asarray(images,dtype=np.int16) -# parlog={'chanDescs':chanDesc,'meta-data':header,'comment':comment} - parobj={"label":"parms", - "description":"raw data for "+camera+" in AEF"+str(portnum), - "values":[{"chanDescs":chanDesc,'meta-data':metadata,'comment':comment}], - "dimensions":[time.tolist()[0],time.tolist()[-1]] - } - url=archivepath+project+"/"+group+"/"+datastream_name - if "Test" in url: - database="Test" - else: - database="ArchiveDB" - if newversion: - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - version=ver - else: - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - if ver==None: - reason="Versioning of Data" - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - version=1 - else: - version=ver - print("writing parlog") - upload_Parlog(url,parobj,version) - #camera,portnum,,"raw" - upload_images(images,time,group,datastream_name,newversion,reason) - -def regenerate_timevector(time,reftrig=1,tref=0,toff=0,give_program=False): - """ - regenerate_timevector(time,reftrig,tref,toff) - (np.array) time: timevector of length t, 1d, in ns - (int) reftrig: number of the trigger which is used to refere the newtime to - (tref) tref: reference time for searching t0,t1,t6, for =0 it uses the first value in time - (toff toff: offset of the timevector in ms - """ - if tref==0: - tref=time[0] - prog=read_program(int(tref),int(tref+time[-1]-time[0]),70) - if prog[0]: - if np.size(reftrig)==1: #shift of timevector - tref=prog[1].get('trigger').get(str(np.squeeze(reftrig)))[0] - ntime=np.asarray(match_time_vectors_lowlevel(time,tref+toff*1e6)) - else: #shift and rescaling - tref=prog[1].get('trigger').get(str(reftrig[0]))[0] - tref2=prog[1].get('trigger').get(str(reftrig[1]))[0] - ntime=np.asarray(match_time_vectors_lowlevel(time,tref+toff*1e6,tref2)) - if give_program: - return ntime,prog[1]['id'] - else: - return ntime - else: - print("Program for this reference timestamp not found, cannot generate new timevector") - if give_program: - return np.asarray(time),"20000000.000" - - -def upload_raw_data_from_H5(camera,portnum,time,h5file,metadata={},comment="",parlog=True,rawdata=True,newversion=False,reason="",del_list=[],temppath=""): - """ - upload_raw_data(camera,portnum,images,time,metadata={},newversion=False,reason="") - (str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path - (int) portnum: portnumber as int, example: 10 or 11 etc. - (np.array) time: timevector of length t in ns - (dict) metadata: the metadata of the file, should contain texp, filter in case of INFRATEC, camera_modell, serial_number,framerate, if croped: pos_left, pos_right, multi_exp, sensor_temperature in K, bitdepth - (bool) newversion: defines whether a new version is needed - (str) reason: reason for the new version - """ - #camera="IRCAM" - project="W7X" - group="QRT_IRCAM"#+camera#+"_TEST" # test part will be removed in final version - datastream_name="AEF"+str(portnum)+"_raw"#_TEST7" - #not tested# - chanDesc=[{'name':'raw','physicalQuantity':{'type':'none'},'active':1}] - #adjusted to metadataforcurrentvideo - url=archivepath+project+"/"+group+"/"+datastream_name - if "Test" in url: - database="Test" - else: - database="ArchiveDB" - if newversion: - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - version=ver - else: - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - if ver==None: - reason="Versioning of Data" - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - version=1 - else: - version=ver - parobj={"label":"parms", - "description":"raw data for "+camera+" in AEF"+str(portnum), - "values":[{"chanDescs":chanDesc,'meta-data':metadata,'comment':comment}], - "dimensions":[int(time[0]),int(time[-1])] - } - if parlog: - print("writing parlog") - parlog_success=upload_Parlog(url,parobj,version,Return=False) - else: - parlog_success=True - if rawdata: - print("writing done, creating upload H5file") - tmp=writeH5_from_File(datastream_name,h5file,'images',time,double_list=del_list,path=temppath) - print("uploading H5file") - if type(tmp) is list: - n=1 - for ele in tmp: - success=True - print("uploading part "+str(n)+" of "+str(len(tmp))) - n+=1 - bla= uploadH5_Versioning(group,datastream_name,ele,version,True,project=project) - if bla[0]: - print("upload of part "+str(n-1)+" of "+str(len(tmp))+" was successfull") - else: - success=False - print("upload of part "+str(n-1)+" of "+str(len(tmp))+" failed") - return [success],parlog_success - else: - return uploadH5_Versioning(group,datastream_name,tmp,version,True,project=project),parlog_success - -def upload_temperaure_data(camera,portnum,images,time,metadata={},comment="",newversion=False,reason=""): - """ - upload_temperaure_data(camera,portnum,images,time,metadata={},newversion=False,reason="") - (str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path - (int) portnum: portnumber as int, example: 10 or 11 etc. - (numpy.array) images: t x w x h numpy array - (numpy.array) time: time vector in uint64 ns of length t - (numpy.array) texp_filter: a list with one or two elements texp or texp,filter for INFRATEC - (dict) metadata: the metadata of the file, should contain texp, filter in case of INFRATEC, camera_modell, serial_number,framerate, if croped: pos_left, pos_right, multi_exp, sensor_temperature in K, bitdepth as integer,LUT_V as integer,LUT_timestamp in ns,NUC_timestamp in ns,Divertor_temperature in K,Divertor_TC_path as str,Divertor_TC_time in ns - (bool) newversion: defines whether a new version is needed - (str) reason: reason for the new version - """ - #not tested# - print("convert image data") - group="QRT_IRCAM" - datastream_name="AEF"+str(portnum)+"_temperature" - images=np.asarray(images,dtype=np.int16) - chanDesc=[{'name':'temperature','physicalQuantity':{'type':'K'},'active':1}] - parlog={'chanDescs':chanDesc,'meta-data':metadata,'comment':comment} -# upload_images(camera,portnum,images,time,"temperature","K",parlog,newversion=False,reason="") - url=archivepath+"W7X/"+group+"/"+datastream_name - upload_Parlog(url,parlog) - upload_images(images,time,group,datastream_name,newversion,reason) - - -def upload_Background_frame(camera,portnum,time,texp_filter,background,parlog=True,data=True,newversion=False,reason=""): - group="QRT_IRCAM"#+camera # test part will be removed in final version -# group="QRT_IRCAM_TEST" # test part will be removed in final version - project="W7X" - name=["background"] - datastream_name="AEF"+str(portnum)+"_background" # test part will be removed in final version - structure={ - "Texp":texp_filter[0] - } - if camera=="INFRATEC": - structure['filter']=texp_filter[1] - url=archivepath+project+"/"+group+"/"+datastream_name - if "Test" in url: - database="Test" - else: - database="ArchiveDB" - if newversion: - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - version=ver - else: - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - if ver==None: - reason="Versioning of Data" - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - version=1 - else: - version=ver - print("Version for upload is "+str(version)) - chanDesc={"[0]":{'name':name[0],'physicalQuantity':{'type':'none'},'active':1}} - fullframe=np.array([background]) - parobj={"label":"parms", - "description":"background for "+camera+" in AEF"+str(portnum), - "values":[{"chanDescs":chanDesc,"structure":structure}], - "dimensions":[time[0],int(time[0]+np.int64(texp_filter[0]*1E3))] - } - dataobj={"datatype":"float", - "values":fullframe.tolist(), - "dimensions":time - } - - upload_fulldataset(url,parobj,dataobj,version,parlog,data) - - - -def upload_NUC_ref(camera,portnum,time,texp_filter,gain,offset,cold,bad,gain_e=[],offset_e=[],parlog=True,data=True,newversion=False,reason=""): - """ - upload_NUC_ref(gain,offset,hot,cold,bad,gain_e,offset_e,camera,portnum,time,newversion=False) - (str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path - (int) portnum: portnumber as int, example: 10 or 11 etc. - (numpy.array) time: time vector in uint64 ns of length t - (numpy.array) texp_filter: a list with one or two elements texp or texp,filter for INFRATEC - (numpy.array) gain: w x h array, image of full size - (numpy.array) offset: w x h array, image of full size - (numpy.array) cold: w x h array, image of full size - (numpy.array) bad: w x h array or n x 2 array, image of full size or a pixelist - (numpy.array) gain_e: w x h array, image of full size - (numpy.array) offset_e: w x h array, image of full size - (bool) newversion: defines whether a new version is needed - (str) reason: reason for the new version - """ - #camera="IRCAM" #will be removed in final version - group="QRT_IRCAM"#+camera#+"_TEST" # test part will be removed in final version - name=["NUC"] - project="W7X" - datastream_name="AEF"+str(portnum)+"_NUC" # test part will be removed in final version - if np.shape(gain)==np.shape(bad): - badframe=bad - else: - badframe=np.zeros(np.shape(gain)) - if len(bad)>0: - if np.shape(bad)[1]==2: - for i in range(np.shape(bad)[0]): - badframe[bad[i][1]][bad[i][0]]=1 - else: - print("badpixel map or list does not fit the requirements") - else: - print("bad pixel list empty, no bad pixels stored!") - if gain_e==[] or gain_e==None: - gain_e=np.zeros(np.shape(gain)) - if offset_e==[] or offset_e==None: - offset_e=np.zeros(np.shape(offset)) - fullframe=np.array([np.vstack((gain,offset,cold,badframe,gain_e,offset_e))]) - structure={ - "gain_index":0, - "offset_index":np.shape(gain)[0], - "cold_index":np.shape(gain)[0]+np.shape(offset)[0], - "badpixels_index":np.shape(gain)[0]+np.shape(offset)[0]+np.shape(cold)[0], - "gain_error_index":np.shape(gain)[0]+np.shape(offset)[0]+np.shape(cold)[0]+np.shape(bad)[0], - "offset_error_index":np.shape(gain)[0]+np.shape(offset)[0]+np.shape(cold)[0]+np.shape(bad)[0]+np.shape(gain_e)[0], - "Texp":texp_filter[0] - } - if camera=="INFRATEC": - structure['filter']=texp_filter[1] - url=archivepath+project+"/"+group+"/"+datastream_name - if "Test" in url: - database="Test" - else: - database="ArchiveDB" - if newversion: - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - version=ver - else: - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - if ver==None: - reason="Versioning of Data" - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - version=1 - else: - version=ver - print("Version for upload is "+str(version)) - chanDesc={"[0]":{'name':name[0],'physicalQuantity':{'type':'none'},'active':1}} - parobj={"label":"parms", - "description":"calib for "+camera+" in AEF"+str(portnum), - "values":[{"chanDescs":chanDesc,"structure":structure}], - "dimensions":[time[0],int(time[0]+np.int64(texp_filter[0]*1E3))] - } - dataobj={"datatype":"float", - "values":fullframe.tolist(), - "dimensions":time - } - - upload_fulldataset(url,parobj,dataobj,version,parlog,data) - -def upload_LUT_tables(LUTs,LUTref,camera,portnum,time,group="QRT_IRCAM",newversion=False,reason="",extra_par=None): - """ - upload_LUT_tables(LUTs,LUTref,camera,portnum,time,newversion=False,reason="") - (numpy.array) LUTs: t x n x 3 array DL,T,T_er, in general t=1, n= number of look-up-tables - (numpy.array) LUTref: t x n x 2 texp, emissivity or t x n x 3 filter,texp, emissivity for INFRATEC, in general t=1, n= number of LUTs - (str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path - (int) portnum: portnumber as int, example: 10 or 11 etc. - (numpy.array) time: time vector in uint64 ns of length t - (bool) newversion: defines whether a new version is needed - (str) reason: reason for the new version - """ - #camera="IRCAM" #will be removed in final version -# group="QRT_IRCAM"#+camera#+"_TEST" # test part will be removed in final version - name=["LUT"] - project="W7X" - if type(portnum)==int: - datastream_name="AEF"+str(portnum)+"_LUT" - descri="LUT for "+camera+" in AEF"+str(portnum) - elif type(portnum)==str: - datastream_name=portnum+"_LUT" - descri="LUT for "+camera+" in "+portnum - #test of dataformat - if np.shape(LUTs)[0]!=len(time): - print("missmatch in between time vector and LUTs timestamps") - exit() - data=[] - structure={} - # checking whether DL is always the same?? needed? - for t in range(len(time)): - data.append([]) - tLUT=LUTs[0] - data[t].append(tLUT[0][0]) - for i in range(np.shape(tLUT)[0]): - data[t].append(tLUT[i][1]) - data[t].append(tLUT[i][2]) - data=np.array(data) - if len(time)==1: - for n in range(np.shape(LUTref[0])[0]): - if camera=="INFRATEC": - structure["Filter_"+str(LUTref[0][n][0])+"_Texp_"+str(LUTref[0][n][1])+"us_e_"+str(LUTref[0][n][2])]=2*n+1 - elif camera=="FLIR": - structure["Texp_"+str(LUTref[0][n][0])+"us_e_"+str(LUTref[0][n][1])]=2*n+1 - else: - structure["Texp_"+str(LUTref[0][n][0])+"us_e_"+str(LUTref[0][n][1])]=2*n+1 - else: - print("multiple timestamps for LUTs are not jet implemented") - exit() - if type(extra_par)==dict: - structure.update(extra_par) - url=archivepath+project+"/"+group+"/"+datastream_name - if "Test" in url: - database="Test" - else: - database="ArchiveDB" - if newversion: - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - version=ver - else: - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - if ver==None: - reason="Versioning of Data" - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - version=1 - else: - version=ver - #using here selfmade uploadroutine to avoid that LUTs table appear as images - chanDesc={"[0]":{'name':name[0],'physicalQuantity':{'type':'none'},'active':1}} - parobj={"label":"parms", - "description":descri, - "values":[{"chanDescs":chanDesc,"structure":structure}], - "dimensions":[time[0],time[0]+10] - } - - dataobj={"datatype":"float", - "values":data.tolist(), - "dimensions":time - } - - upload_fulldataset(url,parobj,dataobj,version) - -def upload_hot_cold_reference(port,path,time=1503907200000000000,newversion=False,reason=""): - """ - uploads the reference hot and cold images - """ - camera="IRCAM" - group="QRT_IRCAM"#+camera#+"_TEST" # test part will be removed in final version - name=["raw"] - project="W7X" - ### get the files in folder, sort it, check whether port in filename fit the expected setting - OP=get_OP_by_time(time_ns=time) - camera=portcamdict[OP]["AEF"+str(port)] - files=os.listdir(path) - testfile=files[0] - Fragments=testfile.split(".")[0].split("_") - for ele in camera.split("_"): - if not ele in Fragments: - raise Exception("filenames does not fit the expected camera, "+camera+", missing part is "+ele) - serialnumber=camera.split("_")[2] - cameramodell=camera.split("_")[0]+" "+camera.split("_")[1] - time_ns_list=[] - framelist=[] - structurenames=[] - for i in range(len(files)): - filep=files[i].split(".")[0].split("_") - if len(Fragments)==len(filep): - if serialnumber in filep: - camfilter=filep[filep.index('filter')+1] - if "cold" in filep: - frame_type="cold" - elif "hot" in filep: - frame_type="hot" - else: - raise Exception("cannot indentify the type") - texp=filep[filep.index('texp')+1] - H5File=h5reader.File(os.path.join(path,files[i]),'r') - if len(list(H5File.keys()))==1: - key=list(H5File.keys())[0] - time_ns_list.append(int(time+i*10e6)) - framelist.append(np.asarray(H5File[key])) - structurenames.append(frame_type+"_"+texp) - H5File.close() - else: - H5File.close() - raise Warning("unexpected HDF5 structure, to much content") - else: - raise Warning("serialnumber not found in the file, file will be ignored") - else: - raise Warning("different file format detected! (differnt from the first one. it will be ignored") - ### - datastream_name="AEF"+str(port)+"_raw" - url=archivepath+project+"/"+group+"/"+datastream_name - if "Test" in url: - database="Test" - else: - database="ArchiveDB" - if newversion: - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - version=ver - else: - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - if ver==None: - reason="Versioning of Data" - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - version=1 - else: - version=ver - - structure={ - 'serialnumber':serialnumber, - 'cameraname':cameramodell, - 'filter':camfilter - } - for i in range(len(time_ns_list)): - structure[structurenames[i]]=time_ns_list[i] - # structure in the parlog, should contain something like cold_1us etc. with given timestamp inside - chanDesc={"[0]":{'name':name[0],'physicalQuantity':{'type':'none'},'active':1}} - parobj={"label":"parms", - "description":"reference data for AEF"+str(port), - "values":[{"chanDescs":chanDesc,"structure":structure}], - "dimensions":[time_ns_list[0],time_ns_list[-1]] - } - dataobj={"datatype":"integer", - "values":np.array(framelist).tolist(), - "dimensions":time_ns_list - } - upload_fulldataset(url,parobj,dataobj,version) - - -def upload_scene_model(port,time_ns,path="\\\\sv-e4-fs-1\\E4-Mitarbeiter\\E4 Diagnostics\\QIR\\Calibrations\\scene_models",newversion=False,reason=""): -# camera="IRCAM" - project="W7X" - group="QRT_IRCAM_TEST"#+camera#+"_TEST" # test part will be removed in final version - name=["scene_model"] - datastream_name="AEF"+str(port)+"_scene_model" - url=archivepath+project+"/"+group+"/"+datastream_name - Fullpath=os.path.join(path,"AEF"+str(port),"AEF"+str(port)+"_scene_model.h5") - File=h5reader.File(Fullpath,'r') - keylist=list(File.keys()) - time_ns_list=[] - framelist=[] - ii=0 - if "Test" in url: - database="Test" - else: - database="ArchiveDB" - if newversion: - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - version=ver - else: - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - if ver==None: - reason="Versioning of Data" - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - version=1 - else: - version=ver - structure={} - for key in keylist: - framelist.append(np.array(File[key])) - time_ns_list.append(time_ns+20*ii) - ii+=0 - structure[key]=time_ns+20*ii - - chanDesc={"[0]":{'name':name[0],'physicalQuantity':{'type':'none'},'active':1}} - parobj={"label":"parms", - "description":"scene model for AEF"+str(port), - "values":[{"chanDescs":chanDesc,"structure":structure}], - "dimensions":[time_ns_list[0],time_ns_list[-1]] - } - upload_Parlog(url,parobj,version=version) -# upload_images(camera,port,framelist,time_ns_list,"scene_model",version) - upload_images(framelist,time_ns_list,group,datastream_name,newversion,reason) - - - -#def upload_NUC_ref(): -# """ -# -# """ -# print("not ready") - -def download_LUT_tables(camera,port,time,texp=0,emissivity=0,Filter=0): - """ - (str) camera: string of cameraname "IRCAM" or "INFRATEC", is used for the path - (numpy.array) time: time vector in uint64 ns of length t - (bool) newversion: defines whether a new version is needed - (int) portnum: portnumber as int, example: 10 or 11 etc. - in case of texp=0 and emissivity=0 it will give back all LUTs - """ - - print("not ready") - return 0 - - - -def upload_Parlog(url,parobj,versionnumber=0,Return=False): -# print('>upload_Parlog: json of parlog') -# print(json.dumps(parobj).encode("utf-8")) - if versionnumber==0: - parlog_request = urllib.request.Request( url + "_PARLOG", data = json.dumps(parobj).encode("utf-8"),headers = { "Content-type" : "application/json" } ) - else: - parlog_request = urllib.request.Request( url + "_PARLOG/V"+str(versionnumber), data = json.dumps(parobj).encode("utf-8"),headers = { "Content-type" : "application/json" } ) - try: - parlog_response = urllib.request.urlopen(parlog_request) - if Return: - if parlog_response.getcode()//100 is 2: - return True - else: - print(parlog_response.getcode()) - return False - except urllib.error.HTTPError as ex: - msg = ex.read() - if Return: - print(msg) - return False - else: - raise RuntimeError(msg) - -def upload_data(url,dataobj,versionnumber=0,Return=False): - if versionnumber==0: - datastream_request = urllib.request.Request( url + "_DATASTREAM",data=json.dumps(dataobj).encode("utf-8"), headers={ "Content-type":"application/json" } ) - else: - datastream_request = urllib.request.Request( url + "_DATASTREAM/V"+str(versionnumber),data=json.dumps(dataobj).encode("utf-8"), headers={ "Content-type":"application/json" } ) - try: - datastream_response = urllib.request.urlopen(datastream_request) - if Return: return datastream_response.getcode()//100 is 2 - except urllib.error.HTTPError as ex: - msg = ex.read() - raise RuntimeError(msg) - -def upload_fulldataset(url,parlog,data,versionnumber,bparlog=True,bdata=True,Return=False): - if bparlog: - if Return: - res=upload_Parlog(url,parlog,versionnumber,Return) - else: - upload_Parlog(url,parlog,versionnumber,Return) - if bdata: - if Return: - res1=upload_data(url,data,versionnumber,Return) - else: - upload_data(url,data,versionnumber) - if bdata and bparlog and Return: - return (res and res1) - -### from here on subfunctions - -def upload_images(images,time,group,stream,newversion=False,reason="",project="W7X",version=0,swapping=True,Test=False): - """ - upload_images(camera,portnum,images,time,metadata={},newversion=False,reason="") - - """ - #camera="IRCAM" #will be removed in final version - group=group#"QRT_IRCAM"#+camera#+"_TEST" # test part will be removed in final version -# names=[typ] - datastream_name=stream#"AEF"+str(portnum)+"_"+typ#+"_TEST001" - #url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name - if Test: - url="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"+group+"/"+datastream_name - database="Test" - else: - url=archivepath+project+"/"+group+"/"+datastream_name - database="ArchiveDB" - if newversion: - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - version=ver - else: - ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG") - if ver==None: - reason="Versioning of Data" - archive_create_stream_version(url, reason, False, "n/a", "n/a" ) - version=1 - else: - version=ver - try: -# archive.write_signals(images,time,group,project,names, datastream_name,units,metadata,2,True) - tmp=writeH5_images(datastream_name,images,time,swapping=swapping) -# uploadH5(group,datastream_name,tmp,True) - response=uploadH5_Versioning(group,datastream_name,tmp,version,True,project=project,Test=Test) - return response - except Exception as E: - print("Upload Failed") - print(E) - return [False] - -def uploadH5(group,stream, h5file, delete=False, timeout=None,project="W7X"): - # path=Path, h5file=h5-file - base=archivepath+project+"/"#"http://archive-webapi/Test/raw/W7X/" - try: - headers = {'Content-Type': 'application/x-hdf'} - link = base+group+'?dataPath=/data/'+stream+'&timePath=/data/timestamps' - f = open(h5file, 'rb') - try: - result = post(link, headers=headers, data=f, timeout=timeout) - finally: - f.close() - finally: - if delete: - try: - os.remove(h5file) - except KeyboardInterrupt as ki: raise ki - except: - print('could not delete file "%s"' % h5file) - pass - if not result.ok: - print(result.reason) - print(result.text) - else: - print("successfully uploaded h5file") -# result = _sup.requeststr(result) -# _sup.debug(result,3) -# return result - -def uploadH5_Versioning(group,stream, h5file,version, delete=False, timeout=None,project="W7X",Test=False): - # path=Path, h5file=h5-file - if Test: - base="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"+project+"/"#"http://archive-webapi/Test/raw/W7X/" #"http://sv-coda-wsvc-3/Test/raw/W7X/"# - else: - base=archivepath+project+"/"#"http://archive-webapi/Test/raw/W7X/" #"http://sv-coda-wsvc-3/Test/raw/W7X/"# - #http://sv-coda-wsvc-3/Test/raw/W7X/QRT_IRCAM_TEST?dataPath=/data/AEF100_RAW_TEST2&timePath=/data/timestamps&versionNumber=1 - try: - headers = {'Content-Type': 'application/x-hdf'} - link = base+group+'?dataPath=/data/'+stream+'&timePath=/data/timestamps&versionNumber='+str(version) - f = open(h5file, 'rb') - try: - result = post(link, headers=headers, data=f, timeout=timeout) - finally: - f.close() - finally: - if delete: - try: - os.remove(h5file) - except KeyboardInterrupt as ki: raise ki - except: - print('could not delete file "%s"' % h5file) - pass - if not result.ok: - print(result.reason) - print(result.text) - return [False,result.reason,result.text] - else: - print("successfully uploaded h5file") - return [True] - -def writeH5(stream,data,dimof,idx=None,swapping=True): - """ - path=Path, data=numpy.array, dimof=list of long - data.shape = (width,height,time) or (width,height) if len(dim)==1 - """ -# stream = path.stream - dtype = str(data.dtype) - tmpfile = "archive_"+stream+'_'+str(dimof[0]) - if idx: tmpfile += '_%d'%(idx,) - tmpfile += ".h5" - if data.ndim<3: - data = data.reshape(list(data.shape)+[1]) - if swapping: - data = data.swapaxes(0,1) # (width,height,time) -> (row,col,time) - with h5reader.File(tmpfile, 'w') as f: - g = f.create_group('data') # requires [row,col,time] - g.create_dataset('timestamps', data=list(dimof), dtype='uint64',compression="gzip") - g.create_dataset(stream, data=data.tolist(), dtype=dtype,compression="gzip") - return tmpfile - -def writeH5_images(stream,data,dimof,idx=None,swapping=True): - """ - path=Path, data=numpy.array, dimof=list of long - data.shape = (width,height,time) or (width,height) if len(dim)==1 - """ -# stream = path.stream - dtype = str(data.dtype) - tmpfile = "archive_"+stream+'_'+str(dimof[0]) - if idx: tmpfile += '_%d'%(idx,) - tmpfile += ".h5" - if data.ndim<3: - data = data.reshape(list(data.shape)+[1]) - if swapping: - data = data.swapaxes(0,1) # (width,height,time) -> (row,col,time) - shapi=np.shape(data) - with h5reader.File(tmpfile, 'w') as f: - g = f.create_group('data') # requires [row,col,time] - g.create_dataset('timestamps', data=list(dimof), dtype='uint64',compression="gzip") - dset=g.create_dataset(stream,shape=(shapi[0],shapi[1],shapi[2]),dtype=dtype,chunks=(shapi[0],shapi[1],1)) - data=np.ascontiguousarray(data) - dset.write_direct(data) - return tmpfile - -#def writeH5_V2(stream,dataname,data,dimof,idx=None): -# """ -# path=Path, data=numpy.array, dimof=list of long -# data.shape = (width,height,time) or (width,height) if len(dim)==1 -# """ -## stream = path.stream -# dtype = str(data.dtype) -# tmpfile = "archive_"+stream+'_'+str(dimof[0]) -# if idx: tmpfile += '_%d'%(idx,) -# tmpfile += ".h5" -# if data.ndim<3: -# data = data.reshape(list(data.shape)+[1]) -# data = data.swapaxes(0,1) # (width,height,time) -> (row,col,time) -# with h5reader.File(tmpfile, 'w') as f: -# g = f.create_group('data') # requires [row,col,time] -# g.create_dataset('timestamps', data=list(dimof), dtype='uint64',compression="gzip") -# g.create_dataset(dataname, data=data.tolist(), dtype=dtype,compression="gzip") -# return tmpfile - -def writeH5_from_File(stream,filename,key,dimof,idx=None,double_list=[],path=""): - """ - path=Path, data=numpy.array, dimof=list of long - data.shape = (width,height,time) or (width,height) if len(dim)==1 - """ -# stream = path.stream -# dtype = str(data.dtype) - import struct - get_P=8*struct.calcsize("P") - filesize=os.stat(filename).st_size - f5=h5reader.File(filename,'r') - if filesize<4000000000: - tmpfile = path+"archive_"+stream+'_'+str(dimof[0]) - if idx: tmpfile += '_%d'%(idx,) - tmpfile += ".h5" - with h5reader.File(tmpfile, 'w') as f: - g = f.create_group('data') # requires [row,col,time] - - if len(double_list)==0: - g.create_dataset('timestamps', data=list(dimof), dtype='uint64',compression="gzip") - f5.copy(key,f['/data'],name=stream) - else: - print("removing double frame for the upload HDF5") - shapi=f5[key].shape - dummy=np.zeros((shapi[0],shapi[1],shapi[2]),dtype=f5[key].dtype) - f5[key].read_direct(dummy) - dummy=np.delete(dummy,double_list,2) - dimof=np.delete(dimof,double_list)#done already earlier - g.create_dataset('timestamps', data=list(dimof), dtype='uint64',compression="gzip") - dset=g.create_dataset(stream,shape=(shapi[0],shapi[1],shapi[2]-len(double_list)),dtype='uint16',chunks=(shapi[0],shapi[1],1)) - dummy=np.ascontiguousarray(dummy) - dset.write_direct(dummy) - f.close() - # g.create_dataset(stream, data=data.tolist(), dtype=dtype,compression="gzip") - f5.close() - return tmpfile - else: - nrfiles=int(np.ceil(filesize/3500000000)) - print("Warning! File for upload is larger than 4 GB ("+str(round(filesize/1000000000,2))+"), split it into "+str(nrfiles)+" files for uploading to smaller fractions. This will take more time") - tmpfiles=[] - times=[] - limits=[0] - shapi=f5[key].shape - if len(double_list)>0: - dimof=np.delete(dimof,double_list) - intervall=int(np.shape(dimof)[0]/nrfiles) - for i in range(0,nrfiles-1): - limits.append(intervall*(i+1)) - times.append(dimof[limits[i]:limits[i+1]]) - limits.append(np.shape(dimof)[0]) - times.append(dimof[limits[nrfiles-1]:limits[nrfiles]]) - for i in range(nrfiles): - tmpfile = "archive_"+stream+'_'+str(dimof[0])+"_part"+str(i+1) - if idx: tmpfile += '_%d'%(idx,) - tmpfile += ".h5" - with h5reader.File(tmpfile, 'w') as f: - g = f.create_group('data') # requires [row,col,time] - g.create_dataset('timestamps', data=list(times[i]), dtype='uint64')#,compression="gzip") - dset=g.create_dataset(stream,shape=(shapi[0],shapi[1],limits[i+1]-limits[i]),dtype='uint16',chunks=(shapi[0],shapi[1],1))#,compression='gzip') - if get_P<5*64:#check python version of 32 or 64 bit to decide which method should be used - c=0 - for n in range(limits[i+1]-limits[i]): - if len(double_list)==0 or (limits[i]+n not in double_list): - dset[:,:,n-c]=f5[key][:,:,limits[i]+n] - else: - c+=1 - else: - dummy=np.zeros((shapi[0],shapi[1],limits[i+1]-limits[i]),dtype=f5[key].dtype) - f5[key].read_direct(dummy,np.s_[:,:,limits[i]:limits[i+1]]) - if len(double_list)==0: - fine=True - else: - fine=False - part_double_list=[] - for ele in double_list: - if ele>limits[i] and ele <limits[i+1]: - part_double_list.append(ele-limits[i]) - if not fine: - print("deleting double frames in part "+str(i)) - dummy=np.delete(dummy,part_double_list,2) - dummy=np.ascontiguousarray(dummy) - dset.write_direct(dummy) - tmpfiles.append(tmpfile) - return tmpfiles - - -#def writeH5_from_2_Files(stream,filename1,filename2,key,dimof,idx=None): -# """ -# path=Path, data=numpy.array, dimof=list of long -# data.shape = (width,height,time) or (width,height) if len(dim)==1 -# """ -## stream = path.stream -## dtype = str(data.dtype) -# f5=h5reader.File(filename1,'r') -# f5_2=h5reader.File(filename2,'r') -# tmpfile = "archive_"+stream+'_'+str(dimof[0]) -# if idx: tmpfile += '_%d'%(idx,) -# tmpfile += ".h5" -## if data.ndim<3: -## data = data.reshape(list(data.shape)+[1]) -## data = data.swapaxes(0,1) # (width,height,time) -> (row,col,time) -# with h5reader.File(tmpfile, 'w') as f: -# g = f.create_group('data') # requires [row,col,time] -# g.create_dataset('timestamps', data=list(dimof), dtype='uint64',compression="gzip") -# f5.copy(key,f['/data'],name=stream) -## g.create_dataset(stream, data=data.tolist(), dtype=dtype,compression="gzip") -# return tmpfile - -#################################### -### END of the UPLOAD ROUTINES ### -#################################### - -def check_dublicates(array): - a = array - import collections - return [item for item, count in collections.Counter(a).items() if count > 1] - -def check_dublicates_2(array): - seen = set() - uniq = [] - for x in array: - if x not in seen: - uniq.append(x) - seen.add(x) - return uniq,seen - -def check_timevector_framenumbers(timevector,framenumbervector): - wrong=False - list_of_double_frames_to_delete=[] - try: - timelist=check_dublicates(timevector) - framelist=check_dublicates(framenumbervector) - if len(timelist)==0 and len(framelist)==0: - return False,[] - else:### okay somewhere it is wrong but where? - if len(timelist)==len(framelist): - for i in range(len(timelist)): - indizes=np.where(np.asarray(framenumbervector)==framelist[i])[0] - for j in range(1,len(indizes)): - list_of_double_frames_to_delete.append(indizes[j]) - else: - if len(timelist)>len(framelist): - raise Exception("wrong Timevector, not caused by double Frames") - else: - for i in range(len(framelist)): - indizes=np.where(np.asarray(framenumbervector)==framelist[i])[0] - for j in range(1,len(indizes)): - list_of_double_frames_to_delete.append(indizes[j]) - - except: - wrong=True - if len(list_of_double_frames_to_delete)>1: - wrong=True - return wrong,list_of_double_frames_to_delete - -def check_extra_frames_FLIR(timevector,framenumbervector): - wrong=False - list_of_double_frames_to_delete=[] - for i in range(1,len(timevector)): - if (timevector[i]-timevector[i-1])/1e9>30: - if i >10: - raise Exception("the time gap is found later than 10 frames!!!! stopping here") - else: - wrong=True - for j in range(i): - list_of_double_frames_to_delete.append(i) - return wrong,list_of_double_frames_to_delete - - -""" -from read_restdb.py by Henning Thomsen copied read_restdb -""" - -def read_restdb(request_url): - """ - Reads JSON data from W7-X REST API - Returns: - valid: access ok - t: numpy-array of time - signal: numpy-array of requested signals - - To get NaN, Infinity as their counter parts in Python (not 'Null') - add the string +'&allowNaN=true' in the request. - """ - try: - res = urllib.request.urlopen(request_url) - signal_list = json.loads(res.read().decode('utf-8')) - res.close() - except urllib.error.URLError as e: - return False, [0], [-1] - else: - try: - signal0=np.array(signal_list['values']) - t=np.array(signal_list['dimensions']) - except ValueError as e: - print(signal_list['status']) - print(e) - return False, [0], [-2] - else: - return True, np.squeeze(t), np.squeeze(np.double(signal0)) \ No newline at end of file