diff --git a/Create_HDF5_from_archive_data.py b/Create_HDF5_from_archive_data.py index 731c6e81f9bea1807fa0fe4ba42e9086176638f3..2d108482c4cee4a604578a8abf36725d46e7e9de 100644 --- a/Create_HDF5_from_archive_data.py +++ b/Create_HDF5_from_archive_data.py @@ -11,15 +11,15 @@ if __name__=='__main__': import downloadversionIRdata as IR_DL import numpy as np localpath="" - time_s=13 + time_s=1.5 - programlist = ["20171121.013"] - ports=[10] + programlist = ["20171010.025"] + ports=[50] for program in programlist: for port in ports: try: - exist,dimof,data1,valid=IR_DL.get_temp_from_raw_by_program(portnr=port,program=program,time_s=time_s,threads=4,verbose=5) + exist,dimof,data1,valid=IR_DL.get_temp_from_raw_by_program(portnr=port,program=program,time_window=time_s,threads=4,verbose=5) if exist: print("data found") stream="images" diff --git a/IR_image_tools.py b/IR_image_tools.py index 578c68a3a8bac3fd00ddec882f95786fc85c2b45..0b29c2cf23fee8dcd927324c2e177400d3a54fdd 100644 --- a/IR_image_tools.py +++ b/IR_image_tools.py @@ -850,7 +850,7 @@ def get_work_list(pipepath,typ="q"): if typ in ('q_old','load_old'): typ = typ.split("_")[0] f=open(pipepath+str(today.year)+str(today.month)+"_"+typ+"_requests.txt") - elif typ in ('q','load','qpeak','Aw','width'): + elif typ in ('q','load','qpeak','Aw','width','pf'): f=open(pipepath+"Auto_"+typ+"_requests.txt") else: reasons=[] @@ -859,13 +859,13 @@ def get_work_list(pipepath,typ="q"): koline=line.split("\t") if len(koline)>1: prog = koline[0] - if typ in ('q','load','qpeak','Aw','width','q_old','load_old'): + if typ in ('q','load','qpeak','Aw','width','q_old','load_old','pf'): cam_programs.append((prog,koline[1].split("\n")[0])) else: cam_programs.append((prog,koline[1])) reasons.append(koline[2].split("\n")[0]) f.close() - if typ in ('q','load','qpeak','Aw','width','q_old','load_old'): + if typ in ('q','load','qpeak','Aw','width','q_old','load_old','pf'): bla=check_dublicates_2(cam_programs) cam_programs=bla[0] return cam_programs @@ -911,7 +911,7 @@ def read_finger_info(file_name=None, OP='OP1.2b', verbose=0): elif OP.startswith('OP2'): file_name='finger_info_HHF.csv' full_path = os.path.join(parameter_file_path, file_name) - print(full_path) + #print(full_path) if verbose > 0: print('read_finger_info: reading from file {0} in {1}'.format(file_name, parameter_file_path)) if not os.path.isfile(full_path): diff --git a/downloadversionIRdata.py b/downloadversionIRdata.py index ee75ee1b96aab19ff8d4c70fe37b29c1bb9f52d4..2a03bc3721e358ca9f818395f841dcaeefcd3cfc 100644 --- a/downloadversionIRdata.py +++ b/downloadversionIRdata.py @@ -4,7 +4,7 @@ Created on Wed Oct 25 15:51:46 2017 updated on Tue Aug 21 10:20:00 2018 last update on Fr Nov 23 15:37:00 2018 -Version: 3.4.4 +Version: 3.5.0 (Numbering: #of big changes(OP1.2a download V1, OP1.2b download V2, heatflux V3) . #of updates to add functionalities @@ -12,7 +12,7 @@ Version: 3.4.4 #number of updates for bug fixes ) @author: holn """ -version = "V3.4.4" +version = "V3.5.0" import numpy as np import IR_image_tools as IR_tools @@ -221,7 +221,7 @@ def read_program(timestamp_start, timestamp_end=0, tol=60): def download_LUT(port, time, exposure=0, emissivity=0, camera_filter=0, version=0, testmode=False, verbose=0): """ - download_LUT(camera, port, time, exposure=0, emissivity=0, camera_filter=0, version=1): + download_LUT(port, time, exposure=0, emissivity=0, camera_filter=0, version=1): download of the look up table for the infrared cameras from the database for OP1.2(a+b) Have to swap 11, 21 until correction in the database INPUT @@ -272,7 +272,7 @@ def download_LUT(port, time, exposure=0, emissivity=0, camera_filter=0, version= return False, 0 # raise Exception if version == 0:#version check, if 0 - version = get_latest_version(portpathdict[OP]["AEF"+str(port)]+"LUT_DATASTREAM", t_from=time, testmode=testmode) + version = get_latest_version(portpathdict[OP]["AEF"+str(port)]+"LUT_PARLOG", t_from=time, testmode=testmode) if verbose > 0: print("download_LUT: LUT V"+str(version)+" is used") #time=int(fu.TimeToNs([2017,9,26], [8, 0, 0, 0])) @@ -5586,7 +5586,7 @@ def download_divertor_load(port, targetmodule=None, program=None, tstart=None, 'TM2V':(22,23, 'TM2v_load'), '2V':(22,23, 'TM2v_load'), 'TM3V':(24,25, 'TM3v_load'), '3V':(24,25, 'TM3v_load') } - __download_module_based_data__(kind="load",TMchadict=TMchadict,port=port, + return __download_module_based_data__(kind="load",TMchadict=TMchadict,port=port, targetmodule=targetmodule,program=program, tstart=tstart, tend=tend, version=version, verbose=verbose, testmode=testmode, request=request, request_ALL=request_ALL) @@ -5636,21 +5636,21 @@ def download_peak_heatflux(port, targetmodule=None, program=None, tstart=None, a list of values, the error of each timepoint, values are in W/m^2 """ TMchadict={ - 'ALL':(0,1, 'divertor_maximum_heatflux'), 'FULL':(0,1, 'divertor_maximum_heatflux'), - 'TM1H':(2,3, 'TM1h_qpeak'), '1H':(2,3, 'TM1h_qpeak'), - 'TM2H':(4,5, 'TM2h_qpeak'), '2H':(4,5, 'TM2h_qpeak'), - 'TM3H':(6,7, 'TM3h_qpeak'), '3H':(6,7, 'TM3h_qpeak'), - 'TM4H':(8,9, 'TM4h_qpeak'), '4H':(8,9, 'TM4h_qpeak'), - 'TM5H':(10,11, 'TM5h_qpeak'), '5H':(10,11, 'TM5h_qpeak'), - 'TM6H':(12,13, 'TM6h_qpeak'), '6H':(12,13, 'TM6h_qpeak'), - 'TM7H':(14,15, 'TM7h_qpeak'), '7H':(14,15, 'TM7h_qpeak'), - 'TM8H':(16,17, 'TM8h_qpeak'), '8H':(16,17, 'TM8h_qpeak'), - 'TM9H':(18,19, 'TM9h_qpeak'), '9H':(18,19, 'TM9h_qpeak'), - 'TM1V':(20,21, 'TM1v_qpeak'), '1V':(20,21, 'TM1v_qpeak'), - 'TM2V':(22,23, 'TM2v_qpeak'), '2V':(22,23, 'TM2v_qpeak'), - 'TM3V':(24,25, 'TM3v_qpeak'), '3V':(24,25, 'TM3v_qpeak') + 'ALL':(0,1, 'maximum_divertor_heatflux'), 'FULL':(0,1, 'maximum_divertor_heatflux'), + 'TM1H':(2,3, 'TM1h_peakflux'), '1H':(2,3, 'TM1h_peakflux'), + 'TM2H':(4,5, 'TM2h_peakflux'), '2H':(4,5, 'TM2h_peakflux'), + 'TM3H':(6,7, 'TM3h_peakflux'), '3H':(6,7, 'TM3h_peakflux'), + 'TM4H':(8,9, 'TM4h_peakflux'), '4H':(8,9, 'TM4h_peakflux'), + 'TM5H':(10,11, 'TM5h_peakflux'), '5H':(10,11, 'TM5h_peakflux'), + 'TM6H':(12,13, 'TM6h_peakflux'), '6H':(12,13, 'TM6h_peakflux'), + 'TM7H':(14,15, 'TM7h_peakflux'), '7H':(14,15, 'TM7h_peakflux'), + 'TM8H':(16,17, 'TM8h_peakflux'), '8H':(16,17, 'TM8h_peakflux'), + 'TM9H':(18,19, 'TM9h_peakflux'), '9H':(18,19, 'TM9h_peakflux'), + 'TM1V':(20,21, 'TM1v_peakflux'), '1V':(20,21, 'TM1v_peakflux'), + 'TM2V':(22,23, 'TM2v_peakflux'), '2V':(22,23, 'TM2v_peakflux'), + 'TM3V':(24,25, 'TM3v_peakflux'), '3V':(24,25, 'TM3v_peakflux') } - __download_module_based_data__(kind="qpeak",TMchadict=TMchadict,port=port, + return __download_module_based_data__(kind="qpeak",TMchadict=TMchadict,port=port, targetmodule=targetmodule,program=program, tstart=tstart, tend=tend, version=version, verbose=verbose, testmode=testmode, request=request, request_ALL=request_ALL) @@ -5700,15 +5700,16 @@ def __download_module_based_data__(kind, TMchadict, port, targetmodule=None, error: list a list of values, the error of each timepoint, values are in W """ + print(kind) if kind in ["load","qpeak"]: if kind == "load": datcha_name = 'divertor_total_load' funname = "download_divertor_load" streamname = "loads" else: - datcha_name = 'divertor_maximum_heatflux' + datcha_name = 'maximum_divertor_heatflux' funname = "download_peak_heatflux" - streanname = "" + streamname = "peakflux" else: raise Exception("__download_module_based_data__: wrong input!") #interpretation of the targetmodule input, string or List of strings, numbers does not make sense here? @@ -5777,7 +5778,7 @@ def __download_module_based_data__(kind, TMchadict, port, targetmodule=None, port = "AEF"+str(port) if version == 0: version = get_latest_version(portpathdict[OP][str(port)]+streamname+"_DATASTREAM", project=project_ana, t_from=tstart, t_to=tend, testmode=testmode) - larchivepath = base+project_ana+"/"+portpathdict[OP][str(port)]+streanname+"_DATASTREAM/V"+str(version) + larchivepath = base+project_ana+"/"+portpathdict[OP][str(port)]+streamname+"_DATASTREAM/V"+str(version) if type(datcha) == list:#case of several requested channels load = [] error = [] @@ -5883,7 +5884,7 @@ def download_wetted_area(typ="total", program=None, tstart=None, reference divertors: list, optional if return_used_divertors if True, it will return a list with the port numbers of the cameras """ - __download_derived_data__(kind="Aw",typ=typ,program=program,tstart=tstart, + return __download_derived_data__(kind="Aw",typ=typ,program=program,tstart=tstart, tend=tend,version=version,verbose=verbose, request=request,return_used_divertors=return_used_divertors) @@ -5899,7 +5900,7 @@ def download_strikeline_width(typ="total", program=None, tstart=None, ------ typ: string, optional, default 'total' name or description of the requested data, nothing will return strike-line width\n - options are: "total", "all", "upper", "lower", "qmaxUP", "qmaxDOWN" + options are: "total"(mean over all divertors), "all", "upper", "lower", "qmaxUP", "qmaxDOWN" program: str program id as str in the format yyyymmdd.pid tstart: integer @@ -5929,7 +5930,7 @@ def download_strikeline_width(typ="total", program=None, tstart=None, reference divertors: list, optional if return_used_divertors if True, it will return a list with the port numbers of the cameras """ - __download_derived_data__(kind="width",typ=typ,program=program,tstart=tstart, + return __download_derived_data__(kind="width",typ=typ,program=program,tstart=tstart, tend=tend,version=version,verbose=verbose, request=request,return_used_divertors=return_used_divertors) @@ -5945,7 +5946,7 @@ def download_peaking_factor(typ="total", program=None, tstart=None, ------ typ: string, optional, default 'total' name or description of the requested data, nothing will return strike-line width\n - options are: "total", "all", "upper", "lower", "qmaxUP", "qmaxDOWN" + options are: "total"(mean over all divertors), "all", "upper", "lower", "qmaxUP", "qmaxDOWN" program: str program id as str in the format yyyymmdd.pid tstart: integer @@ -5975,7 +5976,7 @@ def download_peaking_factor(typ="total", program=None, tstart=None, reference divertors: list, optional if return_used_divertors if True, it will return a list with the port numbers of the cameras """ - __download_derived_data__(kind="qpeak",typ=typ,program=program,tstart=tstart, + return __download_derived_data__(kind="qpeak",typ=typ,program=program,tstart=tstart, tend=tend,version=version,verbose=verbose, request=request,return_used_divertors=return_used_divertors) @@ -6023,20 +6024,127 @@ def __download_derived_data__(kind, typ="total", program=None, tstart=None, reference divertors: list, optional if return_used_divertors if True, it will return a list with the port numbers of the cameras """ + #defining what function this function is in reality if kind in ["Aw","PF","width","wetted_area","wetted area","peaking factor","peaking_factor","strike-line_width","strikeline_width","strike line width"]: if kind in ["Aw","wetted_area","wetted area"]: funname = "download_wetted_area" kind = "Aw" + stream_name = "wetted_area" + channelnames = {'total':(0,"total_wetted_area"), + 'upper':(1,"mean_upper_wetted_area"), + 'lower':(2,"mean_lower_wetted_area") + } elif kind in ["PF","peaking factor","peaking_factor"]: funname = "download_peaking_factor" - kind = "qpeak" + kind = "pf" + stream_name = "peaking_factor" + channelnames = {'total':(0,"mean_peaking_factor"), + 'upper':(1,"mean_upper_peaking_factor"), + 'lower':(2,"mean_lower_peaking_factor") + } else: funname = "download_strikeline_width" - kind = "width" + kind = "width" + stream_name = "strikeline_width" + channelnames = {'total':(0,"mean_strikeline_width"), + 'upper':(1,"mean_upper_strikeline_width"), + 'lower':(2,"mean_lower_strikeline_width") + } else: raise Exception("__download_derived_data__: wrong input!") - print("to be implemented in version 3.5.0") - exist = False + channelnames['qmaxUP']=(3,"max_upper_heatflux") + channelnames['qmaxDOWN']=(4,"max_lower_heatflux") + #"total", "all", "upper", "lower", "qmaxUP", "qmaxDOWN" + + # what is the time input? + if program == None and tstart == None: + if verbose > 0: + print("input for program and time invalid!") + return False, 0,-1,-1 + elif program != None: + exist, _, tstart, tend = get_trigger_from_PID(program, testmode=False, verbose=verbose-1) + if not exist: + print(f"{funname}: Error! program not found") + return False, 0,-1,-1 + else: + if tend == None: + program=AKF_2.get_program_id(tstart) + exist, _, _, tend = get_trigger_from_PID(program, testmode=False, verbose=verbose-1) + if not exist: + print(f"{funname}: Error! end trigger not found") + return False, 0,-1,-1 + # the real download part + print("Version 3.5.0: version test") + if typ in ['all','All','ALL']: + datcha = [] + datcha_name = [] + for key in ["total", "upper", "lower", "qmaxUP", "qmaxDOWN"]: + ele = channelnames[key] + datcha.append(ele[0]) + datcha_name.append(ele[1]) + else: + ele = channelnames[typ] + datcha = ele[0] + datcha_name = ele[1] + if version == 0: + version = get_latest_version(f"QRT_IRCAM/{stream_name}_DATASTREAM", project=project_ana, t_from=tstart, t_to=tend, testmode=False) + larchivepath = archivepath+project_ana+"/"+f"QRT_IRCAM/{stream_name}_DATASTREAM/V"+str(version) + if type(datcha) == list:#case of several requested channels + chadat = [] + exist = True + for i in range(len(datcha)): + data = read_restdb(larchivepath+"/"+str(datcha[i])+"/"+datcha_name[i]+"/_signal.json?from="+str(tstart)+"&upto="+str(tend)) + if i == 0: + if data[0]: + time = data[1] + else: + time = 0 + exist = False + if data[0]: + chadat.append(data[2]) + if len(datcha) != len(chadat): + exist = False + else:# case for a single channel + print(larchivepath+"/"+str(datcha)+"/"+datcha_name+"/_signal.json?from="+str(tstart)+"&upto="+str(tend)) + data = read_restdb(larchivepath+"/"+str(datcha)+"/"+datcha_name+"/_signal.json?from="+str(tstart)+"&upto="+str(tend)) + if data[0]: + print("data found") + exist = True + time = data[1] + chadat = data[2] + else: + exist = False + time = 0 + chadat = -1 + #the extra stuff + if return_used_divertors: + larchivepath_par = archivepath+project_ana+"/"+f"QRT_IRCAM/{stream_name}_PARLOG/V"+str(version) + try: + res = urllib.request.urlopen(larchivepath_par+"/_signal.json?from="+str(tstart)+"&upto="+str(tend)) + signal_list = json.loads(res.read().decode('utf-8')) + res.close() + goon=True + except urllib.error.URLError as e: + print('__download_derived_data__: Error! ', e, larchivepath_par+"/_signal.json?from="+str(tstart)+"&upto="+str(tend)) + goon=False + except Exception as E: + print('__download_derived_data__: Error! ', E) + goon=False + if not goon: + print("__download_derived_data__: Unable to download the used divertor list") + portlist = [] + else: + try: + portlist = signal_list['values'][0]['header']['divertor_ports'] + portlist = portlist.split('[')[1].split("]")[0].split(",") + for i in range(len(portlist)): + portlist[i]=int(portlist[i]) + except: + portlist = [] + print(signal_list['values'][0]['header']) + + # the request part + if exist == False: now = datetime.datetime.now() if verbose > 0: @@ -6049,7 +6157,7 @@ def __download_derived_data__(kind, typ="total", program=None, tstart=None, print(f'{funname}: Error! ', E) programid=str(tstart) cam_progs=IR_tools.get_work_list(heatflux_requestlist_path, typ=kind) - cam_progs_ig,reasons=IR_tools.get_work_list(heatflux_requestlist_path, typ='ignore') + cam_progs_ig,reasons=IR_tools.get_work_list(heatflux_requestlist_path, typ='ignore') cam_p = cam_progs+cam_progs_ig portsblocked = 0 blockedports=[] @@ -6070,6 +6178,10 @@ def __download_derived_data__(kind, typ="total", program=None, tstart=None, for port in blockedports: pid = cam_progs_ig.index((programid, str(port))) print(now, f"{funname}: request ignored, data not available, reason: {0}".format(reasons[pid])) + if return_used_divertors: + return exist, time, chadat, portlist + else: + return exist, time, chadat def download_scene_model(port, program=None, timepoint=None, version=0, testmode=False, verbose=0):