# -*- coding: utf-8 -*- """ Created on Wed Oct 25 15:51:46 2017 updated on Tue Aug 21 10:20:00 2018 last update on Fr Nov 23 15:37:00 2018 Version: 3.4.1 (Numbering: #of big changes(OP1.2a download V1, OP1.2b download V2, heatflux V3) . #of updates to add functionalities . #number of updates for bug fixes ) @author: holn """ version = "V3.4.1" import numpy as np import IR_image_tools as IR_tools from IR_config_constants import archivepath, testarchivepath, portcamdict, camlimdict, \ valid_FOV_circle, valid_background_rectangle, \ TC_port, TC_channel, IRCamColdframes_fittingpath, \ exJet, portpathdict, project, project_ana, \ heatflux_requestlist_path#, exJet_trans #try: # import W7Xrest.read_restdb as AKF_1 #except: # import read_restdb as AKF_1 import datetime import urllib.request import urllib.error import logging import json from PIL import Image from io import BytesIO from os.path import join import matplotlib.pyplot as plt try: import threading import w7xarchive as AKF_2 # disbale deprecation warnings, because of timezone warning spam import warnings warnings.filterwarnings("ignore", category=DeprecationWarning) fastDL = True except Exception as E: print(E) fastDL = False try: config_path = "\\\\sv-e4-fs-1\\E4-Mitarbeiter\\E4 Diagnostics\\QIR\\"\ "Software\\QI-RealTime\\1.0.0\\release\\QIR-IrAnalysis\\Config\\Thermal calibration\\" FOV = plt.imread(config_path+portcamdict['OP1.2a']['AEF'+str(10)]+"\\"+"AEF"+str(10)+"_FOV.png")[:, :, 0] del FOV except: config_path = "" def get_latest_version(stream, project="W7X", testmode=False, t_from=None, t_to=None, program=None, verbose=0): """Find out latest version of given stream INPUT ------ stream: string url stream of interest project: string, optional, default 'W7X' the main tree in the database testmode: boolean, optional, default False whether ArchiveDB oder Test Archive should be used for the request t_from: uint64, optional timestamp in nanosecond from where the data version should be identified t_to: uint64, optional timestamp in nanosecond up to where the data version should be identified program: string, optional program id as a string in the form of "yyyymmdd.pid" verbose: integer, optional, default=0 defines the output level, 1 only this function, 2 this and one level below functions etc. RESULT ------ version: integer or None return int of version number of None if non-versioned or non-existing stream author: G. Schlisio, edit by holn """ if testmode:#check which archive should be used base = testarchivepath else: base = archivepath if program == None and t_from == None and t_to == None:# check wheather any time information was given request = urllib.request.Request(base +project+"/" + stream + "/_versions.json", headers={"Accept": "application/json"}) else: if program is not None: # prog=get_program_from_PID(program) # if prog[0]: # t_from=prog[1]['trigger']['0'][0] # t_to=prog[1]['trigger']['6'][0] exist, t_from, _, t_to = get_trigger_from_PID(program, None, testmode, verbose=verbose-1) if not exist: raise Exception("Cannot identify the Program") elif t_from is not None and t_to == None: t_to = int(t_from+100e9) elif t_from == None and t_to is not None: t_from = int(t_to-10e9) request = urllib.request.Request(base +project+"/" + stream + "/_versions.json?from="+str(t_from)+"&upto="+str(t_to), headers={"Accept": "application/json"}) try: response = urllib.request.urlopen(request) d = json.loads(response.read().decode('utf-8')) except urllib.error.HTTPError as ex: msg = ex.read() raise RuntimeError(msg) except Exception as E: raise RuntimeError(E) else: response.close() # detect unversioned or non-existing stream if d["versionInfo"] == []: return None versions = [] for i in d['versionInfo']: versions.append(i['number']) return max(versions) def TimeToNs(date, time): """ TimeToNs(date, time) INPUT ------ data: list [year, month, day] time: list [hours, minutes, seconds, microseconds] RESULT ------ nsdate: integer nanosecond since 1.1.1970 0:00:00 """ date_time = datetime.datetime(date[0], date[1], date[2], time[0], time[1], time[2], time[3]) div = date_time-datetime.datetime(1970, 1, 1, 0, 0, 0) nsdate = div.total_seconds()*1e9 return int(nsdate) def read_program(timestamp_start, timestamp_end=0, tol=60): """ read_program() downloads the information from the W7-X ArchiveDB and provides the programnumber and triggers INPUT ------ timestamp_start: integer start time in nanoseconds timestamp_end: inter, optional, default is 0 time to stop the search in nanoseconds tol: integer, optional, default is 60 toleranz in seconds around the timestamp_start for searching the program in the W7-X database RESULT ------ exist: boolean True if data was found, False if not program: dictionary W7-X program information """ program_url = 'http://archive-webapi.ipp-hgw.mpg.de/programs.json?from' if timestamp_end == 0: timestamp_end = timestamp_start jsonurl = '{0}={1}&upto={2}'.format(program_url, int(timestamp_start-tol*1e9), int(timestamp_end+tol*1e9)) try: res = urllib.request.urlopen(jsonurl) prog_raw = res.read() res.close() except urllib.error.URLError as e: print('read_program: Error opening URL') print(e) return False, 0 except Exception as e: print('read_program: Error opening URL', e) return False, 0 else: prog_string = prog_raw.decode(encoding='UTF-8') prog_list = json.loads(prog_string) pl = prog_list['programs'][0] return True, pl def download_LUT(port, time, exposure=0, emissivity=0, camera_filter=0, version=0, testmode=False, verbose=0): """ download_LUT(camera, port, time, exposure=0, emissivity=0, camera_filter=0, version=1): download of the look up table for the infrared cameras from the database for OP1.2(a+b) Have to swap 11, 21 until correction in the database INPUT ------ port: interger number of the AEF camera port time: integer nanosecond timestamp for the LUT exposure: integer requested exposure time in microseconds emissivity: float, string or integer emissivity value to identify which LUT is requested camera_filter: inter or string, only needed for port 50 filter number of the Infratec camera in port AEF50 version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if Look up table was found, False if not LUT: list contains the calibration information: [digital level, temperature, temperature error] NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation """ OP = IR_tools.get_OP_by_time(time_ns=time)#getting operation phase if port == 21:# swapping both ports (21 and 11) port = 11 elif port == 11: port = 21 if testmode: #check which archive should be used larchivepath = testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"LUT_" else: larchivepath = archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"LUT_" #camera dependend request string generation if port == 50 and OP == "OP1.2a":# camera=="INFRATEC" or camera=="infratec" or camera=="Infratec": query = "Filter_"+str(camera_filter)+"_Texp_"+str(int(exposure))+"us_e_"+str(float(emissivity)) elif port in [10, 11, 20, 21, 30, 31, 40, 41, 51] or OP == "OP1.2b":#camera=="IRCAM" or camera=="IRcam" or camera=="ircam": query = "Texp_"+str(int(exposure))+"us_e_"+str(float(emissivity)) else: # print("download_LUT: Error! Camera unknown, stopping here.") logging.warning("download_LUT: Error! Camera unknown, stopping here.") return False, 0 # raise Exception if version == 0:#version check, if 0 version = get_latest_version(portpathdict[OP]["AEF"+str(port)]+"LUT_DATASTREAM", t_from=time, testmode=testmode) if verbose > 0: print("download_LUT: LUT V"+str(version)+" is used") #time=int(fu.TimeToNs([2017,9,26], [8, 0, 0, 0])) LUTpar = read_restdb_old(larchivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(time-10)+"&upto="+str(time+20)) if LUTpar[0]: LUTid = LUTpar[2][0]['structure'][query] LUTs = read_restdb_old(larchivepath+"DATASTREAM/V"+str(version)+"/0/LUT/_signal.json?from="+str(time-10)+"&upto="+str(time+20)) if LUTs[0]: LUTs = LUTs[2][0]#.swapaxes(1,2)[0] fixed, somehow the archive gives now data in a swaped way back LUT = [LUTs[0], LUTs[LUTid], LUTs[LUTid+1]] del LUTpar, LUTs return True, LUT else: print("download_LUT: Warning! unable to download the LUTs") del LUTpar, LUTs return False, 0 else: del LUTpar print("download_LUT: Warning! unable to find LUTs, check your request") return False, 0 def read_LUT_from_file(port, this_time, t_exp, emissivity, cfilter, verbose=0):#emissivity_steel=0.31, """ read_LUT_from_file(port, time, t_exp, emissivity, cfilter, verbose=verbose-1) Read LUT V3 from local files for testing new calibration. INPUT ------ port: integer or string the port number of the AEF port this_time: integer nanosecond timestamp for the LUT t_exp: integer requested exposure time in microseconds emissivity: float, string or integer emissivity value to identify which LUT is requested cfilter: inter or string, only needed for port 50 filter number of the Infratec camera in port AEF50 verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if Look up table was found, False if not LUT: list contains the calibration information: [digital level, temperature, temperature error] NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation """ LUT_dir = '\\\\sv-e4-fs-1\\E4-Mitarbeiter\\E4 Diagnostics\\QIR\\testingLUT' OP = IR_tools.get_OP_by_time(time_ns=this_time) cam_name = portcamdict[OP]['AEF{0}'.format(port)] filename = '{0}_thermal_LUT_filter_{1}_texp_{2}us_emiss_{3:.2f}.json'.format(cam_name, cfilter, t_exp, emissivity) # data = json.loads(join(LUT_dir, filename).decode('utf-8')) try: with open(LUT_dir+"\\"+filename) as data_file: jfile = json.load(data_file) LUT = np.array(jfile['LUT']).swapaxes(0, 1) if verbose > 0: print("read_LUT_from_file: succesfully loaded V3 LUT from local directory") return True, LUT except Exception as E: print("read_LUT_from_file: ERROR in loading V3 LUTs", E) return False, [] def download_NUC_by_program(port, program, exposure, version=0, testmode=False, verbose=0): ''' download the NUC data for a given port, program and exposure time of the camera INPUT ------ port: integer or string port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' exposure: integer exposure time in microseconds version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if NUC data was found, False if not NUC: list a list vontaining images of gain, offset, cold, badpixels, gain_error and offset_error NUC_describtion: list a list which entry in NUC is what, looks like ['gain', 'offset', 'cold', 'badpixels', 'gain_error', 'offset_error'] NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation ''' # prog=get_program_from_PID(program) # try: # t_program = AKF_2.get_program_from_to(program) # prog =AKF_2.get_program_list(t_program[0], t_program[1]) # if prog[0]: # starttime=prog[1]['trigger']['0'][0] # stoptime=prog[1]['trigger']['1'][0] exist, starttime, stoptime, _ = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return download_NUC_by_times(port, starttime, stoptime, exposure, version, verbose=verbose) else: # except: print("download_NUC_by_program: cannot find the program") return False, 0, 0 def download_NUC_by_times(port, starttime, stoptime, exposure, testmode=False, version=0, verbose=0): ''' download the NUC data for a given port, start and endtime in ns and exposure time of the camera INPUT ------ port: integer or string port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end exposure: integer exposure time in microseconds version: integer, optional Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional if True, the data will be loaded from the test archive verbose: integer, optional feedback level (details of print messages) RESULT ------ exist: boolean True if NUC data was found, False if not NUC: list a list vontaining images of gain, offset, cold, badpixels, gain_error and offset_error NUC_describtion: list a list which entry in NUC is what, looks like ['gain', 'offset', 'cold', 'badpixels', 'gain_error', 'offset_error'] NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation ''' OP = IR_tools.get_OP_by_time(time_ns=starttime) if testmode: larchivepath = testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"NUC_" else: larchivepath = archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"NUC_" # NUC_parlog=AKF_1.read_restdb_old(archivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) if version == 0: version = get_latest_version(portpathdict[OP]["AEF"+str(port)]+"NUC_DATASTREAM", t_from=starttime, testmode=testmode) try: res = urllib.request.urlopen(larchivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) signal_list = json.loads(res.read().decode('utf-8')) res.close() goon = True except urllib.error.URLError as e: print('download_NUC_by_times: Error! ', e) goon = False except Exception as e: print('download_NUC_by_times: Error! ', e) goon = False if goon: n = 0 nuctimes = [0] for NUCpar in signal_list['values']: if NUCpar['structure']['Texp'] == exposure: nuctimes = [signal_list['dimensions'][2*n], signal_list['dimensions'][2*n+1]] # gain_i=NUCpar['structure']['gain_index'] offset_i = NUCpar['structure']['offset_index'] n += 1 if nuctimes[0] != 0: NUC = read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/_signal.json?from="+str(nuctimes[0]-10)+"&upto="+str(nuctimes[1]+10)) if NUC[0]: images = np.vsplit(NUC[2], np.shape(NUC[2])[0]/offset_i) return True, images, ['gain', 'offset', 'cold', 'badpixels', 'gain_error', 'offset_error'] else: if verbose > 0: print("download_NUC_by_times: NUC image for requested exposure time not found") return False, 0, 0 else: if verbose > 0: print("download_NUC_by_times: NUC image for requested exposure time not found") return False, 0, 0 else: return False, 0, 0 def get_NUC_by_program(port, program, exposure, version=0, testmode=False, verbose=0): ''' Loads NUC elements (gain, offset cold image, bad pixels) for an IR camera in a given port and time interval. Depending on time and camera, reconstruct the cold frame and/or compute the gain, offset from the hot and cold frame. INPUT ------ port: integer or string port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' exposure: integer exposure time in microseconds version: integer, optional Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional if True, the data will be loaded from the test archive verbose: integer, optional feedback level (details of print messages) RESULT ------ exist: boolean True if NUC data was found, False if not NUC: list a list vontaining images of gain, offset, cold, badpixels, gain_error and offset_error NUC_describtion: list a list which entry in NUC is what, looks like ['gain', 'offset', 'cold', 'badpixels', 'gain_error', 'offset_error'] NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation ''' # prog=get_program_from_PID(program) # try: # t_program = AKF_2.get_program_from_to(program) # prog =AKF_2.get_program_list(t_program[0], t_program[1]) # if prog[0]: # starttime=prog[1]['trigger']['0'][0] # stoptime=prog[1]['trigger']['1'][0] exist, starttime, stoptime, _ = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return get_NUC_by_times(port, starttime, stoptime, exposure, version=version, testmode=testmode, verbose=verbose) else: # except: print("get_NUC_by_program: Warning! cannot find the trigger timestamps for program", program) # return False, 0, 0 OP = IR_tools.get_OP_by_time(program_str=program) return get_NUC_by_times(port, starttime, stoptime, exposure, version=version, testmode=testmode, OP=OP, verbose=verbose) def get_NUC_by_times(port, starttime, stoptime, t_exp, version=0, testmode=False, OP=None, verbose=0): """ Loads NUC elements (gain, offset cold image, bad pixels) for an IR camera in a given port and time interval. Depending on time and camera, reconstruct the cold frame and/or compute the gain, offset from the hot and cold frame. the result is operation phase dependent, due to changes in the aquisition and saving programs INPUT ------ port: integer or string port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end exposure: integer exposure time in microseconds version: integer, optional Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional if True, the data will be loaded from the test archive OP: string, optional, default None name of operation phase verbose: integer, optional feedback level (details of print messages) RESULT ------ exist: boolean True if NUC data was found, False if not NUC: list a list vontaining images of gain, offset, cold, badpixels, gain_error and offset_error NUC_describtion: list a list which entry in NUC is what, looks like ['gain', 'offset', 'cold', 'badpixels', 'gain_error', 'offset_error'] NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation """ if OP is None: #get operation phase if not given OP = IR_tools.get_OP_by_time(time_ns=stoptime) if OP == "OP1.2a": # check for operation phase t1 = stoptime t0 = starttime prog = read_program(t1) program = prog[1]['id'] if (port == 31) or (port == 21 and float(program[4:]) > 1110):#camera 31 and 21 need after 20171110 new coldframes due to a open shutter during the recording of the coldframes if verbose > 0: print("get_NUC_by_times: rebuilding coldframe") #use any way the rebuilt coldframe. sT = get_sensor_temp_by_program(port, program, testmode=testmode)[2][0] # coldref, hotref = IR_tools.load_ref_images('AEF' + str(port), t_exp) gotit, hotcold, describtion = download_hot_cold_reference_by_times(port, t_exp, testmode=False) del describtion if gotit: coldref = hotcold[1] hotref = hotcold[0] else: # raise Exception("get_NUC_by_times: unable to download reference frames") logging.warning("get_NUC_by_times: unable to download reference frames") return False, 0, 0 filestring = 'AEF' + str(port) + '_et' + str(int(t_exp)) amap = np.load(join(IRCamColdframes_fittingpath, filestring + '_a.npy')) bmap = np.load(join(IRCamColdframes_fittingpath, filestring + '_b.npy')) cirebuild = IR_tools.reconstruct_coldframe(t_exp, sT, amap, bmap, coldref) if verbose > 0: print('get_NUC_by_times: calculate gain, offset and bad pixels') gain, offset = IR_tools.calculate_gain_offset_image(cirebuild, None, coldref, hotref, verbose=verbose-1) gain[gain == np.inf] = 0 offset[offset == np.inf] = 0 badpixels = find_badpixels(port, gain, offset, niterations=10, tolerance=10, verbose=verbose-1) gain_error = 0 offset_error = 0 return True, [gain, offset, cirebuild, badpixels, gain_error, offset_error], ['gain', 'offset', 'cold', 'badpixels', 'gain_error', 'offset_error'] elif (port == 11) or (port == 21): # case of the swaped cameras if verbose > 0: print('get_NUC_by_times: downloading NUC') # coldref, hotref = IR_tools.load_ref_images('AEF' + str(port), t_exp) gotit, hotcold, describtion = download_hot_cold_reference_by_times(port, t_exp, testmode=False) if gotit: coldref = hotcold[1] hotref = hotcold[0] else: # raise Exception("get_NUC_by_times: unable to download reference frames") logging.warning("get_NUC_by_times: unable to download reference frames") return False, 0, 0 NUC_DL = download_NUC_by_times(port, t0, t1, t_exp, version, testmode) if NUC_DL[0] == False: # raise Exception("get_NUC_by_times: NUC was not found") logging.warning("get_NUC_by_times: NUC was not found") return False, 0, 0 else: ci = NUC_DL[1][2] gain, offset = IR_tools.calculate_gain_offset_image(ci, None, coldref, hotref, verbose=verbose-1) gain[gain == np.inf] = 0 offset[offset == np.inf] = 0 badpixels = find_badpixels(port, gain, offset, niterations=10, tolerance=10, verbose=verbose-1) gain_error = 0 offset_error = 0 return True, [gain, offset, ci, badpixels, gain_error, offset_error], ['gain', 'offset', 'cold', 'badpixels', 'gain_error', 'offset_error'] else: #all other cases, the "normal" case if verbose > 0: print('get_NUC_by_times: downloading NUC') NUC_DL = download_NUC_by_times(port, starttime, stoptime, t_exp, testmode=testmode) #True, images, ['gain', 'offset', 'cold', 'badpixels', 'gain_error', 'offset_error'] if NUC_DL[0]: badpixels = find_badpixels(port,NUC_DL[1][0],NUC_DL[1][1],niterations=10, tolerance=10, verbose=verbose-1) NUC_DL[1][3]=badpixels return NUC_DL elif OP == "OP1.2b": gain_error = 0 offset_error = 0 gotit, hotcold, describtion = download_hot_cold_reference_by_times(port, t_exp, testmode=False) if gotit: coldref = hotcold[1] hotref = hotcold[0] else: # raise Exception("get_NUC_by_times: unable to download reference frames") logging.warning("get_NUC_by_times: unable to download reference frames") return False, 0, 0 ### get the cold frame: 1. get the metastream frametype and exposuretime and check in it for the positions, if -1 type is unknown, when if needs to be identified in a different way try: exist, _, coldframes = download_calibration_raw_files_by_time(port, t_exp, starttime, stoptime, frametype=0, version=0, testmode=testmode, verbose=verbose-1) except: exist = False if exist:#okay it exist, average the cold frames cold = np.zeros(np.shape(coldframes[0]), dtype=np.uint64) for ele in coldframes: cold += ele cold = np.asarray(cold/len(coldframes), dtype=np.uint16) else: print('get_NUC_by_times: Warning! no cold frames found. will use reference cold frame...') cold = np.copy(coldref) # return False, [0], [0] gain, offset = IR_tools.calculate_gain_offset_image(cold, None, coldref, hotref, verbose=verbose-1) badpixels = find_badpixels(port, gain, offset, niterations=10, tolerance=10, verbose=verbose-1) return True, [gain, offset, cold, badpixels, gain_error, offset_error], ['gain', 'offset', 'cold', 'badpixels', 'gain_error', 'offset_error'] else: # raise Exception("get_NUC_by_times: unknown Operation phase or NUC method not implemented for this OP") logging.warning("get_NUC_by_times: unknown Operation phase or NUC method not implemented for this OP") return False, 0, 0 def download_calibration_raw_files_by_time(port, t_exp, starttime, stoptime, frametype=0, version=0, testmode=False, verbose=0): """ download of the calibration raw files which are recorded between t0 and t1 trigger frametype: 0 for closed shutter frames (cold), 1 for open shutter frames (background) INPUT ------ port: integer port number of the AEF port of the camera t_exp: integer exposure time in microseconds starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end frametype: integer, default 0 defines which type of frames should be returned, 0 for cold frames, 1 for background frames version: integer, optional Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional if True, the data will be loaded from the test archive verbose: integer, optional feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation """ gotit, time_t, texp_t = get_exposure_by_times(port, int(starttime-100), int(stoptime-20e6), testmode=testmode) OP = IR_tools.get_OP_by_time(time_ns=stoptime) if gotit: expinds = np.where(texp_t == t_exp)[0] if len(expinds) == 0: logging.warning("download_calibration_raw_files_by_time: Error! cannot find the exposure time in the given data") return False, [0], [0] else: logging.warning("download_calibration_raw_files_by_time: Error! exposure time not found") return False, [0], [0] gotitf, timef, values_f = get_frametype_by_times(port, int(starttime-100), int(stoptime-20e6), testmode=testmode) if gotitf: typiinds = np.where(values_f[expinds] == frametype)[0] if len(typiinds) > 0: ref_t = [np.min(timef[expinds][typiinds]), np.max(timef[expinds][typiinds])] # print((ref_t[1]-ref_t[0])/1e9) # print(len(timef), len(timef[expinds]), len(timef[expinds][typiinds])) else:#okay the early data stuff or strange stuff if verbose > 0: print("download_calibration_raw_files_by_time: frame type was not identified assuming that the first part is the cold and the second one the background") frametimes = time_t[expinds] diftime = [frametimes[i]-frametimes[i-1] for i in range(1, len(frametimes))] turnpoint = np.where(np.asarray(diftime) > 11e6)[0][0] if frametype == 0: ref_t = [np.min(frametimes[0:turnpoint+1]), np.max(frametimes[0:turnpoint+1])] # print((ref_t[1]-ref_t[0])/1e9) elif frametype == 1: # print(len(frametimes[turnpoint+1:]), len(frametimes[0:turnpoint+1]), len(frametimes)) ref_t = [np.min(frametimes[turnpoint+1:]), np.max(frametimes[turnpoint+1:])] # print((ref_t[1]-ref_t[0])/1e9) else: # raise Exception("download_calibration_raw_files_by_time: requested Frametype unknown and not implemented!") logging.warning("download_calibration_raw_files_by_time: requested Frametype unknown and not implemented!") return False, [0], [0] else: print("download_calibration_raw_files_by_time: Error! frametype not found") frametimes = time_t[expinds] diftime = [frametimes[i]-frametimes[i-1] for i in range(1, len(frametimes))] turnpoint = np.where(np.asarray(diftime) > 11e6)[0][0] if frametype == 0: ref_t = [np.min(frametimes[0:turnpoint+1]), np.max(frametimes[0:turnpoint+1])] print((ref_t[1]-ref_t[0])/1e9) elif frametype == 1: # print(len(frametimes[turnpoint+1:]), len(frametimes[0:turnpoint+1]), len(frametimes)) ref_t = [np.min(frametimes[turnpoint+1:]), np.max(frametimes[turnpoint+1:])] print((ref_t[1]-ref_t[0])/1e9) else: # raise Exception("download_calibration_raw_files_by_time: requested Frametype unknown and not implemented!") logging.warning("download_calibration_raw_files_by_time: requested Frametype unknown and not implemented!") return False, [0], [0] # return False, [0], [0] t1date = datetime.datetime.utcfromtimestamp((stoptime-100)/1e9) t1date = t1date.isoformat() t0date = datetime.datetime.utcfromtimestamp((starttime-15e6)/1e9) t0date = t0date.isoformat() if version == 0: version = get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM", t_from=starttime, t_to=stoptime, testmode=testmode) if testmode: larchivepath = testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" else: larchivepath = archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" timest0 = AKF_2.get_time_intervals(larchivepath, t0date.replace("T", " "), t1date.replace("T", " ")) t_offset = timest0[-1][0]-time_t[0] if t_offset > 10e6: if verbose > 0: print("download_calibration_raw_files_by_time: time offset detected, try to correct this, offset is", t_offset) else: t_offset = 0 # print("starttime frames:", np.min(timest0), "starttime metachannels:", time_t[0], "offset", t_offset) return download_raw_images_by_times(port, ref_t[0]+t_offset, ref_t[1]+t_offset, testmode=testmode, verbose=verbose-1) def download_hot_cold_reference_by_times(port, exposure, starttime=1503907200000000000, testmode=False, version=0): """ Loads the most recent hot and cold calibration frames for a starttime. Uses first calibration frames if time is not defined. INPUT ------ port: integer or string port number of the AEF port of the camera, or string containing the full port name exposure: integer exposure time in microseconds starttime: integer time in ns where the search should start version: integer, optional Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional if True, the data will be loaded from the test archive verbose: integer, optional feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not images: list a list vontaining images of gain, offset, cold, badpixels, gain_error and offset_error image_describtion: list a list which entry in NUC is what, looks like ['hot','cold'] NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation """ OP = IR_tools.get_OP_by_time(time_ns=starttime) if isinstance(port, int): port = "AEF"+str(port) if testmode: larchivepath = testarchivepath+project+"/"+portpathdict[OP][port]+"raw_" else: larchivepath = archivepath+project+"/"+portpathdict[OP][port]+"raw_" # NUC_parlog=AKF_1.read_restdb_old(archivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) if version == 0: version = get_latest_version(portpathdict[OP][port]+"raw_DATASTREAM", t_from=starttime) try: path_string = larchivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(starttime)+"&upto="+str(int(starttime+1e9)) res = urllib.request.urlopen(path_string) signal_list = json.loads(res.read().decode('utf-8')) res.close() goon = True except urllib.error.URLError as e: print('download_hot_cold_reference_by_times: Error! ', e) goon = False except Exception as e: print('download_hot_cold_reference_by_times: Error! ', e) goon = False if goon: COLDtime = 0 HOTtime = 0 try: COLDtime = signal_list['values'][0]['structure']['cold_'+str(int(exposure))+'us'] except: print("download_hot_cold_reference_by_times: cold image for requested exposure time not found") try: HOTtime = signal_list['values'][0]['structure']['hot_'+str(int(exposure))+'us'] except: print("download_hot_cold_reference_by_times: hot image for requested exposure time not found") images = [] if HOTtime != 0: HOT = read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/_signal.json?from="+str(HOTtime-10)+"&upto="+str(HOTtime+10)) if HOT[0]: images.append(HOT[2]) if COLDtime != 0: COLD = read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/_signal.json?from="+str(COLDtime-10)+"&upto="+str(COLDtime+10)) if COLD[0]: images.append(COLD[2]) if HOT[0] and COLD[0] and len(images) == 2: return True, images, ['hot', 'cold'] else: print("download_hot_cold_reference_by_times: Error! hot and cold image for requested exposure time not found") return False, 0, 0 else: return False, 0, 0 def download_background_by_program(port, program, exposure, camera_filter=0, version=0, testmode=False, verbose=0): ''' download the background from the AEF cameras in OP1.2a, which are recorded between t0 and t1 trigger returned exist, time, frame INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' exposure: integer exposure time in microseconds camera_filter: integer or string, only needed for port 50 filter number of the Infratec camera in port AEF50 version: integer, optional Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional if True, the data will be loaded from the test archive verbose: integer, optional feedback level (details of print messages) OP: string, optional, default None name of operation phase RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation ''' # prog=get_program_from_PID(program) # try: # t_program = AKF_2.get_program_from_to(program) # prog =AKF_2.get_program_list(t_program[0], t_program[1]) # if prog[0]: # starttime=prog[1]['trigger']['0'][0] # stoptime=prog[1]['trigger']['1'][0] exist, starttime, stoptime, _ = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return download_background_by_times(port, starttime, stoptime, exposure, camera_filter=camera_filter, version=version) else:#except: print("download_background_by_program: Error! cannot find the program {0}".format(program)) return False, 0, 0 def download_background_by_times(port, starttime, stoptime, exposure, camera_filter=0, version=0, testmode=False, verbose=0): """ download the background frames for a given time intervall in nanoseconds, defined by start and stop time INPUT ------ port: integer port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end exposure: integer exposure time in microseconds camera_filter: integer or string, only needed for port 50 filter number of the Infratec camera in port AEF50 version: integer, optional Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional if True, the data will be loaded from the test archive verbose: integer, optional feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation """ OP = IR_tools.get_OP_by_time(time_ns=starttime) if OP == "OP1.2a": stream = portpathdict[OP]["AEF"+str(port)]+"background_" if testmode: larchivepath = testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"background_" else: larchivepath = archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"background_" if version == 0: version = get_latest_version(stream+"DATASTREAM", t_from=starttime, testmode=testmode) try: res = urllib.request.urlopen(larchivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) signal_list = json.loads(res.read().decode('utf-8')) res.close() goon = True except urllib.error.URLError as e: print('download_background_by_times: Error! ', e) goon = False except Exception as e: print('download_background_by_times: Error! ', e) goon = False if goon: n = 0 backtimes = [0] for backpar in signal_list['values']: if backpar['structure']['Texp'] == exposure: if port == 50:# camera=="INFRATEC" or camera=="infratec" or camera=="Infratec": if backpar['structure']['filter'] == camera_filter: backtimes = [signal_list['dimensions'][2*n], signal_list['dimensions'][2*n+1]] else: backtimes = [signal_list['dimensions'][2*n], signal_list['dimensions'][2*n+1]] n += 1 if backtimes[0] != 0: backdat = read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/_signal.json?from="+str(backtimes[0]-10)+"&upto="+str(backtimes[1]+10)) if backdat[0]: return backdat#[True, backdat[2]] else: print("download_background_by_times: Error! background image for requested exposure time(, filter) not found") return False, 0, 0 else: print("download_background_by_times: Error! background image for requested exposure time(, filter) not found") return False, 0, 0 else: return False, 0, 0 elif OP == "OP1.2b": NUC_DL = get_NUC_by_times(port, starttime, stoptime, t_exp=exposure, version=version, testmode=testmode, verbose=verbose-1)#download_NUC_by_times(port, t0, t1, t_exp, version) if NUC_DL[0] == False: # raise Warning("download_background_by_times: NUC was not found") logging.warning("download_background_by_times: NUC was not found") return False, 0, 0 else: # extract NUC elements gain = [] offset = [] gain = np.array(NUC_DL[1][0]) offset = np.array(NUC_DL[1][1]) # check quality of NUC elements gain[np.isnan(gain)] = 0 offset[np.isnan(offset)] = 0 offset[offset < -1e100] = 0 return get_NUCed_background_by_times(port=port, t0=starttime, t1=stoptime, t_exp=exposure, cfilter=camera_filter, gain=gain, offset=offset, version=version, verbose=verbose-1) def get_NUCed_background_by_times(port, t0, t1, t_exp, cfilter, gain, offset, version=0, testmode=False, plot_it=False, verbose=0): """ OP1.2b function download the background frame and applies the NUC onto the background, given by gain und offset it is an average over 30 frames. INPUT ------ port: integer port number of the AEF port of the camera t0: integer nanoseconds timestamp of the t0 trigger, start of the program t1: integer nanoseconds timestamp of the t1 trigger, start of the heating t_exp: integer exposure time in microseconds gain: numpy array the gain image from the calibration to convert the counts into digital level offset: numpy array the offset image from the calibration to convert the counts into digital level version: integer, optional Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional if True, the data will be loaded from the test archive plot_it: boolean, optional, default False if True, the data will be plotted directly verbose: integer, optional feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: integer the timestamp of the background frame (firest timestamp of the 30 frames) background_frame: numpy array the background frame NOTE ------ function need to be adapted for OP2, if AEK ports come into opperation """ exist, btime, backgroundframes = download_calibration_raw_files_by_time(port, t_exp, t0, t1, frametype=1, version=version, testmode=testmode, verbose=verbose-1) camera = portcamdict["OP1.2b"]["AEF"+str(port)] if verbose > 50: print("camera is", camera) background = np.zeros(np.shape(backgroundframes[0]), dtype=np.uint64) for frame in backgroundframes: background += frame background = np.asarray(background/len(backgroundframes), dtype=np.uint16) if plot_it: plt.figure() plt.imshow(background, vmin=np.median(background)-500, vmax=np.median(background)+500) plt.title("background image unnuced") if not camera.split("_")[0] == "Infratec": background = apply_NUC([background], gain, offset)[0] if plot_it: plt.figure() plt.imshow(background, vmin=np.median(background)-500, vmax=np.median(background)+500) plt.title("background image nuced") return exist, btime[0], background def get_NUCed_coldframe_by_program(port, program, exposure=None, version=0, testmode=False, plot_it=False, verbose=0): """ Load the raw cold refences frame taken bofore this program and NUC it with the NUC of the previous program (or next one, if the previous does not exist). INPUT ----- port: int number of camera AEF port (e.g. 10, 11, 20,...) program: str experiment program identifier as a string of format 'DATE.PROG_NO', e.g. '20180904.015' exposure: float, optional camera exposure time in us (OPTIONAL: default is to take the exposure time of the first data frame) version: int, optional calibration version to be used (OPTIONAL: default is 0) plot_it: bool, optional switch of whether to plot intermediate results or not (OPTIONAL: deafult is NOT to plot) verbose: integer, optional feedback level (details of print messages) (OPTIONAL: if not provided, only ERROR output) RESULT ------ exist: bool indicator, of coldframe could be found coldframe: numpy array NUCed cold frame """ if exposure is None: try: exist, t, exposure = get_exposure_by_program(port, program, version=version, testmode=testmode) del t except: print('get_NUCed_coldframe_by_program: ERROR! no exposure time found!') return False, [] t_exp = exposure[0] else: t_exp = exposure if verbose > 0: print('get_NUCed_coldframe_by_program: using exposure time {0}'.format(t_exp)) try: exist, data, desc = get_NUC_by_program(port, program, t_exp, version=version, testmode=testmode, verbose=verbose-1) del desc except: print('get_NUCed_coldframe_by_program: ERROR! no cold frame found!') return False, [] if exist: cold_raw = data[2] porg_nr = int(program.split('.')[1]) ref_program = '{0}.{1:03d}'.format(program.split('.')[0], porg_nr-1) exist2, data, desc = get_NUC_by_program(port, ref_program, t_exp, version=version, testmode=testmode, verbose=verbose-1) if exist2: coldframe = apply_NUC([cold_raw], data[0], data[1])[0] if verbose > 0: print('get_NUCed_coldframe_by_program: using NUC of program {0}'.format(ref_program)) else: print('get_NUCed_coldframe_by_program: WARNING! no adjacent program found! --> cold frame is not NUCed') coldframe = cold_raw if plot_it: plt.figure(figsize=[8, 4]) plt.subplot(1, 2, 1) plt.imshow(cold_raw, vmin=np.percentile(cold_raw, 1), vmax=np.percentile(cold_raw, 99)) plt.colorbar() plt.title('averaged raw\nof {0}'.format(program)) plt.subplot(1, 2, 2) plt.imshow(coldframe, vmin=np.percentile(coldframe, 1), vmax=np.percentile(coldframe, 99)) plt.colorbar() plt.title('NUCed based on\ngain/offset from {0}'.format(ref_program)) plt.suptitle('cold frame of port {0}'.format(port)) plt.tight_layout(rect=[0, 0, 1, 0.95]) plt.show() else: print('get_NUCed_coldframe_by_program: ERROR! No cold frame found for program {0}!'.format(program)) coldframe = [] return exist, coldframe def download_raw_images_by_program(port, program, time_window=0, version=0, testmode=False, verbose=0): """ download the raw frames (counts) from the infrared cameras for a given program INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' time_window: list, array, integer or float, optional, default 0 if integer or float: size of the window in seconds from t1 if list or array: start and endpoint of the window in seconds from t1 if 0 the full program will be downloaded (t1 until t6) version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays NOTE ------ the timewindow will be substructured into intervalls, and each intervall will be downloaded one after the other to avoid a overfilling of the RAM. A to large intervall can still overflow the RAM """ exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) # prog=AKF_1.get_program_from_PID(program) # try: # t_program = AKF_2.get_program_from_to(program) # prog =AKF_2.get_program_list(t_program[0], t_program[1]) ## if prog[0]: # starttime=prog[0]['trigger']['1'][0] # stoptime=prog[0]['trigger']['6'][0] if exist: if type(time_window) == list or type(time_window) == np.ndarray: if len(time_window) > 2 or time_window[0] > 1000: # raise Exception("download_raw_images_by_program: invalid input for the time_window!") logging.warning("download_raw_images_by_program: invalid input for the time_window!") return False, 0, 0 tstart = time_window[0] tstop = time_window[1] if tstop 1: nrinterv = int(np.ceil((stoptime-starttime)/intervalSize)) if verbose > 0: print("download_images_by_times: time_window to large, splitting {0}s interval into {1} smaller fractions".format((stoptime-starttime)/1e9, nrinterv)) intervalls = [] for i in range(nrinterv): intervalls.append(int(starttime-10+i*intervalSize)) intervalls.append(stoptime) time = -1 allimages = 0 success = False for i in range(nrinterv): try: url = larchivepath+"/_signal.json?from="+str(intervalls[i])+"&upto="+str(intervalls[i+1]) if verbose > 2: print("download_images_by_times: reading from\n {0}".format(url)) res = urllib.request.urlopen(url) signal_list = json.loads(res.read().decode('utf-8')) res.close() images = [np.array(ele, dtype=typo) for ele in signal_list['values']] # signal=np.array(signal_list['values'],dtype=np.uint16) t = np.array(signal_list['dimensions']) del signal_list if i == 0: allimages = images.copy() time = t else: allimages = allimages+images[1:]#np.append(ressignal, signal,axis=0) time = np.append(time, t[1:]) del images, t success = True except urllib.error.URLError as e: print('download_images_by_times: Error in sub-interval {0}! {1}'.format(i, e)) except Exception as e: print('download_images_by_times: Error in sub-interval {0}! {1}'.format(i, e)) return success, time, allimages else: try: if verbose > 100: print(larchivepath+"/_signal.json?from="+str(starttime-10)+"&upto="+str(stoptime)) res = urllib.request.urlopen(larchivepath+"/_signal.json?from="+str(starttime-10)+"&upto="+str(stoptime)) signal_list = json.loads(res.read().decode('utf-8')) res.close() images = [np.array(ele, dtype=typo) for ele in signal_list['values']] # signal=np.array(signal_list['values'],dtype=np.uint16) t = np.array(signal_list['dimensions']) del signal_list return True, t, images except urllib.error.URLError as e: print('download_images_by_times: Error! ', e) return False, 0, -1 except Exception as e: print('download_images_by_times: Error! ', e) return False, 0, -1 if fastDL: def download_raw_images_by_program_via_png(port, program, time_window=0, version=0, threads=1, testmode=False, verbose=0): """ download of the raw images in png format instead of json for a given program. number of threads can be defined to download several pngs in parallel INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' time_window: list, array, integer or float, optional, default 0 if integer or float: size of the window in seconds from t1 if list or array: start and endpoint of the window in seconds from t1 if 0 the full program will be downloaded (t1 until t6) version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used threads: integer, default 1 number of parallel threads to download the images (approx. max. 2x number of cores and max. timeintervall/0.5 for best performance) testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: numpa array an array containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays NOTE ------ """ # prog=get_program_from_PID(program) ## try: ## t_program = AKF_2.get_program_from_to(program) ## prog =AKF_2.get_program_list(t_program[0], t_program[1]) # if not prog[0]: # print("download_raw_images_by_program_via_png: Error! program not found, cannot download the data") # return False, [0], [0] # starttime=prog[1]['trigger']['1'][0] # stoptime=prog[1]['trigger']['6'][0] trigger_exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if not trigger_exist: print("download_raw_images_by_program_via_png: Error! trigger not found, cannot download the data") OP = IR_tools.get_OP_by_time(time_ns=starttime) # get raw data from latest or requested version if version == 0: version = get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM", testmode=testmode, program=program) if testmode: larchivepath = testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" else: larchivepath = archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" return download_images_by_time_via_png(larchivepath, starttime, stoptime, time_window=time_window, threads=threads, version=version, verbose=verbose-1) def download_images_by_time_via_png(larchivepath, starttime, stoptime, time_window=0, threads=1, version=0, verbose=0, framerate='max'): """ download of the images in png format instead of json for a given timeintervall defined by start and stop time. number of threads can be defined to download several pngs in parallel INPUT ------ larchivepath: string local archivepath, the URL to download the images starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end time_window: list, array, integer or float, optional, default 0 if integer or float: size of the window in seconds from t1 if list or array: start and endpoint of the window in seconds from t1 if 0 the full program will be downloaded (t1 until t6) threads: integer, default 1 number of parallel threads to download the images (approx. max. 2x number of cores and max. timeintervall/0.5 for best performance) version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used verbose: integer, optional, default 0 feedback level (details of print messages) framerate: string or integer, optional, default 'max' sets the framerate to downsample the data. normal rate is 100. RESULT ------ exist: boolean True if data was found, False if not time: numpa array an array containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays NOTE ------ """ success = True if type(time_window) == list: tstart = time_window[0] tstop = time_window[1] if tstop < tstart: # raise Exception("download_images_by_time_via_png: t_stop before t_start") logging.warning("download_images_by_time_via_png: t_stop before t_start") return False, [0], [0] time_from = (starttime-100)+int(tstart*1E9) if stoptime < time_from: # raise Exception("download_images_by_time_via_png: time_window after stoptime") logging.warning("download_images_by_time_via_png: time_window after stoptime") return False, [0], [0] time_to = starttime+int(tstop*1E9) if stoptime < time_to: time_to = stoptime print('download_images_by_time_via_png: Warning! time_window cropped by end of data') # stdate=datetime.datetime.utcfromtimestamp((starttime-100)/1e9+tstart) # stdate=stdate.isoformat() # enddate=datetime.datetime.utcfromtimestamp((starttime)/1e9+tstop) # enddate=enddate.isoformat() else: # stdate=datetime.datetime.utcfromtimestamp((starttime-100)/1e9) # stdate=stdate.isoformat() time_from = starttime-100 if time_window == 0: time_to = stoptime # enddate=datetime.datetime.utcfromtimestamp(stoptime/1e9) # enddate=enddate.isoformat() else: time_to = starttime+int(time_window*1E9) # enddate=datetime.datetime.utcfromtimestamp((starttime)/1e9+time_window) # enddate=enddate.isoformat() #"2017-11-15 08:00:00" try: # times=AKF_2.get_time_intervals(larchivepath, stdate.replace("T", " "), enddate.replace("T", " ")) times = AKF_2.get_time_intervals(larchivepath, time_from, time_to) except Exception as E: # print('download_images_by_time_via_png: Error loading times from ', larchivepath, stdate.replace("T", " "), enddate.replace("T", " ")) print('download_images_by_time_via_png: Error loading times from {0} between [{1}, {2}]'.format(larchivepath, time_from, time_to)) # raise Warning(E) logging.warning(E) return False, [0], [0] if framerate != 'max': if verbose > 5: print("download_images_by_time_via_png: downsampling active; number of original frames:", len(times)) if type(framerate) == float or type(framerate) == int: realrate = 1e9/((times[0]-times[-1])[0]/len(times)) if framerate < realrate: steps = int(round(realrate/framerate, 0)) dummy = [] for i in range(0, len(times), steps): dummy.append(times[i]) times = dummy if verbose > 5: print("download_images_by_time_via_png: downsampling active; number of downsampeld frames:", len(times), steps) time = [] images = [] lnt = len(times) if threads == 1: for i in range(lnt): ele = times[lnt-1-i] imag = download_last_raw_image_by_time(larchivepath, ele[0]-10, ele[0]+10) if imag[0]: time.append(ele[0]) images.append(np.array(imag[1], dtype=np.uint16)) else: success = False return success, np.array(time), images else: tim = [] for i in range(lnt): tim.append(times[lnt-1-i][0]) intervalls = [] intervalSize = int(lnt/threads) for i in range(threads): intervalls.append(int(i*intervalSize)) intervalls.append(lnt) jobs = [] resultdict = [] for i in range(threads): if verbose > 0: print("download_images_by_time_via_png: Start Thread ", i+1) TH = download_images_thread(i, larchivepath, tim[intervalls[i]:intervalls[i+1]], resultdict, version) jobs.append(TH) TH.start() for p in jobs: p.join() if verbose > 0: print("download_images_by_time_via_png: all threads are done") order = [] for ele in resultdict: order.append(ele[0]) if len(np.where(np.asarray(ele[1])==False)[0])>0: success=False times = []#np.array(resultdict[order.index(0)][2]) images=[]#np.array(resultdict[order.index(0)][3]) for i in range(threads): images = images+resultdict[order.index(i)][3] times = times+resultdict[order.index(i)][2] resultdict[order.index(i)] = [] del resultdict return success, np.array(times), images class download_images_thread(threading.Thread): """ class for parallel download of images.\n contains an init function and a run function """ def __init__(self, threadID, larchivepath, times, resultlist, version=0): """ INPUT ------ theadID: integer identification number of the thread larchivepath: string local archivepath, the URL to download the images times: list the timestamps of each frames for the download resultlist: list list which is returned after the thread is done version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used RESULT ------ resultlist: list list containing the ID, exist, time and the images NOTE ------ """ threading.Thread.__init__(self) self.threadID = threadID self.times = times self.archivepath = larchivepath self.resultlist = resultlist self.version = version def run(self): images = [] time = [] successes = [] for i in self.times: imag = download_last_raw_image_by_time(self.archivepath, i-10, i+10)#, version=self.version if imag[0]: images.append(np.array(imag[1],dtype=np.uint16)) time.append(i) successes.append(True) else: successes.append(False) self.resultlist.append([self.threadID, successes, time, images]) class convert_raw_to_temp_thread(threading.Thread): """ class for parallel download of images and parallel application the NUC and temperature conversion.\n contains an init function and a run function """ def __init__(self, larchivepath, times,resultlist, threadID, version=0, background=0, LUT=[[], [], []],refT=28.5, gain=0, offset=0, gain_error=0, offset_error=0, give_ERROR=False, FOV=0, badpixels=[], verbose=0): """ INPUT ------ larchivepath: string local archivepath, the URL to download the images times: list the timestamps of each frames for the download resultlist: list list which is returned after the thread is done theadID: integer identification number of the thread version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used background: numpy array background image LUT: list the look up table, containing the digital level, the temperature and the temperature error refT: float reference temperature for the calibration gain: numpy array the gain image from the calibration to convert the counts into digital level offset: numpy array the offset image from the calibration to convert the counts into digital level gain_error: numpy array, optional the gain error image from the calibration to calculate the error, needed if give_ERROR=True offset_error: numpy array, optional the offset error image from the calibration to calculate the error, needed if give_ERROR=True give_ERROR: boolean switch to activate the error calculation and returning of the error FOV: numpy array field of view image, with 1 in the visible region and 0 in the dark region badpixels: list list of the bad pixels for the corrections verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ resultlist: list list containing the ID, exist, time and the images NOTE ------ """ threading.Thread.__init__(self) self.threadID = threadID self.times = times self.archivepath = larchivepath self.resultlist = resultlist self.version = version self.gain = gain self.offset = offset self.background = background self.LUT = LUT self.refT = refT self.gain_error = gain_error self.offset_error = offset_error self.give_ERROR = give_ERROR self.FOV = FOV self.badpixels = badpixels self.verbose = verbose def run(self): """ INPUT ------ same as init RESULT ------ resultlist: list list containing the ID, exist, time and the images NOTE ------ """ images = [] time = [] successes = [] for i in self.times: imag = download_last_raw_image_by_time(self.archivepath, i-10, i+10)#, version=self.version if imag[0]: images.append(np.array(imag[1],dtype=np.uint16)) time.append(i) successes.append(True) else: successes.append(False) if self.give_ERROR: success, images, error_images = apply_calib_on_raw(images, self.background, self.LUT, self.refT, self.gain, self.offset, self.gain_error, self.offset_error, False, self.give_ERROR, verbose=self.verbose-1) else: success, images=apply_calib_on_raw(images, self.background, self.LUT, self.refT, self.gain, self.offset, self.gain_error, self.offset_error, False, self.give_ERROR, verbose=self.verbose-1) for i in range(len(images)): images[i] = (images[i]*self.FOV).astype(np.float32) if self.verbose > 0: print(datetime.datetime.now(), "convert_raw_to_temp_thread: correcting bad pixels") images = IR_tools.correct_images(images, self.badpixels, verbose=self.verbose-1) self.resultlist.append([self.threadID, successes, time, images]) class NUC_raw_thread(threading.Thread): """ class for parallel calculation of nuced raw images """ def __init__ (self, larchivepath, times,resultlist, threadID, version=0, background=0, LUT=[[], [], []],refT=28.5, gain=0, offset=0, gain_error=0, offset_error=0, give_ERROR=False, FOV=0, badpixels=[], verbose=0): """ INPUT ------ larchivepath: string local archivepath, the URL to download the images times: list the timestamps of each frames for the download resultlist: list list which is returned after the thread is done theadID: integer identification number of the thread version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used background: numpy array background image LUT: list the look up table, containing the digital level, the temperature and the temperature error refT: float reference temperature for the calibration gain: numpy array the gain image from the calibration to convert the counts into digital level offset: numpy array the offset image from the calibration to convert the counts into digital level gain_error: numpy array, optional the gain error image from the calibration to calculate the error, needed if give_ERROR=True offset_error: numpy array, optional the offset error image from the calibration to calculate the error, needed if give_ERROR=True give_ERROR: boolean switch to activate the error calculation and returning of the error FOV: numpy array field of view image, with 1 in the visible region and 0 in the dark region badpixels: list list of the bad pixels for the corrections verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ resultlist: list list containing the ID, exist, time and the images NOTE ------ """ threading.Thread.__init__(self) self.threadID = threadID self.times = times self.archivepath = larchivepath self.resultlist = resultlist self.version = version self.gain=gain self.offset=offset self.background=background self.LUT=LUT self.refT=refT self.gain_error=gain_error self.offset_error=offset_error self.give_ERROR=give_ERROR self.FOV=FOV self.badpixels=badpixels self.verbose=verbose def run(self): """ """ images=[] time=[] successes=[] for i in self.times: imag=download_last_raw_image_by_time(self.archivepath, i-10, i+10)#, version=self.version if imag[0]: images.append(np.array(imag[1],dtype=np.uint16)) time.append(i) successes.append(True) else: successes.append(False) if self.verbose > 0: print(datetime.datetime.now(), "NUC_raw_thread: applying NUC") if self.give_ERROR: images, error_images=apply_NUC(images, self.gain, self.offset, self.gain_error, self.offset_error, self.give_ERROR) else: images=apply_NUC(images, self.gain, self.offset, self.gain_error, self.offset_error, self.give_ERROR) for i in range(len(images)): images[i]=(images[i]*self.FOV).astype(np.float32) if self.verbose > 0: print(datetime.datetime.now(), "NUC_raw_thread: correcting bad pixels") images=IR_tools.correct_images(images, self.badpixels, verbose=self.verbose-1) self.resultlist.append([self.threadID, successes, time, images]) def download_raw_FLIR_images_via_png(t1, t6, time_window=0, version=0, threads=1, verbose=0, testmode=True): """ INPUT ------ t1: integer nanoseconds timestamp of the t1 trigger, start of the heating t6: integer nanoseconds timestamp of the t6 trigger, end of the program time_window: list, array, integer or float, optional, default 0 if integer or float: size of the window in seconds from t1 if list or array: start and endpoint of the window in seconds from t1 if 0 the full program will be downloaded (t1 until t6) version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used threads: integer, default 1 number of parallel threads to download the images (approx. max. 2x number of cores and max. timeintervall/0.5 for best performance) verbose: integer, optional, default 0 feedback level (details of print messages) testmode: boolean, optional, default True if True, the data will be loaded from the test archive RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays mode: integer kind of data structure,\n 1= pre-sets stored in different lists\n; 2=presets stored interlaced in one list tsets: list contains the information which frame belongs to which pre-set versions: list contains the informations of the version of each pre-set NOTE ------ the FLIR camera is capable to run in superframing mode, recording frames with different exposure times in an interlaced mode. each exposure time frame is stored in a different file named after the pre-set """ if testmode: larchivepath=testarchivepath+project+"/QSR07_FLIR/AEK51_raw" else: larchivepath=archivepath+project+"/QSR07_FLIR/AEK51_raw" if type(time_window)==list: tstart=time_window[0] tstop=time_window[1] if tstop 0: print("download_raw_FLIR_images_via_png: found {0} frames in the different sets".format([np.shape(x) for x in timess])) ### we not know the times for all presets for the program, now we have to download them, if times is not empty exist, time, frames, tsets=download_FLIR_Psets(larchivepath, timess, threads, versions, verbose=verbose-1) elif mode==2:##the data is stored only in one datastream, at least this datastream exist # raise Exception("download_raw_FLIR_images_via_png: not implemented") logging.warning("download_raw_FLIR_images_via_png: not implemented") return False, [0], [0], 0, [0], [0] #download data and downlaod LUT # time=[] # images=[] # lnt=len(times) # if threads==1: # for i in range(lnt): # ele=times[lnt-1-i] # imag=download_last_raw_image_by_time(port, ele[0]-10, ele[0]+10) # if imag[0]: # time.append(ele[0]) # images.append(np.array(imag[1],dtype=np.uint16)) # else: # success=False # return success, np.array(time), images else: # raise Exception("download_raw_FLIR_images_via_png: cannot identifie the way the searched data is stored!") logging.warning("download_raw_FLIR_images_via_png: cannot identifie the way the searched data is stored!") return False, [0], [0], 0, [0], [0] return exist, time, frames,mode, tsets, versions def get_FLIR_Pset_times(stdate, enddate, version=0, verbose=0, testmode=True): """ Download of the timevector for the different pre-sets of the recorded FLIR images INPUT ------ stdate: integer time in ns where the search should start enddate: integer time in sn where the search should end version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used verbose: integer, optional, default 0 feedback level (details of print messages) testmode: boolean, optional, default True if True, the data will be loaded from the test archive RESULT ------ timess: list timevectors of each pre-set versions: list list of the versions of each pre-set NOTE ------ """ if testmode: larchivepath=testarchivepath+project+"/QSR07_FLIR/AEK51_raw" else: larchivepath=archivepath+project+"/QSR07_FLIR/AEK51_raw" if version == 0: versions=[] versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS0_DATASTREAM", testmode=testmode, t_from=stdate)) versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS1_DATASTREAM", testmode=testmode, t_from=stdate)) versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS2_DATASTREAM", testmode=testmode, t_from=stdate)) versions.append(get_latest_version("QSR07_FLIR/AEK51_raw_PS3_DATASTREAM", testmode=testmode, t_from=stdate)) else: versions=[version, version, version, version] timess=[] if version == 0: if verbose > 0: print('get_FLIR_Pset_times: no version specified search for data in all versions') counter=-1 for version0 in versions: counter+=1 # print(versions) try: if version0>0: found=False while (version0>0 and not found): try: url = '{0}_PS{1}_DATASTREAM/V{2}/0/raw'.format(larchivepath, counter, version0) times_0=AKF_2.get_time_intervals(url, stdate, enddate)#stdate.replace("T", " "), enddate.replace("T", " "))# found=True except Exception as E: print('get_FLIR_Pset_times: Error querrying {0}\n{1}'.format(url, E)) found=False times_0=[] version0=version0-1 timess.append(times_0) else: timess.append([]) except: timess.append([]) if verbose > 0: print("get_FLIr_Pset_times: no version for PS"+str(counter)) else: counter=-1 for version0 in versions: counter+=1 try: url = '{0}_PS{1}_DATASTREAM/V{2}/0/raw'.format(larchivepath, counter, version0) times_0=AKF_2.get_time_intervals(url, stdate.replace("T", " "), enddate.replace("T", " "))# except Exception as E: print('get_FLIR_Pset_times: Error querrying {0}\n{1}'.format(url, E)) times_0=[] timess.append(times_0) return timess, versions def download_FLIR_Psets(larchivepath, timess, threads, versions, verbose=0): """ Download of the FLIR datasets, which are stored as different datasets. Each dataset has a different exposure time INPUT ------ larchivepath: string local archivepath, the URL to download the images timess: list list of the timevectors for each pre-set (pset) threads: integer, default 1 number of parallel threads to download the images, one or two per pre-set version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays tsets: list contains the information which frames belong to which pre-set NOTE ------ """ if len(timess[0])==len(timess[1]) and len(timess[2])==len(timess[3]) and len(timess[0])==0: exist=False time=[] frames=[] tsets=[] else:#timess is not empty, there is data in at least one Pset if verbose > 0: print(datetime.datetime.now(), "download_FLIR_Psets: data found, start the download") exist=True timesets=[] framess=[] success=True Psets=[] if threads==1: counter=-1 setn=-1 for times in timess: setn+=1 if times!=[]: counter+=1 Psets.append(counter) framess.append([]) timesets.append([]) lnt=len(times) for i in range(lnt): ele=times[lnt-1-i] imag=download_last_raw_image_by_time(larchivepath+"_PS"+str(setn)+"_DATASTREAM/V"+str(versions[setn])+"/0/raw", ele[0]-10, ele[0]+10) if imag[0]: timesets[counter].append([ele[0], i, setn, counter]) framess[counter].append(np.array(imag[1],dtype=np.uint16)) else: success=False else:#several threads, each for a preset tim = [] counter = -1 setn = -1 threads = 0 larchivepaths = [] versionss = [] for times in timess: times = np.array(times) setn += 1 if times != []: counter += 1 threads += 1 Psets.append(counter) larchivepaths.append(larchivepath+"_PS"+str(setn)+"_DATASTREAM/V"+str(versions[setn])+"/0/raw") tim.append(times[:, 0]) versionss.append(versions[setn]) jobs = [] resultdict = [] for i in range(threads): if verbose > 0: print("download_FLIR_Psets: Start Thread ", i+1) TH = download_images_thread(i, larchivepaths[i], tim[i],resultdict, versionss[i]) jobs.append(TH) TH.start() for p in jobs: p.join() if verbose > 0: print("download_FLIR_Psets: all threads are done") order=[] for ele in resultdict: order.append(ele[0]) if len(np.where(np.asarray(ele[1])==False)[0])>0: success = False for i in range(threads): framess.append(resultdict[order.index(i)][3]) timeline=[] for j in range(len(resultdict[order.index(i)][2])): timeline.append([resultdict[order.index(i)][2][j],j, Psets[i], i]) timesets.append(timeline) resultdict[order.index(i)] = [] del resultdict, timeline if verbose > 0: print(datetime.datetime.now(), "download finished, successfull", success) if success: del timess ### okay the have now the data from all presets, now we have to sort the frames into one array time=[] frames=[] dummyT=[] tsets=[] for tim in timesets: dummyT=dummyT+tim dummyT.sort() for ele in dummyT: time.append(ele[0]) frames.append(framess[ele[2]][ele[1]]) tsets.append(ele[2]) del timesets, framess else: exist=False time=[] frames=[] tsets=[] return exist, time, frames, tsets def download_last_raw_image_by_time(larchivepath, starttime, stoptime):#, version=0 """ downloads for a given time intervall the image as png. it if always the last image in the intervall INPUT ------ larchivepath: string local archivepath, the URL to download the images starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end RESULT ------ exist: boolean True if data was found, False if not frame: list a list of frame, 2D numpy array NOTE ------ """ try: url='{0}/_signal.png?from={1}&upto={2}'.format(larchivepath, starttime-10, stoptime) res = urllib.request.urlopen(url) img = Image.open(BytesIO(res.read())) res.close() # pixelarray = np.array(img.getdata()).reshape(img.size[1], img.size[0]) pixelarray = np.array(img,dtype=np.uint16)#.swapaxes(0,1) return True, pixelarray except urllib.error.URLError as e: print('download_last_raw_image_by_time: Error querrying {0}\n{1}'.format(url, e)) return False, [-1] except Exception as e: print('download_last_raw_image_by_time: Error querrying {0}\n{1}'.format(url, e)) return False, [-1] def download_raw_parlog_by_program(port, program, version=0, testmode=False, verbose=0): """ download of the additional information for the raw data of the AEF cameras for a given program, which are stored in the parlog. contains information as frame width, height bitdepth. INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of the parlog (from to) meta-data: dictionary/list the meta-data from the parlog converted from JSON NOTE ------ """ ## prog=AKF_1.get_program_from_PID(program) # try: # t_program = AKF_2.get_program_from_to(program) # prog =AKF_2.get_program_list(t_program[0], t_program[1]) ## if prog[0]: # starttime=prog[0]['trigger']['1'][0] # stoptime=prog[0]['trigger']['6'][0] exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return download_raw_parlog_by_times(port, starttime, stoptime, version, testmode=testmode) else: #except: print("download_raw_parlog_by_program: Error! cannot find the program") return False, 0, 0 def download_raw_parlog_by_times(port, starttime, stoptime, version=0, testmode=False): """ download of the additional information for the raw data of the AEF cameras for a given time intervall, which are stored in the parlog. contains information as frame width, height bitdepth. INPUT ------ port: integer port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of the parlog (from to) meta-data: dictionary/list the meta-data from the parlog converted from JSON NOTE ------ """ OP = IR_tools.get_OP_by_time(time_ns=stoptime) if version == 0: version = get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_PARLOG", t_from=starttime, testmode=testmode) if testmode: larchivepath=testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"raw_PARLOG/V"+str(version) else: larchivepath=archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"raw_PARLOG/V"+str(version) try: res = urllib.request.urlopen(larchivepath+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) signal_list = json.loads(res.read().decode('utf-8')) res.close() goon=True except urllib.error.URLError as e: print('download_raw_parlog_by_times: Error! ', e) goon=False except Exception as E: print('download_raw_parlog_by_times: Error! ', E) goon=False if goon: return True, signal_list['dimensions'], signal_list['values'][0]['meta-data'] else: return False, [0], [0] def get_INFRATEC_filter_by_program(program, port=50, version=0, testmode=False, verbose=0): """ downlods the information of the used filter of the INFRATEC camera for a given program INPUT ------ program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' port: integer, optional, default 50 port number of the AEF port of the camera version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of the parlog (from to) filter: integer the number of the used filter NOTE ------ """ # prog=get_program_from_PID(program) ## try: ## t_program = AKF_2.get_program_from_to(program) ## prog =AKF_2.get_program_list(t_program[0], t_program[1]) # if prog[0]: # starttime=prog[1]['trigger']['1'][0] # stoptime=prog[1]['trigger']['6'][0] exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return get_INFRATEC_filter_by_times(starttime, stoptime,50, version, testmode=testmode) else:#except: print("get_INFRATEC_filter_by_program: Error! cannot find the program") return False, 0, 0 def get_INFRATEC_filter_by_times(starttime, stoptime, port=50, version=0, testmode=False): """ Idownlods the information of the used filter of the INFRATEC camera for a given time intervall INPUT ------ starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end port: integer, optional, default 50 port number of the AEF port of the camera version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of the parlog (from to) filter: integer the number of the used filter NOTE ------ """ OP = IR_tools.get_OP_by_time(time_ns=starttime) if version == 0: version = get_latest_version(portpathdict[OP]["AEF"+str(port)]+"raw_PARLOG", t_from=starttime, testmode=testmode) if testmode: larchivepath=testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"raw_PARLOG/V"+str(version) else: larchivepath=archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"raw_PARLOG/V"+str(version) try: res = urllib.request.urlopen(larchivepath+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) signal_list = json.loads(res.read().decode('utf-8')) res.close() goon=True except urllib.error.URLError as e: print('get_INFRATEC_filter_by_times: Error! ', e) goon=False except Exception as E: print('get_INFRATEC_filter_by_times: Error! ', E) goon=False if goon: return True, signal_list['dimensions'], signal_list['values'][0]['meta-data']['filter'] else: return False, 0,-1 def get_exposure_by_program(port, program, version=0, testmode=False, verbose=0): """ download the exposure time for a given camera and program for each frame INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame exposure time: list the exposure time of the camera for each frame NOTE ------ """ # prog=get_program_from_PID(program) ## try: ## t_program = AKF_2.get_program_from_to(program) ## prog =AKF_2.get_program_list(t_program[0], t_program[1]) # if prog[0]: # starttime=prog[1]['trigger']['1'][0] # stoptime=prog[1]['trigger']['6'][0] exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return get_exposure_by_times(port, starttime, stoptime, version, testmode=testmode) else: # except Exception as E: print("get_exposure_by_program: Error! cannot find the program, no exposure time available;")#, E) return False, 0, 0 def get_exposure_by_times(port, starttime, stoptime, version=0, testmode=False): """ download the exposure time for a given camera and each frame in a given time intervall INPUT ------ port: integer port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame exposure time: list the exposure time of the camera for each frame NOTE ------ """ OP = IR_tools.get_OP_by_time(time_ns=stoptime) if testmode: larchivepath=testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" else: larchivepath=archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" stream=portpathdict[OP]["AEF"+str(port)]+"meta_"+"DATASTREAM" if version == 0: version = get_latest_version(stream, t_from=starttime, testmode=testmode) return read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/3/exposuretime"+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) def get_camera_temp_by_program(port, program, version=0, testmode=False, verbose=0): """ download the camera temperature for a given camera and program for each frame INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame temperature: list the temperature of the camera for each frame NOTE ------ """ ## prog=AKF_1.get_program_from_PID(program) # try: # t_program = AKF_2.get_program_from_to(program) # prog =AKF_2.get_program_list(t_program[0], t_program[1]) ## if prog[0]: # starttime=prog[0]['trigger']['1'][0] # stoptime=prog[0]['trigger']['6'][0] exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return get_camera_temp_by_times(port, starttime, stoptime, version, testmode=testmode) else: #except: print("get_camera_temp_by_program: Error! cannot find the program") return False, 0, 0 def get_camera_temp_by_times(port, starttime, stoptime, version=0, testmode=False): """ download the camera temperature for a given camera and each frame in a given time intervall INPUT ------ port: integer port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame temperature: list the temperature of the camera for each frame NOTE ----- """ OP = IR_tools.get_OP_by_time(time_ns=stoptime) stream=portpathdict[OP]["AEF"+str(port)]+"meta_" if testmode: larchivepath=testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" else: larchivepath=archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" if version == 0: version = get_latest_version(stream+"DATASTREAM", t_from=starttime, testmode=testmode) dummy=read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/1/cameratemperature/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) if dummy[0]: return dummy else: return read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/1/cameratempetarure/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) def get_sensor_temp_by_program(port, program, version=0, testmode=False, verbose=0): """ download the sensor temperature for a given camera and program for each frame INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame temperature: list the temperature of the camera sensor for each frame NOTE ------ """ # prog=get_program_from_PID(program) ## try: ## t_program = AKF_2.get_program_from_to(program) ## prog =AKF_2.get_program_list(t_program[0], t_program[1]) # if prog[0]: # starttime=prog[1]['trigger']['1'][0] # stoptime=prog[1]['trigger']['6'][0] exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return get_sensor_temp_by_times(port, starttime, stoptime, version, testmode=testmode) else:#except: print("get_sensor_temp_by_program: Error! cannot find the program") return False, 0, 0 def get_sensor_temp_by_times(port, starttime, stoptime, version=0, testmode=False): """ download the sensor temperature for a given camera and each frame in a given time intervall INPUT ------ port: integer port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame temperature: list the temperature of the camera sensor for each frame NOTE ----- """ OP = IR_tools.get_OP_by_time(time_ns=stoptime) stream=portpathdict[OP]["AEF"+str(port)]+"meta_" if testmode: larchivepath=testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" else: larchivepath=archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" if version == 0: version = get_latest_version(stream+"DATASTREAM", t_from=starttime, testmode=testmode) return read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/0/sensortemperature/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) def get_camera_framerate_by_program(port, program, version=0, testmode=False, verbose=0): """ download the framerate for a given camera and program for each frame INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame framerate: list the framerate of the camera for each frame NOTE ------ """ # prog=get_program_from_PID(program) ## try: ## t_program = AKF_2.get_program_from_to(program) ## prog =AKF_2.get_program_list(t_program[0], t_program[1]) # if prog[0]: # starttime=prog[1]['trigger']['1'][0] # stoptime=prog[1]['trigger']['6'][0] exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return get_camera_framerate_by_times(port, starttime, stoptime, version, testmode=testmode) # except: else: print("get_camera_framerate_by_program: Error! cannot find the program") return False, 0, 0 def get_camera_framerate_by_times(port, starttime, stoptime, version=0, testmode=False): """ download the framerate for a given camera and each frame in a given time intervall INPUT ------ port: integer port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame framerate: list the framerate of the camera for each frame NOTE ----- """ OP = IR_tools.get_OP_by_time(time_ns=stoptime) stream=portpathdict[OP]["AEF"+str(port)]+"meta_" if testmode: larchivepath=testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" else: larchivepath=archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" if version == 0: version = get_latest_version(stream+"DATASTREAM", t_from=starttime, testmode=testmode) return read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/2/framerate/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) def get_frametype_by_program(port, program, version=0, testmode=False, verbose=0): """ download the frametype for a given camera and program for each frame INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frametype: list the frametype of the camera for each frame NOTE ------ """ ## prog=AKF_1.get_program_from_PID(program) # try: # t_program = AKF_2.get_program_from_to(program) # prog =AKF_2.get_program_list(t_program[0], t_program[1]) ## if prog[0]: # starttime=prog[0]['trigger']['1'][0] # stoptime=prog[0]['trigger']['6'][0] exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return get_frametype_by_times(port, starttime, stoptime, version, testmode=testmode) else: #except: print("get_frametype_by_program: Error! cannot find the program") return False, 0, 0 def get_frametype_by_times(port, starttime, stoptime, version=0, testmode=False): """ download the frametyp for a given camera and each frame in a given time intervall INPUT ------ port: integer port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frametype: list the frametype of the camera for each frame NOTE ----- """ OP = IR_tools.get_OP_by_time(time_ns=starttime) stream=portpathdict[OP]["AEF"+str(port)]+"meta_" if testmode: larchivepath=testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" else: larchivepath=archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" if version == 0: version = get_latest_version(stream+"DATASTREAM", t_from=starttime, testmode=testmode) return read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/5/frametype/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) def get_framenumber_by_program(port, program, version=0, testmode=False, verbose=0): """ download the framenumber for a given camera and program for each frame INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame framenumber: list the framenumber of the camera for each frame NOTE ------ """ # prog=AKF_1.get_program_from_PID(program) # try: # t_program = AKF_2.get_program_from_to(program) # prog =AKF_2.get_program_list(t_program[0], t_program[1]) ## if prog[0]: # starttime=prog[0]['trigger']['1'][0] # stoptime=prog[0]['trigger']['6'][0] exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: return get_framenumber_by_times(port, starttime, stoptime, version, testmode=testmode) else: #except: print("get_framenumber_by_program: Error! cannot find the program") return False, 0, 0 def get_framenumber_by_times(port, starttime, stoptime, version=0, testmode=False): """ download the framenumber for a given camera and each frame in a given time intervall INPUT ------ port: integer port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame framenumber: list the framenumber of the camera for each frame NOTE ----- """ OP = IR_tools.get_OP_by_time(time_ns=starttime) stream=portpathdict[OP]["AEF"+str(port)]+"meta_" if testmode: larchivepath=testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" else: larchivepath=archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"meta_" if version == 0: version = get_latest_version(stream+"DATASTREAM", t_from=starttime, testmode=testmode) return read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/4/framenumber/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) def get_average_divertor_TC_for_camtemp(port, starttime, stoptime, part="all", TC=0): """ download of an mean divertor temperature measured by the thermocouples in the divertor with respect to a camera observing this divertor INPUT ------ port: integer port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end part: string or integer, optional, default "all" defines the divertor region for the evaluation of the thermocouples (TC)\n options are 'h' (horizontal), 'v' (vertical), 'hli' (horizontal low iota), 'hhi' (horizontal high iota) 's' or 'single' for only one TC\n if it is a number, one thermocouple will be used, as counted from low to high iota region(7), going from 0 to 10, 8-10 vor vertical TC: integer, optional, only for part 's' TC number if part is 'single' or 's' RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each temperature temperature: list averaged TC temperature for each timestamp in time NOTE ------ """ urlstart="http://archive-webapi.ipp-hgw.mpg.de/ArchiveDB/raw/W7X/CoDaStationDesc.115/" if port==50 or port==51: urlstart=urlstart+"DataModuleDesc.19471_DATASTREAM/" else: urlstart=urlstart+"DataModuleDesc.262_DATASTREAM/" if port %10==1:##uneven cameraport, even TC port portnr=port-1 offset=(port//10-1)*32 elif port%10==0:##even cameraport, uneven TC port portnr=port+1 offset=(port//10-1)*32+16 else: #print("get_average_divertor_TC_for_camtemp: Error! unknown port") #raise Exception logging.warning("get_average_divertor_TC_for_camtemp: Error! unknown port") return False, [0], [0] if portnr==50: offset=0 elif portnr==51: offset=16 ext="/ActV1QRD"+str(portnr)+"CT1" n=0 signal=0 success=True if portnr==21: skip=True skipnr=1 else: skip=False skipnr=0 if part=="all": rs=0 re=10 elif part in ["h", "horizontal", "full h", "full horizontal"]: rs=0 re=7 elif part in ["v", "vertical", "vertikal"]: rs=7 re=10 elif part in ["hli", "horizontal low iota"]: rs=0#4 re=4#7 elif part in ["hhi", "horizontal high iota", "high iota", "hi"]: rs=4#0 re=7#4 elif part in [0,1,2,3,4,5,6,7,8,9,10]: rs=part re=part+1 elif part in ["Single", "s", "single"]: rs=TC re=TC+1 else: rs=0 re=0 if rs!=re: for i in range(rs,re,1): if skip and i==skipnr: i+=1 url=urlstart+str(offset+i)+ext+"0"+str(i)+"_TC"+str(i%8+1) try: result=read_restdb(url+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) # res.append(result) if result[0]: n+=1 time=result[1] signal=signal+np.array(result[2]) else: time=0 success=False except Exception as E: print('get_average_divertor_TC_for_camtemp: Error! ', E) success=False try: return success, time, signal/n except Exception as E: print('get_average_divertor_TC_for_camtemp: Error! ', E) return False, 0,-1 else: print("get_average_divertor_TC_for_camtemp: Error! rs and re are the same numbers") return False, 0,-1 def get_divertor_TC_coordinate(cameraport, TC=0): """ returns the information of the thermocouple INPUT ------ cameraport: integer port number of the AEF port of the camera TC: integer, default is 0 TC number going from 0 to 10 RESULT ------ TC_info: dictionary contains x,y,z information and in which target it is located NOTE ------ """ from IR_config_constants import TC_database if cameraport %10==1:##uneven cameraport, even TC port portnr=cameraport-1 elif cameraport%10==0:##even cameraport, uneven TC port portnr=cameraport+1 else: raise Exception("get_divertor_TC_coordinate: unknown port!") return TC_database[portnr][TC] def get_TC_data_for_port_by_program(port, program, part=0, version=0, verbose=0): """ download the thermocouple (TC) data for a thermocouple in the port (immersion tube or endoscope) INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' part: integer, default 0 0 or the number of the TC in the port version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame temperature: list temperature from the TC for each time stamp in time NOTE ------ """ ## prog=AKF_1.get_program_from_PID(program) # try: # t_program = AKF_2.get_program_from_to(program) # prog =AKF_2.get_program_list(t_program[0], t_program[1]) ## if prog[0]: # starttime=prog[0]['trigger']['1'][0] # stoptime=prog[0]['trigger']['6'][0] exist, _, starttime, stoptime = get_trigger_from_PID(program, port, testmode=False, verbose=verbose-1) if exist: return get_TC_data_for_port(port, starttime, stoptime, part)#, version) else: #except: print("get_TC_data_for_port_by_program: Error! cannot find the program") return False, 0, 0 def get_TC_data_for_port(port, starttime, stoptime, part=0):#, version=0 """ download the thermocouple (TC) data for a thermocouple in the port (immersion tube or endoscope) INPUT ------ port: integer port number of the AEF port of the camera starttime: integer time in ns where the search should start stoptime: integer time in sn where the search should end part: integer, default 0 0 or the number of the TC in the port version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame temperature: list temperature from the TC for each time stamp in time NOTE ------ """ urlstart="http://archive-webapi.ipp-hgw.mpg.de/ArchiveDB/raw/W7X/CoDaStationDesc.115/DataModuleDesc.19471_DATASTREAM/" #66,67(10),72,73(20),78,79(30),84,85(40/50_2),90,91(50_1) n=0 success=True signal=0 time=0 try: for thisport in TC_port[port]: thisTCchannel=TC_channel[thisport] if part==0: for chi in range(len(thisTCchannel[0])): url=urlstart+str(thisTCchannel[0][chi])+"/"+thisTCchannel[1][chi] try: result=read_restdb(url+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) if result[0]: n+=1 time=result[1] signal=signal+np.array(result[2]) else: success=False except Exception as E: print('get_TC_data_for_port: Error! ', E) success=False n=1 else: try: url=urlstart+str(thisTCchannel[0][part-1])+"/"+thisTCchannel[1][part-1] try: result=read_restdb(url+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) if result[0]: n+=1 time=result[1] signal=signal+np.array(result[2]) else: success=False n=1 except Exception as E: print('get_TC_data_for_port: Error! ', E) success=False n=1 except Exception as E: print('get_TC_data_for_port: Error! ', E) success=False n=1 except: success=False n=1 return success, time, signal/n #%% end of download functions, below combination of tools to get temperature #def apply_LUT_to_images(LUT, images): # LUTd={LUT[0][i]: LUT[1][i] for i in range(0, len(LUT[0]))} # return np.vectorize(LUTd.get)(images) def temp_from_LUT(LUT, images): """ apply the look up table(LUT) onto images to get temperature images INPUT ------ LUT: list list in the form [Temperaure,temperature_error] images: list the frames as a list of 2D numpy arrays RESULT ------ images: list list of numpy arrays NOTE ------ """ return LUT[0, images] def terror_from_LUT(LUT, images): """ apply the look up table(LUT) onto images to get temperature error images INPUT ------ LUT: list list in the form [Temperaure,temperature_error] images: list the frames as a list of 2D numpy arrays RESULT ------ images: list list of numpy arrays NOTE ------ """ return LUT[1, images] def check_temperature_range(time,maxvalue_C,minvalue_C, port, exposure, cfilter=0): """ INPUT ------ time: integer nanosecond time stamp to identify the right camera maxvalue_C: float or integer the maximum value in the data minvalue_C: float or integer the minimum value in the data port: integer port number of the AEF port of the camera exposure: integer exposure time in microseconds cfilter: integer, optional, default 0 the filter for the infratec camera RESULT ------ no return NOTE ------ not fully functional and slow! """ try: cam=portcamdict[IR_tools.get_OP_by_time(time_ns=time)]['AEF'+str(port)] if port==50: ranges=camlimdict[cam][cfilter][exposure] else: ranges=camlimdict[cam][exposure] # if minvalue_C>=ranges[0] and maxvalue_C<=ranges[1]: # return True if minvalue_C{1:.2f}°C)".format(minvalue_C,ranges[0])) # return False elif minvalue_C>=ranges[0] and maxvalue_C>ranges[1]: raise Exception("check_temperature_range: max value ({0:.2f}°C) is out of the valid temperature range (T<{1:.2f}°C)".format(maxvalue_C,ranges[1])) # return False elif minvalue_Cranges[1]: raise Exception("check_temperature_range: max and min value ({0:.2f}°C,{1:.2f}°C) is out of the valid temperature range ({2:.2f}°C 0: print(datetime.datetime.now(), "get_temp_from_raw_by_program_V1: Start download of raw images") if (stoptime-starttime)/intervalSize>1: nrinterv=int(np.ceil((stoptime-starttime)/intervalSize)) if verbose > 0: print("get_temp_from_raw_by_program_V1: time_window too large, splitting into {0} smaller fractions".format(nrinterv)) intervalls=[] for i in range(nrinterv): intervalls.append(int(starttime-10+i*intervalSize)) intervalls.append(stoptime) temperatureimages=[] times=[] for i in range(nrinterv): raw_dl=download_raw_images_by_times(portnr, intervalls[i], intervalls[i+1], version, intervalSize, testmode=testmode, verbose=verbose-1) print(datetime.datetime.now(), "get_temp_from_raw_by_program_V1: download of raw images part {0} of {1} finished".format(i+1, nrinterv)) if raw_dl[0]: time=raw_dl[1] images=raw_dl[2] del raw_dl # images=images.swapaxes(1,2) images=[im.swapaxes(0,1) for im in images] success, images=apply_calib_on_raw(images, background, LUT,refT, gain, offset, True, verbose=verbose-1) temperatureimages=temperatureimages+images times=times+time del time, images else: # raise Exception("get_temp_from_raw_by_program_V1: cannot download the raw images") logging.warning("get_temp_from_raw_by_program_V1: cannot download the raw images") return False, [0], [0] else: raw_dl=download_raw_images_by_times(portnr, starttime, stoptime, version, intervalSize, testmode=testmode, verbose=verbose-1) if raw_dl[0]: times=raw_dl[1] temperatureimages=raw_dl[2] del raw_dl # temperatureimages=temperatureimages.swapaxes(1,2) temperatureimages=[im.swapaxes(0,1) for im in temperatureimages] if give_ERROR: success, temperatureimages, error_images=apply_calib_on_raw(temperatureimages, background, LUT,refT, gain, offset, gain_error, offset_error, True, give_ERROR, verbose=verbose-1) else: success, temperatureimages=apply_calib_on_raw(temperatureimages, background, LUT,refT, gain, offset, gain_error, offset_error, True, give_ERROR, verbose=verbose-1) else: raise Exception("get_temp_from_raw_by_program_V1: cannot download the raw images") valid=True for i in range(len(temperatureimages)): try: check_temperature_range(times[0], np.max(temperatureimages)-273.15, np.min(temperatureimages)-273.15, portnr, t_exp, cfilter) except Exception as E: print("get_temp_from_raw_by_program_V1: Error! ", E) valid=False i=len(temperatureimages) if give_ERROR: return success, times, temperatureimages, valid, error_images else: return success, times, temperatureimages, valid def get_temp_from_raw_by_program_V2(portnr, program, time_window=0, emi=0.8, version=0, threads=1, give_ERROR=False,use_firstframe_as_background=False, verbose=0): """ downloads the raw data of the cameras, the calibration and applies it to the data to deliver the temperature data INPUT ------ portnr: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' time_window: list, array, integer or float, optional, default 0 if integer or float: size of the window in seconds from t1 if list or array: start and endpoint of the window in seconds from t1 if 0 the full program will be downloaded (t1 until t6) emi: float, optional, default 0.8 the emissivity value for the calibration version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used threads: integer, optional, default 1 number of parallel threads to download the images (approx. max. 2x number of cores and max. timeintervall/0.5 for best performance) give_ERROR: boolean, optional default False switch to turn on the return of error images use_firstframe_as_background: boolean in case the background frames are not available, the first frame can be used verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays optional: error_frames: list a list of frames, 2D numpy arrays NOTE ------ """ return get_temp_from_raw_by_program(portnr, program, time_window=time_window, emi=emi, T_version=2, version=version, threads=threads, give_ERROR=give_ERROR, use_firstframe_as_background=use_firstframe_as_background, verbose=verbose-1) def get_temp_from_raw_by_program(portnr, program, time_window=0, emi=0.8, T_version=2, version=0, threads=1, give_ERROR=False, use_firstframe_as_background=False, back_emissivity=0.8, verbose=0, testmode=False, framerate='max'): """ downloads the raw data of the cameras, the calibration and applies it to the data to deliver the temperature data several threads can be used to download in parallel the data INPUT ------ portnr: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' time_window: list, array, integer or float, optional, default 0 if integer or float: size of the window in seconds from t1 if list or array: start and endpoint of the window in seconds from t1 if 0 the full program will be downloaded (t1 until t6) emi: float, optional, default 0.8 the emissivity value for the calibration T_version: integer, default 2 set the version of the calibration method, see more on the QRT wiki version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used threads: integer, optional, default 1 number of parallel threads to download the images (approx. max. 2x number of cores and max. timeintervall/0.5 for best performance) give_ERROR: boolean, optional default False switch to turn on the return of error images use_firstframe_as_background: boolean in case the background frames are not available, the first frame can be used back_emissivity: float, optional, default 0.8 set the emissivity value for the background region for the calibration verbose: integer, optional, default 0 feedback level (details of print messages) testmode: boolean, optional, default False if True, the data will be loaded from the test archive framerate: string or integer, optional, default 'max' sets the framerate to downsample the data. normal rate is 100. RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays, values are in Kelvin optional: error_frames: list a list of frames, 2D numpy arrays NOTE ------ """ FLIR=False trigger_exist, t0, t1, t6 = get_trigger_from_PID(program, portnr, testmode, verbose=verbose-1) if not trigger_exist: # raise Exception("get_temp_from_raw_by_program: unknown Program") logging.warning("get_temp_from_raw_by_program: unknown Program") return False, [0], [0] success=True OP = IR_tools.get_OP_by_time(time_ns=t1) if type(portnr) is str: try: portnr=int(portnr) FLIR=False except: try: camera=portcamdict[OP][portnr] if camera.split("_")[0]=='FLIR': FLIR=True else: FLIR=False portnr=int(portnr.split("AEF")[1]) except Exception as E: raise Exception("get_temp_from_raw_by_program: unknown Port!"+E) elif isinstance(portnr, (int, np.integer, float, np.float)): FLIR=False portnr=int(portnr) else: # raise Exception("get_temp_from_raw_by_program: the given port is neither a number or a valid String!") logging.warning("get_temp_from_raw_by_program: the given port is neither a number or a valid String!") return False, [0], [0] if FLIR: # --- Glens FLIR camera --- # # get raw data exist, time, frames,mode, tsets, versions=download_raw_FLIR_images_via_png(t1=t1, t6=t6, time_window=time_window, version=version, threads=threads, verbose=verbose-1, testmode=testmode) if not exist: print("get_temp_from_raw_by_program: Error! FLIR data not found") return False, [0], [0], [] # get calibration if mode==1: Psets=IR_tools.check_dublicates(tsets) Radi_Co=[] Temp_Co=[] if len(Psets)==1: # single exposure time case PS=Psets[0] if testmode: parlog=read_restdb_old(testarchivepath+"W7X/QSR07_FLIR/AEK51_raw_PS"+str(PS)+"_PARLOG/V"+str(versions[PS])+"/_signal.json?from="+str(t1-10)+"&upto="+str(t6+20)) else: parlog=read_restdb_old(archivepath+"W7X/QSR07_FLIR/AEK51_raw_PS"+str(PS)+"_PARLOG/V"+str(versions[PS])+"/_signal.json?from="+str(t1-10)+"&upto="+str(t6+20)) tsets=[0 for i in range(len(frames))] if parlog[0]: pari=parlog[2][0]['meta-data'] ipi=0 for inti in range(4): try: dummy=pari['ITime_'+str(inti)] ipi=inti break except: ipi=-1 c1=[float(pari['Coeff0_'+str(ipi)].split("\n")[0]), float(pari['Coeff1_'+str(ipi)].split("\n")[0])] c2=[float(pari['TempCoeff'+str(x)+'_'+str(ipi)].split("\n")[0]) for x in range(7)] Radi_Co.append(c1) Temp_Co.append(c2) else: Radi_Co.append([]) Temp_Co.append([]) else: # multi exposure time case for PS in range(4): if PS in Psets: if testmode: parlog=read_restdb_old(testarchivepath+"W7X/QSR07_FLIR/AEK51_raw_PS"+str(PS)+"_PARLOG/V"+str(versions[PS])+"/_signal.json?from="+str(t1-10)+"&upto="+str(t6+20)) else: parlog=read_restdb_old(archivepath+"W7X/QSR07_FLIR/AEK51_raw_PS"+str(PS)+"_PARLOG/V"+str(versions[PS])+"/_signal.json?from="+str(t1-10)+"&upto="+str(t6+20)) if parlog[0]: pari=parlog[2][0]['meta-data'] c1=[float(pari['Coeff0_'+str(PS)].split("\n")[0]), float(pari['Coeff1_'+str(PS)].split("\n")[0])] c2=[float(pari['TempCoeff'+str(x)+'_'+str(PS)].split("\n")[0]) for x in range(7)] Radi_Co.append(c1) Temp_Co.append(c2) else: Radi_Co.append([]) Temp_Co.append([]) else: Radi_Co.append([]) Temp_Co.append([]) # got the raw, knowing which frame needs which calibration, got the calibration, now we have to transform it if verbose > 0: print(datetime.datetime.now(), "get_temp_from_raw_by_program: Converting the raw data into temperature, number of frames:", len(frames)) for i in range(len(frames)): try: frames[i]=Radi_Co[tsets[i]][0]+frames[i]*Radi_Co[tsets[i]][1] dummy=frames[i] temp=Temp_Co[tsets[i]][0]+Temp_Co[tsets[i]][1]*dummy for expo in range(2,7): temp=temp+Temp_Co[tsets[i]][expo]*np.power(dummy, expo) frames[i]=temp+273.15 except Exception as E: print('get_temp_from_raw_by_program: Error in frame {0}! {1}'.format(i, E)) print(' (len(tsets) {0},Radi_Co[tsets[i]] {1}, Temp_Co[tsets[i]] {2})'.format(len(tsets),Radi_Co[tsets[i]], Temp_Co[tsets[i]])) if verbose > 0: print(datetime.datetime.now(), "get_temp_from_raw_by_program: temperature frames done") return exist, time, frames, tsets elif mode==2: raise Exception("get_temp_from_raw_by_program: not implemented in mode 2") else: raise Exception("get_temp_from_raw_by_program: FLIR evaluation, This case should not be possible.") else: # --- IRcam case --- # # get calibration data cexist, background, LUT,refT, gain, offset, badpixels, t_exp, cfilter, gain_error, offset_error = get_calib_data(portnr, t0=t0, t1=t1, t6=t6, emissivity=emi, T_version=T_version, version=version, back_emissivity=back_emissivity, testmode=testmode, verbose=verbose-1) if not cexist: if verbose > 0: print("get_temp_from_raw_by_program: Unable to load the complete calibration data for", program, " please confirm that this data was uploaded") return False, [0], [0], False # get raw data from latest or requested version if version == 0: version = get_latest_version(portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM", testmode=testmode, program=program) if testmode: larchivepath = testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" else: larchivepath = archivepath+project+"/"+portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" if verbose > 0: print(datetime.datetime.now(), "get_temp_from_raw_by_program: raw download start") if fastDL: exist, time, frames = download_images_by_time_via_png(larchivepath, starttime=t1, stoptime=t6, time_window=time_window, version=version, threads=threads, verbose=verbose-1, framerate=framerate) else: exist, time, frames = download_raw_images_by_program(portnr, program, time_window, version, testmode=testmode, verbose=verbose-1) if exist: frames = [im.swapaxes(0,1) for im in frames] if not exist: # raise Warning("get_temp_from_raw_by_program: data not found in database!") logging.warning("get_temp_from_raw_by_program: data not found in database!") return False, [0], [0], False if verbose > 0: print(datetime.datetime.now(), "get_temp_from_raw_by_program: raw download finished") # calibrate frames FOV = get_FOV_mask(portnr) if use_firstframe_as_background: bim = apply_NUC([frames[0]], gain, offset) background=get_average_background_recangle(portnr, bim[0]) if give_ERROR: success, frames, error_images=apply_calib_on_raw(frames, background, LUT,refT, gain, offset, gain_error, offset_error, False, give_ERROR, verbose=verbose-1) else: success, frames=apply_calib_on_raw(frames, background, LUT,refT, gain, offset, gain_error, offset_error, False, give_ERROR, verbose=verbose-1) if verbose > 0: print(datetime.datetime.now(), "get_temp_from_raw_by_program: Temperature calculation done") if not success: if verbose > 0: print("calculation of the temperature failed, calibration could not be applied") return False, time, frames, False # crop to FOV for i in range(len(frames)): frames[i]=(frames[i]*FOV).astype(np.float32) # correct bad pixels if verbose > 0: print(datetime.datetime.now(), "get_temp_from_raw_by_program: correcting bad pixels") frames = IR_tools.correct_images(frames, badpixels) # TODO: check temperature range for saturation and mark frames as not valid # # if verbose > 0: # print(datetime.datetime.now(), "get_temp_from_raw_by_program: checking temperaturerange") valid = True # for i in range(len(frames)): # try: # check_temperature_range(time[0], np.max(frames)-273.15, np.min(frames[:][np.nonzero(frames[i])])-273.15, portnr, t_exp, cfilter) # except Exception as E: # print(E) # valid=False # i=len(frames) # check for time vector offsets date = int(program.split(".")[0]) if date > 20180712 and date < 20180720: toff = time[0]-t1 if toff > 10e6: print("get_temp_from_raw_by_program: time offset of {0}ns detected, correcting".format(toff)) time[:] = time[:]-toff if give_ERROR: return exist, time, frames, valid, error_images else: return exist, time, frames, valid def get_temp_from_raw_by_program_fullthreads(portnr, program, time_window=0, emi=0.8, T_version=2, version=0, threads=1, give_ERROR=False, check_range=True, testmode=False, verbose=0): """ downloads the raw data and converting it to temeprature images INPUT ------ portnr: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' time_window: list, array, integer or float, optional, default 0 if integer or float: size of the window in seconds from t1 if list or array: start and endpoint of the window in seconds from t1 if 0 the full program will be downloaded (t1 until t6) emi: float, optional, default 0.8 the emissivity value for the calibration T_version: integer, default 2 set the version of the calibration method, see more on the QRT wiki version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used threads: integer, optional, default 1 number of parallel threads to download the images (approx. max. 2x number of cores and max. timeintervall/0.5 for best performance) give_ERROR: boolean, optional default False switch to turn on the return of error images check_range: boolean, optional, default True switch to turn the temperature range check on or off testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays, values are in Kelvin optional: error_frames: list a list of frames, 2D numpy arrays NOTE ------ """ if threads == 1 or not fastDL: return get_temp_from_raw_by_program(portnr, program, time_window, emi, T_version, version, threads, give_ERROR, testmode=testmode, verbose=verbose-1) else: if type(portnr) is str: try: OP = IR_tools.get_OP_by_time(program_str=program) camera = portcamdict[OP][portnr] if camera.split("_")[0]=='FLIR': FLIR=True else: FLIR=False portnr=int(portnr.split("AEF")[1]) goon=True except Exception as E: # raise Exception("get_temp_from_raw_by_program_fullthreads: unknown Port!"+E) logging.warning("get_temp_from_raw_by_program_fullthreads: unknown Port!"+E) return False, [0], [0] elif type(portnr) is int: FLIR = False goon = True portnr = int(portnr) else: goon = False if goon and not FLIR: # prog=get_program_from_PID(program) # if prog[0]: # starttime=prog[1]['trigger']['1'][0] # stoptime=prog[1]['trigger']['6'][0] exist, t0, starttime, stoptime = get_trigger_from_PID(program, portnr, testmode, verbose=verbose-1) if exist: exist, background, LUT,refT, gain, offset, badpixels, t_exp, cfilter, gain_error, offset_error = get_calib_data(portnr, t0=t0, t1=starttime, t6=stoptime, emissivity=emi, T_version=T_version, version=version, testmode=testmode, verbose=verbose-1) FOV = get_FOV_mask(portnr) success = True OP = IR_tools.get_OP_by_time(time_ns=starttime) if version == 0: version = get_latest_version(portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM", program=program, testmode=testmode) if testmode: larchivepath = testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" else: larchivepath = archivepath+project+"/"+portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" if type(time_window) == list: tstart = time_window[0] tstop = time_window[1] if tstop < tstart: # raise Exception("get_temp_from_raw_by_program_fullthreads: t_stop before t_start") logging.warning("get_temp_from_raw_by_program_fullthreads: t_stop before t_start") return False, [0], [0] stdate = datetime.datetime.utcfromtimestamp((starttime-100)/1e9+tstart) stdate = stdate.isoformat() enddate = datetime.datetime.utcfromtimestamp((starttime)/1e9+tstop) enddate = enddate.isoformat() else: stdate = datetime.datetime.utcfromtimestamp((starttime-100)/1e9) stdate = stdate.isoformat() if time_window == 0: enddate = datetime.datetime.utcfromtimestamp(stoptime/1e9) enddate = enddate.isoformat() else: enddate = datetime.datetime.utcfromtimestamp((starttime)/1e9+time_window) enddate = enddate.isoformat() times = AKF_2.get_time_intervals(larchivepath, stdate.replace("T", " "), enddate.replace("T", " "))# lnt = len(times) tim = [] for i in range(lnt): tim.append(times[lnt-1-i][0]) intervalls = [] intervalSize = int(lnt/threads) for i in range(threads): intervalls.append(int(i*intervalSize)) intervalls.append(lnt) jobs = [] resultdict = [] for i in range(threads): if verbose > 0: print("get_temp_from_raw_by_program_fullthreads: Start Thread ", i+1) # p = multiprocessing.Process(target=convert_raw_to_temp_thread, args=(larchivepath, tim[intervalls[i]:intervalls[i+1]], out_q, i, version, background, LUT,refT, gain, offset, gain_error, offset_error, give_ERROR, fOV, badpixels,)) p=convert_raw_to_temp_thread(larchivepath, tim[intervalls[i]:intervalls[i+1]], resultdict, i, version, background, LUT, refT, gain, offset, gain_error, offset_error, give_ERROR, FOV, badpixels, verbose=verbose-1) jobs.append(p) p.start() for p in jobs: p.join() if verbose > 0: print("get_temp_from_raw_by_program_fullthreads: all threads are done") order=[] for ele in resultdict: order.append(ele[0]) if len(np.where(np.asarray(ele[1])==False)[0])>0: success=False times=[]#np.array(resultdict[order.index(0)][2]) images=[]#np.array(resultdict[order.index(0)][3]) for i in range(threads): images=images+resultdict[order.index(i)][3] times=times+resultdict[order.index(i)][2] del resultdict if check_range: if verbose > 0: print(datetime.datetime.now(), "get_temp_from_raw_by_program_fullthreads: checking temperaturerange") valid=True for i in range(len(images)): try: check_temperature_range(times[0], np.max(images[i])-273.15, np.min(images[i][np.nonzero(images[i])])-273.15, portnr, t_exp, cfilter) except Exception as E: print('get_temp_from_raw_by_program_fullthreads: Error! ', E) valid=False i=len(images) else: valid=True else: success=False times=[0] images=[0] valid=False error_images=[0] if give_ERROR: return success, np.array(times), images, valid, error_images else: return success, np.array(times), images, valid def get_nuced_raw_by_program(portnr, program, time_window=0,version=0, emi=1, T_version=2, threads=1, give_ERROR=False, testmode=False, verbose=0): """ download the raw data and applies the non uniformity correction to make the image understandable INPUT ------ portnr: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' time_window: list, array, integer or float, optional, default 0 if integer or float: size of the window in seconds from t1 if list or array: start and endpoint of the window in seconds from t1 if 0 the full program will be downloaded (t1 until t6) version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used emi: float, optional, default 0.8 the emissivity value for the calibration T_version: integer, default 2 set the version of the calibration method, see more on the QRT wiki threads: integer, default 1 number of parallel threads to download the images (approx. max. 2x number of cores and max. timeintervall/0.5 for best performance) give_ERROR: boolean, optional default False switch to turn on the return of error images testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays valid: boolean status wheather the data can be used optional frames_error: list a list of frames, 2D numpy arrays NOTE ------ """ # get calibration data exist, background, LUT,refT, gain, offset, badpixels, t_exp, cfilter, gain_error, offset_error=get_calib_data(portnr, program=program, emissivity=emi, T_version=T_version, version=version, testmode=testmode, verbose=verbose-1) if verbose > 0: print(datetime.datetime.now(), "get_nuced_raw_by_program: raw download start") if fastDL: exist, time, frames=download_raw_images_by_program_via_png(portnr, program, time_window, version, threads, testmode=testmode, verbose=verbose-1) else: exist, time, frames=download_raw_images_by_program(portnr, program, time_window, version, testmode=testmode, verbose=verbose-1) if exist: frames=[im.swapaxes(0,1) for im in frames] else: # raise Exception("get_nuced_raw_by_program: no data found") logging.warning("get_nuced_raw_by_program: no data found") return False, [0], [0], False if verbose > 0: print(datetime.datetime.now(), "get_nuced_raw_by_program: raw download finished") FOV=get_FOV_mask(portnr) if give_ERROR: frames, frames_error=apply_NUC(frames, gain, offset, gain_error, offset_error, give_ERROR) else: frames=apply_NUC(frames, gain, offset, gain_error, offset_error, give_ERROR) for i in range(len(frames)): frames[i]=(frames[i]*FOV).astype(np.float32) if verbose > 0: print(datetime.datetime.now(), "get_nuced_raw_by_program: correcting bad pixels") frames=IR_tools.correct_images(frames, badpixels, verbose=verbose-1) if verbose > 0: print(datetime.datetime.now(), "get_nuced_raw_by_program: checking temperaturerange") valid=True if give_ERROR: return exist, time, frames, valid, frames_error else: return exist, time, frames, valid def get_nuced_raw_by_program_fullthreads(portnr, program, time_window=0, emi=0.8, T_version=2, version=1, threads=1, give_ERROR=False, check_range=True, testmode=False, verbose=0): """ download the raw data and applies the non uniformity correction to make the image understandable by using threads for all steps INPUT ------ portnr: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' time_window: list, array, integer or float, optional, default 0 if integer or float: size of the window in seconds from t1 if list or array: start and endpoint of the window in seconds from t1 if 0 the full program will be downloaded (t1 until t6) emi: float, optional, default 0.8 the emissivity value for the calibration T_version: integer, default 2 set the version of the calibration method, see more on the QRT wiki version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used threads: integer, default 1 number of parallel threads to download the images (approx. max. 2x number of cores and max. timeintervall/0.5 for best performance) give_ERROR: boolean, optional default False switch to turn on the return of error images testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays valid: boolean status wheather the data can be used optional frames_error: list a list of frames, 2D numpy arrays NOTE ------ """ # prog=get_program_from_PID(program) # if prog[0]: # starttime=prog[1]['trigger']['1'][0] # stoptime=prog[1]['trigger']['6'][0] exist, t0, starttime, stoptime = get_trigger_from_PID(program, portnr, testmode, verbose=verbose-1) if not exist: print('get_nuced_raw_by_program_fullthreads: Error! no trigger timestamps found') return False, None, None, None if threads==1 or not fastDL: return get_nuced_raw_by_program(portnr, program, time_window=time_window, emi=emi, T_version=T_version, version=version, threads=threads, give_ERROR=give_ERROR, testmode=testmode, verbose=verbose-1) else: exist, background, LUT,refT, gain, offset, badpixels, t_exp, cfilter, gain_error, offset_error=get_calib_data(portnr, t0=t0, t1=starttime, t6=stoptime, emissivity=emi, T_version=T_version, version=version, testmode=testmode, verbose=verbose-1) FOV=get_FOV_mask(portnr) success=True OP = IR_tools.get_OP_by_time(time_ns=starttime) if verbose > 0: print('get_nuced_raw_by_program_fullthreads: prepare loading images by threads') if version == 0: version = get_latest_version(portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM", t_from=starttime, testmode=testmode) if testmode: larchivepath=testarchivepath+project+"/"+portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" else: larchivepath=archivepath+project+"/"+portpathdict[OP]["AEF"+str(portnr)]+"raw_DATASTREAM/V"+str(version)+"/0/raw" if type(time_window) == list: tstart = time_window[0] tstop = time_window[1] if tstop < tstart: # raise Exception("get_nuced_raw_by_program_fullthreads: t_stop before t_start") logging.warning("get_nuced_raw_by_program_fullthreads: t_stop before t_start") return False, [0], [0], False stdate = datetime.datetime.utcfromtimestamp((starttime-100)/1e9+tstart) stdate = stdate.isoformat() enddate = datetime.datetime.utcfromtimestamp((starttime)/1e9+tstop) enddate = enddate.isoformat() else: stdate = datetime.datetime.utcfromtimestamp((starttime-100)/1e9) stdate = stdate.isoformat() if time_window == 0: enddate = datetime.datetime.utcfromtimestamp(stoptime/1e9) enddate = enddate.isoformat() else: enddate = datetime.datetime.utcfromtimestamp((starttime)/1e9+time_window) enddate = enddate.isoformat() times = AKF_2.get_time_intervals(larchivepath, stdate.replace("T", " "), enddate.replace("T", " "))# lnt = len(times) tim = [] for i in range(lnt): tim.append(times[lnt-1-i][0]) intervalls = [] intervalSize=int(lnt/threads) for i in range(threads): intervalls.append(int(i*intervalSize)) intervalls.append(lnt) jobs = [] resultdict = [] for i in range(threads): if verbose > 0: print("get_nuced_raw_by_program_fullthreads: Start Thread ", i+1) # p = multiprocessing.Process(target=NUC_raw_thread, args=(portnr, tim[intervalls[i]:intervalls[i+1]], out_q, i, version, background, LUT,refT, gain, offset, gain_error, offset_error, give_ERROR, fOV, badpixels,)) p = NUC_raw_thread(larchivepath, tim[intervalls[i]:intervalls[i+1]], resultdict, i, version, background, LUT,refT, gain, offset, gain_error, offset_error, give_ERROR, FOV, badpixels, verbose=verbose-1) jobs.append(p) p.start() for p in jobs: p.join() if verbose > 0: print("get_nuced_raw_by_program_fullthreads: all threads are done") order = [] for ele in resultdict: order.append(ele[0]) if len(np.where(np.asarray(ele[1])==False)[0])>0: success = False times = []#np.array(resultdict[order.index(0)][2]) images = []#np.array(resultdict[order.index(0)][3]) for i in range(threads): images = images+resultdict[order.index(i)][3] times = times+resultdict[order.index(i)][2] del resultdict valid = True # else: # success=False # times=[0] # images=[0] # valid=False # error_images=[0] # print(larchivepath) if give_ERROR: return success, np.array(times), images, valid#, error_images else: return success, np.array(times), images, valid def apply_calib_on_raw(images, background, LUT,refT=28.5, gain=0, offset=0, gain_error=0, offset_error=0, fullbackground=False, give_ERROR=False, verbose=0): """ applies the calibration onto the given raw images and return the temperature images INPUT ------ images: list a list of frames, 2D numpy arrays background: integer or 2D array the value of the background for calibration, which get subtracted from the images LUT: list list or array containing the look up table in the form of [[DL],[temperature],[error]] refT: float, default 28.5 reference temperature in degree celcius of the background gain: numpy array the gain image from the calibration to convert the counts into digital level offset: numpy array the offset image from the calibration to convert the counts into digital level gain_error: numpy array the gain error image from the calibration to convert the counts into digital level offset_error: numpy array the offset error image from the calibration to convert the counts into digital level fullbackground: boolean, default False switch to set the treatment of the backgroun input give_ERROR: boolean, optional default False switch to turn on the return of error images verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not frames: list a list of frames, 2D numpy arrays, values are in Kelvin optional frames_error: list a list of frames, 2D numpy arrays NOTE ------ """ try: #images=np.array(raw,dtype=np.uint16) # del raw # images=images.swapaxes(1,2) if type(gain)!=int and type(offset)!=int: if verbose > 0: print(datetime.datetime.now(), "apply_calib_on_raw: NUCing") # eliminate bad offset and gain points offset[offset==np.inf] = 0 offset[offset==-np.inf] = 0 gain[gain==np.inf] = 1 gain[gain==-np.inf] = 1 # try to apply NUC if give_ERROR: images, error_images=apply_NUC(images, gain, offset, gain_error, offset_error, give_ERROR) else: images=apply_NUC(images, gain, offset) if verbose > 0: print(datetime.datetime.now(), "apply_calib_on_raw: background treatment") # if fullbackground: #sbackground=np.zeros(np.shape(images[0]),dtype=np.float32)+background for i in range(len(images)): images[i]=np.clip((images[i].astype(np.int32)-background),a_min=0,a_max=None).astype(np.uint16) # else: # for i in range(len(images)): # images[i]=images[i]-background # images=np.array(images.clip(min=0),dtype=np.uint16) # images=[np.round(im.clip(min=0)).astype(np.uint16) for im in images] if verbose > 0: print(datetime.datetime.now(), "apply_calib_on_raw: applying LUT") LUT=np.array([LUT[1], LUT[2]]) if give_ERROR: terror=[]#np.zeros(np.shape(images)) for i in range(len(images)): # uncertainty of temperature calibration terror=terror+[terror_from_LUT(LUT, images[i])] # error propagation of uncertainty in NUC max_image = temp_from_LUT(LUT, images[i]+error_images[i]) min_image = temp_from_LUT(LUT, images[i]-error_images[i]) terror[i] = (terror[i] + (max_image - min_image)/2).astype(np.float32) images[i]=(temp_from_LUT(LUT, images[i])).astype(np.float32) images[i]=(images[i]+(refT+273.15)).astype(np.float32) return True, images, terror else: for i in range(len(images)): images[i]=(temp_from_LUT(LUT, images[i])).astype(np.float32) images[i]=(images[i]+(refT+273.15)).astype(np.float32) return True, images except Exception as E: # raise Warning('apply_calib_on_raw: '+E) logging.warning('apply_calib_on_raw: '+E) return False, [0] def apply_NUC(images, gain, offset, gain_error=0, offset_error=0, give_error=False): """ apply_NUC(images=numpy array(time,width,height), gain, offset =numpy array(width,height) ) applies the non-uniformity correction onto the given raw images INPUT ------ frames: list a list of frames, 2D numpy arrays gain: numpy array the gain image from the calibration to convert the counts into digital level offset: numpy array the offset image from the calibration to convert the counts into digital level gain_error: numpy array the gain error image from the calibration to convert the counts into digital level offset_error: numpy array the offset error image from the calibration to convert the counts into digital level give_error RESULT ------ frames: list a list of frames, 2D numpy arrays optional frames_error: list a list of frames, 2D numpy arrays NOTE ------ """ try: if give_error: error_images = [] for i in range(len(images)): images[i]=(images[i]*gain).astype(np.float32) images[i]=(images[i]+offset).astype(np.float32) error_images.append(np.round(images[i]*gain_error + offset_error).astype(np.uint16)) return images, error_images else: for i in range(len(images)): images[i]=(images[i]*gain).astype(np.float32) images[i]=(images[i]+offset).astype(np.float32) return images except Exception as E: print("apply_NUC: Error! ", E) return 0 def get_cooling_water_temperature_by_time(time_ns): """ downloads the information of the water temperture in the cooling cycle of W7-X INPUT ------ time_ns: integer nanosecond time stamp to identify the time window RESULT ------ exist: boolean True if data was found, False if not inlet: float the water temperature of the inflow water outlet: float the water temperature of the outgoing water NOTE ------ """ starttime=int(time_ns-3e9) stoptime=int(time_ns+3e9) outlet_url="95/Ist%20Av06_ABK10_CT006" inlet_url="94/Ist%20Av06_ABK10_CT003" url_base="http://archive-webapi.ipp-hgw.mpg.de/ArchiveDB/raw/W7X/CoDaStationDesc.85/DataModuleDesc.188_DATASTREAM/" try: result_in=read_restdb(url_base+inlet_url+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) if result_in[0]: inlet=np.mean(np.array(result_in[2])) else: print("get_cooling_water_temperature_by_time: inlet water temperature not found in: ",result_in) inlet=0 except: print("get_cooling_water_temperature_by_time: inlet water temperature not found") inlet=0 result_in=[False, 0, 0] try: result_out=read_restdb(url_base+outlet_url+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime)) if result_out[0]: outlet=np.mean(np.array(result_out[2])) else: print("get_cooling_water_temperature_by_time: outlet water temperature not found in: ",result_out) outlet=0 except: print("get_cooling_water_temperature_by_time: outlet water temperature not found") inlet=0 result_out=[False, 0, 0] return result_in[0] and result_out[0], inlet, outlet def get_calib_data(port, program=None, t0=None, t1=None, t6=None, emissivity=0.8, T_version=2, version=0, back_emissivity=0.82, testmode=False, verbose=0): """ get_calib_data downloads the calibration data for the application onto the images INPUT ------ port: integer port number of the AEF port of the camera program: string, default None program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' t0: integer, default None nanoseconds timestamp of the t0 trigger, start of the program t1: integer, default None nanoseconds timestamp of the t1 trigger, start of the heating t6: integer, default None nanoseconds timestamp of the t6 trigger, end of the program emissivity: float, optional, default 0.8 the emissivity value for the calibration T_version: intger, default 2, set the calibration version, in version 1 the full background image is subtracted\n in version 2 the background value is subtracted, version 3 version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used back_emissivity: float, optional, default 0.82, experimental emissivity value for the background treatment, only needed for T_version 3 testmode: boolean, optional, default False if True, the data will be loaded from the test archive verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not background: integer or numpy array background image for calibration method 1 and a value for method 2 LUT: list the look up table for the temperature calibration, form [[Digital level],[temperature],[error]] refT: float reference temperature of the background for the calibration gain: numpy array the gain image from the calibration to convert the counts into digital level offset: numpy array the offset image from the calibration to convert the counts into digital level badpixels: list list of badpixels t_exp: integer exposure time in microseconds cfilter: integer or string, only needed for port 50 filter number of the Infratec camera in port AEF50 gain_error: numpy array the gain error image from the calibration to convert the counts into digital level for the error offset_error: numpy array the offset error image from the calibration to convert the counts into digital level for the error NOTE ------ """ if t0 is None or t1 is None or t6 is None: if program is None: if t0 is not None: program = AKF_2.get_program_id(t0) elif t1 is not None: program = AKF_2.get_program_id(t1) else: # raise Exception('get_calib_data: ERROR! Missing time definition!') # print(' function requires either a program ID or the trigger times t0, t1 and t6') logging.warning('get_calib_data: ERROR! Missing time definition!') return False, 0, [], 0, [], [], [], 0, 0, [], [] # get start and end times from program ID exist, t0, t1, t6 = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if not exist: # raise Exception('get_calib_data: ERROR! program {0} not found!'.format(program)) logging.warning(f'get_calib_data: ERROR! program {program} not found!') return False, 0, [], 0, [], [], [], 0, 0, [], [] # determin camera in this port and OP OP = IR_tools.get_OP_by_time(time_ns=t0) exist=True if verbose > 0: print('get_calib_data: loading data for camera {1} in {0}'.format(OP, port)) Camera=portcamdict[OP]['AEF'+str(port)] if Camera.split("_")[0] == "Infratec": # cf=get_INFRATEC_filter_by_program(program, version) cf = get_INFRATEC_filter_by_times(starttime=t1, stoptime=t6, testmode=testmode) if cf[0]: cfilter = cf[2] else: cfilter = -1 # raise Warning("get_calib_data: Filter not found for INFRATEC Camera for Program "+AKF_2.get_program_id(t0)) logging.warning(f"get_calib_data: Filter not found for INFRATEC Camera for Program {AKF_2.get_program_id(t0)}") return False, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 else: cfilter=0 # get exposure time of first frame expo_DL=get_exposure_by_times(port, t1, t6, version, testmode) if expo_DL[0]: t_exp = int(expo_DL[2][0]) del expo_DL else: # raise Warning("get_calib_data: no exposure time found") logging.warning("get_calib_data: no exposure time found") return False, 0, 0, 0, 0, 0, 0, 0, cfilter, 0, 0 # get Look-Up-Table (LUT) time = int(TimeToNs([2017,9,26], [8, 0, 0, 0])) LUT_DL=download_LUT(port, time, t_exp, emissivity, cfilter, version, testmode=testmode, verbose=verbose-1) if LUT_DL[0]: LUT=LUT_DL[1] del LUT_DL else: # raise Warning("get_calib_data: no LUT found") logging.warning("get_calib_data: no LUT found") return False, 0, 0, 0, 0, 0, 0, t_exp, cfilter, 0, 0 # treat cameras with and w/o filters differently if cfilter==0 and not Camera.split("_")[0] == "Infratec": # --- Ircam Camera case --- if verbose > 0: print('get_calib_data: getting NUC arrays') NUC_DL=get_NUC_by_times(port, t0, t1, t_exp, version, testmode, verbose=verbose-1)#download_NUC_by_times(port, t0, t1, t_exp, version) if NUC_DL[0]==False: # raise Warning("get_calib_data: NUC was not found") logging.warning("get_calib_data: NUC was not found") return False, 0, LUT, 0, 0, 0, 0, t_exp, cfilter, 0, 0 # extract NUC elements gain = [] offset = [] badpixels = [] gain_error = [] offset_error = [] gain=np.array(NUC_DL[1][0]) offset=np.array(NUC_DL[1][1]) badpixels=np.array(NUC_DL[1][3], dtype=np.ubyte) # check quality of NUC elements badpixels[np.isnan(gain)] = 1 badpixels[np.isnan(offset)] = 1 gain[np.isnan(gain)] = 0 offset[np.isnan(offset)] = 0 badpixels[offset<-1e100] = 1 offset[offset<-1e100] = 0 # catch cases of error frames being the value "0" if NUC_DL[1][4] is 0: gain_error = np.zeros(np.shape(gain), dtype=np.int8) else: gain_error = np.array(NUC_DL[1][4]) badpixels[np.isnan(gain_error)] = 1 gain_error[np.isnan(gain_error)] = np.nanmax(gain_error)+1 if NUC_DL[1][5] is 0: offset_error = np.zeros(np.shape(offset), dtype=np.int8) else: offset_error = np.array(NUC_DL[1][5]) badpixels[np.isnan(offset_error)] = 1 offset_error[np.isnan(offset_error)] = np.nanmax(offset_error)+1 # catch special case for AEF11 and AEF21 in OP1.2a if ((port == 11) or (port == 21)) and OP == "OP1.2a": exist, time, frames=download_raw_images_by_times(port, t1, int(t1+0.02*1e9), version, testmode=testmode, verbose=verbose-1) if exist: # frames=[im.swapaxes(0,1) for im in frames]#somehow the archive gives it now already swapped back (09.07.2018) bim = apply_NUC([frames[0]], gain, offset) background = get_average_background_recangle(port, bim[0]) else: # raise Warning("get_calib_data: cannot find the first frame of the discharge, reconstruction failed") logging.warning("get_calib_data: cannot find the first frame of the discharge, reconstruction failed") return False, 0, LUT, 0, gain, offset, badpixels, t_exp, cfilter, gain_error, offset_error else: # --- Infratec Camera case --- gain = 0 offset = 0 badpixels = 0 gain_error = 0 offset_error = 0 # get background if OP == "OP1.2a": back_DL = download_background_by_times(port, t0, t1, t_exp, cfilter, version, testmode=testmode) elif OP == "OP1.2b": back_DL = get_NUCed_background_by_times(port, t0, t1, t_exp, cfilter, gain, offset, version, testmode, verbose=verbose-1) else: # raise Exception("get_calib_data: unknown Operation Phase or calibration method not implemented for this campaign") logging.warning("get_calib_data: unknown Operation Phase or calibration method not implemented for this campaign") return False, 0, [], 0, [], [], [], 0, 0, [], [] if not back_DL[0]: # raise Warning("get_calib_data: no background image found") logging.warning("get_calib_data: no background image found") return False, 0, [], 0, [], [], [], 0, 0, [], [] # return False, 0, LUT, 0, gain, offset, badpixels, t_exp, cfilter, gain_error, offset_error background = back_DL[2] # apply different versions of temperature calibration if T_version == 1: if verbose > 0: print('get_calib_data: use temperature calibration version 1') backtime = back_DL[1] backtime = backtime.tolist() divertorpart = "all" TCT_dl = get_average_divertor_TC_for_camtemp(port, int(backtime-2e9), int(backtime+2e9),divertorpart) if TCT_dl[0]: refT = np.average(TCT_dl[2]) else: TCT_dl = get_average_divertor_TC_for_camtemp(port, int(backtime-6e9), int(backtime+6e9),divertorpart) if TCT_dl[0]: refT = np.average(TCT_dl[2]) else: # raise Exception("get_calib_data: Unable to find thermocouple data") logging.warning("get_calib_data: Unable to find thermocouple data") return elif T_version == 2: if verbose > 0: print('get_calib_data: use temperature calibration version 2') frame = background.copy() background = get_average_background_recangle(port, background) gotit, waterin, waterout = get_cooling_water_temperature_by_time(t1) if gotit: refT = waterout else: refT = 28.5 elif T_version == 3: if verbose > 0: print('get_calib_data: use temperature calibration version 3, TEST CASE!!!') frame = background.copy() background = get_average_background_recangle(port, background) refT = 28.5 # read LUT V3 (compensated BG signal and photons flux of BG) LUT_DL_wall = read_LUT_from_file(port, time, t_exp, 0.45, cfilter, verbose=verbose-1) if LUT_DL_wall[0]: if verbose > 0: print('get_calib_data: got the wall LUT') LUT_wall = LUT_DL_wall[1] index = IR_tools.find_nearest(LUT_wall[1],refT) background = background-LUT_wall[0][index] refT = 0 del LUT_DL_wall, LUT_wall LUT_DL = read_LUT_from_file(port, time, t_exp, emissivity, cfilter, verbose=verbose-1) if LUT_DL[0]: if verbose > 0: print('get_calib_data: got the V3 LUT') LUT = LUT_DL[1] del LUT_DL # back_off=estimate_offset(port, program) # background=(background-back_off)/(back_emissivity)+back_off else: # raise Warning("get_calib_data: Unknown temperature calibration method") logging.warning("get_calib_data: Unknown temperature calibration method") return False, 0, [], 0, [], [], [], 0, 0, [], [] # return False, background, LUT, 0, gain, offset, badpixels, t_exp, cfilter, gain_error, offset_error # get bad pixels if np.max(badpixels)==0: if Camera.split("_")[0] == "Infratec": badpixels=find_badpixels(port, frame-background, offset, niterations=10, tolerance=10, plot_it=False, verbose=verbose-1)#find_badpixels(port, gain, offset) else: if verbose > 0: print(datetime.datetime.now(), "get_calib_data: Scanning for bad pixel") # initial list from config files init_bp_list = IR_tools.read_bad_pixels_from_file(port, time_ns=t1) # find more bad pixel badpixels = find_badpixels(port, gain, offset, init_bp_list=init_bp_list, niterations=10, tolerance=10, verbose=verbose-1) if verbose > 0: print('get_calib_data: NUC arrays ready') return exist, background, LUT,refT, gain, offset, badpixels, t_exp, cfilter, gain_error, offset_error def find_badpixels(port, gain, offset, init_bp_list=None, niterations=3, tolerance=10, plot_it=False, verbose=0): """ find the bad pixels by searching outliers in the gain and offset image INPUT ------ port: integer port number of the AEF port of the camera gain: numpy array the gain image from the calibration to convert the counts into digital level offset: numpy array the offset image from the calibration to convert the counts into digital level init_bp_list: list, default None initial bad pixel list niterations: intger, default 3 number of interation in the process of finding bad pixels tolerance: integer, default 10 tolerance value for the idenfication of outlier in the images to identify the bad pixels plot_it: boolean, default False, if True the result of the bad pixel finding will be plotted verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ badpixels: numpy array list of the bad pixels NOTE ------ """ badpixels = np.zeros(np.shape(gain)) # certain_bads=np.zeros(np.shape(gain)) # gainmax=12#100 # badpixels+=(gain>gainmax) # badpixels+=(gain<0.1) FOV=get_FOV_mask(port) # take initial bad pixels into account if init_bp_list is not None: if verbose > 0: print("find_badpixels: use {0} initial bad pixels from file".format(len(init_bp_list))) for pix in init_bp_list: try: badpixels[pix] = 1 except Exception as E: raise Warning('find_badpixels: '+E) gain = IR_tools.restore_bad_pixels(gain*FOV, init_bp_list, verbose=verbose-1) last_number=0 finished=False n=0 while (n<=niterations and not finished):#len(badlist)>=last_number): badlist=IR_tools.find_outlier_pixels(gain*FOV, plot_it=plot_it, tolerance=tolerance) gain=IR_tools.restore_bad_pixels(gain*FOV, badlist, verbose=verbose-1) if verbose > 0: print("find_badpixels: number of found bad pixels: ", len(badlist)) n+=1 if len(badlist)>=last_number and n>2: finished=True last_number=len(badlist) for pix in badlist: badpixels[pix]=1 # gain*(1-certain_bads) these pixels are not certainly bad # possi_goodgain=gain*(1-certain_bads) # badpixels+=(gain>(np.mean(possi_goodgain)+np.std(possi_goodgain))) # badpixels+=(gain<(np.mean(possi_goodgain)-np.std(possi_goodgain))) badpixels+=badpixels*((1-FOV)*2) return np.array(badpixels,dtype=np.ubyte) def make_FOV_mask(port): """ creates the field of view image for the requested port and saves it locally INPUT ------ port: integer port number of the AEF port of the camera RESULT ------ no return NOTE ------ """ points=valid_FOV_circle[port] # Sekante errechnen für Kreis, Gerade ax+by=c, sonderfall: y=c, a=0, b=1 # Kreis definiert durch (x-x0)²+(y-y0)²=r² # loesung: d=c-a*x0-b*y0=c-y0 # x1,2=x0+(ad+-b*sqrt(r²(a²+b²)-d²))/(a²+b²)=x0+-sqrt(r²-d²) # y1,2=y0+(bd-+a*sqrt(r²(a²+b²)-d²))/(a²+b²)=y0+d=c=y y0=points[1] x0=points[0] r1=points[2] if port==50: exposure=50 cfilter=1 else: exposure=9 cfilter=0 da, time, back=download_background_by_program(port, "20171109.045", exposure, camera_filter=cfilter) # fig = plt.figure() # plt.imshow(back, vmin=np.average(back)-200, vmax=np.average(back)+500) # inner_circle = mlt.patches.Circle((x0,y0), r1, color = 'r', fill = False) # ax = fig.gca() # ax.add_artist(inner_circle) if da: FOV=np.ones(np.shape(back)) for y in range(len(back)): xs1=x0-np.sqrt(r1**2-(y-y0)**2) xs2=x0+np.sqrt(r1**2-(y-y0)**2) for xi in range(0, int(xs1)): FOV[y][xi]=0 for xi in range(int(xs2), np.shape(back)[1]): FOV[y][xi]=0 plt.figure() plt.imshow(FOV*back, vmin=np.average(back)-500, vmax=np.average(back)+1500) if config_path == "": plt.imsave("AEF"+str(port)+"_FOV.png", FOV) else: plt.imsave(config_path+portcamdict['OP1.2a']['AEF'+str(port)]+"\\"+"AEF"+str(port)+"_FOV.png", FOV) else: raise Exception("make_FOV_mask: cannot find the background frame to create the FOV file", exposure, port) def get_FOV_mask(port): """ loads the locally saved field of view images for the requested port, see also make_FOV_mask INPUT ------ port: integer port number of the AEF port of the camera RESULT ------ FOV: numpy array the image showing the field of view with 1 and 0 for the region of the image which is outside the field of view NOTE ------ """ try: if config_path=="": FOV=plt.imread("AEF"+str(port)+"_FOV.png")[:,:, 0] else: FOV=plt.imread(config_path+portcamdict['OP1.2a']['AEF'+str(port)]+"\\"+"AEF"+str(port)+"_FOV.png")[:,:, 0] except: make_FOV_mask(port) if config_path=="": FOV=plt.imread("AEF"+str(port)+"_FOV.png")[:,:, 0] else: FOV=plt.imread(config_path+portcamdict['OP1.2a']['AEF'+str(port)]+"\\"+"AEF"+str(port)+"_FOV.png")[:,:, 0] FOV=(FOV>0.1)*1 return FOV def get_background(port, image): """ extracts from a given background image the minimum in the field of view INPUT ------ port: integer port number of the AEF port of the camera image: numpy array 2D numpy arrays RESULT ------ background: integer (or float if image contains float values) returns the background value, extracted from the given image NOTE ------ """ try: # points=valid_background_rectangle[port] FOV=get_FOV_mask(port) dummy=FOV*image#image[points[1]:points[3], points[0]:points[2]] return np.min(dummy[np.nonzero(dummy)]) except Exception as E: print('get_background: Error! ', E) return 0 def get_average_background_recangle(port, image): """ extracts from a given background image the minimum in a defined rectangular region INPUT ------ port: integer port number of the AEF port of the camera image: numpy array 2D numpy arrays RESULT ------ background: integer (or float if image contains float values) returns the background value, extracted from the given image NOTE ------ """ try: points = valid_background_rectangle[port] # FOV=get_FOV_mask(port) dummy = image[points[1]:points[3], points[0]:points[2]]#FOV*image# return np.min(dummy[np.nonzero(dummy)]) except Exception as E: print('get_average_background_recangle: Error! ', E) return 0 def estimate_offset(port, program, plot_it=False, verbose=0): """ estimates the offset value based on the background values for different exposure times INPUT ------ port: integer port number of the AEF port of the camera program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' plot_it: boolean, default False if True the result of the bad pixel finding will be plotted verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ Offset: integer estimated offset value NOTE ------ """ explist = [1, 2, 3, 4, 5, 6, 7, 8, 9] meanlist = [] exlist = [] FOV = get_FOV_mask(port) for exposure in explist: exist, time, frame = download_background_by_program(port, program, exposure) if exist: meanlist.append(np.mean(frame[np.where(FOV == 1)])) exlist.append(exposure) if len(meanlist)>=2: # print(exlist,meanlist) from scipy.stats import linregress slope, intercept, r_value, p_value, std_err = linregress(exlist,meanlist) if plot_it: plt.figure() plt.plot(exlist,meanlist, 'b*') plt.plot(exlist, np.array(exlist)*slope+intercept, 'k-') plt.show() if verbose > 0: print("estimate_offset: slope, intercept, r_value, p_value, std_err") print(" ", slope, intercept, r_value, p_value, std_err) return intercept else: return -1 def download_heatflux_by_program(port, program, time_window=0, testmode=False, version=0, verbose=0, givealpha=False,request=True):#threads=1, """ download the heatflux for a given program and port, if the data is not available a request is made for the automated evaluation of the heat flux\n if the data is not available or already requested a message will be returned in the console\n For extended use of the heat flux data see also the following functions:\n download_heatflux_mapping_reference --> for a better interpretation of the data with respect to the used coordinate system download_heatflux_scene_model_reference --> for remapping of the data into W7-X coordinates and pixel coordiantes extract_heatflux_profile_from_DL --> to extract a profile out of the data returned by this funciton INPUT ------ port: string or integer the camera port, for which the data is requested program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' time_window: integer, float or list/numpy array of interger/float, time window for the heat flux, single value: t1 until so many seconds, two values, t1+ first value until t1+ second value testmode: boolean, default False True to load data from testarchive version: integer, default 0 version number for 0 the highest version will be used verbose: integer, optional, default 0 feedback level (details of print messages) givealpha: boolean, default False switch to turn on or off the return of the alpha image from the THEODOR calculation, it wil be returned as first image request: boolean, default True switch to turn on or off the request of data if the data is not available in the database RESULT ------ (returns downlad_heatflux_by_times) exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame frames: list a list of frames, 2D numpy arrays, first frame is the alpha frame if it is requested NOTE ------ """ # prog=get_program_from_PID(program) # if prog[0]: # t1=prog[1]['trigger']['1'][0] # t6=prog[1]['trigger']['6'][0] exist, _, t1, t6 = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if not exist: print("download_heatflux_by_program: Error! program not found") return False, 0,-1 if time_window==0: tstart=t1 tend=t6 elif type(time_window)==list or type(time_window)==np.ndarray: try: if len(time_window)>1: tstart=int(t1+time_window[0]*1e9) tend=int(t1+time_window[1]*1e9) else: tstart=int(t1) tend=int(t1+time_window[0]*1e9) except Exception as E: # raise Exception("download_heatflux_by_program: an Exception occured interpretating the time_window: ", E) logging.warning("download_heatflux_by_program: an Exception occured interpretating the time_window: "+E) return False, [0], [0] elif type(time_window)==int or type(time_window)==float: tstart=int(t1) tend=int(t1+time_window*1e9) else: # raise Exception("download_heatflux_by_program: given time_window type is not supported") logging.warning("download_heatflux_by_program: given time_window type is not supported") return False, [0], [0] if givealpha: tstart=tstart-1.1e9 return download_heatflux_by_times(port=port, tstart=tstart, tend=tend, time_window=0, testmode=testmode, version=version, verbose=verbose, request=request) def download_heatflux_by_times(port, tstart, tend, time_window=0, testmode=False, version=0, verbose=0,request=True,request_ALL=False):#threads=1, ,moffset=0 """ download the heatflux for a given start, endtime and port, if the data is not available a request is made for the automated evaluation of the heat flux\n if the data is not available or already requested a message will be returned in the console INPUT ------ port: string or integer the camera port, for which the data is requested tstart: integer time in ns where the search should start tend: integer time in sn where the search should end time_window: integer, float or list/numpy array of interger/float, time window for the heat flux, single value: t1 until so many seconds, two values, t1+ first value until t1+ second value testmode: boolean, default False True to load data from testarchive version: integer, default 0 version number for 0 the highest version will be used verbose: integer, optional, default 0 feedback level (details of print messages) request: boolean, default True switch to turn on or off the request of data if the data is not available in the database request_ALL: boolean, optional, default False switch to turn on the request of data for all AEF ports RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame, time is in nanoseconds UTC frames: list a list of frames, 2D numpy arrays, values in the images are in W/m2 NOTE ------ """ if testmode: base=testarchivepath else: base=archivepath OP = IR_tools.get_OP_by_time(time_ns=tstart) if type(port) is str: try: camera=portcamdict[OP][port] if camera.split("_")[0]=='FLIR': FLIR=True else: FLIR=False port=int(port.split("AEF")[1]) goon=True except Exception as E: # raise Exception("download_heatflux_by_times: unknown Port!"+E) logging.warning("download_heatflux_by_times: unknown Port!"+E) return False, [0], [0] elif isinstance(port, (int, np.integer, float, np.float)): FLIR=False goon=True port=int(port) else: goon=False if not goon: raise Exception("download_heatflux_by_times: the given port is neither a number or a valid String!") else: if FLIR: if verbose > 0: print("download_heatflux_by_times: FLIR heatflux is at the moment unsupported!") return False, 0,-1 else:## Okay QRT is requested. Maybe there is data available, maybe not, lets find out if version == 0: version = get_latest_version(portpathdict[OP]["AEF"+str(port)]+"heatflux_DATASTREAM", project=project_ana, testmode=testmode, t_from=tstart, t_to=tend) larchivepath=base+project_ana+"/"+portpathdict[OP]["AEF"+str(port)]+"heatflux_DATASTREAM/V"+str(version)+"/0/heatflux" ### test for the testsample### # if version == 0: # version = get_latest_version("QRT_IRCAM_Test/AEF"+str(port)+"_heatflux_Test_2_DATASTREAM", project=project_ana, testmode=testmode) # larchivepath=base+project_ana+"/QRT_IRCAM_Test/AEF"+str(port)+"_heatflux_Test_2_DATASTREAM/V"+str(version)+"/0/heatflux" ### end of testsample modifikation### if verbose > 0: print(datetime.datetime.now(), "download_heatflux_by_times: heat flux download started") if False: ## does not work, the png pictures are interpretated as colors, values are lost threads = 1 exist, time, frames=download_images_by_time_via_png(larchivepath, starttime=tstart, stoptime=tend, time_window=time_window, version=version, threads=threads, verbose=verbose-1) else: exist, time, frames=download_images_by_times(larchivepath, starttime=tstart, stoptime=tend, verbose=verbose-1) if exist==False: now=datetime.datetime.now() if verbose > 0: print(now, "download_heatflux_by_times: heat flux data is not available") if request: try: programid=AKF_2.get_program_id(tstart) except Exception as E: if verbose > 0: print('download_heatflux_by_times: Error! ', E) programid=str(tstart) try: cam_progs=IR_tools.get_work_list(heatflux_requestlist_path, typ='load') except: cam_progs=[] try: cam_progs_ig,reasons=IR_tools.get_work_list(heatflux_requestlist_path, typ='ignore') except: cam_progs_ig=[] cam_p = cam_progs + cam_progs_ig if (programid, "AEF"+str(port)) not in cam_p and (programid, "ALL") not in cam_p: f=open(heatflux_requestlist_path+"Auto_q_requests.txt", 'a') if request_ALL: f.write(programid+"\tALL\n") else: f.write(programid+"\tAEF"+str(port)+"\n") f.close() if verbose > 0: print("download_heatflux_by_times: heat flux calculation request logged for automatic processing (within ca. 1 day)") elif verbose > 0: if (programid, 'AEF'+str(port)) in cam_progs: print("download_heatflux_by_times: request exist already") else: pid=cam_progs_ig.index((programid, 'AEF'+str(port))) print("download_heatflux_by_times: request ignored, data not available, reason: {0}".format(reasons[pid])) return exist, time, frames def download_heatflux_mapping_reference(timepoint=None, version=0, testmode=False, get_thickness=False, verbose=0): """ returns the mapping informations for the heatflux to relate each pixel in the heat flux image to different additional parameters. e.g., X and Y coordiante of the divertor mapping INPUT ------ timepoint: integer, optional, default None set the timepoint in nanoseconds where/when to search for the mapping in the database version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive get_thickness: boolean, optional, default False turns on the return of the thickness verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not mappings: dictionary the mapping informations containing: surface coordinate s, divertor coordinate system X,Y, Finger coordiante system X, Y, Finger Id, Thickness, target, Target_Module\n all coordinates in meter NOTE ------ """ if timepoint == None: timepoint = int(TimeToNs([2017,9,26], [8, 0, 0, 0])) if testmode: base = testarchivepath else: base = archivepath # OP = IR_tools.get_OP_by_time(time_ns=timepoint) if version == 0: version = get_latest_version("QRT_IRCAM/Mapping_reference_DATASTREAM", project=project_ana, testmode=testmode, t_from=timepoint) larchivepath = base+project_ana+"/"+"QRT_IRCAM/Mapping_reference_DATASTREAM/V"+str(version)+"/0/reference" ### test for the testsample### # if version == 0: # version = get_latest_version("QRT_IRCAM_Test/Mapping_reference_Test_1_PARLOG", project=project_ana, testmode=testmode) # larchivepath=base+project_ana+"/QRT_IRCAM_Test/Mapping_reference_Test_1_DATASTREAM/V"+str(version)+"/0/reference" ### end of testsample ### exist, time, frames = download_images_by_times(larchivepath, starttime=timepoint, stoptime=int(timepoint+1e9), verbose=verbose-1) mappings={} if exist: mappings['s']=frames[0] mappings['X']=frames[1] mappings['Y']=frames[2] mappings['Finger_X']=frames[3] mappings['Finger_Y']=frames[4] mappings['Finger_ID']=(frames[5], "legend: first three digits are fingernumber, starting @0, last two are the profile number") mappings['Target']=(frames[6],{1:"TM1-4h",2:"TM5-6h",3:"TM7-9h",4:"TM1-3v"}) # derive thickness of profile line (for integration) if get_thickness: profile_no = mappings['Finger_ID'][0] profile_ID = np.unique(profile_no) profile_ID = profile_ID[:np.where(np.isnan(profile_ID))[0][0]] profile_ID = profile_ID.astype(np.int16) d = np.zeros(np.shape(mappings['s'])) for i_profile in profile_ID: x_f = mappings['Finger_X'][np.where(profile_no==i_profile)] y_f = mappings['Finger_Y'][np.where(profile_no==i_profile)] if i_profile-1 not in profile_ID: # i_profile is first profile of finger # # thickness of this profile at each point [x_f,y_f] is equal to # the distance to the next line from [x1, y1] to [x2, y2] # https://en.wikipedia.org/wiki/Distance_from_a_point_to_a_line x_f_next = mappings['Finger_X'][np.where(profile_no==i_profile+1)] x1 = x_f_next[0] x2 = x_f_next[-1] y_f_next = mappings['Finger_Y'][np.where(profile_no==i_profile+1)] y1 = y_f_next[0] y2 = y_f_next[-1] d[np.where(profile_no==i_profile)] = \ abs((y2-y1)*x_f - (x2-x1)*y_f + x2*y1 - y2*x1) / np.sqrt((y2-y1)**2 + (x2-x1)**2) elif i_profile+1 not in profile_ID: # i_profile is last profile of finger # # thickness of this profile at each point [x_f,y_f] is equal to # the distance to the previous line from [x1, y1] to [x2, y2] # https://en.wikipedia.org/wiki/Distance_from_a_point_to_a_line x_f_prev = mappings['Finger_X'][np.where(profile_no==i_profile-1)] x1 = x_f_prev[0] x2 = x_f_prev[-1] y_f_prev = mappings['Finger_Y'][np.where(profile_no==i_profile-1)] y1 = y_f_prev[0] y2 = y_f_prev[-1] d[np.where(profile_no==i_profile)] = \ abs((y2-y1)*x_f - (x2-x1)*y_f + x2*y1 - y2*x1) / np.sqrt((y2-y1)**2 + (x2-x1)**2) else: # a previous and next profile line exists on this finger # # thickness of this profile at each point [x_f,y_f] is the sum # of half the distance to the previous line from [x1_p, y1_p] to [x2_p, y2_p] # and half the distance to the next line from [x1_n, y1_n] to [x2_n, y2_n] # https://en.wikipedia.org/wiki/Distance_from_a_point_to_a_line x_f_prev = mappings['Finger_X'][np.where(profile_no==i_profile-1)] x1_p = x_f_prev[0] x2_p = x_f_prev[-1] y_f_prev = mappings['Finger_Y'][np.where(profile_no==i_profile-1)] y1_p = y_f_prev[0] y2_p = y_f_prev[-1] x_f_next = mappings['Finger_X'][np.where(profile_no==i_profile+1)] x1_n = x_f_next[0] x2_n = x_f_next[-1] y_f_next = mappings['Finger_Y'][np.where(profile_no==i_profile+1)] y1_n = y_f_next[0] y2_n = y_f_next[-1] d[np.where(profile_no==i_profile)] = \ abs((y2_p-y1_p)*x_f - (x2_p-x1_p)*y_f + x2_p*y1_p - y2_p*x1_p) / np.sqrt((y2_p-y1_p)**2 + (x2_p-x1_p)**2) /2 + \ abs((y2_n-y1_n)*x_f - (x2_n-x1_n)*y_f + x2_n*y1_n - y2_n*x1_n) / np.sqrt((y2_n-y1_n)**2 + (x2_n-x1_n)**2) /2 mappings['Thickness'] = d if verbose > 0: print("download_heatflux_mapping_reference: 'thickness' of profiles added to mapping reference dictionary") # shift vertical target in X and Y if it is centered on top of horizontal target index_ver = np.where(mappings['Target'][0]==4) X = mappings['X'] Y = mappings['Y'] if abs(np.nanmean(mappings['X'][index_ver])) < 0.015 and abs(np.nanmean(mappings['Y'][index_ver])) < 0.01: x1 = X[index_ver] y1 = Y[index_ver] X[index_ver] = np.cos(22.5/180*np.pi)*x1 + np.sin(22.5/180*np.pi)*y1 - 0.9 Y[index_ver] = -np.sin(22.5/180*np.pi)*x1 + np.cos(22.5/180*np.pi)*y1 + 0.7 if verbose > 0: print("download_heatflux_mapping_reference: vertical target moved and rotated in 'X' and 'Y' for better plotting") # define target modules array TM = (np.copy(mappings['Finger_ID'][0]),{1:'TM1h', 2:'TM2h', 3:'TM3h', 4:'TM4h', 5:'TM5h', 6:'TM6h', 7:'TM7h', 8:'TM8h', 9:'TM9h', 10:'TM1v', 11:'TM2v', 12:'TM3v'}) Finger_ID = np.copy(mappings['Finger_ID'][0]) Finger_ID[np.isnan(Finger_ID)] = -1 TM[0][np.logical_and(Finger_ID >= 0, Finger_ID < 700)] = 1 TM[0][np.logical_and(Finger_ID >= 700, Finger_ID < 1400)] = 2 TM[0][np.logical_and(Finger_ID >= 1400, Finger_ID < 2200)] = 3 TM[0][np.logical_and(Finger_ID >= 2200, Finger_ID < 3000)] = 4 TM[0][np.logical_and(Finger_ID >= 3000, Finger_ID < 5400)] = 5 TM[0][np.logical_and(Finger_ID >= 5400, Finger_ID < 7800)] = 6 TM[0][np.logical_and(Finger_ID >= 7800, Finger_ID < 8400)] = 7 TM[0][np.logical_and(Finger_ID >= 8400, Finger_ID < 9600)] = 8 TM[0][np.logical_and(Finger_ID >= 9600, Finger_ID < 10800)] = 9 TM[0][np.logical_and(Finger_ID >= 10800, Finger_ID < 11800)] = 10 TM[0][np.logical_and(Finger_ID >= 11800, Finger_ID < 12800)] = 11 TM[0][np.logical_and(Finger_ID >= 12800, Finger_ID < 13700)] = 12 TM[0][TM[0]==-1] = np.nan mappings['Target_Module'] = TM return exist,mappings def download_heatflux_scene_model_reference(port, timepoint=None, program=None, version=0, testmode=False, verbose=0): """ returns the mapping informations based on the scene model for the heatflux to relate each pixel in the heat flux image to different additional parameters. e.g., X and Y coordiante in the original image or the CAD geometry in X,Y,Z INPUT ------ port: integer or string port of the AEF port of the camera timepoint: integer, optional, default None set the timepoint in nanoseconds where/when to search for the mapping in the database version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional, default False if True, the data will be loaded from the test archive get_thickness: boolean, optional, default False turns on the return of the thickness verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not mappings: dictionary the mapping informations containing: most likely Pixel coordiante X,Y, CAD X,Y,Z and the component id as PFC, \n all coordinates in meter NOTE ------ """ if type(port) == int or type(port) == float: portnam = "AEF"+str(port) elif type(port) == str: portnam = port if timepoint == None and program == None: timepoint = int(TimeToNs([2017,8,28], [8, 0, 0, 0])) elif timepoint == None: # prog=get_program_from_PID(program) # if prog[0]: # timepoint=prog[1]['trigger']['1'][0] # else: exist, _, timepoint, _ = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if not exist: raise Exception("download_heatflux_scene_model_reference: Error! Program "+program+" not found!") if testmode: base=testarchivepath else: base=archivepath # OP = IR_tools.get_OP_by_time(time_ns=timepoint) if version == 0: version = get_latest_version("QRT_IRCAM/"+str(portnam)+"_SceneModel_reference_PARLOG", project=project_ana, testmode=testmode, t_from=timepoint) larchivepath=base+project_ana+"/"+"QRT_IRCAM/"+str(portnam)+"_SceneModel_reference_DATASTREAM/V"+str(version)+"/0/scene%20model%20reference" larchivepath_par=base+project_ana+"/QRT_IRCAM/"+str(portnam)+"_SceneModel_reference_PARLOG/V"+str(version)+"/parms/header/Pixel_X" ### test for the testsample### #============================================================================== # if version == 0: # version = get_latest_version("QRT_IRCAM_Test/"+str(portnam)+"_SceneModel_reference_PARLOG", project=project_ana, testmode=testmode) # larchivepath=base+project_ana+"/QRT_IRCAM_Test/"+str(portnam)+"_SceneModel_reference_DATASTREAM/V"+str(version)+"/0/scene%20model%20reference" # larchivepath_par=base+project_ana+"/QRT_IRCAM_Test/"+str(portnam)+"_SceneModel_reference_PARLOG/V"+str(version)+"/parms/header/Pixel_X" #============================================================================== ### end of testsample ### try: res = urllib.request.urlopen(larchivepath_par+"/_signal.json?from="+str(timepoint)+"&upto="+str(timepoint)) signal_list = json.loads(res.read().decode('utf-8')) res.close() goon=True except urllib.error.URLError as e: print('download_heatflux_scene_model_reference: Error! ', e, larchivepath_par+"/_signal.json?from="+str(timepoint)+"&upto="+str(timepoint)) goon=False except Exception as E: print('download_heatflux_scene_model_reference: Error! ', E) goon=False if goon: timepoint=signal_list['values'][0] exist, time, frames = download_images_by_times(larchivepath, starttime=timepoint-10, stoptime=int(timepoint+1e8), verbose=verbose-1) del time mappings={} if exist: mappings['Pixel_X']=frames[0] mappings['Pixel_Y']=frames[1] mappings['CAD_X']=frames[2] mappings['CAD_Y']=frames[3] mappings['CAD_Z']=frames[4] mappings['PFC']=(frames[5], "legend: https://wikis.ipp-hgw.mpg.de/W7X/images/9/9c/PFC2.pdf") return exist,mappings else: return False,{} def give_finger_ID(profile, finger=None, verbose=0): """ returns for a given profile (and finger) information the finger ID which is internally used in the mappings INPUT ------ profile: string, integer or float, string: "TM3h_5_5" or "1605" or "16.05" or "5", similar for float and integer. single number only if the finger is given! finger: string or integer, optional, default None string "TM3h_5" or "16" or 16 or 1600 or None verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ success: boolean gives the information whether the idenfication of the fingerID went well or not fingerID: integer the finger idendifikation in the form of XXXYY with XXX the fingernumber *100 and YY the profile number, starting with 0 NOTE ------ """ ### offsets={ "tm1h":0, "tm2h":6, "tm3h":13, "tm4h":21, "tm5h":29, "tm6h":53, "tm7h":77, "tm8h":83, "tm9h":95, "tm1v":107, "tm2v":117, "tm3v":127} ### go throw the different cases, case: only one if given fingerID = -1 if finger == None:##combinations where only profile is given if type(profile) == int: fingerID = profile success = True elif type(profile) == float: fingerID=int(profile*100) success = True elif type(profile) == str: profile=profile.lower() if "." in profile: fingerID = int(float(profile)*100) success = True elif "tm" in profile: ## special case with TM have to be included prof = profile.split("_") offset = offsets[prof[0]] fingerID = (offset+int(prof[1]))*100+int(prof[2]) success = True else: fingerID = int(profile) success = True else: success = False if verbose > 0: print("give_finger_ID: type of profile is not supported!") else:#cases with both are given if type(finger) == str: finger = finger.lower() if "tm" in finger: fing = finger.split("_") offset = offsets[fing[0]] finger = offset+int(fing[1]) else: finger = int(finger) #finger is now an int if finger%100 == 0: #check whether the finger format is 1600 or equal to it, reduce it to 16 or equal finger = int(finger/100) if type(profile) == int and type(finger) == int:#simple case if finger < 138 and profile < 18: success = True fingerID = finger*100+profile else: success = False if verbose > 0: print("give_finger_ID: given finger or profile is out of valid range! (finger 0-137, profiles 0-17)") ### combinations left: str-str, float-str, float-int, str-int, int-str elif type(profile) == str or type(profile) == float:#combinations str-str and str-int try:### str: by logic only integer should be left if input is a valid one, float: in theory for a valid no further digits should be there if type(profile) == float: if profile < 1.0: profile = profile*100 fingerID = finger*100+int(profile) success = True except: success = False if verbose > 0: print("give_finger_ID: profile and finger defined, profile is not in a valid format!") elif type(profile) == int: if profile < 18: fingerID = finger*100+profile success = True else: success = False if verbose > 0: print("give_finger_ID: profile is out of valid range (0-17)") else: success = False if verbose > 0: print("give_finger_ID: type of profile is not supported!") if success:#last test if fingerID%100<18 and fingerID//100<138 and not fingerID==-1: success = True else: success = False if verbose > 0: print("give_finger_ID: invalid fingerID found. Please check your input.") return success, fingerID def get_heatflux_profile(port, profile, finger=None, timepoint=None, program=None, tstart=None, tend=None, testmode=False, version=0, verbose=0): """ returns a heat flux profile for a given port, profile (and finger) and timepoint in a program/time intervall defined by tstart and tend \n it is a combination of the following functions:\n download_heatflux_by_prrogram or download_heatflux_by_times\n extract_heatflux_profile_from_DL, since it is called in the end\n and also give_finger_ID for the interpretation of the profile/finger input\n in case you want several profiles, please use the function extract_heatflux_profile_from_DL to lower the amount of downloaded data (speed things up) INPUT ------ port: string or integer the port for which the data is requested, normally a AEF port profile: string, integer or float, string: "TM3h_5_5" or "1605" or "16.05" or "5", similar for float and integer. single number only if the finger is given! finger: string or integer, string "TM3h_5" or "16" timepoint: integer, default None the timepoint for the profile in seconds from T1 trigger program: string program id in the form of 'YYYYMMDD.XXX', e.g. '20181016.016' tstart: integer time in ns where the search should start tend: integer time in ns where the search should end testmode: boolean, default False True to load data from testarchive version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used verbose: integer, optional, default 0 feedback level (details of print messages) RESULT ------ exist: boolean True if data was found, False if not time: list timestamp of the profile in seconds s: list of float surface coordinate in meter for each profile position heatflux: list of float the heat flux in in W/m2 for each profile position NOTE ------ """ ### checking whether input is valid and whether the download can be started if program == None and tstart == None: raise Exception("get_heatflux_profile: No Program given, please specify the program you want heatflux from") else: ### some informations about the program are given, is the timepoint given in a usefull way? if timepoint == None or (type(timepoint) != int and type(timepoint) != float): if verbose > 0: print("get_heatflux_profile: timepoint is not given in a usefull way, the profile have to be requested for location and time and program!") return False, 0, 0, 0 ### is the location given? possible combinations for profile have to be checked checkprof = give_finger_ID(profile, finger) if checkprof[0]: fingerID = checkprof[1] inputcheck = True else: if verbose > 0: print(datetime.datetime.now(), "get_heatflux_profile: given Finger and Profile combination is not understood!") return False, 0, 0, 0 ### lets find out which timepoint or timeintervall is requested, to know the OP, only needed for OP2 changes, but who knows if program != None: # prog=get_program_from_PID(program) # if prog[0]: # t1=prog[1]['trigger']['1'][0] exist, _, t1, _ = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: tstart=t1+int((timepoint-0.02)*1e9) tend=t1+int((timepoint+0.02)*1e9) else: if verbose > 0: print("get_heatflux_profile: program not found") return False, 0, 0, 0 else: t1 = tstart if tend == None: program = AKF_2.get_program_id(t1) # prog=get_program_from_PID(program) # if prog[0]: # tref=prog[1]['trigger']['6'][0] exist, _, _, tref = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if exist: tend = tref+int((timepoint+0.02)*1e9) OP = IR_tools.get_OP_by_time(time_ns=t1) ### is the port given? if type(port) is str: try: camera=portcamdict[OP][port] if camera.split("_")[0]=='FLIR': FLIR = True else: FLIR = False port = int(port.split("AEF")[1]) goon = True except Exception as E: # raise Warning("get_heatflux_profile: unknown Port!"+E) logging.warning("get_heatflux_profile: unknown Port!"+E) return False, 0, 0, 0 elif isinstance(port, (int, np.integer, float, np.float)): FLIR = False goon = True else: goon = False if not goon or FLIR: # raise Exception("get_heatflux_profile: Unknown port, FLIR data is unsupported at the moment") logging.warning("get_heatflux_profile: Unknown port, FLIR data is unsupported at the moment") return False, 0, 0, 0 else: ### if the program goes up to here, time okay, finger okay, port okay, lets download it exist, timo, frames=download_heatflux_by_times(port, tstart, tend, time_window=0, threads=1, testmode=testmode, version=version, verbose=verbose-1) if exist: return extract_heatflux_profile_from_DL(time=(np.asarray(timo)-t1)/1e9, images=frames, profile=int(fingerID%100), finger=int(fingerID/100), time_window=timepoint, inputchecked=inputcheck, verbose=verbose-1) else: if verbose > 0: print("get_heatflux_profile: heatflux data could not be downloaded") return False, 0, 0, 0 def extract_heatflux_profile_from_DL(time, images, profile, finger=None, time_window=None, inputchecked=False, verbose=0, testmode=False): """ extracts from given heat flux images the requested profile for the given time window INPUT ------ time: list the timevector of the data in seconds images: list list of frames in form of numpy arrays with the heat flux data profile: string, integer or float, string: "TM3h_5_5" or "1605" or "16.05" or "5", similar for float and integer. single number only if the finger is given! finger: string or integer, string "TM3h_5" or "16" time_window: integer, float or list/numpy array defines the timewindow, if only a number is give or an array/list with only one number, it searches one profile for the given timevector, \n if a list or array is given, several profiles will be returned, in case of None, the full program will be returned inputchecked: boolean, optional, default False defines whether the input parameters have been checked for correct format or not (profile and finger) verbose: integer, optional, default 0 feedback level (details of print messages) testmode: boolean, optional, default 0 if True, the data will be loaded from the test archive RESULT ------ exist: boolean indicates whether data was found (True) or not (False) time: list the timestamps of the profile s: list the surface coordinates of the profile in meter heatflux: list the heat flux for the profile in W/m2 NOTE ------ """ goon = True if time_window == None: Tid_start = 0 Tid_end = len(time) elif type(time_window) == int or type(time_window) == float: try: Tid_start=IR_tools.find_nearest(time, time_window) Tid_end=Tid_start+1 except Exception as E: goon = False elif type(time_window) == list or type(time_window) == np.ndarray and len(time_window)>0: try: Tid_start = IR_tools.find_nearest(time, time_window[0]) except Exception as E: goon = False if len(time_window)>1: try: Tid_end = IR_tools.find_nearest(time, time_window[1]) except Exception as E: goon = False else: Tid_end = Tid_start+1 else: goon=False E = "unknown Case" if not goon: if verbose > 0: print('extract_heatflux_profile_from_DL: ', E) return False, 0, 0, 0 else: if not inputchecked: checkprof = give_finger_ID(profile, finger) if checkprof[0]: fingerID = checkprof[1] else: return False, 0, 0, 0 else: fingerID = finger*100+profile ### okay finderID is fine, data is given, lets get the mapping exist ,mapping = download_heatflux_mapping_reference(testmode=testmode, verbose=verbose-1) if exist: Fingermap = np.asarray(mapping['Finger_ID'][0],dtype=np.int) locs = np.where(Fingermap==fingerID) S = mapping['s'][locs] q = np.asarray([images[i][locs] for i in range(Tid_start, Tid_end)]) return True, time[Tid_start:Tid_end], S, q else: if verbose > 0: print("extract_heatflux_profile_from_DL: mapping was not found, cannot extract the profile") return False, 0, 0, 0 def extract_temperature_profile_from_DL(port, time, images, profile, finger=None, time_window=None, inputchecked=False, verbose=0, reference_time=0): """ for a given heatflux image and profile and finger information, a profile can be extracted. INPUT ------ port: int or str the camera port as number or full string (e.g. 'AEF10') time: array or numpy array the time vector for the given images images: numpy array temperature flux images, from which the profile should be extracted profile: string, integer or float, string: "TM3h_5_5" or "1605" or "16.05" or "5", similar for float and integer. single number only if the finger is given! finger: string or integer, string "TM3h_5" or "16" time_window: integer, float or list/numpy array defines the timewindow, if only a number is give or an array/list with only one number, it searches one profile for the given timevector, \n if a list or array is given, several profiles will be returned, in case of None, the full program will be returned inputchecked: boolean, optional, default False defines whether the input parameters have been checked for correct format or not (profile and finger) verbose: integer, optional feedback level (details of print messages) reference_time : float, optional, default is 0 reference timepoint for the mapping data RESULT ------ exist: boolean indicates whether data was found (True) or not (False) time: list the timestamps of the profiles s: list the surface coordinates of the profiles in meter heatflux: list the heat flux for each profile in W/m2 NOTES ----- """ goon = True if time_window == None: Tid_start = 0 Tid_end = len(time) elif type(time_window) == int or type(time_window) == float: try: Tid_start = IR_tools.find_nearest(time, time_window) Tid_end = Tid_start+1 except Exception as E: goon=False elif type(time_window) == list or type(time_window) == np.ndarray and len(time_window) > 0: try: Tid_start = IR_tools.find_nearest(time, time_window[0]) except Exception as E: goon = False if len(time_window)>1: try: Tid_end = IR_tools.find_nearest(time, time_window[1]) except Exception as E: goon = False else: Tid_end = Tid_start+1 else: goon = False E = "unknown Case" if not goon: if verbose > 0: print('extract_temperature_profile_from_DL: ', E) return False, 0, 0, 0 else: if not inputchecked: checkprof = give_finger_ID(profile, finger) if checkprof[0]: fingerID = checkprof[1] else: return False, 0, 0, 0 else: fingerID = finger*100+profile ### okay finderID is fine, data is given, lets get the mapping exist, mapping = download_heatflux_mapping_reference() if exist: Fingermap = np.asarray(mapping['Finger_ID'][0],dtype=np.int) locs = np.where(Fingermap==fingerID) S = mapping['s'][locs] exist_s, scene = download_heatflux_scene_model_reference(port, timepoint=reference_time, verbose=verbose-1) if exist_s: X = scene['Pixel_X'][locs] Y = scene['Pixel_Y'][locs] q = np.asarray([images[i][np.asarray(Y,dtype=np.int), np.asarray(X,dtype=np.int)] for i in range(Tid_start, Tid_end)]) return True, time[Tid_start:Tid_end], S, q else: if verbose > 0: print("extract_temperature_profile_from_DL: mapping was not found, cannot extract the profile") return False, 0, 0, 0 def download_divertor_load(port, targetmodule=None, program=None, tstart=None, tend=None, version=0, verbose=0, testmode=False, request=True, request_ALL=False): """ downloads the integrated divertor load, based on the heat flux evaluation of the temperature data from the IR cameras.\n The data is structed in total load per divertor, identified by the port, and also in target-modules of the divertor.\n An estiamted error for the data is given back. To get the total divertor load, please download the data of each of the 10 AEF ports.\n In case one or more ports are missing, request can be created (console will give feedback on the request).\n The data will be evaluated by a server over time. INPUT ------ port: int or str the port description for the camera as number or full string (e.g. 'AEF10') targetmodule: str or list/ndarray of str, optional name or description of the target module, nothing wil return total load program: str program id as str in the format yyyymmdd.pid tstart: integer time in ns where the search should start tend: integer time in ns where the search should end version: integer, optional, default 0 Version of the data in the archiveDB, in case of 0 the highest version will be used verbose: integer, optional feedback level (details of print messages) testmode: boolean, default False True to load data from testarchive request: boolean, optional, default True switch to turn on or off the request of data if the data is not available in the database request_ALL: boolean, optional, default False switch to turn on the request of data for all AEF ports RESULT ------ exist: boolean True if data was found, False if not time: list a list containing the time stamp of each frame, time is in nanoseconds UTC load: list a list of values, values in the images are in W error: list a list of values, the error of each timepoint, values are in W """ TMchadict={ 'ALL':(0,1, 'divertor_total_load'), 'FULL':(0,1, 'divertor_total_load'), 'TM1H':(2,3, 'TM1h_load'), '1H':(2,3, 'TM1h_load'), 'TM2H':(4,5, 'TM2h_load'), '2H':(4,5, 'TM2h_load'), 'TM3H':(6,7, 'TM3h_load'), '3H':(6,7, 'TM3h_load'), 'TM4H':(8,9, 'TM4h_load'), '4H':(8,9, 'TM4h_load'), 'TM5H':(10,11, 'TM5h_load'), '5H':(10,11, 'TM5h_load'), 'TM6H':(12,13, 'TM6h_load'), '6H':(12,13, 'TM6h_load'), 'TM7H':(14,15, 'TM7h_load'), '7H':(14,15, 'TM7h_load'), 'TM8H':(16,17, 'TM8h_load'), '8H':(16,17, 'TM8h_load'), 'TM9H':(18,19, 'TM9h_load'), '9H':(18,19, 'TM9h_load'), 'TM1V':(20,21, 'TM1v_load'), '1V':(20,21, 'TM1v_load'), 'TM2V':(22,23, 'TM2v_load'), '2V':(22,23, 'TM2v_load'), 'TM3V':(24,25, 'TM3v_load'), '3V':(24,25, 'TM3v_load') } #interpretation of the targetmodule input, string or List of strings, numbers does not make sense here? if targetmodule == None: datcha = 0 ercha = 1 datcha_name='divertor_total_load' elif type(targetmodule)== list or type(targetmodule)==np.ndarray:## okay maybe more than one targetmodule is requested datcha = [] datcha_name = [] ercha = [] #okay now we have to find out which targetmodules are requested for ele in targetmodule: dummy = str(ele).upper() try: dummy=TMchadict[dummy] datcha.append(dummy[0]) ercha.append(dummy[1]) datcha_name.append(dummy[2]) except: if verbose > 0: print("cannot interpretate", ele, "as a targetmodule, skip it") elif type(targetmodule) == str: targetmodule = targetmodule.upper() dummy = TMchadict[targetmodule] datcha = dummy[0] datcha_name = dummy[2] ercha = dummy[1] else:# if verbose > 0: print("input for targetmodule invalid!") return False, 0,-1,-1 # target module known and channels are known, time to know the time if program == None and tstart == None: if verbose > 0: print("input for program and time invalid!") return False, 0,-1,-1 elif program != None: # prog=get_program_from_PID(program) # if prog[0]: # tstart=prog[1]['trigger']['1'][0] # tend=prog[1]['trigger']['6'][0] # else: exist, _, tstart, tend = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if not exist: print("download_divertor_load: Error! program not found") return False, 0,-1,-1 else: if tend == None: program=AKF_2.get_program_id(tstart) # prog=get_program_from_PID(program) # if prog[0]: # tend=prog[1]['trigger']['6'][0] exist, _, _, tend = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if not exist: print("download_divertor_load: Error! end trigger not found") return False, 0,-1,-1 #channels known, time known, okay lets get the data # tstart=int(tstart-100e6) OP = IR_tools.get_OP_by_time(time_ns=tstart) if testmode: base = testarchivepath else: base = archivepath if type(port) == int: port = "AEF"+str(port) if version == 0: version = get_latest_version(portpathdict[OP][str(port)]+"loads_DATASTREAM", project=project_ana, t_from=tstart, t_to=tend, testmode=testmode) larchivepath = base+project_ana+"/"+portpathdict[OP][str(port)]+"loads_DATASTREAM/V"+str(version) if type(datcha) == list:#case of several requested channels load = [] error = [] exist = True for i in range(len(datcha)): data = read_restdb(larchivepath+"/"+str(datcha[i])+"/"+datcha_name[i]+"/_signal.json?from="+str(tstart)+"&upto="+str(tend)) err = read_restdb(larchivepath+"/"+str(ercha[i])+"/"+datcha_name[i]+"_error/_signal.json?from="+str(tstart)+"&upto="+str(tend)) if i == 0: if data[0]: time = data[1] else: time = 0 exist = False if data[0]: load.append(data[2]) if err[0]: error.append(err[2]) if len(datcha) != len(load): exist = False else:# case for a single channel data = read_restdb(larchivepath+"/"+str(datcha)+"/"+datcha_name+"/_signal.json?from="+str(tstart)+"&upto="+str(tend)) err = read_restdb(larchivepath+"/"+str(ercha)+"/"+datcha_name+"_error/_signal.json?from="+str(tstart)+"&upto="+str(tend)) if data[0] and err[0]: exist = True time = data[1] load = data[2] error = err[2] else: exist = False time = 0 load = -1 error = -1 if exist == False: now = datetime.datetime.now() if verbose > 0: print(now, "download_divertor_load: divertor_load data is not available") if request: try: programid=AKF_2.get_program_id(tstart) except Exception as E: if verbose > 0: print('download_divertor_load: Error! ', E) programid=str(tstart) cam_progs=IR_tools.get_work_list(heatflux_requestlist_path, typ='load') cam_progs_ig,reasons=IR_tools.get_work_list(heatflux_requestlist_path, typ='ignore') cam_p = cam_progs+cam_progs_ig if (programid, str(port)) not in cam_p and (programid, "ALL") not in cam_p: f = open(heatflux_requestlist_path+"Auto_load_requests.txt", 'a') if request_ALL: f.write(programid+"\tALL\n") else: f.write(programid+"\t"+str(port)+"\n") f.close() if verbose > 0: print(now, "download_divertor_load: request created") elif verbose > 0: if (programid, str(port)) in cam_progs: print(now, "download_divertor_load: request exist already") else: pid = cam_progs_ig.index((programid, str(port))) print(now, "download_divertor_load: request ignored, data not available, reason: {0}".format(reasons[pid])) return exist, time, load, error def download_scene_model(port, program=None, timepoint=None, version=0, testmode=False, verbose=0): ''' downloading the scene model, provided by F. Pisano INPUT ----- port: integer, float, string or list of int, float or strings array of heat fluxes from THEODOR on the profiles defined in the IR mapping; can be 2D (one divertor), or 3D (multiple divertor modules) program : string, optional Program ID of an W7-X Program in form of "20160224.025" to indicate the validation window timepoint: int64, optional nanosecond timestamp to indicate the validation window of the scene model version: integer, optional Version of the data in the archiveDB, in case of 0 the highest version will be used testmode: boolean, optional if True, the data will be loaded from the test archive verbose: integer, optional feedback level (details of print messages) RESULT ------ exist: boolean indicates whether data was found (True) or not (False) scene_model: dict dictionary of the different images in the scene model (see keys for describtion) NOTES ----- ''' if type(port)==int or type(port)==float: portnam="AEF"+str(port) elif type(port)==str: portnam=port if program==None and timepoint==None: dati=datetime.datetime.now() timepoint=int(TimeToNs([dati.year,dati.month,dati.day], [dati.hour,dati.minute,dati.second,dati.microsecond])) elif timepoint==None: # prog=get_program_from_PID(program) # if prog[0]: # timepoint=prog[1]['trigger']['1'][0] # else: exist, _, timepoint, _ = get_trigger_from_PID(program, port, testmode, verbose=verbose-1) if not exist: # raise Exception("download_scene_model: Error! Program "+program+" not found!") logging.warning("download_scene_model: Error! Program "+program+" not found!") return False, 0 if testmode: base=testarchivepath else: base=archivepath if version == 0: version = get_latest_version("QRT_IRCAM/"+str(portnam)+"_scene_model_PARLOG", project=project, testmode=False, t_from=timepoint) larchivepath=base+project+"/"+"QRT_IRCAM/"+str(portnam)+"_scene_model_DATASTREAM/V"+str(version)+"/0/"+str(portnam)+"_scene_model" larchivepath_par=base+project+"/QRT_IRCAM/"+str(portnam)+"_scene_model_PARLOG/V"+str(version) try: res = urllib.request.urlopen(larchivepath_par+"/_signal.json?from="+str(timepoint)+"&upto="+str(timepoint)) signal_list = json.loads(res.read().decode('utf-8')) res.close() goon=True except urllib.error.URLError as e: print('download_scene_model: Error! ', e) goon=False if goon: timepoint=signal_list['values'][0]['meta-data']['timestamp'] height=signal_list['values'][0]['meta-data']['height'] exist, time, frames = download_images_by_times(larchivepath, starttime=timepoint-10, stoptime=int(timepoint+1e8), verbose=verbose-1) del time stacked_image = np.array(frames[0], dtype='float64') channels = np.array(np.vsplit(stacked_image, stacked_image.shape[0]/height)) scene_model = { "CAD": channels[0], "FOV": channels[1], "PFC": channels[2], "angle": channels[3], "distance": channels[4], "emissivity": channels[5], "phi": channels[6], "theta": channels[7], "x": channels[8], "y": channels[9], "z": channels[10] } return exist, scene_model else: return False, [0] #%% caching, requested by Lukas R, sample function supplied by Lukas R. import os def extract_heatflux_profile_from_cache(port, program, time_window=0, profile="TM3h_5_5", **kwargs): """Returns exists, time, position, heat_flux_profiles """ kwargs.setdefault('verbose',0) kwargs.setdefault('testmode',False) data_folder = os.path.join(os.path.dirname(__file__), 'cache') save_path = data_folder + '/'.join(["", program, str(port), ""]) start = time_window[0] if hasattr(time_window, '__iter__') else 0.0 end = time_window[-1] if hasattr(time_window, '__iter__') else time_window path = save_path + "heatflux_images_[{s:.2f}-{e:.2f}].npz".format(s=start, e=end) if not os.path.isfile(path): if kwargs['verbose'] > 0: print("Generating cache for {program} port {port} timewindow [{s:.2f}-{e:.2f}]".format( port=port, program=program, s=start, e=end )) cache_heatflux_by_program(port, program, time_window=time_window, **kwargs) cache_file = np.load(path) found,time,images = [cache_file[kw] for kw in ['found','time','images']] if not found: raise ValueError("Download was invalid") exist, _, t1, t6 = get_trigger_from_PID( program, port, kwargs['testmode'], verbose=kwargs['verbose']-1) return extract_heatflux_profile_from_DL((np.asarray(time)-t1)/1e9, images, profile, time_window=time_window, **kwargs) def cache_heatflux_by_program(port, program, time_window=0, **kwargs): """ """ data_folder = os.path.join(os.path.dirname(__file__), 'cache') save_path = data_folder + '/'.join(["", program, str(port), ""]) check_path_and_make_folders(True, save_path) found, time, images = download_heatflux_by_program(port, program, time_window=time_window, **kwargs) start = time_window[0] if hasattr(time_window, '__iter__') else 0.0 end = time_window[-1] if hasattr(time_window, '__iter__') else time_window np.savez(save_path + "heatflux_images_[{s:.2f}-{e:.2f}]".format(s=start, e=end), found=found, time=time, images=images) def check_path_and_make_folders(activate, path, sep='', folder=''): '''Does nothing if file structure already exists. Otherwise recursively goes up layers until existing file is found, then creates subfolders. This kind of thing should be in an independent & general caching handler ''' if not activate: return if not os.path.exists(path): check_path_and_make_folders(activate, *path.rpartition('/')) try: os.mkdir(path+sep+folder) except FileExistsError: pass #%% general download functions """ the following functions are copied from W7xrest.read_restdb to remove this dependency and are edit to fit into the other functions @author: thomsen """ def read_restdb_old(request_url): """ Reads JSON data from W7-X REST API INPUT ------ request_url: string the url from there the data should be loaded Returns ------ valid: bolean access ok t: numpy-array numpy-array of time signal: numpy-array numpy-array of requested signals by H. Thomsen """ try: res = urllib.request.urlopen(request_url) except urllib.error.URLError as e: print('read_restdb_old: Error ', e) return False, 0, -1 except Exception as e: print('read_restdb_old: Error ', e) return False, 0, -1 else: signal_raw=res.read() res.close() signal_string=signal_raw.decode(encoding='UTF-8') signal_list = json.loads(signal_string) signal0=np.array(signal_list['values']) t=np.array(signal_list['dimensions']) return True, t, signal0 def read_restdb(request_url): """ Reads JSON data from W7-X REST API. Signals are converted into double INPUT ------ request_url: string the url from there the data should be loaded Returns: ------ valid: bolean access ok t: numpy-array numpy-array of time signal: numpy-array numpy-array of requested signals To get NaN, Infinity as their counter parts in Python (not 'Null') add the string +'&allowNaN=true' in the request. """ try: res = urllib.request.urlopen(request_url) signal_list = json.loads(res.read().decode('utf-8')) res.close() except urllib.error.URLError as e: #if hasattr(e, 'reason'): #print('Failed to reach server:',request_url) #print('Reason: ', e.reason) #elif hasattr(e, 'code'): #print('The server couldn\'t fulfill the request.') #print('Error code: ', e.code) return False, [0], [-1] except Exception as E: print('read_restdb: Error! ', E) return False, [0], [-1] else: # res = urllib.request.urlopen(request_url) # signal_raw=res.read() # signal_string=signal_raw.decode(encoding='UTF-8') # signal_list = json.loads(signal_string) try: signal0=np.array(signal_list['values']) t=np.array(signal_list['dimensions']) except ValueError as e: print('read_restdb: Error! ', signal_list['status'], e) return False, [0], [-2] else: return True, np.squeeze(t), np.squeeze(np.double(signal0)) def get_program_from_PID(instring): """ Retrieve program information via json-request (Python version of A.Alonso's Matlab routine getProgramInfo.m) input ------ instring : str specifies day and program ID (format yyyymmdd.pid) Returns ------ valid: boolean retrival ok program_list: list for program ID req_PID (returns whole list, if req_PID was not found) Example: ------ from W7Xrest import read_restdb as read_restdb v, pl=read_restdb.get_program_from_PID('20160310.007') if v: print(pl['description']) trigger6=pl['trigger']['6'][0] """ try: string_date,req_PID=instring.split('.') except ValueError as e: print('get_program_from_PID: Format of input string should be similar to 20160310.007') return False, 0 try: day=datetime.datetime.strptime(string_date, "%Y%m%d") except ValueError as e: print('get_program_from_PID: Date format should be similar to 20160310') return False, 0 day_plus1=day+datetime.timedelta(days=1) # create timestamps from begining to end of that day w7xstamp0=np.int64(day.timestamp()*1e9) w7xstamp1=np.int64(day_plus1.timestamp()*1e9) # url to retrieve program information program_url = 'http://archive-webapi.ipp-hgw.mpg.de/programs.json?from=' # write the url and load the data jsonurl = program_url+np.str(w7xstamp0)+'&upto='+np.str(w7xstamp1) try: res = urllib.request.urlopen(jsonurl) prog_list = json.loads(res.read().decode('utf-8')) res.close() except urllib.error.URLError as e: print('get_program_from_PID: Error opening URL', e) return False, 0 except Exception as E: print('get_program_from_PID: ERROR appeared', E) return False, 0 else: pl=prog_list['programs'] id_list=[] for pr in pl: x=pr['id'] id_list.append(int((x.split('.'))[1])) try: id_=id_list.index(int(req_PID)) except ValueError as e: print('get_program_from_PID: Could not find requested program ID', e) return False, pl else: return True, pl[id_] """ end of the copied functions @author: thomsen """ def get_trigger_from_PID(program, port=None, testmode=False, verbose=0): '''Gets the nanosecond timestamps of beginning of preparation phase (t0), the start of experiment phase (t1) and the end of the experiment phase (t6). If there was no program in the Archive (calibration or test shot), the function will scan the raw datastream and try to find the program by counting segments of data (with NUC, background, experiment data). INPUT ----- program: str program string of format YYYYMMDD.NNN (Year, Month, Day, program Number), e.g. '20171026.007' port: int port number of IR camera (e.g. 21 for AEF21 port) testmode: bool, optional switch of whether to search in the Test archive or not (OPTIONAL: default is to search the real Archive) verbose: integer, optional feedback level (details of print messages) (OPTIONAL: if not provided, only ERROR output) RESULT ------ exist: bool flag of whether the script was successful t_preparation: int nanosecond time stamp of beginning of preparation phase (t0) t_start: int nanosecond time stamp of beginning of experiment phase (t1) t_end: int nanosecond time stamp of end of experiment phase (t6) ''' # check if program was defined in archive if not isinstance(program, str): program = str(program) if len(program.split(".")[0]) < 3: program = program+"0" prog = get_program_from_PID(program) if prog[0]: t_preparation = prog[1]['trigger']['0'][0] t_start = prog[1]['trigger']['1'][0] t_end = prog[1]['trigger']['6'][0] return True, t_preparation, t_start, t_end else: # handle different formats of port if isinstance(port, str): try: # case port=='31' port = int(port) port = "AEF"+str(int(port)) except: # case port=='... AEF31 ...' avports = [] for key in portpathdict: avports = avports+list(portpathdict[key].keys()) portprefix = [] for portstr in avports: if portstr[0:3] not in portprefix: portprefix.append(portstr[0:3]) if len(port)<6: if port not in avports: print("get_trigger_from_PID: unknown port!", port,avports) return False, 0, 0, 0 else: gotit = False for key in portprefix: if key in port: port = int(port[port.find(key)+3:port.find(key)+5]) port = key+str(port) gotit = True break if not gotit: print("get_trigger_from_PID: unknown port!", port,avports) return False, 0, 0, 0 # if 'AEF' in port: # port = int(port[port.find('AEF')+3:port.find('AEF')+5]) else: try: port = "AEF"+str(int(port)) except: print("get_trigger_from_PID: port format not supported") return False, 0, 0, 0 if verbose > 0: print('get_trigger_from_PID: no program found in Archive. checking datastream...') date_str = program.split('.')[0] prog_no = int(program.split('.')[1]) OP = IR_tools.get_OP_by_time(program_str=program) # prepare Datastream url if testmode: stream_name = testarchivepath+project+"/"+portpathdict[OP][port]+'raw_DATASTREAM' else: stream_name = archivepath+project+"/"+portpathdict[OP][port]+'raw_DATASTREAM' time_from = AKF_2.to_timestamp(date_str+' 00:00:00', fmt=u"%Y%m%d %H:%M:%S") time_to = AKF_2.to_timestamp(date_str+' 23:59:59', fmt=u"%Y%m%d %H:%M:%S") version = AKF_2.get_last_version(stream_name, time_from, time_to) if version is not None: signal_name = stream_name+'/'+version+'/0/raw' else: print("get_trigger_from_PID: no version with data found!") return False, 0, 0, 0 # load all time intervals with data in this datastream intervals = AKF_2.get_time_intervals(signal_name, time_from, time_to) frame_start = np.array(intervals[::-1, 0]) frame_end = np.array(intervals[::-1,1]) # identify segments of frames by gaps in time steps dt = abs(intervals[:-1, 0]-intervals[1:, 0]) dt_normal = np.median(dt) i_segment = np.hstack([np.array([0]), np.where(dt>2.5*dt_normal)[0]+1, np.array([len(intervals)])]) if verbose>1: print('get_trigger_from_PID: found {0} data segments starting at {1}ns'.format(len(i_segment)-1, frame_start[i_segment[:-1]])) # try to group segments according to normal order (NUC frames, background frames, data frames) if (len(i_segment)-1)%3 == 0: t_programs = [] for i in range(len(i_segment)//3): # three data segments: # i*3: NUC, i*3+1: background, i*3+2: experiment phase # --> triggers: # beginning of NUC, beginning of experiment, end of experiment (=one frame before start of next segment) t_programs.append( [frame_start[i_segment[i*3]], frame_start[i_segment[i*3+2]], frame_end[i_segment[(i+1)*3]-1] ] ) if verbose>1: print('get_trigger_from_PID: found {0} shots (NUC+BG+data)'.format(len(t_programs))) if len(t_programs) >= prog_no and prog_no>0: t_preparation = t_programs[prog_no-1][0] t_start = t_programs[prog_no-1][1] t_end = t_programs[prog_no-1][2] return True, t_preparation, t_start, t_end else: print('get_trigger_from_PID: ERROR! program {0} not found among {1} shots (NUC+BG+data)'.format(prog_no, len(t_programs))) return False, 0, 0, 0 else: print('get_trigger_from_PID: ERROR! number of segments does not indicate multiple shots.') return False, 0, 0, 0 #%% dummy run of the script if __name__=='__main__': print("local function calling") #%% temperature download and plotting example # port=31#"AEF50"#"AEF51"'20171114.053'# # prog="20180911.008"#"20181011.033"#'20171122.035'#'20181011.010'#'20180823.037'#'20170927.020'#"20181011.036"#"20181016.037"#"20180920.042"#"20171109.021"#"20181010.036" # status, time, images, valid=get_temp_from_raw_by_program(port, prog, time_window=[5,5.1], emi=0.80, t_version=2, version=0, threads=1, give_ERROR=False,use_firstframe_as_background=False, verbose=5) # status2, time2, images2=download_raw_images_by_program(port, prog, time_window=0.02, verbose=5) # bla=get_calib_data(50, program=prog, verbose=5) # success, t, s, profile=extract_temperature_profile_from_DL(port, np.asarray(time-time[0])/1e9, images, profile="TM3h_5_5", verbose=10,reference_time=time[0]) # if status: # plt.figure() # plt.imshow(images[-1], vmin=330, vmax=1000, cmap=exJet) # cb=plt.colorbar() # cb.set_label("temperature in K",rotation=270, labelpad=20, fontsize=20) # cb.ax.tick_params(labelsize=20) # plt.title("AEF{0}".format(port)) #%% heatflux test # port = 20 # prog = "20181016.016" # # status,mapping=download_heatflux_mapping_reference(verbose=4) # # plt.figure() # plt.subplot(2,1,1) # t_start = 2.0 # status, times1, images1=download_heatflux_by_program(port, prog, time_window=[t_start, t_start+0.1], threads=4, version=2, verbose=3, givealpha=False,request=False) # plot_frame1 = np.nan_to_num(images1[0])/1E6 # plot_frame1[plot_frame1<0.1] = 0 # q_max = np.nanmax(plot_frame1) # q_min = np.nanpercentile(plot_frame1[plot_frame1>0], 1) # plt.scatter(mapping['X'], mapping['Y'], s=1, c=plot_frame1, cmap='jet', vmin=q_min) # # plt.colorbar(label='q [MW/m²]') # plt.title('t-t_0={0:.1f}s'.format(t_start)) # # plt.subplot(2,1,2) # t_start = 10.0 # status, times2, images2=download_heatflux_by_program(port, prog, time_window=[t_start, t_start+0.1], threads=4, version=2, verbose=3, givealpha=False,request=False) # plot_frame2 = np.nan_to_num(images2[0])/1E6 # plot_frame2[plot_frame2<0.1] = 0 # plt.scatter(mapping['X'], mapping['Y'], s=1, c=plot_frame2, cmap='jet', vmin=q_min, vmax=q_max) # plt.colorbar(label='q [MW/m²]') # plt.title('t-t_0={0:.1f}s'.format(t_start)) # plt.suptitle('Heat flux in module {0} program {1}'.format(port, prog)) # plt.tight_layout(rect=[0, 0, 1, 0.95]) # plt.show() #%% heat flux profile test # port = 20 # prog = "20171109.008" # finger_no = 1605 # # status, profile_time, s, profiles=get_heatflux_profile(port, finger_no, timepoint=1, program=prog, verbose=4) # plt.figure() # plt.plot(s, profiles[0]/1E6) # plt.xlabel('s [m]') # plt.ylabel('q [MW/m²]') # plt.title('heat flux along finger {0}\nin module {1} @ t-t1={2:.2f}s'.format(finger_no, port, profile_time[0])) # plt.show() #%% loads test # prog = "20181016.016" # loads = [] # times = [] # labels = [] # for port in [11,20,21,30,31,40,41]: # print('loading loads of port {0} in {1}'.format(port, prog)) # exist, time, load, error=download_divertor_load(port, targetmodule=['all'], program=prog,request=False, verbose=5) # if exist: # load_start = np.mean(load[0][:10]) # load_end = np.mean(load[0][-10:]) # load_offset = np.linspace(load_start, load_end, len(time)) # load_offset = 0 # times.append(time) # loads.append(load[0]-load_offset) # labels.append('module {0}'.format(port)) # # plt.figure() # for i in range(len(times)): # plt.plot((times[i] - times[i][0])/1E9, loads[i]/1E3, label=labels[i], linewidth=1, linestyle='--') # plt.legend() # plt.title('Integral heat load - program {0}'.format(prog)) # plt.xlabel('t - t_0 [s]') # plt.ylabel('P_divertor [kW]') # plt.tight_layout() # plt.show() #%% scene model test # port=10 # exist,model=download_scene_model(port) # keys = ['CAD', 'FOV', 'PFC', 'distance', 'angle', 'emissivity', 'x', 'z', 'phi'] # if exist: # plt.figure() # for i in range(len(keys)): # plt.subplot(3,3, i+1) # plt.imshow(model[keys[i]]) # plt.colorbar() # plt.title(keys[i]) # plt.tight_layout() # plt.show() #%% HDF5 writing test # import h5py as h5 ## bla=get_temp_from_raw_by_program_fullthreads(51, prog, time_window=[0,4], threads=4) ## bla2=get_nuced_raw_by_program_fullthreads(51, prog, time_window=[0,4], threads=4) # port = 10 # program = "20180925.013" # prog = get_program_from_PID(program) # if prog[0]: # t0 = prog[1]['trigger']['0'][0] # t1 = prog[1]['trigger']['1'][0] # t6 = prog[1]['trigger']['6'][0] # ## bla=download_raw_images_by_times(port, t0, t1, verbose=5) # bla = get_temp_from_raw_by_program(port, program, time_window=[0,4], emi=0.82, # T_version=1, version=0, threads=4, give_ERROR=False, # use_firstframe_as_background=True, verbose=5) # # filename = "AEF"+str(port)+"_"+program+"_temp_v1_b.h5" # File = h5.File(filename, 'w') # frames = np.asarray(bla[2]).swapaxes(0,2).swapaxes(0,1) # n_rows, n_cols, n_frames = np.shape(frames) # dset = File.create_dataset('images', shape=(n_rows, n_cols, n_frames),dtype='uint16', chunks=(n_rows, n_cols,1)) # imags = np.ascontiguousarray(frames) # dset.write_direct(imags) # File.create_dataset('timestamps', data=list(bla[1]), dtype='uint64')#, compression="gzip") # File.close() # # # test reading # File = h5.File(filename, 'r') # times = np.array(File['timestamps']) # frames2 = File['images'] # n_frames2 = frames2.shape[2] # if n_frames == n_frames2: # i_frames = np.random.randint(low=0, high=n_frames-1 , size=3) # for i in range(3): # print('testing frame', i_frames[i]) # if (np.nan_to_num(frames[:,:, i_frames[i]]) == frames2[:,:, i_frames[i]]).all(): # print(' original and h5 file verions identical!') # else: # print(' total difference of original and h5 file:', np.sum(np.nan_to_num(frames[:,:, i_frames[i]]) - frames2[:,:, i_frames[i]])) # File.close() #%% get coldframe from NUC test # port = 31 # program = '20171026.007' #"20190621.001" # testmode = False #True # exist, coldframe = get_NUCed_coldframe_by_program(port, program, exposure=None, # version=0, testmode=testmode, # plot_it=True, verbose=3) #%% get trigger times # port = 31 # program = '20190621.001' #'20190621.001' #'20171026.007' # time_window = 0 #0 #0.02 # testmode = True #True #False # verbose = 1 # # exist, t0, t1, t6 = get_trigger_from_PID(program, port, testmode, verbose=1) # if exist: # print('data of port {0} in program {1}:\n t_preparation {2}ns\n t_start {3}ns\n t_end {4}ns'.format(port, program, t0, t1, t6))