Commit 92f134ab authored by Holger Niemann's avatar Holger Niemann
Browse files

Merge branch 'master' into 'Holgers'

# Conflicts:
#   downloadversionIRdata.py
parents 86645bad a47486aa
......@@ -68,7 +68,7 @@ def bestimmtheitsmass_general(data,fit):
qam=quad_abweich_mittel(fit,mittel)
R=qam/(qam+quad_abweich(data,fit))
else:
print("Arrays must have same dimensions")
print("bestimmtheitsmass_general: Arrays must have same dimensions")
return R
def quad_abweich_mittel(data,mittel):
......@@ -83,7 +83,7 @@ def quad_abweich(data,fit):
for i in range(len(data)):
R=R+(data[i]-fit[i])**2
else:
print("Arrays must have same dimensions")
print("quad_abweich: Arrays must have same dimensions")
return R
def find_nearest(array,value):
......@@ -131,6 +131,8 @@ def check_coldframe(coldframe,references=None,threshold=0.5,plot_it=False):
return False,bestimmtheit
def check_coldframe_by_refframe(coldframe,reference_frame,threshold=0.8,plot_it=False):
'''
'''
references=[]
shapi=np.shape(reference_frame)
for i in [int(shapi[1]//5),int(shapi[1]//2),int(shapi[1]//4*3)]:
......@@ -185,8 +187,10 @@ def read_bad_pixels_from_file(port, shot_no=None, program=None,time_ns=None):
def find_outlier_pixels(frame,tolerance=3,worry_about_edges=True,plot_it=False):
# This function finds the bad pixels in a 2D dataset.
# Tolerance is the number of standard deviations used for cutoff.
'''
This function finds the bad pixels in a 2D dataset.
Tolerance is the number of standard deviations used for cutoff.
'''
frame = np.array(frame)#, dtype=int)
from scipy.ndimage import median_filter
blurred = median_filter(frame, size=9)
......@@ -222,26 +226,26 @@ def find_outlier_pixels(frame,tolerance=3,worry_about_edges=True,plot_it=False):
return bad_pixels
def correct_images(images,badpixels):
print('correct_images: New routine restore_bad_pixels() is used and can be called directly. Check out "help(restore_bad_pixels)"')
def correct_images(images,badpixels,verbose=0):
'''
'''
if type(badpixels)!=int:
if type(images) == list:
# return corrected images also as list of 2D arrays
# images = restore_bad_pixels(images, np.invert(badpixels==1))#.astype(np.float32)
# images = list(images)
for i in range(len(images)):
images[i]=restore_bad_pixels(images[i], np.invert(badpixels==1))
images[i]=restore_bad_pixels(images[i], np.invert(badpixels==1), verbose=verbose-1)
else:
# keep shape
images = restore_bad_pixels(images, np.invert(badpixels==1)).astype(np.float32)
images = restore_bad_pixels(images, np.invert(badpixels==1), verbose=verbose-1)
# for i in range(len(images)):
# images[i]=(restore_pixels(images[i],np.invert(badpixels==1))).astype(np.float32)
print("done")
if verbose>0:
print("correct_images: done")
return images
def restore_bad_pixels(frames, bad_pixel, by_list=True, check_neighbours=True, plot_it=False, verbose=0):
def restore_bad_pixels(frames, bad_pixel, by_list=True, check_neighbours=True, plot_it=False,verbose=0):
"""Restore bad pixel by interpolation of adjacent pixels. Optionally make
sure that adjacent pixels are not bad (time consuming). Default is to use
a list of bad pixels and a for loop. For many bad pixels consider using
......@@ -412,16 +416,20 @@ def restore_bad_pixels(frames, bad_pixel, by_list=True, check_neighbours=True, p
def generate_new_hot_image(cold,reference_cold,reference_hot):
'''
'''
if cold is None or reference_cold is None or reference_hot is None:
raise Exception("Cannot Calculate new Hot image, if images are missing!")
raise Exception("generate_new_hot_image: Cannot Calculate new Hot image, if images are missing!")
else:
return reference_hot+(cold-reference_cold)
def calculate_gain_offset_image_pix(cold_image,hot_image=None,reference_cold=None,reference_hot=None,bose=1):
def calculate_gain_offset_image_pix(cold_image,hot_image=None,reference_cold=None,reference_hot=None,verbose=0):
'''
'''
if hot_image is None:
hot_image=generate_new_hot_image(cold_image,reference_cold,reference_hot)
if bose>0:
print("calculate gain and offset")
if verbose>0:
print("calculate_gain_offset_image_pix: calculate gain and offset")
Sh_ref = hot_image[ ( np.int( np.shape(hot_image)[0] /2 ) ) ][np.int( (np.shape(hot_image)[1] /2 ) ) ]
Sc_ref = cold_image[ ( np.int( (np.shape(cold_image)[0]) /2 ) ) ][( np.int( (np.shape(cold_image)[1]) /2 ) ) ]
Gain_rel = ( Sh_ref - Sc_ref ) / ( hot_image - cold_image)
......@@ -434,7 +442,7 @@ def calculate_gain_offset_image(cold_image,hot_image=None,reference_cold=None,re
if hot_image is None:
hot_image=generate_new_hot_image(cold_image,reference_cold,reference_hot)
if verbose>0:
print("calculate gain and offset")
print("calculate_gain_offset_image: calculate gain and offset")
# Sh_ref = hot_image[ ( np.int( np.shape(hot_image)[0] /2 ) ) ][np.int( (np.shape(hot_image)[1] /2 ) ) ]
# Sc_ref = cold_image[ ( np.int( (np.shape(cold_image)[0]) /2 ) ) ][( np.int( (np.shape(cold_image)[1]) /2 ) ) ]
......@@ -455,30 +463,32 @@ def calculate_gain_offset_image(cold_image,hot_image=None,reference_cold=None,re
#%% functions from Yu Gao
""" functions by Yu Gao"""
def load_ref_images(port, exposuretime):
'''
load the reference cold and hot frame during calibration from local files.
@port: e.g. 'AEF10'
@exposuretime: int number.
'''
cameraname = portcamdict['OP1.2a'][port]
foldername = cameraname.split('_')[0] + '_' + cameraname.split('_')[2]
scanpath = join(IRCamRefImagespath, foldername)
coldref, hotref = [], []
for filename in glob.iglob(scanpath + '\*' + str(int(exposuretime)) + 'us.h5', recursive=True):
if 'hot' in filename:
print (filename)
with h5py.File(filename, 'r') as h5in:
hotref = h5in[basename(filename)].value
elif 'cold' in filename:
print (filename)
with h5py.File(filename, 'r') as h5in:
coldref = h5in[basename(filename)].value
return coldref, hotref
def load_ref_images(port, exposuretime, verbose=0):
'''
load the reference cold and hot frame during calibration from local files.
@port: e.g. 'AEF10'
@exposuretime: int number.
'''
cameraname = portcamdict['OP1.2a'][port]
foldername = cameraname.split('_')[0] + '_' + cameraname.split('_')[2]
scanpath = join(IRCamRefImagespath, foldername)
coldref, hotref = [], []
for filename in glob.iglob(scanpath + '\*' + str(int(exposuretime)) + 'us.h5', recursive=True):
if 'hot' in filename:
if verbose>0:
print('load_ref_images: read from ',filename)
with h5py.File(filename, 'r') as h5in:
hotref = h5in[basename(filename)].value
elif 'cold' in filename:
if verbose>0:
print('load_ref_images: read from ',filename)
with h5py.File(filename, 'r') as h5in:
coldref = h5in[basename(filename)].value
return coldref, hotref
def reconstruct_coldframe (exposuretime, sT, a, bnew, coldref):
cirebuild = a * sT + bnew * exposuretime + coldref
return cirebuild
cirebuild = a * sT + bnew * exposuretime + coldref
return cirebuild
#%% other functions
......
......@@ -11,7 +11,11 @@ Version: 3.1.0
import numpy as np
import IR_image_tools as IR_tools
from IR_config_constants import archivepath,portcamdict,camlimdict,valid_FOV_circle,valid_background_rectangle,TC_port,TC_channel,IRCamColdframes_fittingpath,exJet,portpathdict,project,project_ana,heatflux_requestlist_path#,exJet_trans
from IR_config_constants import archivepath,portcamdict,camlimdict, \
valid_FOV_circle,valid_background_rectangle, \
TC_port,TC_channel,IRCamColdframes_fittingpath, \
exJet,portpathdict,project,project_ana, \
heatflux_requestlist_path#,exJet_trans
#try:
# import W7Xrest.read_restdb as AKF_1
#except:
......@@ -39,33 +43,40 @@ try:
except:
config_path=""
def get_OP_by_program(program):
try:
bla=int(program.split(".")[0])
time_ns=int(TimeToNs([bla // 10000,bla % 10000 // 100,bla % 10000 % 100],[10,0,0,0]))
return get_OP_by_time(time_ns)
except Exception as E:
print(E)
return None
def get_OP_by_time(time_ns):
dateOP=datetime.datetime.utcfromtimestamp(time_ns/1e9)
if dateOP.year==2017:
if dateOP.month>8 and dateOP.month<12:
return "OP1.2a"
elif dateOP.month==8 and dateOP.day>=28:
return "OP1.2a"
elif dateOP.month==12 and dateOP.day<8:
return "OP1.2a"
else:
return None
elif dateOP.year==2018:
return "OP1.2b"
elif dateOP.year<=2016 and dateOP.year>=2015:
if (dateOP.year==2016 and dateOP.month<=3) or (dateOP.year==2015 and dateOP.month==12):
return "OP1.1"
else:
return None
# These two functions seem unused and will be deleted.
# They are redundant to
# IR_tools.get_OP_by_time(time_ns=None, shot_no=None, program_str=None)
# which should be used instead.
#
#def get_OP_by_program(program):
# try:
# bla=int(program.split(".")[0])
# time_ns=int(TimeToNs([bla // 10000,bla % 10000 // 100,bla % 10000 % 100],[10,0,0,0]))
# return get_OP_by_time(time_ns)
# except Exception as E:
# print(E)
# return None
#
#def get_OP_by_time(time_ns):
# dateOP=datetime.datetime.utcfromtimestamp(time_ns/1e9)
# if dateOP.year==2017:
# if dateOP.month>8 and dateOP.month<12:
# return "OP1.2a"
# elif dateOP.month==8 and dateOP.day>=28:
# return "OP1.2a"
# elif dateOP.month==12 and dateOP.day<8:
# return "OP1.2a"
# else:
# return None
# elif dateOP.year==2018:
# return "OP1.2b"
# elif dateOP.year<=2016 and dateOP.year>=2015:
# if (dateOP.year==2016 and dateOP.month<=3) or (dateOP.year==2015 and dateOP.month==12):
# return "OP1.1"
# else:
# return None
def get_latest_version(stream,project="W7X",Test=False,t_from=None,t_to=None,program=None):
"""Find out latest version of given stream
......@@ -118,21 +129,22 @@ def TimeToNs(date,time):
nsdate=div.total_seconds()*1e9
return nsdate
def read_program(timestampstart,timestamp_end=0,tol=60):
def read_program(timestamp_start,timestamp_end=0,tol=60):
"""
read_program()
"""
program_url = 'http://archive-webapi.ipp-hgw.mpg.de/programs.json?from'
if timestamp_end!=0:
jsonurl=program_url+"="+str(int(timestampstart-tol*1e9))+"&upto="+str(int(timestamp_end+tol*1e9))
else:
jsonurl=program_url+"="+str(int(timestampstart-tol*1e9))+"&upto="+str(int(timestampstart+tol*1e9))
timestamp_end = timestamp_start
jsonurl = '{0}={1}&upto={2}'.format(program_url,
int(timestamp_start-tol*1e9),
int(timestamp_end+tol*1e9))
try:
res = urllib.request.urlopen(jsonurl)
prog_raw=res.read()
res.close()
except urllib.error.URLError as e:
print('!get_program: Error opening URL')
print('read_program: Error opening URL')
print(e)
return False,0
else:
......@@ -153,7 +165,7 @@ def read_restdb_old(request_url):
try:
res = urllib.request.urlopen(request_url)
except urllib.error.URLError as e:
print(e)
print('read_restdb_old: Error ', e)
return False, 0, -1
else:
signal_raw=res.read()
......@@ -181,12 +193,12 @@ def download_LUT(port,time,exposure=0,emissivity=0,camera_filter=0,version=0, ve
elif port in [10,11,20,21,30,31,40,41,51] or OP=="OP1.2b":#camera=="IRCAM" or camera=="IRcam" or camera=="ircam":
query="Texp_"+str(int(exposure))+"us_e_"+str(float(emissivity))
else:
print("camera unknown, stopping here")
print("download_LUT: Error! Camera unknown, stopping here.")
raise Exception
if version==0:
version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"LUT_DATASTREAM")
if verbose>0:
print("LUT V"+str(version)+" is used")
print("download_LUT: LUT V"+str(version)+" is used")
#time=int(fu.TimeToNs([2017,9,26],[8,0,0,0]))
LUTpar=read_restdb_old(larchivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(time-10)+"&upto="+str(time+20))
if LUTpar[0]:
......@@ -198,12 +210,12 @@ def download_LUT(port,time,exposure=0,emissivity=0,camera_filter=0,version=0, ve
del LUTpar, LUTs
return True,LUT
else:
print("Warning: unable to download the LUTs")
print("download_LUT: Warning! unable to download the LUTs")
del LUTpar, LUTs
return False,0
else:
del LUTpar
print("Warning: unable to find LUTs, check your request")
print("download_LUT: Warning! unable to find LUTs, check your request")
return False,0
......@@ -214,7 +226,7 @@ def read_LUT_from_file(port, this_time, t_exp, emissivity, cfilter, emissivity_s
Read LUT V3 from local files for testing new calibration.
"""
LUT_dir = '\\\\sv-e4-fs-1\\E4-Mitarbeiter\\E4 Diagnostics\\QIR\\testingLUT'
OP = get_OP_by_time(this_time)
OP = IR_tools.get_OP_by_time(time_ns=this_time)
cam_name = portcamdict[OP]['AEF{0}'.format(port)]
filename = '{0}_thermal_LUT_filter_{1}_texp_{2}us_emiss_{3:.2f}.json'.format(cam_name, cfilter, t_exp, emissivity)
# data = json.loads(join(LUT_dir, filename).decode('utf-8'))
......@@ -223,10 +235,10 @@ def read_LUT_from_file(port, this_time, t_exp, emissivity, cfilter, emissivity_s
jfile=json.load(data_file)
LUT=np.array(jfile['LUT']).swapaxes(0,1)
if verbose>0:
print("succesfully loaded V3 LUT from local directory")
print("read_LUT_from_file: succesfully loaded V3 LUT from local directory")
return True, LUT
except Exception as E:
print("ERROR in loading V3 LUTs",E)
print("read_LUT_from_file: ERROR in loading V3 LUTs",E)
return False, []
def download_NUC_by_program(port,program,exposure,version=0):
......@@ -240,12 +252,12 @@ def download_NUC_by_program(port,program,exposure,version=0):
return download_NUC_by_times(port,starttime,stoptime,exposure,version)
else:
# except:
print("cannot find the program")
print("download_NUC_by_program: cannot find the program")
return False,0,0
def download_NUC_by_times(port,starttime,stoptime,exposure,version=0):
project="W7X"
OP=get_OP_by_time(starttime)
OP=IR_tools.get_OP_by_time(time_ns=starttime)
larchivepath=archivepath+project+"/"+portpathdict[OP]["AEF"+str(port)]+"NUC_"
# NUC_parlog=AKF_1.read_restdb_old(archivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime))
if version==0:
......@@ -256,7 +268,7 @@ def download_NUC_by_times(port,starttime,stoptime,exposure,version=0):
res.close()
goon=True
except urllib.error.URLError as e:
print(e)
print('download_NUC_by_times: Error! ', e)
goon=False
if goon:
n=0
......@@ -273,10 +285,10 @@ def download_NUC_by_times(port,starttime,stoptime,exposure,version=0):
images=np.vsplit(NUC[2],np.shape(NUC[2])[0]/offset_i)
return True,images,['gain','offset','cold','badpixels','gain_error','offset_error']
else:
print("NUC image for requested exposure time not found")
print("download_NUC_by_times: NUC image for requested exposure time not found")
return False, 0, 0
else:
print("NUC image for requested exposure time not found")
print("download_NUC_by_times: NUC image for requested exposure time not found")
return False,0,0
else:
return False,0,0
......@@ -292,10 +304,15 @@ def get_NUC_by_program(port,program,exposure,version=0,verbose=0):
return get_NUC_by_times(port,starttime,stoptime,exposure,version=0,verbose=verbose-1)
else:
# except:
print("cannot find the program")
print("get_NUC_by_program: Error! cannot find the program")
return False,0,0
def get_NUC_by_times(port,starttime,endtime,t_exp,version=0,verbose=0):
"""
Loads NUC elements (gain, offset cold image, bad pixels) for an IR camera
in a given port and time interval. Depending on time and camera, reconstruct
teh cold frame and/or compute the gain,offset from the hot and cold frame.
"""
OP=IR_tools.get_OP_by_time(time_ns=endtime)
if OP=="OP1.2a":
t1=endtime
......@@ -303,8 +320,8 @@ def get_NUC_by_times(port,starttime,endtime,t_exp,version=0,verbose=0):
prog=read_program(t1)
program=prog[1]['id']
if (port == 31) or (port == 21 and float(program[4:]) > 1110):
print("rebuilding coldframe")
if verbose>0:
print("get_NUC_by_times: rebuilding coldframe")
#use any way the rebuilt coldframe.
sT = get_sensor_temp_by_program(port, program)[2][0]
# coldref, hotref = IR_tools.load_ref_images('AEF' + str(port), t_exp)
......@@ -313,7 +330,7 @@ def get_NUC_by_times(port,starttime,endtime,t_exp,version=0,verbose=0):
coldref=hotcold[1]
hotref=hotcold[0]
else:
raise Exception("unable to download reference frames")
raise Exception("get_NUC_by_times: unable to download reference frames")
filestring = 'AEF' + str(port) + '_et' + str(t_exp)
amap = np.load(join(IRCamColdframes_fittingpath, filestring + '_a.npy'))
bmap = np.load(join(IRCamColdframes_fittingpath, filestring + '_b.npy'))
......@@ -321,7 +338,7 @@ def get_NUC_by_times(port,starttime,endtime,t_exp,version=0,verbose=0):
gain, offset = IR_tools.calculate_gain_offset_image(cirebuild, None, coldref, hotref,verbose=verbose-1)
gain[gain == np.inf] = 0
offset[offset == np.inf] = 0
badpixels=find_badpixels(port,gain,offset,niterations=10,tolerance=10)
badpixels=find_badpixels(port,gain,offset,niterations=10,tolerance=10,verbose=verbose-1)
gain_error=0
offset_error=0
return True,[gain,offset,cirebuild,badpixels,gain_error,offset_error],['gain','offset','cold','badpixels','gain_error','offset_error']
......@@ -332,16 +349,16 @@ def get_NUC_by_times(port,starttime,endtime,t_exp,version=0,verbose=0):
coldref=hotcold[1]
hotref=hotcold[0]
else:
raise Exception("unable to download reference frames")
raise Exception("get_NUC_by_times: unable to download reference frames")
NUC_DL=download_NUC_by_times(port,t0,t1,t_exp,version)
if NUC_DL[0]==False:
raise Exception("NUC was not found")
raise Exception("get_NUC_by_times: NUC was not found")
else:
ci = NUC_DL[1][2]
gain, offset = IR_tools.calculate_gain_offset_image(ci, None, coldref, hotref,verbose=verbose-1)
gain[gain == np.inf] = 0
offset[offset == np.inf] = 0
badpixels=find_badpixels(port,gain,offset,niterations=10,tolerance=10)
badpixels=find_badpixels(port,gain,offset,niterations=10,tolerance=10,verbose=verbose-1)
gain_error=0
offset_error=0
return True,[gain,offset,ci,badpixels,gain_error,offset_error],['gain','offset','cold','badpixels','gain_error','offset_error']
......@@ -355,9 +372,9 @@ def get_NUC_by_times(port,starttime,endtime,t_exp,version=0,verbose=0):
coldref=hotcold[1]
hotref=hotcold[0]
else:
raise Exception("unable to download reference frames")
raise Exception("get_NUC_by_times: unable to download reference frames")
### get the cold frame: 1. get the metastream frametype and exposuretime and check in it for the positions, if -1 type is unknown, when if needs to be identified in a different way
exist,ctime,coldframes=download_calibration_raw_files_by_time(port,t_exp,starttime,endtime,frametype=0,version=0)
exist,ctime,coldframes=download_calibration_raw_files_by_time(port,t_exp,starttime,endtime,frametype=0,version=0,verbose=verbose-1)
if exist:
cold=np.zeros(np.shape(coldframes[0]),dtype=np.uint64)
for ele in coldframes:
......@@ -366,12 +383,12 @@ def get_NUC_by_times(port,starttime,endtime,t_exp,version=0,verbose=0):
else:
return False,[0],[0]
gain, offset = IR_tools.calculate_gain_offset_image(cold, None, coldref, hotref,verbose=verbose-1)
badpixels=find_badpixels(port,gain,offset,niterations=10,tolerance=10)
badpixels=find_badpixels(port,gain,offset,niterations=10,tolerance=10,verbose=verbose-1)
return True,[gain,offset,cold,badpixels,gain_error,offset_error],['gain','offset','cold','badpixels','gain_error','offset_error']
else:
raise Exception("unknown Operation phase or NUC method not implemented for this OP")
raise Exception("get_NUC_by_times: unknown Operation phase or NUC method not implemented for this OP")
def download_calibration_raw_files_by_time(port,t_exp,starttime,endtime,frametype=0,version=0):
def download_calibration_raw_files_by_time(port,t_exp,starttime,endtime,frametype=0,version=0,verbose=0):
"""
download_calibration_raw_files_by_time(port,t_exp,starttime,endtime,frametype=0,version=0)
frametype: 0 for closed shutter frames (cold), 1 for open shutter frames (background)
......@@ -381,10 +398,10 @@ def download_calibration_raw_files_by_time(port,t_exp,starttime,endtime,frametyp
if gotit:
expinds=np.where(texp_t==t_exp)[0]
if len(expinds)==0:
print("cannot find the exposure time in the given data")
print("download_calibration_raw_files_by_time: Error! cannot find the exposure time in the given data")
return False,[0],[0]
else:
print("exposure time not found")
print("download_calibration_raw_files_by_time: Error! exposure time not found")
return False,[0],[0]
gotitf,timef,values_f=get_frametype_by_times(port,int(starttime-100),int(endtime-20e6))
if gotitf:
......@@ -394,7 +411,8 @@ def download_calibration_raw_files_by_time(port,t_exp,starttime,endtime,frametyp
# print((ref_t[1]-ref_t[0])/1e9)
# print(len(timef),len(timef[expinds]),len(timef[expinds][typiinds]))
else:#okay the early data stuff or strange stuff
print("frame type was not identified assuming that the first part is the cold and the second one the background")
if verbose>0:
print("download_calibration_raw_files_by_time: frame type was not identified assuming that the first part is the cold and the second one the background")
frametimes=time_t[expinds]
diftime=[frametimes[i]-frametimes[i-1] for i in range(1,len(frametimes))]
turnpoint=np.where(np.asarray(diftime)>11e6)[0][0]
......@@ -406,9 +424,9 @@ def download_calibration_raw_files_by_time(port,t_exp,starttime,endtime,frametyp
ref_t=[np.min(frametimes[turnpoint+1:]),np.max(frametimes[turnpoint+1:])]
print((ref_t[1]-ref_t[0])/1e9)
else:
raise Exception("requested Frametype unknown and not implemented!")
raise Exception("download_calibration_raw_files_by_time: requested Frametype unknown and not implemented!")
else:
print("frametype not found")
print("download_calibration_raw_files_by_time: Error! frametype not found")
return False,[0],[0]
t1date=datetime.datetime.utcfromtimestamp((endtime-100)/1e9)
t1date=t1date.isoformat()
......@@ -420,13 +438,18 @@ def download_calibration_raw_files_by_time(port,t_exp,starttime,endtime,frametyp
timest0=AKF_2.get_time_intervals(larchivepath,t0date.replace("T"," "),t1date.replace("T"," "))
t_offset=timest0[-1][0]-time_t[0]
if t_offset>10e6:
print("time offset detected, try to correct this, offset is",t_offset)
if verbose>0:
print("download_calibration_raw_files_by_time: time offset detected, try to correct this, offset is",t_offset)
else:
t_offset=0
# print("starttime frames:",np.min(timest0),"starttime metachannels:",time_t[0],"offset",t_offset)
return download_raw_images_by_times(port,ref_t[0]+t_offset,ref_t[1]+t_offset)
return download_raw_images_by_times(port,ref_t[0]+t_offset,ref_t[1]+t_offset,verbose=verbose-1)
def download_hot_cold_reference_by_times(port,exposure,starttime=1503907200000000000,version=0):
"""
Loads the most recent hot and cold calibration frames for a starttime.
Uses first calibration frames if time is not defined.
"""
OP=IR_tools.get_OP_by_time(time_ns=starttime)
larchivepath=archivepath+"W7X/"+portpathdict[OP]["AEF"+str(port)]+"raw_"
# NUC_parlog=AKF_1.read_restdb_old(archivepath+"PARLOG/V"+str(version)+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime))
......@@ -438,7 +461,7 @@ def download_hot_cold_reference_by_times(port,exposure,starttime=150390720000000
res.close()
goon=True
except urllib.error.URLError as e:
print(e)
print('download_hot_cold_reference_by_times: Error! ',e)
goon=False
if goon:
COLDtime=0
......@@ -446,11 +469,11 @@ def download_hot_cold_reference_by_times(port,exposure,starttime=150390720000000
try:
COLDtime=signal_list['values'][0]['structure']['cold_'+str(int(exposure))+'us']
except:
print("cold image for requested exposure time not found")
print("download_hot_cold_reference_by_times: cold image for requested exposure time not found")
try:
HOTtime=signal_list['values'][0]['structure']['hot_'+str(int(exposure))+'us']
except:
print("hot image for requested exposure time not found")
print("download_hot_cold_reference_by_times: hot image for requested exposure time not found")
images=[]
if HOTtime!=0:
HOT=read_restdb(larchivepath+"DATASTREAM/V"+str(version)+"/_signal.json?from="+str(HOTtime-10)+"&upto="+str(HOTtime+10))
......@@ -464,7 +487,7 @@ def download_hot_cold_reference_by_times(port,exposure,starttime=150390720000000
if HOT[0] and COLD[0] and len(images)==2:
return True,images,['hot','cold']
else:
print("hot and cold image for requested exposure time not found")
print("download_hot_cold_reference_by_times: Error! hot and cold image for requested exposure time not found")
return False,0,0
else:
return False,0,0
......@@ -482,7 +505,7 @@ def download_background_by_program(port,program,exposure,version=0):
stoptime=prog[1]['trigger']['1'][0]
return download_background_by_times(port,starttime,stoptime,exposure,version)
else:#except:
print("cannot find the program")
print("download_background_by_program: Error! cannot find the program {0}".format(program))
return False,0,0
def download_background_by_times(port,starttime,stoptime,exposure,camera_filter=0,version=0):
......@@ -497,7 +520,7 @@ def download_background_by_times(port,starttime,stoptime,exposure,camera_filter=
res.close()
goon=True
except urllib.error.URLError as e:
print(e)
print('download_background_by_times: Error! ',e)
goon=False
if goon:
n=0
......@@ -515,17 +538,19 @@ def download_background_by_times(port,starttime,stoptime,exposure,camera_filter=
if backdat[0]:
return backdat#[True,backdat[2]]
else:
print("background image for requested exposure time(, filter) not found")
print("download_background_by_times: Error! background image for requested exposure time(, filter) not found")
return False,0,0
else:
print("background image for requested exposure time(, filter) not found")
print("download_background_by_times: Error! background image for requested exposure time(, filter) not found")
return False,0,0
else:
return False,0,0
def get_NUCed_background_by_times(port,t0,t1,t_exp,cfilter,gain,offset,version=0,plot_it=False):
"OP1.2b function"
exist,btime,backgroundframes=download_calibration_raw_files_by_time(port,t_exp,t0,t1,frametype=1,version=version)
def get_NUCed_background_by_times(port,t0,t1,t_exp,cfilter,gain,offset,version=0,plot_it=False,verbose=0):
"""
OP1.2b function
"""
exist,btime,backgroundframes=download_calibration_raw_files_by_time(port,t_exp,t0,t1,frametype=1,version=version,verbose=verbose-1)
camera=portpathdict["OP1.2b"]["AEF"+str(port)]
background=np.zeros(np.shape(backgroundframes[0]),dtype=np.uint64)
for frame in backgroundframes:
......@@ -538,13 +563,14 @@ def get_NUCed_background_by_times(port,t0,t1,t_exp,cfilter,gain,offset,version=0
if not camera.split("_")[0]=="Infratec":
background=apply_NUC([background],gain,offset)[0]
if plot_it:
plt.figure()
plt.imshow(background,vmin=np.median(background)-500,vmax=np.median(background)+500)
plt.title("background image nuced")
return exist,btime[0],background
def download_raw_images_by_program(port,program,time_s=0,version=0,verbose=0):
def download_raw_images_by_program(port,program,time_window=0,version=0,verbose=0):
"""
"""
# prog=AKF_1.get_program_from_PID(program)
try:
t_program = AKF_2.get_program_from_to(program)
......@@ -552,25 +578,26 @@ def download_raw_images_by_program(port,program,time_s=0,version=0,verbose=0):
# if prog[0]:
starttime=prog[0]['trigger']['1'][0]
stoptime=prog[0]['trigger']['6'][0]
if type