Commit df9d8164 authored by Lukas Rudischhauser's avatar Lukas Rudischhauser
Browse files

built some chaching functions convenient for me

parent 64edeb22
This diff is collapsed.
......@@ -27,7 +27,7 @@ import urllib.error
import json
from PIL import Image
from io import BytesIO
from os.path import join
from os.path import join, exists
import matplotlib.pyplot as plt
try:
import threading
......@@ -43,8 +43,8 @@ except Exception as E:
try:
config_path = "\\\\sv-e4-fs-1\\E4-Mitarbeiter\\E4 Diagnostics\\QIR\\"\
"Software\\QI-RealTime\\1.0.0\\release\\QIR-IrAnalysis\\"\
"Config\\Thermal calibration\\"
"Software\\QI-RealTime\\1.0.0\\release\\QIR-IrAnalysis\\"\
"Config\\Thermal calibration\\"
FOV = plt.imread(
config_path +
portcamdict['OP1.2a']['AEF'+str(10)] +
......@@ -55,8 +55,8 @@ except:
def get_latest_version(stream, project="W7X",
testmode=False, t_from=None, t_to=None,
program=None, verbose=0):
testmode=False, t_from=None, t_to=None,
program=None, verbose=0):
"""Find out latest version of given stream
:param stream url stream of interest
:return int of version number of None if non-versioned or non-existing stream
......@@ -1868,34 +1868,30 @@ def get_average_divertor_TC_for_camtemp(port, starttime, stoptime, part="all", T
rs = TC
re = TC+1
else:
rs = 0
re = 0
if rs != re:
for i in range(rs, re, 1):
if skip and i == skipnr:
i += 1
url = urlstart+str(offset+i)+ext+"0"+str(i)+"_TC"+str(i % 8+1)
try:
result = read_restdb(
url+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime))
# res.append(result)
if result[0]:
n += 1
time = result[1]
signal = signal+np.array(result[2])
else:
time = 0
success = False
except Exception as E:
print('get_average_divertor_TC_for_camtemp: Error! ', E)
success = False
print("get_average_divertor_TC_for_camtemp: Error! rs and re are the same numbers")
return False, 0, -1
for i in range(rs, re, 1):
if skip and i == skipnr:
i += 1
url = urlstart+str(offset+i)+ext+"0"+str(i)+"_TC"+str(i % 8+1)
try:
return success, time, signal/n
result = read_restdb(
url+"/_signal.json?from="+str(starttime)+"&upto="+str(stoptime))
# res.append(result)
if result[0]:
n += 1
time = result[1]
signal = signal+np.array(result[2])
else:
time = 0
success = False
except Exception as E:
print('get_average_divertor_TC_for_camtemp: Error! ', E)
return False, 0, -1
else:
print("get_average_divertor_TC_for_camtemp: Error! rs and re are the same numbers")
success = False
try:
return success, time, signal/n
except Exception as E:
print('get_average_divertor_TC_for_camtemp: Error! ', E)
return False, 0, -1
......@@ -3205,6 +3201,19 @@ def estimate_offset(port, program, plot_it=False, verbose=0):
return -1
def cache_heatflux_by_program(port, program, time_window=0, **kwargs):
from char_compare.utilities import check_path_and_make_folders
cache_folder = kwargs.pop('cache_folder', "//share.ipp-hgw.mpg.de/documents/luru/Documents/Arbeit/Data")
save_path = cache_folder + '/'.join(["", program, str(port), ""])
check_path_and_make_folders(True, save_path)
found, time, images = download_heatflux_by_program(port, program,
time_window=time_window, **kwargs)
start = time_window[0] if hasattr(time_window, '__iter__') else 0.0
end = time_window[-1] if hasattr(time_window, '__iter__') else time_window
np.savez(save_path + "heatflux_images_[{s:.2f}-{e:.2f}]".format(s=start, e=end),
found=found, time=time, images=images)
def download_heatflux_by_program(port, program, time_window=0, threads=1, testmode=False,
version=0, verbose=0, givealpha=False, request=True, moffset=0):
"""
......@@ -3255,7 +3264,7 @@ def download_heatflux_by_program(port, program, time_window=0, threads=1, testmo
def download_heatflux_by_times(port, tstart, tend, time_window=0, threads=1, testmode=False,
version=0, verbose=0, request=True, moffset=0):
version=0, verbose=0, request=True, moffset=0, auto_process_all=True):
"""
port: string or integer
tstart: int64 timestamp in ns from which point the download should start
......@@ -3337,7 +3346,11 @@ def download_heatflux_by_times(port, tstart, tend, time_window=0, threads=1, tes
if (programid, "AEF"+str(port)) not in cam_progs and (programid, "AEF"+str(port)) not in cam_progs_ig:
f = open(heatflux_requestlist_path+str(now.year) +
str(now.month+moffset)+"_q_requests.txt", 'a')
f.write(programid+"\tAEF"+str(port)+"\n")
if auto_process_all:
[f.write(programid+"\tAEF"+str(p)+"\n")
for p in ['10', '11', '20', '21', '30', '31', '40', '41', '51']]
else:
f.write(programid+"\tAEF"+str(port)+"\n")
f.close()
if verbose > 0:
print(
......@@ -3673,7 +3686,7 @@ def give_finger_ID(profile, finger=None, verbose=0):
def get_heatflux_profile(port, profile, finger=None, timepoint=1, program=None,
tstart=None, tend=None, testmode=False, version=0, verbose=0):
tstart=None, tend=None, testmode=False, version=0, verbose=0):
"""
returns exist,time,s in m, heat flux in W/m2
......@@ -3762,6 +3775,74 @@ def get_heatflux_profile(port, profile, finger=None, timepoint=1, program=None,
return False, 0, 0, 0
def extract_heatflux_profile_from_cache(port, program, time_window=0, profile="TM3h_5_5", **kwargs):
"""Returns exists, time, position, heat_flux_profiles
"""
kwargs.setdefault('verbose', 0)
kwargs.setdefault('testmode', False)
data_folder = "//share.ipp-hgw.mpg.de/documents/luru/Documents/Arbeit/Data"
save_path = data_folder + '/'.join(["", program, str(port), ""])
start = time_window[0] if hasattr(time_window, '__iter__') else 0.0
end = time_window[-1] if hasattr(time_window, '__iter__') else time_window
path = save_path + \
"heatflux_images_[{s:.2f}-{e:.2f}].npz".format(s=start, e=end)
if not exists(path):
if kwargs['verbose'] > 0:
'''We could notify here if larger caches already exist by importing everything with
import glob
glob.glob("heatflux_images_/[[0,{s:.2f}]-[{e:.2f},1000]/].npz")
but this would take some more work, especially on the matching of arbitrarily large numbers"
'''
print("Generating cache for {program} port {port} timewindow [{s:.2f}-{e:.2f}]".format(
port=port, program=program, s=start, e=end
))
cache_heatflux_by_program(
port, program, time_window=time_window, **kwargs)
cache_file = np.load(path)
found, time, images = [cache_file[kw]
for kw in ['found', 'time', 'images']]
if not found:
raise ValueError("Download was invalid")
exist, _, t1, t6 = get_trigger_from_PID(
program, port, kwargs['testmode'], verbose=kwargs['verbose']-1)
return extract_heatflux_profile_from_DL((np.asarray(time)-t1)/1e9, images, profile, time_window=time_window, **kwargs)
def extract_many_heatflux_profiles_from_cache(port, program, time_window=0, profile="TM3h_5_5", **kwargs):
"""Returns exists, time, position, heat_flux_profiles
"""
kwargs.setdefault('verbose', 0)
kwargs.setdefault('testmode', False)
data_folder = "//share.ipp-hgw.mpg.de/documents/luru/Documents/Arbeit/Data"
save_path = data_folder + '/'.join(["", program, str(port), ""])
start = time_window[0] if hasattr(time_window, '__iter__') else 0.0
end = time_window[-1] if hasattr(time_window, '__iter__') else time_window
path = save_path + \
"heatflux_images_[{s:.2f}-{e:.2f}].npz".format(s=start, e=end)
if not exists(path):
if kwargs['verbose'] > 0:
print("Generating cache for {program} port {port} timewindow [{s:.2f}-{e:.2f}]".format(
port=port, program=program, s=start, e=end
))
cache_heatflux_by_program(
port, program, time_window=time_window, **kwargs)
cache_file = np.load(path)
found, time, images = [cache_file[kw]
for kw in ['found', 'time', 'images']]
if not found:
raise ValueError("Download was invalid")
exist, _, t1, t6 = get_trigger_from_PID(
program, port, kwargs['testmode'], verbose=kwargs['verbose']-1)
return_dict = {}
for p in profile:
e, t, s, q = extract_heatflux_profile_from_DL(
(np.asarray(time)-t1)/1e9, images, p, time_window=time_window, **kwargs)
if e:
return_dict['times'] = t
return_dict[p] = {'positions': s, 'heat_flux': q}
return return_dict
def extract_heatflux_profile_from_DL(time, images, profile, finger=None, time_window=1,
inputchecked=False, verbose=0, testmode=False):
"""
......@@ -4169,11 +4250,11 @@ def read_restdb(request_url):
res.close()
except urllib.error.URLError as e:
# if hasattr(e, 'reason'):
#print('Failed to reach server:',request_url)
#print('Reason: ', e.reason)
#print('Failed to reach server:',request_url)
#print('Reason: ', e.reason)
# elif hasattr(e, 'code'):
#print('The server couldn\'t fulfill the request.')
#print('Error code: ', e.code)
#print('The server couldn\'t fulfill the request.')
#print('Error code: ', e.code)
return False, [0], [-1]
except Exception as E:
print('read_restdb: Error! ', E)
......
......@@ -10,7 +10,7 @@ import matplotlib.pyplot as plt
import datetime
port=51 # 20
program="20180927.017"#"20180904.007"#"20171207.022" # "20181004.012"
program="20180814.007"#"20180927.017"#"20180904.007"#"20171207.022" # "20181004.012"
if __name__=='__main__':
......@@ -22,6 +22,7 @@ if __name__=='__main__':
#%% plotting data
if status:
status2,time,s,q=IR.extract_heatflux_profile_from_DL((np.asarray(times)-times[0])/1e9,images,profile="TM3h_5_5",time_window=None,verbose=5,testmode=False)
plt.figure()
plt.imshow(images[-1]/1e6)
plt.clim([0,4])
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment