Skip to content
Snippets Groups Projects

add create_Integrated_load_and_upload for V3.1.0

Merged Holger Niemann requested to merge Holgers into master
1 file
+ 289
0
Compare changes
  • Side-by-side
  • Inline
+ 289
0
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 17 13:27:05 2019
program to create missing integrated loads on the divertor as far the heat flux data is available in the database
V3.1.0
@author: holn
"""
#%% Import of libaries
#import scipy.io as sio
import numpy as np
import datetime
#import holn.functions as fu
import downloadversionIRdata as IR
from scipy.optimize import curve_fit
#from scipy.integrate import simps
from scipy.integrate import trapz
import uploadingversionIRdata as IRup
import IR_config_constants as IRconst
#import h5py as h5
#%% input for the code
#download input
programlist=["20171114.054"]#,"20170926.071","20171010.026"]
ports=["AEF10"]#,,"AEF51"
version=1
#upload
newversion=False
reason="Frist Upload"
code_release="n/a"
enviroment="python 3.5.1"#"n/a"
comment="error is the mininum error"
version=1
project=IRconst.project_ana
alpha_cor=1
heatflux_V=4
codi="Python 3.5.1 Integrated_load V1"
group="QRT_IRCAM"#+camera#+"_TEST"
archivepath="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"
#%% definition of constants
linedict={'h_l':30,'h_m':54,'h_h':24,'v':29}
numstacks={
'h_l':{0:13,1:14,2:14,3:14,4:14,5:14,6:14,7:14,8:14,9:14,10:15,11:15,12:14,13:14,14:14,15:13,16:13,17:13,18:13,19:13,20:13,21:13,22:13,23:13,24:13,25:13,26:13,27:13,28:13,29:13},
'h_m':{0:14,1:15,2:14,3:15,4:14,5:15,6:14,7:15,8:14,9:15,10:14,11:15,12:14,13:15,14:14,15:15,16:14,17:15,18:14,19:15,20:14,21:15,22:14,23:15,24:14,25:15,26:14,27:15,28:14,29:15,30:14,31:15,32:14,33:15,34:14,35:15,36:14,37:15,38:14,39:15,40:14,41:15,42:14,43:15,44:14,45:15,46:14,47:15,48:15,49:14,50:14,51:14,52:14,53:15},
'h_h':{0:16,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15},
'v':{0:15,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15,24:15,25:15,26:15,27:15,28:15}
}
Modulenames=["TM1h","TM2h","TM3h","TM4h","TM5h","TM6h","TM7h","TM8h","TM9h","TM1v","TM2v","TM3v"]
Modulefingers=[7,7,8,8,24,24,6,12,12,10,10,9]
#%% extra functions
def gaussianfit1(d1,p0,pbounds=((0,-100,0),(1e12,100,100))):
# p0 is the initial guess for the fitting coefficients (A, mu and sigma above)
#p0 = [5.41123285, -0.04281431, -0.01736732]
def gauss(x, *p):
A, mu, sigma = p
return A*np.exp(-(x-mu)**2/(2.*sigma**2))
if len(d1)==3:
coeff, var_matrix = curve_fit(gauss, d1[0], d1[1], sigma=d1[2], absolute_sigma=True, p0=p0,bounds=pbounds)
else:
coeff, var_matrix = curve_fit(gauss, d1[0], d1[1], p0=p0,bounds=pbounds)
# Get the fitted curve
gausfit = gauss(d1[0], *coeff)
return [d1[0],gausfit,coeff]
def update_progress(progress,extratext=""):
import sys
barLength = 10 # Modify this to change the length of the progress bar
status = ""
if isinstance(progress, int):
progress = float(progress)
if not isinstance(progress, float):
progress = 0
status = "error: progress var must be float\r\n"
if progress < 0:
progress = 0
status = "Halt...\r\n"
if progress >= 1:
progress = 1
status = "Done...\r\n"
block = int(round(barLength*progress))
text = "\rPercent: [{0}] {1}% {2} {3}".format( "#"*block + "-"*(barLength-block), round(progress*100,2), status,extratext)
sys.stdout.write(text)
sys.stdout.flush()
def distance_points(p1,p2):
dx=p2[0]-p1[0]
dy=p2[1]-p1[1]
dz=p2[2]-p1[2]
return np.sqrt(dx**2+dy**2+dz**2)
def simpsonint(XYAR):
x=XYAR[0]
y1=XYAR[1]
return trapz(y1, x)#simps gives often nans, in the orignal version it does not happen. difference between offline and archive version was not been identified
#%% the code
#steps
#1. check whether the data is already uploaded
#2. if not download the heatflux data
#3. if heat fllux data available, evalute the divertor load, if not create request for the heatflux
#4. if 3. successfull upload the evaluated divertor load
###Loading of the matfile
for program in programlist:
for port in ports:
if type(port)==int:
portnr=port
port="AEF"+str(port)
else:
portnr=int(port.split("AEF")[1])
# check wether the data is available (step 1)
exist,time,load,error=IR.download_divertor_load(port=portnr,program=program,version=version)
if not exist:#(go on with step 2)
status,times,images=IR.download_heatflux_by_program(portnr,program,version=1,threads=1,verbose=5)
# ### data loaded, but we need also the spatial information from the mapping, so loading the mapping
if status:#(go on with step 3)
exist,mapping=IR.download_heatflux_mapping_reference()
if exist:#without the mapping it does not make sense to go one (spatial informations)
fingerID=np.asarray(mapping['Finger_ID'][0],dtype=np.int)
fingers=np.nanmax(fingerID)
profilelocs=[]
lines_all=[]
for fi in range(int(fingers//100)+1):
profiles=max(fingerID[(fingerID>fi*100) & (fingerID<(fi+1)*100)])%100+1
profilelocs.append(profiles)
lines_all.append([])
for pi in range(profiles):
X=mapping['Finger_X'][np.where(fingerID==int(fi*100+pi))]
Y=mapping['Finger_Y'][np.where(fingerID==int(fi*100+pi))]
lines_all[fi].append(np.array([X,Y]))
time=(np.asarray(times)-times[0])/1e9
### data loaded, mapping loaded, calculate now the loads
noise_levels=[]
disu=[]
disd=[]
dummy_i=0
load_finger_time=[]
#do it for each time step#
totallength=len(time)*len(profilelocs)
leni=len(profilelocs)
for ti in range(len(time)):
print(datetime.datetime.now(),"Analysis "+program+" AEF"+str(portnr)+" time: "+str(round(time[ti],2)))
update_progress((ti+1)/(len(time)),extratext="Analysis "+program+" AEF"+str(portnr)+" time: "+str(round(time[ti],2)))
totalnum=0
totalflux=0
testdisplay=ti
fullint=[]# complete line integral
max_fluxes=[]
fingerpow=[]
fingerint1=[]
weg1=[]
#do it for each time step and for each finger#
for ii in range(len(profilelocs)):
if ti==0:
update_progress((ti*leni+ii+1)/(totallength),extratext="Analysis "+program+" AEF"+str(portnr)+" time: "+str(round(time[ti],2)))
fullint.append([])#fingerlevel
max_fluxes.append([])
if ti==0:
noise_levels.append([])
#do it for each time step ti, each finger ii and each profile j on the finger
for j in range(int(profilelocs[ii])):
heatprofile=images[ti][np.where(fingerID==int(ii*100+j))]
prof=np.asarray(mapping['s'])[np.where(fingerID==int(ii*100+j))]#profilelocs[ii][real_i]
max_fluxes[ii].append(np.max(heatprofile))
if ti==0:
noisss=[images[t][np.where(fingerID==int(ii*100+j))] for t in range(len(time)-50,len(time))]#fingerfluxes[ii][real_i][len(time)-50:len(time)-1]#[]
noisehist=np.histogram(np.asarray(noisss)/1e6,bins=20)
try:
noisegauß=gaussianfit1([noisehist[1][0:-1],noisehist[0]],(np.max(noisehist[0]),0,1))
noiselevel=noisegauß[2][2]*1e6
except:
noiselevel=0
noise_levels[ii].append(noiselevel)
else:
noiselevel=noise_levels[ii][j]
if ti==0:
disu.append([])
disd.append([])
if j>0:
disu[ii].append(disu[ii][j-1]+distance_points((lines_all[ii][j][0][0],lines_all[ii][j][1][0],0),(lines_all[ii][dummy_i][0][0],lines_all[ii][dummy_i][1][0],0)))
disd[ii].append(disd[ii][j-1]+distance_points((lines_all[ii][j][0][-1],lines_all[ii][j][1][-1],0),(lines_all[ii][dummy_i][0][-1],lines_all[ii][dummy_i][1][-1],0)))
else:
disu[ii].append(0)
disd[ii].append(0)
dummy_i=j
totalnum+=len(heatprofile)
totalflux+=np.sum(heatprofile)
if max_fluxes[ii][j]>2*noiselevel:
fullint[ii].append(simpsonint([prof,heatprofile]))
else:
fullint[ii].append(0)
### the profiles in the mapping does not going to the end of the tiles, are is a space to the beginning and the end, extrapolate data to this points(?)
# the profiles on the fingers are intergrated, the distance between them calculated, so integrate again to get the fingerload
fingerpow.append(simpsonint([(np.array(disu[ii])+np.array(disd[ii]))/2,fullint[ii]]))
fingerint1.append(fullint[ii])
weg1.append((np.array(disu[ii])+np.array(disd[ii]))/2)
load_finger_time.append(fingerpow)
load_finger_time=np.asarray(load_finger_time)
fingernoises=[]
for i in range(len(load_finger_time[0])):
nois=load_finger_time[len(time)-50:len(time)-1,i]
noisehist=np.histogram(np.asarray(nois),bins=20)
try:
noisegauß=gaussianfit1([noisehist[1][0:-1],noisehist[0]],(np.max(noisehist[0]),0,200),pbounds=((0,-1e3,0),(1e12,1000,1000)))
noiselevel=noisegauß[2][2]
except:
noiselevel=(np.max(nois)-np.min(nois))/2
if np.max(noisegauß[1])<1:
noiselevel=(np.max(nois)-np.min(nois))/2
fingernoises.append(noiselevel)
totalload=[]
totalnoise=np.sum(fingernoises)
for t in range(len(time)):
totalload.append(np.sum(load_finger_time[t]))
#%%## upload part
port=portnr
Q_total=totalload
Q_total_e=totalnoise*np.ones(len(time))
Q_module=[]
Q_module_e=[]
for TM in range(12):
Q_module.append([])
Q_module_e.append([])
FID=sum(Modulefingers[0:TM])
Q_module[TM]=load_finger_time[:,0+1*FID]#data[:,3+2*FID]
Q_module_e[TM]=fingernoises[0+1*FID]#data[:,4+2*FID]
# stringcheck=str(3+2*FID)
for FI in range(1,Modulefingers[TM]):
# stringcheck=stringcheck+" "+str(3+2*(FID+FI))
Q_module[TM]=Q_module[TM]+load_finger_time[:,0+1*(FID+FI)]#data[:,3+2*(FID+FI)]
Q_module_e[TM]=Q_module_e[TM]+fingernoises[0+1*(FID+FI)]#data[:,4+2*(FID+FI)]
Q_module_e[TM]=Q_module_e[TM]*np.ones(len(time))
prog=IR.get_program_from_PID(program)
if prog[0]:
try:
T1=prog[1]['trigger']['1'][0]
time_ns=times#np.asarray(np.asarray(time)*1e9+T1,dtype=np.uint64)
datastream_name="AEF"+str(port)+"_loads"
url=archivepath+project+"/"+group+"/"+datastream_name
if "Test" in archivepath:
database="Test"
else:
database="ArchiveDB"
if newversion:
IRup.archive_create_stream_version(url, reason, False, code_release, enviroment )
ver=IRup.get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
else:
ver=IRup.get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
print("No version found!")
reason="Versioning of Data"
IRup.archive_create_stream_version(url, reason, False, code_release, enviroment )
version=1
else:
if version==0:
version=ver
chanDesc={"[0]":{'name':'divertor_total_load','physicalQuantity':{'type':'W'},'active':1},
"[1]":{'name':'divertor_total_load_error','physicalQuantity':{'type':'W'},'active':1}
}
channels=[Q_total,
Q_total_e]
for i in range(12):
chanDesc["["+str(2*i+2)+"]"]={'name':Modulenames[i]+'_load','physicalQuantity':{'type':'W'},'active':1}
chanDesc["["+str(2*i+3)+"]"]={'name':Modulenames[i]+'_load_error','physicalQuantity':{'type':'W'},'active':1}
channels.append(Q_module[i])
channels.append(Q_module_e[i])
metadata={
'heatflux_version':heatflux_V,'alpha_corrected':alpha_cor,'code_version':codi}
parobj={"label":"parms",
"description":"load on the divertor from camera in AEF"+str(port),
"values":[{"chanDescs":chanDesc,'header':metadata,'comment':comment}],
"dimensions":[int(time_ns[0]),int(time_ns[-1])]
}
channels=np.asarray(channels)
dataobj={"datatype":"float",
"values":channels.tolist(),
"dimensions":time_ns.tolist()
}
IRup.upload_fulldataset(url,parobj,dataobj,version)
print("Upload for AEF",port,"for",program," finished")
except Exception as E:
print("unable to upload",program,"for AEF",port,E)
else:
print("cannot find the program",program,"in the database, cannot create the timevector")
else:
print("load exist already for ",port,program)
\ No newline at end of file
Loading