Commit 974ba8bb authored by Holger Niemann's avatar Holger Niemann
Browse files

V3.2.0: scene model download, downsampled temperature download, bug fixing,...

V3.2.0: scene model download, downsampled temperature download, bug fixing, temperature profile download
parent ddb72caa
<<<<<<< HEAD
26.03.2019: Upadte to V3.2.0:
- scne model download possible
- scene model reference download possible for heat flux data
- downsampled temperature image stream possible
- extration of temperature profiles possible
27/28/29.11.2018: update to V3
- add file upload_heatflux.py
- add plot_heatflux_example.py
......@@ -44,6 +48,7 @@ bugfixes in downloadversionIRdata:
- fixed: wrong time intervall for TC divertor data
Versions:
V3.2.0: download of scene models possible, temperature profiles can be extracted, downsampled temperature images available
V3.1.0: add of divertor loads in the upload and the download
V3.0.2: change the upload of heat flux from float64 to float32. extract profile accepts now also timewindows.
V3.0.1: extension of the heat flux upload (scene model reference can be uploaded)
......
......@@ -6,14 +6,3 @@ download:
- implement download of the stored temperature data (After the upload)
- implement download of the stored heat flux data --> done in V3.0.0
- implement download of FLIR data --> Done in V2.8.0, in testing phase
upload:
- upload the new nucing files for AEF50 (the workaround)
- upload remaining data from failed uploads
- upload scene models
- upload temperature
- upload heat flux data (started)
- upload FLIR data (partly done)
- upload FLIR LUT (partly done)
This diff is collapsed.
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 17 13:27:05 2019
program to create missing integrated loads on the divertor as far the heat flux data is available in the database
V3.1.0
@author: holn
"""
#%% Import of libaries
#import scipy.io as sio
import numpy as np
import datetime
#import holn.functions as fu
import downloadversionIRdata as IR
from scipy.optimize import curve_fit
#from scipy.integrate import simps
from scipy.integrate import trapz
import uploadingversionIRdata as IRup
import IR_config_constants as IRconst
#import h5py as h5
#%% input for the code
#download input
programlist=["20171114.054"]#,"20170926.071","20171010.026"]
ports=["AEF10"]#,,"AEF51"
version=1
#upload
newversion=False
reason="Frist Upload"
code_release="n/a"
enviroment="python 3.5.1"#"n/a"
comment="error is the mininum error"
version=1
project=IRconst.project_ana
alpha_cor=1
heatflux_V=4
codi="Python 3.5.1 Integrated_load V1"
group="QRT_IRCAM"#+camera#+"_TEST"
archivepath="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"
#%% definition of constants
linedict={'h_l':30,'h_m':54,'h_h':24,'v':29}
numstacks={
'h_l':{0:13,1:14,2:14,3:14,4:14,5:14,6:14,7:14,8:14,9:14,10:15,11:15,12:14,13:14,14:14,15:13,16:13,17:13,18:13,19:13,20:13,21:13,22:13,23:13,24:13,25:13,26:13,27:13,28:13,29:13},
'h_m':{0:14,1:15,2:14,3:15,4:14,5:15,6:14,7:15,8:14,9:15,10:14,11:15,12:14,13:15,14:14,15:15,16:14,17:15,18:14,19:15,20:14,21:15,22:14,23:15,24:14,25:15,26:14,27:15,28:14,29:15,30:14,31:15,32:14,33:15,34:14,35:15,36:14,37:15,38:14,39:15,40:14,41:15,42:14,43:15,44:14,45:15,46:14,47:15,48:15,49:14,50:14,51:14,52:14,53:15},
'h_h':{0:16,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15},
'v':{0:15,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15,24:15,25:15,26:15,27:15,28:15}
}
Modulenames=["TM1h","TM2h","TM3h","TM4h","TM5h","TM6h","TM7h","TM8h","TM9h","TM1v","TM2v","TM3v"]
Modulefingers=[7,7,8,8,24,24,6,12,12,10,10,9]
#%% extra functions
def gaussianfit1(d1,p0,pbounds=((0,-100,0),(1e12,100,100))):
# p0 is the initial guess for the fitting coefficients (A, mu and sigma above)
#p0 = [5.41123285, -0.04281431, -0.01736732]
def gauss(x, *p):
A, mu, sigma = p
return A*np.exp(-(x-mu)**2/(2.*sigma**2))
if len(d1)==3:
coeff, var_matrix = curve_fit(gauss, d1[0], d1[1], sigma=d1[2], absolute_sigma=True, p0=p0,bounds=pbounds)
else:
coeff, var_matrix = curve_fit(gauss, d1[0], d1[1], p0=p0,bounds=pbounds)
# Get the fitted curve
gausfit = gauss(d1[0], *coeff)
return [d1[0],gausfit,coeff]
def update_progress(progress,extratext=""):
import sys
barLength = 10 # Modify this to change the length of the progress bar
status = ""
if isinstance(progress, int):
progress = float(progress)
if not isinstance(progress, float):
progress = 0
status = "error: progress var must be float\r\n"
if progress < 0:
progress = 0
status = "Halt...\r\n"
if progress >= 1:
progress = 1
status = "Done...\r\n"
block = int(round(barLength*progress))
text = "\rPercent: [{0}] {1}% {2} {3}".format( "#"*block + "-"*(barLength-block), round(progress*100,2), status,extratext)
sys.stdout.write(text)
sys.stdout.flush()
def distance_points(p1,p2):
dx=p2[0]-p1[0]
dy=p2[1]-p1[1]
dz=p2[2]-p1[2]
return np.sqrt(dx**2+dy**2+dz**2)
def simpsonint(XYAR):
x=XYAR[0]
y1=XYAR[1]
return trapz(y1, x)#simps gives often nans, in the orignal version it does not happen. difference between offline and archive version was not been identified
#%% the code
#steps
#1. check whether the data is already uploaded
#2. if not download the heatflux data
#3. if heat fllux data available, evalute the divertor load, if not create request for the heatflux
#4. if 3. successfull upload the evaluated divertor load
###Loading of the matfile
for program in programlist:
for port in ports:
if type(port)==int:
portnr=port
port="AEF"+str(port)
else:
portnr=int(port.split("AEF")[1])
# check wether the data is available (step 1)
exist,time,load,error=IR.download_divertor_load(port=portnr,program=program,version=version)
if not exist:#(go on with step 2)
status,times,images=IR.download_heatflux_by_program(portnr,program,version=1,threads=1,verbose=5)
# ### data loaded, but we need also the spatial information from the mapping, so loading the mapping
if status:#(go on with step 3)
exist,mapping=IR.download_heatflux_mapping_reference()
if exist:#without the mapping it does not make sense to go one (spatial informations)
fingerID=np.asarray(mapping['Finger_ID'][0],dtype=np.int)
fingers=np.nanmax(fingerID)
profilelocs=[]
lines_all=[]
for fi in range(int(fingers//100)+1):
profiles=max(fingerID[(fingerID>fi*100) & (fingerID<(fi+1)*100)])%100+1
profilelocs.append(profiles)
lines_all.append([])
for pi in range(profiles):
X=mapping['Finger_X'][np.where(fingerID==int(fi*100+pi))]
Y=mapping['Finger_Y'][np.where(fingerID==int(fi*100+pi))]
lines_all[fi].append(np.array([X,Y]))
time=(np.asarray(times)-times[0])/1e9
### data loaded, mapping loaded, calculate now the loads
noise_levels=[]
disu=[]
disd=[]
dummy_i=0
load_finger_time=[]
#do it for each time step#
totallength=len(time)*len(profilelocs)
leni=len(profilelocs)
for ti in range(len(time)):
print(datetime.datetime.now(),"Analysis "+program+" AEF"+str(portnr)+" time: "+str(round(time[ti],2)))
update_progress((ti+1)/(len(time)),extratext="Analysis "+program+" AEF"+str(portnr)+" time: "+str(round(time[ti],2)))
totalnum=0
totalflux=0
testdisplay=ti
fullint=[]# complete line integral
max_fluxes=[]
fingerpow=[]
fingerint1=[]
weg1=[]
#do it for each time step and for each finger#
for ii in range(len(profilelocs)):
if ti==0:
update_progress((ti*leni+ii+1)/(totallength),extratext="Analysis "+program+" AEF"+str(portnr)+" time: "+str(round(time[ti],2)))
fullint.append([])#fingerlevel
max_fluxes.append([])
if ti==0:
noise_levels.append([])
#do it for each time step ti, each finger ii and each profile j on the finger
for j in range(int(profilelocs[ii])):
heatprofile=images[ti][np.where(fingerID==int(ii*100+j))]
prof=np.asarray(mapping['s'])[np.where(fingerID==int(ii*100+j))]#profilelocs[ii][real_i]
max_fluxes[ii].append(np.max(heatprofile))
if ti==0:
noisss=[images[t][np.where(fingerID==int(ii*100+j))] for t in range(len(time)-50,len(time))]#fingerfluxes[ii][real_i][len(time)-50:len(time)-1]#[]
noisehist=np.histogram(np.asarray(noisss)/1e6,bins=20)
try:
noisegauß=gaussianfit1([noisehist[1][0:-1],noisehist[0]],(np.max(noisehist[0]),0,1))
noiselevel=noisegauß[2][2]*1e6
except:
noiselevel=0
noise_levels[ii].append(noiselevel)
else:
noiselevel=noise_levels[ii][j]
if ti==0:
disu.append([])
disd.append([])
if j>0:
disu[ii].append(disu[ii][j-1]+distance_points((lines_all[ii][j][0][0],lines_all[ii][j][1][0],0),(lines_all[ii][dummy_i][0][0],lines_all[ii][dummy_i][1][0],0)))
disd[ii].append(disd[ii][j-1]+distance_points((lines_all[ii][j][0][-1],lines_all[ii][j][1][-1],0),(lines_all[ii][dummy_i][0][-1],lines_all[ii][dummy_i][1][-1],0)))
else:
disu[ii].append(0)
disd[ii].append(0)
dummy_i=j
totalnum+=len(heatprofile)
totalflux+=np.sum(heatprofile)
if max_fluxes[ii][j]>2*noiselevel:
fullint[ii].append(simpsonint([prof,heatprofile]))
else:
fullint[ii].append(0)
### the profiles in the mapping does not going to the end of the tiles, are is a space to the beginning and the end, extrapolate data to this points(?)
# the profiles on the fingers are intergrated, the distance between them calculated, so integrate again to get the fingerload
fingerpow.append(simpsonint([(np.array(disu[ii])+np.array(disd[ii]))/2,fullint[ii]]))
fingerint1.append(fullint[ii])
weg1.append((np.array(disu[ii])+np.array(disd[ii]))/2)
load_finger_time.append(fingerpow)
load_finger_time=np.asarray(load_finger_time)
fingernoises=[]
for i in range(len(load_finger_time[0])):
nois=load_finger_time[len(time)-50:len(time)-1,i]
noisehist=np.histogram(np.asarray(nois),bins=20)
try:
noisegauß=gaussianfit1([noisehist[1][0:-1],noisehist[0]],(np.max(noisehist[0]),0,200),pbounds=((0,-1e3,0),(1e12,1000,1000)))
noiselevel=noisegauß[2][2]
except:
noiselevel=(np.max(nois)-np.min(nois))/2
if np.max(noisegauß[1])<1:
noiselevel=(np.max(nois)-np.min(nois))/2
fingernoises.append(noiselevel)
totalload=[]
totalnoise=np.sum(fingernoises)
for t in range(len(time)):
totalload.append(np.sum(load_finger_time[t]))
#%%## upload part
port=portnr
Q_total=totalload
Q_total_e=totalnoise*np.ones(len(time))
Q_module=[]
Q_module_e=[]
for TM in range(12):
Q_module.append([])
Q_module_e.append([])
FID=sum(Modulefingers[0:TM])
Q_module[TM]=load_finger_time[:,0+1*FID]#data[:,3+2*FID]
Q_module_e[TM]=fingernoises[0+1*FID]#data[:,4+2*FID]
# stringcheck=str(3+2*FID)
for FI in range(1,Modulefingers[TM]):
# stringcheck=stringcheck+" "+str(3+2*(FID+FI))
Q_module[TM]=Q_module[TM]+load_finger_time[:,0+1*(FID+FI)]#data[:,3+2*(FID+FI)]
Q_module_e[TM]=Q_module_e[TM]+fingernoises[0+1*(FID+FI)]#data[:,4+2*(FID+FI)]
Q_module_e[TM]=Q_module_e[TM]*np.ones(len(time))
prog=IR.get_program_from_PID(program)
if prog[0]:
try:
T1=prog[1]['trigger']['1'][0]
time_ns=times#np.asarray(np.asarray(time)*1e9+T1,dtype=np.uint64)
datastream_name="AEF"+str(port)+"_loads"
url=archivepath+project+"/"+group+"/"+datastream_name
if "Test" in archivepath:
database="Test"
else:
database="ArchiveDB"
if newversion:
IRup.archive_create_stream_version(url, reason, False, code_release, enviroment )
ver=IRup.get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
else:
ver=IRup.get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
print("No version found!")
reason="Versioning of Data"
IRup.archive_create_stream_version(url, reason, False, code_release, enviroment )
version=1
else:
if version==0:
version=ver
chanDesc={"[0]":{'name':'divertor_total_load','physicalQuantity':{'type':'W'},'active':1},
"[1]":{'name':'divertor_total_load_error','physicalQuantity':{'type':'W'},'active':1}
}
channels=[Q_total,
Q_total_e]
for i in range(12):
chanDesc["["+str(2*i+2)+"]"]={'name':Modulenames[i]+'_load','physicalQuantity':{'type':'W'},'active':1}
chanDesc["["+str(2*i+3)+"]"]={'name':Modulenames[i]+'_load_error','physicalQuantity':{'type':'W'},'active':1}
channels.append(Q_module[i])
channels.append(Q_module_e[i])
metadata={
'heatflux_version':heatflux_V,'alpha_corrected':alpha_cor,'code_version':codi}
parobj={"label":"parms",
"description":"load on the divertor from camera in AEF"+str(port),
"values":[{"chanDescs":chanDesc,'header':metadata,'comment':comment}],
"dimensions":[int(time_ns[0]),int(time_ns[-1])]
}
channels=np.asarray(channels)
dataobj={"datatype":"float",
"values":channels.tolist(),
"dimensions":time_ns.tolist()
}
IRup.upload_fulldataset(url,parobj,dataobj,version)
print("Upload for AEF",port,"for",program," finished")
except Exception as E:
print("unable to upload",program,"for AEF",port,E)
else:
print("cannot find the program",program,"in the database, cannot create the timevector")
else:
print("load exist already for ",port,program)
\ No newline at end of file
This diff is collapsed.
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 21 09:32:29 2018
V3.1.2
@author: holn
"""
import numpy as np
import scipy.io as sio
#import matplotlib.pyplot as plt
import h5py as h5
import uploadingversionIRdata as IRup
import downloadversionIRdata as IR
import datetime
#125,190 vertical, horizontal *3 mm
port=20
program="20171109.008"
#programlist=["20171011.009"]
programlist=["20171114.038"]#"20180814.024"]#,"20181016.016"]#["20171108.018","20171109.040","20171109.045"]
path="F:\\tmp\\upload_it\\"#"F:\\holn\\Documents\\Doktorarbeit\\RESULTS\\OP1.2a\\Fulldivertor_res\\EJM\\H\\"
outpfad="F:\\holn\\Documents\\Doktorarbeit\\DATEN\\Mapping\\LinesforStacks\\"
scenepath="X:\\E4 Diagnostics\\QIR\\Data\\scene_models\OP1.2a\\"
project="W7XAnalysis"
group="QRT_IRCAM"
stream="AEF"+str(port)+"_heatflux"
comment="first upload"
header={'code_version':4,'alpha_corrected':1}
stream2="Mapping_reference"
upload_Mapping=False
upload_scene_reference=True#False
stream_scene="AEF"+str(port)+"_SceneModel_reference"
scene_reference_time=1512547200000000000#1503907200000000000#ns timestamp goes here!
for program in programlist:
File=sio.loadmat(path+program+"_AEF"+str(port)+"_heatflux_V4.mat")
time=np.asarray(File['time'][0])
tilelist=np.asarray(File['tiles'])
### camera, program dependent things###
heatpics=np.zeros((125+190+10,108*12+4,len(time)),dtype=np.float32)# define the pictures for the heat flux
alphapic=np.zeros((125+190+10,108*12+4,1),dtype=np.float32) # define the picture for the alpha values
alphapic.fill(np.nan)
heatpics.fill(np.nan)
if upload_Mapping:
### mapping dependent things ###
Xpic=alphapic.copy() #X-Coordiate in the Mapping
Ypic=alphapic.copy() #Y-Coordiate in the Mapping
FXpic=alphapic.copy() #X-Coordiate in the Fingersystem in the Mapping
FYpic=alphapic.copy() #Y-Coordiate in the Fingersystem in the Mapping
profpic=alphapic.copy() #profiles or s coordiante in the Mapping
# profnpic=alphapic.copy()
tilepic=alphapic.copy() #target indentifier, 1: low iota, 2: middle part, 3: high iota, 4: vertical
fingerpic=alphapic.copy() #finger ID from the mapping, last two digits are the profile number, the 3 to 6. digit are the finger id (100*i+j; i:0-136; j:0-16)
if upload_scene_reference:
### Camera ###
PXpic=alphapic.copy() #orignal Pixel X
PYpic=alphapic.copy() #orignal Pixel Y
Xpic3D=alphapic.copy() #CAD 3D X from Scene Model
Ypic3D=alphapic.copy() #CAD 3D Y from Scene Model
Zpic3D=alphapic.copy() #CAD 3D Z from Scene Model
CID=alphapic.copy() #Component ID from the Scene Model
szene=h5.File(scenepath+"AEF"+str(port)+"_scene_model_"+str(scene_reference_time)+".h5",'r')
Xloc=np.asarray(szene['x'])
Yloc=np.asarray(szene['y'])
Zloc=np.asarray(szene['z'])
#CAD=np.asarray(szene['CAD'])
PFC=np.asarray(szene['PFC'])
linedict={'h_l':30,'h_m':54,'h_h':24,'v':29}
numstacks={
'h_l':{0:13,1:14,2:14,3:14,4:14,5:14,6:14,7:14,8:14,9:14,10:15,11:15,12:14,13:14,14:14,15:13,16:13,17:13,18:13,19:13,20:13,21:13,22:13,23:13,24:13,25:13,26:13,27:13,28:13,29:13},
'h_m':{0:14,1:15,2:14,3:15,4:14,5:15,6:14,7:15,8:14,9:15,10:14,11:15,12:14,13:15,14:14,15:15,16:14,17:15,18:14,19:15,20:14,21:15,22:14,23:15,24:14,25:15,26:14,27:15,28:14,29:15,30:14,31:15,32:14,33:15,34:14,35:15,36:14,37:15,38:14,39:15,40:14,41:15,42:14,43:15,44:14,45:15,46:14,47:15,48:15,49:14,50:14,51:14,52:14,53:15},
'h_h':{0:16,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15},
'v':{0:15,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15,24:15,25:15,26:15,27:15,28:15}
}
def splitStacktoLines(locationall, rowcountarray):
locations = []
for i in range(len(rowcountarray) - 1):
locations.append(np.array(locationall[rowcountarray[i]:rowcountarray[i+1]]))
locations = np.array(locations)
return locations
ii=0
V_index=0
xoff=0
maxyoff=0
tn=0
for tile in tilelist:#['h_l','h_m','h_h','v']:
tn+=1
tile=tile.replace(" ","")
linn=linedict[tile]
if tile!='v':
V_index+=linn
yoff=130
else:
yoff=1
xoff=1
for i in range(linn):
if ii<30:
iko=ii
elif ii<84:
iko=ii-30
elif ii<108:
iko=ii-84
else:
iko=ii-108
if ii>=78 and ii<108:
tn=3
fingerfile=str(port//10)
if port % 10 ==0:
fingerfile=fingerfile+"l"+tile+"_"
else:
fingerfile=fingerfile+"u"+tile+"_"
if port==20 and int(program.split(".")[0])<20171020:
linefile=fingerfile+'{0:02d}'.format(iko)+"2Dlines_biliniar_before_20171020.h5"
else:
linefile=fingerfile+'{0:02d}'.format(iko)+"2Dlines_biliniar.h5"
linf=h5.File(outpfad+linefile,'r')
linid=list(linf['LineNos'])
lines=linf['locationall2D']#
linlocid=linf['rowcountarray']
linrefpoi=linf['refPoi']
linweipoi=linf['weightPoi']
if port==50:
shapi=[1024, 1280]
else:
shapi=[768, 1024]
locations=splitStacktoLines(lines,linlocid)
locpoi=splitStacktoLines(linrefpoi,linlocid)
locwei=splitStacktoLines(linweipoi,linlocid)
linf.close()
xoff+=1
if tile=='h_m' and iko<48 and iko%2==1:
shift=True
xoff=xoff-numstacks[tile][i-1]-1
maxyoff+=1
else:
shift=False
maxyoff=0
for j in range(numstacks[tile][i]):
j2=linid.index(j)
if shift:
yloff=maxyoff
else:
yloff=0
lprof=np.array(File['profiles_'+str(ii)+"_"+str(j2)][0])
lalpha=np.array(File['alpha_'+str(ii)+"_"+str(j2)][0])
lheat=np.array(File['heat_'+str(ii)+"_"+str(j2)])
lXY=np.array(File['plot_profiles_'+str(ii)+"_"+str(j2)])
xf=np.array(np.asarray(locpoi[j2])%shapi[1],dtype=np.int16)
yf=np.array(np.asarray(locpoi[j2])//shapi[1],dtype=np.int16)
PX=np.asarray([np.dot(xf[o],locwei[j2][o]) for o in range(len(xf))])
PY=np.asarray([np.dot(yf[o],locwei[j2][o]) for o in range(len(yf))])
loco=locations[j2]
profilelength=int(np.max(lprof)/3e-3)+1
for k in range(profilelength):
pos=k*3e-3#3.143e-3
pid=IR.IR_tools.find_nearest(lprof,pos)
if abs(pos-lprof[pid])<2e-3:
alphapic[yoff+yloff,xoff]=lalpha[pid]
heatpics[yoff+yloff,xoff,:]=lheat[:,pid]
if upload_Mapping:
Xpic[yoff+yloff,xoff]=lXY[pid,0]
Ypic[yoff+yloff,xoff]=lXY[pid,1]
FXpic[yoff+yloff,xoff]=loco[pid,0]
FYpic[yoff+yloff,xoff]=loco[pid,1]
profpic[yoff+yloff,xoff]=lprof[pid]
# profnpic[yoff+yloff,xoff]=j
tilepic[yoff+yloff,xoff]=tn
fingerpic[yoff+yloff,xoff]=ii*100+j
if upload_scene_reference:
PXpic[yoff+yloff,xoff]=int(round(PX[pid],0))
PYpic[yoff+yloff,xoff]=int(round(PY[pid],0))
Xpic3D[yoff+yloff,xoff]=Xloc[int(round(PY[pid],0)),int(round(PX[pid],0))]
Ypic3D[yoff+yloff,xoff]=Yloc[int(round(PY[pid],0)),int(round(PX[pid],0))]
Zpic3D[yoff+yloff,xoff]=Zloc[int(round(PY[pid],0)),int(round(PX[pid],0))]
CID[yoff+yloff,xoff]=PFC[int(round(PY[pid],0)),int(round(PX[pid],0))]
yloff+=1
xoff+=1
if yloff>maxyoff and not shift:
maxyoff=yloff
ii+=1
del lalpha,lheat,lXY,loco,lprof,tn,File
#plt.figure();plt.imshow(alphapic[:,:,0])
#plt.figure();plt.imshow(tilepic[:,:,0])
#plt.figure();plt.imshow(fingerpic[:,:,0])
#plt.figure();plt.imshow(heatpics[:,:,50])
#testupload ####
archivepath="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"
prog=IR.get_program_from_PID(program)
if prog[0]:
t1=prog[1]['trigger']['1'][0]
time_ns=[int(t1-1e9)]
for t3 in time:
time_ns.append(int(t1+t3*1e9))
images=np.append(alphapic,heatpics,axis=2)
chanDesc=[{'name':'heatflux','physicalQuantity':{'type':'W/m^2'},'active':1}]
parobj={"label":"parms",
"description":"heat flux for IR camera in AEF"+str(port),
"values":[{"chanDescs":chanDesc,'header':header,'comment':comment}],
"dimensions":[int(time_ns[0]),int(time_ns[-1])]}
url=archivepath+project+"/"+group+"/AEF"+str(port)+"_heatflux"
# url=IR.archivepath+project+"/"+group+"/"+stream
ver=IR.get_latest_version(group+"/"+stream+"_PARLOG",project=project,Test=True)
if ver==None:
reason="Versioning of Data"
IRup.archive_create_stream_version(url, reason, False, "n/a", "n/a" )
print(datetime.datetime.now(),"starting upload")
# IRup.upload_Parlog(url,parobj,versionnumber=1)
print(datetime.datetime.now(),"parlog written")
# IRup.upload_images(images,time=time_ns,group=group,stream=stream,project=project,version=1,swapping=False,Test=True)
print(datetime.datetime.now(),"data uploaded")
del images,alphapic,heatpics,time_ns
if upload_Mapping:
### upload reference stream ###once!
time_ns2=[]
timet1=int(IR.TimeToNs([2017,9,26],[8,0,0,0]))
for t in range(7):
time_ns2.append(int(timet1+10*t))
images2=np.append(profpic,Xpic,axis=2)
images2=np.append(images2,Ypic,axis=2)
images2=np.append(images2,FXpic,axis=2)
images2=np.append(images2,FYpic,axis=2)
images2=np.append(images2,fingerpic,axis=2) # images2=np.append(images2,profnpic,axis=2)
images2=np.append(images2,tilepic,axis=2)
header2={'profile':int(timet1),'MX':int(timet1+10),'MY':int(timet1+20),'FX':int(timet1+30),'FY':int(timet1+40),'finger_profile':int(timet1+50),'targetgroup':int(timet1+60)}
chanDesc=[{'name':'reference','physicalQuantity':{'type':'none'},'active':1}]
parobj={"label":"parms",