Commit 40b5ed1f authored by Holger Niemann's avatar Holger Niemann
Browse files

Merge branch 'Holgers' into 'master'

update to V3.1.0: changing the get_last version function, adding downloading of the divertor loads

See merge request !23
parents a47486aa 92f134ab
<<<<<<< HEAD
27/28/29.11.2018: update to V3
- add file upload_heatflux.py
- add plot_heatflux_example.py
......@@ -43,6 +44,9 @@ bugfixes in downloadversionIRdata:
- fixed: wrong time intervall for TC divertor data
Versions:
V3.1.0: add of divertor loads in the upload and the download
V3.0.2: change the upload of heat flux from float64 to float32. extract profile accepts now also timewindows.
V3.0.1: extension of the heat flux upload (scene model reference can be uploaded)
V3.0.0: download and upload of heat flux data now also possible, the script creates a request+memory file for all heatflux request which point to a camera, program comibination which was not uploaded; in this way high requested heatflux data can be evaluated earlier if possible
V2.9.7: bug fixes in the download for OP1.2a data, background download was broken
V2.9.6: bug fixes for AEFXX, port and portnr was not used in the right way
......
# -*- coding: utf-8 -*-
"""
Created on Wed May 9 14:56:32 2018
Version: 2.8.0
Version: 3.1.0
@author: Holger Niemann, Peter Drewelow, Yu Gao
mainly to clean up the downloadversionIRdata code
......
......@@ -4,16 +4,16 @@ download:
- implement multi-emissivity calculation
- implement absolute calibration
- implement download of the stored temperature data (After the upload)
- implement download of the stored heat flux data (After the upload)
- implement download of the stored heat flux data --> done in V3.0.0
- implement download of FLIR data --> Done in V2.8.0, in testing phase
upload:
- upload the new nucing files for AEF50 (the workaround)
- upload remaining data from failed uploads
- upload scene models
- upload temperature
- upload heat flux data
- upload FLIR data
- upload FLIR LUT
- upload temperature
- upload heat flux data (started)
- upload FLIR data (partly done)
- upload FLIR LUT (partly done)
......@@ -4,7 +4,7 @@ Created on Wed Oct 25 15:51:46 2017
updated on Tue Aug 21 10:20:00 2018
last update on Fr Nov 23 15:37:00 2018
Version: 3.0.2
Version: 3.1.0
(Numbering: #of big changes(OP1.2a download V1, OP1.2b download V2, heatflux V3) . #of updates to add functionalities . #number of updates for bug fixes )
@author: holn
"""
......@@ -78,7 +78,7 @@ except:
# return None
def get_latest_version(stream,project="W7X",Test=False):
def get_latest_version(stream,project="W7X",Test=False,t_from=None,t_to=None,program=None):
"""Find out latest version of given stream
:param stream url stream of interest
:return int of version number of None if non-versioned or non-existing stream
......@@ -87,9 +87,22 @@ def get_latest_version(stream,project="W7X",Test=False):
if Test:
base="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"
else:
base=archivepath
url = '{0}{1}/{2}/_versions.json'.format(base, project, stream)
request = urllib.request.Request(url, headers={"Accept": "application/json"})
base=archivepath
if program==None and t_from==None and t_to==None:
request =urllib.request.Request(base +project+"/" + stream + "/_versions.json", headers={"Accept": "application/json"})
else:
if program is not None:
prog=get_program_from_PID(program)
if prog[0]:
t_from=prog[1]['trigger']['1'][0]
t_to=prog[1]['trigger']['6'][0]
elif t_from is not None and t_to==None:
t_to=int(t_from+100e9)
elif t_from==None and t_to is not None:
t_from=int(t_to-10e9)
request =urllib.request.Request(base +project+"/" + stream + "/_versions.json?from="+str(t_from)+"&upto="+str(t_to), headers={"Accept": "application/json"})
try:
response = urllib.request.urlopen(request)
d = json.loads(response.read().decode('utf-8'))
......@@ -2533,7 +2546,7 @@ def download_heatflux_by_times(port,tstart,tend,time_window=0,threads=1,testmode
return False,0,-1
else:## Okay QRT is requested. Maybe there is data available, maybe not, lets find out
if version==0:
version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"heatflux_DATASTREAM",project=project_ana,Test=testmode)
version=get_latest_version(portpathdict[OP]["AEF"+str(port)]+"heatflux_DATASTREAM",project=project_ana,Test=testmode,t_from=tstart,t_to=tend)
larchivepath=base+project_ana+"/"+portpathdict[OP]["AEF"+str(port)]+"heatflux_DATASTREAM/V"+str(version)+"/0/heatflux"
### test for the testsample###
# if version==0:
......@@ -2799,6 +2812,119 @@ def extract_heatflux_profile_from_DL(time,images,profile,finger=None,time_window
print("extract_heatflux_profile_from_DL: mapping was not found, cannot extract the profile")
return False,0,0,0
def download_divertor_load(port,targetmodule=None,program=None,tstart=None,tend=None,version=0,verbose=0,testmode=True):
"""
"""
TMchadict={
'ALL':(0,1,'divertor_total_load'),'FULL':(0,1,'divertor_total_load'),
'TM1H':(2,3,'TM1h_load'),'1H':(2,3,'TM1h_load'),
'TM2H':(4,5,'TM2h_load'),'2H':(4,5,'TM2h_load'),
'TM3H':(6,7,'TM3h_load'),'3H':(6,7,'TM3h_load'),
'TM4H':(8,9,'TM4h_load'),'4H':(8,9,'TM4h_load'),
'TM5H':(10,11,'TM5h_load'),'5H':(10,11,'TM5h_load'),
'TM6H':(12,13,'TM6h_load'),'6H':(12,13,'TM6h_load'),
'TM7H':(14,15,'TM7h_load'),'7H':(14,15,'TM7h_load'),
'TM8H':(16,17,'TM8h_load'),'8H':(16,17,'TM8h_load'),
'TM9H':(18,19,'TM9h_load'),'9H':(18,19,'TM9h_load'),
'TM1V':(20,21,'TM1v_load'),'1V':(20,21,'TM1v_load'),
'TM2V':(22,23,'TM2v_load'),'2V':(22,23,'TM2v_load'),
'TM3V':(24,25,'TM3v_load'),'3V':(24,25,'TM3v_load')
}
#interpretation of the targetmodule input, string or List of strings, numbers does not make sense here?
if targetmodule==None:
datcha=0
ercha=1
datcha_name='divertor_total_load'
elif type(targetmodule)== list or type(targetmodule)==np.ndarray:## okay maybe more than one targetmodule is requested
datcha=[]
datcha_name=[]
ercha=[]
#okay now we have to find out which targetmodules are requested
for ele in targetmodule:
dummy=str(ele).upper()
try:
dummy=TMchadict[dummy]
datcha.append(dummy[0])
ercha.append(dummy[1])
datcha_name.append(dummy[2])
except:
if verbose>0:
print("cannot interpretate",ele,"as a targetmodule, skip it")
elif type(targetmodule)==str:
targetmodule=targetmodule.upper()
dummy=TMchadict[targetmodule]
datcha=dummy[0]
datcha_name=dummy[2]
ercha=dummy[1]
else:#
if verbose>0:
print("input for targetmodule invalid!")
return False,0,-1,-1
# target module known and channels are known, time to know the time
if program==None and tstart==None:
if verbose>0:
print("input for program and time invalid!")
return False,0,-1,-1
elif program!=None:
prog=get_program_from_PID(program)
if prog[0]:
tstart=prog[1]['trigger']['1'][0]
tend=prog[1]['trigger']['6'][0]
else:
if verbose>0: print("program not found")
return False,0,-1,-1
else:
if tend==None:
proggi=AKF_2.get_program_id(tstart)
prog=get_program_from_PID(proggi)
if prog[0]:
tend=prog[1]['trigger']['6'][0]
#channels known, time known, okay lets get the data
# tstart=int(tstart-100e6)
OP=IR_tools.get_OP_by_time(time_ns=tstart)
if testmode:
base="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"
else:
base=archivepath
if type(port)==int:
port="AEF"+str(port)
if version==0:
version=get_latest_version(portpathdict[OP][str(port)]+"loads_DATASTREAM",project=project_ana,t_from=tstart,t_to=tend,Test=testmode)
larchivepath=base+project_ana+"/"+portpathdict[OP][str(port)]+"loads_DATASTREAM/V"+str(version)
if type(datcha)==list:#case of several requested channels
load=[]
error=[]
exist=True
for i in range(len(datcha)):
data=read_restdb(larchivepath+"/"+str(datcha[i])+"/"+datcha_name[i]+"/_signal.json?from="+str(tstart)+"&upto="+str(tend))
err=read_restdb(larchivepath+"/"+str(ercha[i])+"/"+datcha_name[i]+"_error/_signal.json?from="+str(tstart)+"&upto="+str(tend))
if i==0:
if data[0]:
time=data[1]
else:
time=0
exist=False
if data[0]:
load.append(data[2])
if err[0]:
error.append(err[2])
if len(datcha)!=len(load):
exist=False
else:# case for a single channel
data=read_restdb(larchivepath+"/"+str(datcha)+"/"+datcha_name+"/_signal.json?from="+str(tstart)+"&upto="+str(tend))
err=read_restdb(larchivepath+"/"+str(ercha)+"/"+datcha_name+"_error/_signal.json?from="+str(tstart)+"&upto="+str(tend))
if data[0] and err[0]:
exist=True
time=data[1]
load=data[2]
error=err[2]
else:
exist=False
time=0
load=-1
error=-1
return exist,time,load,error
#%% general download functions
"""
......@@ -2922,17 +3048,21 @@ if __name__=='__main__':
# cb.set_label("temperature in K",rotation=270,labelpad=20,fontsize=20)
# cb.ax.tick_params(labelsize=20)
#%% heatflux test
port=20
prog="20171108.017"
status,time,images=download_heatflux_by_program(port,prog,time_window=0.1,threads=1,version=1,verbose=1,givealpha=False,request=False)
if status:
print(prog,port,"OK")
else:
print(prog,port,"missing")
# port=10
# prog="20171114.039"
# status,time,images=download_heatflux_by_program(port,prog,timewindow=0.5,threads=1,version=1,verbose=3,givealpha=False,request=True)
# if status:
# print(prog,port,"OK")
# else:
# print(prog,port,"missing")
# plt.figure(),plt.imshow(images[0],vmin=0)
# plt.figure(),plt.imshow(images[50],vmin=0)
# status,mapping=download_heatflux_mapping_reference(verbose=4)
# test=get_heatflux_profile(20,1605,timepoint=1,program="20171109.008",verbose=4)
#%% loads test
port=21
prog="20171114.052"
status,time,load,error=download_divertor_load(port,targetmodule=['all','tm1h'],program=prog,verbose=5,debug=True)
#%% HDF5 writing test
# bla=get_temp_from_raw_by_program_fullthreads(51,prog,time_window=[0,4],threads=4)
# bla2=get_nuced_raw_by_program_fullthreads(51,prog,time_window=[0,4],threads=4)
......
......@@ -16,7 +16,7 @@ if __name__=='__main__':
print('done')
#%% plotting data
status2,time,s,q=IR.extract_heatflux_profile_from_DL((np.asarray(times)-times[0])/1e9,images,profile="TM3h_5_5",timewindow=[0,0.5],verbose=5)
status2,time,s,q=IR.extract_heatflux_profile_from_DL((np.asarray(times)-times[0])/1e9,images,profile="TM3h_5_5",timewindow=0.5,verbose=5)
plt.figure()
plt.imshow(images[-1]/1e6)
plt.clim([0,4])
......@@ -31,7 +31,9 @@ if __name__=='__main__':
c_ax = plt.colorbar()
c_ax.set_label('q [MW/m2]')
plt.savefig('20171109.008 - '+str(round((np.asarray(times[-1])-times[0])/1e9,2))+' - AEF20IR - in MWm2.png', dpi=300, bbox_inches='tight')
if len(np.shape(q))==1:
if len(np.shape(q))==1 or np.shape(q)[0]==1:
if np.shape(q)[0]==1:
q=q[0]
plt.figure()
plt.plot(s,q/1e6,'ko-')
plt.xlabel("s [m]",fontsize=26)
......
......@@ -16,11 +16,11 @@ import datetime
port=20
port=10
program="20171109.008"
#programlist=["20171011.009"]
programlist=["20171108.018","20171109.040","20171109.045"]
path="F:\\holn\\Documents\\Doktorarbeit\\RESULTS\\OP1.2a\\Fulldivertor_res\\EJM\\H\\"
programlist=["20180814.024","20181016.016"]#["20171108.018","20171109.040","20171109.045"]
path="F:\\tmp\\upload_it\\"#"F:\\holn\\Documents\\Doktorarbeit\\RESULTS\\OP1.2a\\Fulldivertor_res\\EJM\\H\\"
outpfad="F:\\holn\\Documents\\Doktorarbeit\\DATEN\\Mapping\\LinesforStacks\\"
scenepath=""
......
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 21 09:32:29 2018
V3.0.2
@author: holn
"""
import numpy as np
import scipy.io as sio
#import matplotlib.pyplot as plt
import h5py as h5
import uploadingversionIRdata as IRup
import downloadversionIRdata as IR
import datetime
import os
import sys
#125,190 vertical, horizontal *3 mm
exception_ports=[]
path="F:\\holn\\Documents\\Doktorarbeit\\RESULTS\\OP1.2b\\Fulldivertor_res\\DBM\\H\\"
path="F:\\holn\\Documents\\Doktorarbeit\\Python-Programme\\THEODOR\\Divertor\\data\\"
path="F:\\tmp\\upload_it\\"
outpfad="F:\\holn\\Documents\\Doktorarbeit\\DATEN\\Mapping\\LinesforStacks\\"
archivepath="http://archive-webapi.ipp-hgw.mpg.de/Test/raw/"
project="W7XAnalysis"
group="QRT_IRCAM"
comment="Test upload"
header={'code_version':4,'alpha_corrected':1}
stream2="Mapping_reference"
upload_Mapping=False
class Tee(object):
def __init__(self, *files):
self.files = files
def write(self, obj):
for f in self.files:
f.write(obj)
f.flush() # If you want the output to be visible immediately
def flush(self) :
for f in self.files:
f.flush()
original = sys.stdout
start=datetime.datetime.now()
fout=open("upload_log_heatflux_"+str(start.date())+"_"+str(start.hour)+"_"+str(start.minute)+"_"+str(start.second)+".txt",'w')
#foutf=open("upload_log_failes_"+str(start.date())+"_"+str(start.hour)+"_"+str(start.minute)+"_"+str(start.second)+".txt",'w')
sys.stdout = Tee(sys.stdout, fout)
filelist=os.listdir(path)
for file in filelist:
if "heatflux_V4.mat" in file:
program=file.split("_")[0]
port=int(file.split("_")[1].split("AEF")[1])
prog=IR.get_program_from_PID(program)
if prog[0]:
t1=prog[1]['trigger']['1'][0]
time_ns=[int(t1-1e9)]
try:
# import archivedb as AKF
exist,tt,fra=IR.download_heatflux_by_times(port,t1,int(t1+100*1e6),version=1,request=False)
except:
exist=False
if exist:
print(datetime.datetime.now(),"Data already uploaded for AEF"+str(port)+" , "+program)
elif port not in exception_ports:
stream="AEF"+str(port)+"_heatflux"
print(datetime.datetime.now(),"loading "+program+"_AEF"+str(port)+"_heatflux_V4.mat")
File=sio.loadmat(path+program+"_AEF"+str(port)+"_heatflux_V4.mat")
time=np.asarray(File['time'][0])
tilelist=np.asarray(File['tiles'])
heatpics=np.zeros((125+190+10,108*12+4,len(time)),dtype=np.float32)
alphapic=np.zeros((125+190+10,108*12+4,1),dtype=np.float32)
alphapic.fill(np.nan)
heatpics.fill(np.nan)
Xpic=alphapic.copy()
Ypic=alphapic.copy()
FXpic=alphapic.copy()
FYpic=alphapic.copy()
profpic=alphapic.copy()
profnpic=alphapic.copy()
tilepic=alphapic.copy()
fingerpic=alphapic.copy()
linedict={'h_l':30,'h_m':54,'h_h':24,'v':29}
numstacks={
'h_l':{0:13,1:14,2:14,3:14,4:14,5:14,6:14,7:14,8:14,9:14,10:15,11:15,12:14,13:14,14:14,15:13,16:13,17:13,18:13,19:13,20:13,21:13,22:13,23:13,24:13,25:13,26:13,27:13,28:13,29:13},
'h_m':{0:14,1:15,2:14,3:15,4:14,5:15,6:14,7:15,8:14,9:15,10:14,11:15,12:14,13:15,14:14,15:15,16:14,17:15,18:14,19:15,20:14,21:15,22:14,23:15,24:14,25:15,26:14,27:15,28:14,29:15,30:14,31:15,32:14,33:15,34:14,35:15,36:14,37:15,38:14,39:15,40:14,41:15,42:14,43:15,44:14,45:15,46:14,47:15,48:15,49:14,50:14,51:14,52:14,53:15},
'h_h':{0:16,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15},
'v':{0:15,1:15,2:15,3:15,4:15,5:15,6:15,7:15,8:15,9:15,10:15,11:15,12:15,13:15,14:15,15:15,16:15,17:15,18:15,19:15,20:15,21:15,22:15,23:15,24:15,25:15,26:15,27:15,28:15}
}
def splitStacktoLines(locationall, rowcountarray):
locations = []
for i in range(len(rowcountarray) - 1):
locations.append(np.array(locationall[rowcountarray[i]:rowcountarray[i+1]]))
locations = np.array(locations)
return locations
ii=0
V_index=0
xoff=0
maxyoff=0
tn=0
for tile in tilelist:#['h_l','h_m','h_h','v']:
tn+=1
tile=tile.replace(" ","")
linn=linedict[tile]
if tile!='v':
V_index+=linn
yoff=130
else:
yoff=1
xoff=1
for i in range(linn):
if ii<30:
iko=ii
elif ii<84:
iko=ii-30
elif ii<108:
iko=ii-84
else:
iko=ii-108
if ii>=78 and ii<108:
tn=3
fingerfile=str(port//10)
if port % 10 ==0:
fingerfile=fingerfile+"l"+tile+"_"
else:
fingerfile=fingerfile+"u"+tile+"_"
if port==20 and int(program.split(".")[0])<20171020:
linefile=fingerfile+'{0:02d}'.format(iko)+"2Dlines_biliniar_before_20171020.h5"
else:
linefile=fingerfile+'{0:02d}'.format(iko)+"2Dlines_biliniar.h5"
linf=h5.File(outpfad+linefile,'r')
linid=list(linf['LineNos'])
lines=linf['locationall2D']#
linlocid=linf['rowcountarray']
locations=splitStacktoLines(lines,linlocid)
linf.close()
xoff+=1
if tile=='h_m' and iko<48 and iko%2==1:
shift=True
xoff=xoff-numstacks[tile][i-1]-1
maxyoff+=1
else:
shift=False
maxyoff=0
for j in range(numstacks[tile][i]):
j2=linid.index(j)
if shift:
yloff=maxyoff
else:
yloff=0
lprof=np.array(File['profiles_'+str(ii)+"_"+str(j2)][0])
lalpha=np.array(File['alpha_'+str(ii)+"_"+str(j2)][0])
lheat=np.array(File['heat_'+str(ii)+"_"+str(j2)])
lXY=np.array(File['plot_profiles_'+str(ii)+"_"+str(j2)])
loco=locations[j2]
profilelength=int(np.max(lprof)/3e-3)+1
for k in range(profilelength):
pos=k*3e-3#3.143e-3
pid=IR.IR_tools.find_nearest(lprof,pos)
if abs(pos-lprof[pid])<2e-3:
alphapic[yoff+yloff,xoff]=lalpha[pid]
heatpics[yoff+yloff,xoff,:]=lheat[:,pid]
Xpic[yoff+yloff,xoff]=lXY[pid,0]
Ypic[yoff+yloff,xoff]=lXY[pid,1]
FXpic[yoff+yloff,xoff]=loco[pid,0]
FYpic[yoff+yloff,xoff]=loco[pid,1]
profpic[yoff+yloff,xoff]=lprof[pid]
# profnpic[yoff+yloff,xoff]=j
tilepic[yoff+yloff,xoff]=tn
fingerpic[yoff+yloff,xoff]=ii*100+j
yloff+=1
xoff+=1
if yloff>maxyoff and not shift:
maxyoff=yloff
ii+=1
del lalpha,lheat,lXY,loco,lprof,tn,File
#plt.figure();plt.imshow(alphapic[:,:,0])
#plt.figure();plt.imshow(tilepic[:,:,0])
#plt.figure();plt.imshow(fingerpic[:,:,0])
#plt.figure();plt.imshow(heatpics[:,:,50])
#testupload ####
print(datetime.datetime.now(),"loading and preparation done, finding program and starting upload")
for t3 in time:
time_ns.append(int(t1+t3*1e9))
images=np.append(alphapic,heatpics,axis=2)
chanDesc=[{'name':'heatflux','physicalQuantity':{'type':'W/m^2'},'active':1}]
parobj={"label":"parms",
"description":"heat flux for IR camera in AEF"+str(port),
"values":[{"chanDescs":chanDesc,'header':header,'comment':comment}],
"dimensions":[int(time_ns[0]),int(time_ns[-1])]}
url=archivepath+project+"/"+group+"/AEF"+str(port)+"_heatflux"
# url=IR.archivepath+project+"/"+group+"/"+stream
ver=IR.get_latest_version(group+"/"+stream+"_PARLOG",project=project,Test=True)
if ver==None:
reason="Versioning of Data"
IRup.archive_create_stream_version(url, reason, False, "n/a", "n/a" )
print(datetime.datetime.now(),"starting upload")
IRup.upload_Parlog(url,parobj,versionnumber=1)
print(datetime.datetime.now(),"parlog written")
IRup.upload_images(images,time=time_ns,group=group,stream=stream,project=project,version=1,swapping=False,Test=True)
print(datetime.datetime.now(),"data uploaded")
del images,alphapic,heatpics,time_ns
if upload_Mapping:
### upload reference stream ###once!
time_ns2=[]
timet1=int(IR.TimeToNs([2017,9,26],[8,0,0,0]))
for t in range(7):
time_ns2.append(int(timet1+10*t))
images2=np.append(profpic,Xpic,axis=2)
images2=np.append(images2,Ypic,axis=2)
images2=np.append(images2,FXpic,axis=2)
images2=np.append(images2,FYpic,axis=2)
images2=np.append(images2,fingerpic,axis=2) # images2=np.append(images2,profnpic,axis=2)
images2=np.append(images2,tilepic,axis=2)
header2={'profile':int(timet1),'MX':int(timet1+10),'MY':int(timet1+20),'FX':int(timet1+30),'FY':int(timet1+40),'finger_profile':int(timet1+50),'targetgroup':int(timet1+60)}
chanDesc=[{'name':'reference','physicalQuantity':{'type':'none'},'active':1}]
parobj={"label":"parms",
"description":"reference data for the heat flux",
"values":[{"chanDescs":chanDesc,'header':header2,'comment':comment}],
"dimensions":[int(time_ns2[0]),int(time_ns2[-1])]
}
# url=IR.archivepath+project+"/"+group+"/"+stream2
url=archivepath+project+"/"+group+"/"+stream2
ver=IR.get_latest_version(group+"/"+stream2+"_PARLOG",project=project,Test=True)
if ver==None:
reason="Versioning of Data"
IRup.archive_create_stream_version(url, reason, False, "n/a", "n/a" )
IRup.upload_Parlog(url,parobj,versionnumber=1)
IRup.upload_images(images2,time=time_ns2,group=group,stream=stream2,project=project,version=1,swapping=False,Test=True)
fout.close()
sys.stdout = original
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 24 11:24:52 2017
Version: 3.0.0
Version: 3.1.0
@author: holn
"""
......@@ -290,12 +290,16 @@ def upload_meta_stream(camera,portnum,time,sensortemp,cameratemp,framerate,expos
group="QRT_IRCAM"#+camera#+"_TEST"
datastream_name="AEF"+str(portnum)+"_meta"
url=archivepath+project+"/"+group+"/"+datastream_name
if "Test" in url:
database="Test"
else:
database="ArchiveDB"
if newversion:
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
else:
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
reason="Versioning of Data"
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
......@@ -333,12 +337,16 @@ def upload_meta_stream_V2(camera,portnum,time,channel_names,channel_units,channe
group="QRT_IRCAM"#+camera#+"_TEST"
datastream_name="AEF"+str(portnum)+"_meta"
url=archivepath+project+"/"+group+"/"+datastream_name
if "Test" in url:
database="Test"
else:
database="ArchiveDB"
if newversion:
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
version=ver
else:
ver=get_latest_version("Test/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
ver=get_latest_version(database+"/raw/"+project+"/"+group+"/"+datastream_name+"_PARLOG")
if ver==None:
reason="Versioning of Data"
archive_create_stream_version(url, reason, False, "n/a", "n/a" )
......@@ -393,12 +401,16 @@ def upload_raw_data(camera,portnum,time,images,metadata={},comment="",newversion
"dimensions":[time.tolist()[0],time.tolist()[-1]]
}
url=archivepath+project+"/"+group+"/"+datastream_name
if "Test" in url:
database="Test"
else:
database="ArchiveDB"
if newversion:
archive_create_stream_version(url, reason, False, "n/a", "n/a" )