Skip to content
Snippets Groups Projects
Commit 0637ee43 authored by Jacobs Matthieu's avatar Jacobs Matthieu
Browse files

Modified sripts

parent 721a20e0
Branches
No related tags found
No related merge requests found
import numpy as np
from matplotlib import pyplot as plt
from combine.quick_g import quick_g_plot
from quick_g import quick_g_plot
import sys
# not whole loop, only tenth -> phi doesnt loop
def yield_grid_cells(g, r_lahead=1, tht_lahead=1, phi_lahead=0):
......@@ -184,7 +185,8 @@ def nonconvex(g):
if __name__ == "__main__":
try:
g = np.load("g_possible.npy")
name = sys.argv[1]
g = np.load(name)
# g = np.load("g_kaputt.npy")
except:
print("Need to generate grid array 'g.npy' with another program")
......@@ -193,11 +195,16 @@ if __name__ == "__main__":
#nonc = nonconvex(g) # TODO 1 == good, 0 == nonconvex, -1 == weird
# volumes = volume(g)
# angles_r, angles_tht = non_orthogonality(g)
# uneven_r, uneven_tht = unevenness(g)
# skewness_r, skewness_tht = skewness(g)
# print(f"{np.sum(volumes):.3E} <- Whole volume in m^3")
# print(f"{np.min(volumes):.3E}, {np.max(volumes):.3E} <- Min, Max cell volume in m^3")
# print(f"{np.mean(volumes):.3E}, {np.std(volumes):.3E} <- Mean, Std of cell volume in m^3")
\ No newline at end of file
volumes = volume(g)
angles_r, angles_tht = non_orthogonality(g)
uneven_r, uneven_tht = unevenness(g)
skewness_r, skewness_tht = skewness(g)
print(f"{np.sum(volumes):.3E} <- Whole volume in m^3")
print(f"{np.min(volumes):.3E}, {np.max(volumes):.3E} <- Min, Max cell volume in m^3")
print(f"{np.mean(volumes):.3E}, {np.std(volumes):.3E} <- Mean, Std of cell volume in m^3")
print(f"{np.mean(uneven_r)} <- Uneven r")
print(f"{np.mean(uneven_tht)} <- Uneven theta")
print(f"{np.mean(skewness_r)} <- Skewness r")
print(f"{np.mean(skewness_tht)} <- Skewness theta")
print(f"{np.mean(angles_r)} <- Unorthogonality r")
print(f"{np.mean(angles_tht)} <- Unorthogonality theta")
......@@ -7,7 +7,7 @@ import pandas as pd
import numpy as np
import ast
def extractValues(positions,gbpositions, drifts, efield, gradb, drgradb, bcells,bnodes, edescr, bdescr, gradbdescr, ExB, gradBdrift, ggcorr):
def extractValues(positions,gbpositions, drifts, efield, gradb, drgradb, bcells,bnodes, edescr, bdescr, gradbdescr, ExB, gradBdrift, ggcorr, Rmajor):
pos = []
gbpos = []
enum = []
......@@ -40,24 +40,24 @@ def extractValues(positions,gbpositions, drifts, efield, gradb, drgradb, bcells,
# filter our boundary cells => These have zero field and drift and are not useful for interpretation!
# quick fix with nan
if bnd != 1 and not math.isnan(en[0]) and not math.isnan(den[0]): #and (abs(max(en))) <500 and abs(min(en))<500:
if bnd != 1 and not math.isnan(en[0]) and not math.isnan(den[0]) and (abs(max(en))) <300 and abs(min(en))<300:
denum.append(den)
pos.append(dpn)
enum.append(en)
dgnum.append(dbn)
# Compute analytical efield and drifts at these positions
efa = edescr(dpn[0], dpn[2], dpn[1], Constants.Rmajor)
bfa = bdescr(dpn[0], dpn[2], dpn[1], Constants.Rmajor, Constants.Rminor)
gfa = gradbdescr(dpn[0], dpn[2], dpn[1], Constants.Rmajor, Constants.Rminor)
efa = edescr(dpn[0], dpn[2], dpn[1], Rmajor)
bfa = bdescr(dpn[0], dpn[2], dpn[1],Rmajor, Constants.Rminor)
gfa = gradbdescr(dpn[0], dpn[2], dpn[1], Rmajor, Constants.Rminor)
dgfa = gradBdrift(bfa,gfa)
dga.append(dgfa.tolist())
ea.append(efa)
ba.append(bfa)
ggcorrnum.append(gg)
dea.append(ExB(efa, bfa).tolist())
denumperp.append(transformRadPerptoCyl(denum[-1],pos[-1]))
enumperp.append(transformRadPerptoCyl(enum[-1],pos[-1]))
dgnumperp.append(transformRadPerptoCyl(dgnum[-1],pos[-1]))
denumperp.append(transformRadPerptoCyl(denum[-1],pos[-1], Rmajor))
enumperp.append(transformRadPerptoCyl(enum[-1],pos[-1], Rmajor))
dgnumperp.append(transformRadPerptoCyl(dgnum[-1],pos[-1], Rmajor))
for i in range(len(gradb)):
gbn = [float(x) for x in gradb[i].split()]
......@@ -65,10 +65,10 @@ def extractValues(positions,gbpositions, drifts, efield, gradb, drgradb, bcells,
dpn = [float(x) for x in gbpositions[i].split()]
if bnd != 1:
gnum.append(gbn)
gfa = gradbdescr(dpn[0], dpn[2], dpn[1], Constants.Rmajor, Constants.Rminor)
gfa = gradbdescr(dpn[0], dpn[2], dpn[1], Rmajor, Constants.Rminor)
ga.append(gfa)
gbpos.append(dpn)
gnumperp.append(transformRadPerptoCyl(gnum[-1],gbpos[-1]))
gnumperp.append(transformRadPerptoCyl(gnum[-1],gbpos[-1],Rmajor))
return [pos,gbpos, enum, denum,gnum,dgnum, enumperp,denumperp, gnumperp ,dgnumperp,ea , ba, dea, ga,dga, ggcorrnum]
def extractVectorInfo(numeric,analytical):
......@@ -115,9 +115,9 @@ def getAllSlices(positions,allInfo,direction,coordinate):
result.append(pos_slice)
return result
def getAllInfo(positions,gbpositions,drifts,efield, gradb, drgradb,bcells,bnodes,edescr,bdescr, gradbdescr, ExB, gradBdrift,GGcorr, name):
def getAllInfo(positions,gbpositions,drifts,efield, gradb, drgradb,bcells,bnodes,edescr,bdescr, gradbdescr, ExB, gradBdrift,GGcorr,Rmajor, name):
[pos, gbpos, enum, denum,gnum,dgnum, enumperp,denumperp, gnumperp ,dgnumperp,ea , ba, dea, ga,dga, GGcorr] = extractValues(positions, gbpositions,
drifts, efield, gradb, drgradb, bcells,bnodes, edescr, bdescr,gradbdescr, ExB, gradBdrift, GGcorr)
drifts, efield, gradb, drgradb, bcells,bnodes, edescr, bdescr,gradbdescr, ExB, gradBdrift, GGcorr, Rmajor)
[deres_mag, dean_mag, deer_mag, deer_vec, deer_rel_mag, deer_rel_vec] = extractVectorInfo(denumperp,dea)
[eres_mag, ean_mag, eer_mag, eer_vec, eer_rel_mag, eer_rel_vec] = extractVectorInfo(enumperp, ea)
......@@ -155,9 +155,9 @@ def readText(filepath):
return result
def transformCyltoRadPerp(v,pos):
def transformCyltoRadPerp(v,pos, Rmajor):
result = []
phi = math.atan2(pos[2],pos[0]-500)
phi = math.atan2(pos[2],pos[0]-Rmajor)
c = math.cos(phi)
s = math.sin(phi)
result.append(v[0]*c+v[2]*s)
......@@ -167,6 +167,7 @@ def transformCyltoRadPerp(v,pos):
def readAllInfo(method,descr,noise, folder):
result = {}
print(folder+'AllInfo'+method+'Descr'+descr+'Noise'+noise+'.csv')
df = pd.read_csv(folder+'AllInfo'+method+'Descr'+descr+'Noise'+noise+'.csv')
keys = list(df.columns)
for i in range(1,len(keys)):
......@@ -201,9 +202,9 @@ def gradientLength(gradientmags,functionvalues):
result.append(functionvalues[i]/(gradientmags[i]+1e-6))
return result
def transformRadPerptoCyl(v,pos):
def transformRadPerptoCyl(v,pos,Rmajor):
result = []
phi = math.atan2(pos[2],pos[0]-500)
phi = math.atan2(pos[2],pos[0]-Rmajor)
c = math.cos(phi)
s = math.sin(phi)
result.append(v[0]*c-v[2]*s)
......
......@@ -92,3 +92,13 @@ def e_descr2(r,z,theta, Rmajor):
return [Er,Ephi,Ez]
# return Ez
def e_descr6(r,z,theta, Rmajor):
r = r/100
Rmajor = 590/100
z = z/100
D = math.sqrt((r - Rmajor) ** 2 + z ** 2)
Er = -500*math.exp(-D)*1/D*(r-Rmajor)
Ephi = 0
Ez = -500*math.exp(-D)*1/D*z
return [Er,Ephi, Ez]
This diff is collapsed.
......@@ -5,17 +5,24 @@ import matplotlib.pyplot as plt
path = os.getcwd()
xnames = ['_1','_2','_3','_4','_5','_7','_8','_9','_10']
xnames = ['_1','_2','_3','_4','_5','_6','_7','_8','_9']#,'_10']
Tdata = []
for i in range(len(xnames)):
Temp = np.loadtxt('/u/mbj/Drifts/data-for-drift-computations/EIM/N03.00_P05.00_D05_C02.50-TEST/x-out'+xnames[i]+'/x-out/TE_TI', max_rows = 121554 )
Tlength = len(Temp)
# TempFiltered = Temp[Temp != 0]
# TempFiltered = np.nonzero(Temp)
TempFiltered = np.reshape(Temp,Tlength*6)
print('max',np.amax(TempFiltered))
Tdata.append(TempFiltered)
file = open('/u/mbj/Drifts/data-for-drift-computations/Reference-FullRes/X-TEST-HR/x-out'+xnames[i]+'/x-out/TE_TI','r')
Temptemp = file.readlines()
Temp = []
for i in range(len(Temptemp)):
Temp = Temp+[float(x) for x in Temptemp[i].split()]
# print(Temp)
print(len(Temp))
Temp = np.array(Temp)
# Temp = np.loadtxt('/u/mbj/Drifts/data-for-drift-computations/Reference-FullRes/X-TEST-HR/x-out'+xnames[i]+'/x-out/TE_TI', max_rows = 25851)
# Temp =np.concatenate(Temp.flatten(),np.loadtxt('/u/mbj/Drifts/data-for-drift-computations/Reference-FullRes/X-TEST-HR/x-out'+xnames[i]+'/x-out/TE_TI',skiprows=25851,max_rows = 1))
# Tlength = len(Temp)
# print(Tlength)
# Temp.flatten()
Tdata.append(Temp)
avTemp = Tdata[0]
......
......@@ -4,6 +4,7 @@ import sys
import AnalysisFunctions
import math
import numpy as np
import plotly.graph_objects as go
path = os.getcwd()
method = sys.argv[1]
......@@ -20,7 +21,7 @@ plt.xlabel('Radial Position [cm]')
plt.ylabel('Vertical Position [cm]')
title = 'Trajectory'+ name
plt.title(title)
positions = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/TRAJECTORY1')
positions = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/TRAJECTORY4')
for i in range(len(positions)):
if i%2 == 0:
p = [float(x) for x in positions[i].split()]
......@@ -35,13 +36,38 @@ y = [r[2] for r in posNum]
R = math.sqrt((x[0]-500)**2+y[0]**2)
print(R)
sc = plt.scatter(x, y, s=10, c = posLen, cmap = 'plasma')
sc = plt.scatter(x, y, s=50, c = posLen, cmap = 'plasma')
#circle = plt.Circle((500,0),R, color = 'green')
angle = np.linspace(0,2*np.pi,150)
x = (500+R*np.cos(angle))
y = R*np.sin(angle)
axes.plot(x,y)
plt.show()
plt.savefig(path+'/Trajectory'+method+analyticalP+noise+name+'.png')
print(path+method+analyticalP+noise+name+'.png')
x = [r[0]*math.cos(r[1]) for r in posNum]
y = [r[0]*math.sin(r[1]) for r in posNum]
z = [r[2] for r in posNum]
x = x[0:250]
y = y[0:250]
z = z[0:250]
#fig = plt.figure()
#ax = fig.add_subplot(projection='3d')
#sc = plt.scatter(x, y, z, cmap = 'plasma')
fig = go.Figure(data=[go.Scatter3d(x=x, y=y, z=z,
mode='markers',
marker=dict(
size=12,
color=posLen[0:250], # set color to an array/list of desired values
colorscale='Viridis', # choose a colorscale
opacity=0.8
))])
fig.show()
plt.show()
......@@ -4,16 +4,16 @@ import shutil
import AnalyticalPrescriptions
# The folder where the different cases should be stored is
basepath = '/u/mbj/Drifts/data-for-drift-computations'
folder = '/u/mbj/Drifts/data-for-drift-computations/CompareNodal75'
folder = '/u/mbj/Drifts/data-for-drift-computations/CompareNodal105'
os.mkdir(folder)
# Choose the parameters for the case to test
# Geometry
# Number of zones considered
nz = 1
# Loop over different cases to prepare
nts = [6, 9,12,18,24]
nrs = [15,23, 30,45, 60]
nps = [75,110, 150,225, 300]
nts = [6, 12,24, 36, 48]
nrs = [10, 20, 40, 60,80]
nps = [50, 100, 200, 300,400]
#cases = [nrs,nps]
caseNames = ['Case1','Case2','Case3', 'Case4', 'Case5']
# Number of toroidal, radial, poloidal surfaces
......
......@@ -5,7 +5,7 @@ import AnalysisFunctions
analyticalCase = sys.argv[1]
path = os.getcwd()
methods = ['6','8']
methods = ['6','7','8','9', '10']
noise = '0'
pathlengths = []
......@@ -23,7 +23,7 @@ for method in methods:
t = [float(x) for x in trajInfo[i].split()]
print(t)
pls.append(t[0])
devs.append(t[1])
devs.append(abs(t[1]))
ts.append(t[2])
rds.append(devs[i]/pls[i])
pathlengths.append(pls)
......@@ -33,8 +33,8 @@ for method in methods:
upperX = 1e-4
lowerX = 1e-9
upperY = 1e-3
lowerY = 1e-8
upperY = 100
lowerY = 1e-12
xlabel = 'Timestep [s]'
ylabel = 'Relative Deviation (after 1s)'
......@@ -42,6 +42,9 @@ ylabel = 'Relative Deviation (after 1s)'
plot1 = plt.figure(1)
sc1 = plt.scatter(timesteps[0],reldev[0],label = methods[0])
sc2 = plt.scatter(timesteps[1],reldev[1],label = methods[1])
sc3 = plt.scatter(timesteps[0],reldev[2],label = methods[2])
sc4 = plt.scatter(timesteps[1],reldev[3],label = methods[3])
sc5 = plt.scatter(timesteps[0],reldev[4],label = methods[4])
ax = plt.gca()
ax.set_yscale('log')
......@@ -51,7 +54,7 @@ ax.legend()
#x = np.linspace(0.000001,1,100)
#y = x**(1)/100
#plt.loglog(x,y)
#plt.axis(limits)
plt.axis(limits)
plt.title('Relative Deviation'+analyticalCase)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
......
......@@ -26,10 +26,14 @@ boundarynodes = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+anal
gbpositions = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/GB_POS')
ggcorr = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/GGCORR')
Rmajor = 500
if aP ==1:
edescr = AnalyticalPrescriptions.e_descr1
elif aP == 5:
edescr = AnalyticalPrescriptions.e_descr5
elif aP==6:
edescr = AnalyticalPrescriptions.e_descr6
Rmajor = 590
elif aP >= 2:
edescr = AnalyticalPrescriptions.e_descr2
......@@ -38,6 +42,6 @@ gdescr = AnalyticalPrescriptions.gradb_description1
exb = driftFunctions.ExBdrift
gB = driftFunctions.gradBdrift
allInfo = AnalysisFunctions.getAllInfo(positions,gbpositions,edrifts,efield,gradb,gdrifts,boundarycells,boundarynodes,edescr,bdescr,gdescr,exb, gB, ggcorr, method+'Descr'+analyticalP+'Noise'+noise)
allInfo = AnalysisFunctions.getAllInfo(positions,gbpositions,edrifts,efield,gradb,gdrifts,boundarycells,boundarynodes,edescr,bdescr,gdescr,exb, gB, ggcorr,Rmajor, method+'Descr'+analyticalP+'Noise'+noise)
print('CSV file written')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment