diff --git a/Scripts_Matthieu/Analysis2D.py b/Scripts_Matthieu/Analysis2D.py
index ea1183859b8a9ee8299098b2a5adad4b7f920073..96df77b77eb76b6006cec14eba018f0a61da4f74 100644
--- a/Scripts_Matthieu/Analysis2D.py
+++ b/Scripts_Matthieu/Analysis2D.py
@@ -5,6 +5,7 @@ import Constants
 import Utilities
 import pandas as pd
 import matplotlib.pyplot as plt
+import matplotlib
 import PlottingFunctions
 import AnalysisFunctions
 import sys
@@ -14,29 +15,43 @@ import numpy as np
 
 method = sys.argv[1]
 anCase = sys.argv[2]
-#Case = sys.argv[3]
+noise = sys.argv[3]
 path = '' # Case+'/'
 allInfo = []
+allInfoGB = []
 
-info = AnalysisFunctions.readAllInfo(method,anCase,'') #Case+'/'
+[info,infoGB] = AnalysisFunctions.readAllInfo(method,anCase,noise,'') #Case+'/'
 keys = list(info.keys())
-#print(keys)
+keysGB = list(infoGB.keys())
+
 for i in range(len(keys)-1): # don't include pos
     allInfo.append(info.get(keys[i]))
-
+for i in range(len(keysGB)-1): # don't include pos
+    allInfoGB.append(infoGB.get(keysGB[i]))
 pos = info.get('pos')
+gbpos = infoGB.get('gb_pos')
+
 angles = [r[1] for r in pos]
 uniqueangles = list(set(angles))    
 print(uniqueangles)
-#print(info.get('efield_er_rel_mag'))
 # Get a slice for 2D visualization
 
-torsurf = uniqueangles[2]
+torsurf = uniqueangles[1]
+print(torsurf)
 torSliceDict = {}
 torslices = AnalysisFunctions.getAllSlices(pos,allInfo,1,torsurf)
 for i in range(len(torslices)):
     torSliceDict[keys[i]] = torslices[i]
     
+
+angles = [r[1] for r in gbpos]
+uniqueanglesGB = list(set(angles))     
+torsurfGB = uniqueanglesGB[2]
+torSliceDictGB = {}
+torslicesGB = AnalysisFunctions.getAllSlices(gbpos,allInfoGB,1,torsurfGB)
+for i in range(len(torslicesGB)):
+    torSliceDictGB[keysGB[i]] = torslicesGB[i]   
+     
 # Make Plots
 x = [r[0] for r in torSliceDict.get('pos')]
 y = [r[1] for r in torSliceDict.get('pos')]
@@ -51,7 +66,7 @@ title = 'Magnitude of Electic Field (Numerical)'
 plt.title(title)
 sc1 = plt.scatter(x, y, s=20, c=torSliceDict.get('ef_mag'), cmap='plasma')
 plt.colorbar(sc1)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot2 = plt.figure(2)
 plt.xlabel('Radial Position [cm]')
@@ -60,22 +75,22 @@ title = 'Horizontal component of Electric Field'
 plt.title(title)
 sc2 = plt.scatter(x, y, s=20, c = [x[0] for x in torSliceDict.get('ef_an')], cmap='plasma')
 plt.colorbar(sc2)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot3 = plt.figure(3)
 plt.xlabel('Radial Position [cm]')
 plt.ylabel('Vertical Position [cm]')
 title = 'Relative magnitude error of Electric Field'
 plt.title(title)
-sc3 = plt.scatter(x, y, s=20, c=torSliceDict.get('efield_er_rel_mag'), cmap='plasma')
+sc3 = plt.scatter(x, y, s=20, c=torSliceDict.get('efield_er_rel_mag'), cmap='plasma', norm=matplotlib.colors.LogNorm())
 plt.colorbar(sc3)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot4 = plt.figure(4)
 title = 'Potential'
 plt.title(title)
 PlottingFunctions.plot2DContour(Constants.ns,Constants.ns,Constants.Rmajor-Constants.Rminor,Constants.Rmajor+Constants.Rminor,-Constants.Rminor,Constants.Rminor,Constants.Rmajor,Constants.Rminor,AnalyticalPrescriptions.potential_description1,'X [cm]', 'Y [cm]','Potential [V]',False)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot5 = plt.figure(5)
 plt.xlabel('Radial Position [cm]')
@@ -84,17 +99,17 @@ title = 'Magnitude of Electric Field (Analytical)'
 plt.title(title)
 sc5 = plt.scatter(x, y, s=20, c = torSliceDict.get('ef_an_mag'), cmap='plasma')
 plt.colorbar(sc5)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 
 plot6 = plt.figure(6)
 plt.xlabel('Radial Position [cm]')
 plt.ylabel('Vertical Position [cm]')
-title = 'Magnitude of ExB Drift (Analytical)'
+title = 'Magnitude of ExB Drift (Numerical)'
 plt.title(title)
-sc6 = plt.scatter(x, y, s=20, c=torSliceDict.get('edrift_an_mag'), cmap='plasma')
+sc6 = plt.scatter(x, y, s=20, c=torSliceDict.get('edrift_mag'), cmap='plasma')
 plt.colorbar(sc6)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 
 plot7 = plt.figure(7)
@@ -102,9 +117,9 @@ plt.xlabel('Radial Position [cm]')
 plt.ylabel('Vertical Position [cm]')
 title = 'Relative magnitude error of ExB Drift'
 plt.title(title)
-sc7 = plt.scatter(x, y, s=20, c=torSliceDict.get('edrift_er_rel_mag'), cmap='plasma')
+sc7 = plt.scatter(x, y, s=20, c=torSliceDict.get('edrift_er_rel_mag'), cmap='plasma', norm=matplotlib.colors.LogNorm())
 plt.colorbar(sc7)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 
 plot8 = plt.figure(8)
@@ -114,7 +129,7 @@ title = 'Magnitude of Efield (radial)'
 plt.title(title)
 sc8 = plt.scatter(x, y, s=20, c= [x[0] for x in torSliceDict.get('efield')], cmap='plasma')
 plt.colorbar(sc8)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 
 plot9 = plt.figure(9)
@@ -124,12 +139,12 @@ title = 'Magnitude of Efield (poloid)'
 plt.title(title)
 sc9 = plt.scatter(x, y, s=20, c= [x[2] for x in torSliceDict.get('efield')], cmap='plasma')
 plt.colorbar(sc9)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot10 = plt.figure(10)
 sns.violinplot(torSliceDict.get('efield_er_rel_mag'))
 title = 'ErrorDistribution'+method+anCase
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot11 = plt.figure(11)
 plt.xlabel('Radial Position [cm]')
@@ -138,17 +153,17 @@ title = 'GradientLengths'
 plt.title(title)
 sc11 = plt.scatter(x, y, s=20, c=gLengths, cmap='plasma')
 plt.colorbar(sc11)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 normErrors = np.multiply(np.array(torSliceDict.get('edrift_er_rel_mag')),np.array(gLengths))
 plot12 = plt.figure(12)
 plt.xlabel('Radial Position [cm]')
 plt.ylabel('Vertical Position [cm]')
-title = 'NormalizedRelErrors'
+title = 'NormalizedRelErrors ExB'
 plt.title(title)
-sc12 = plt.scatter(x, y, s=20, c=normErrors, cmap='plasma')
+sc12 = plt.scatter(x, y, s=20, c=normErrors, cmap='plasma', norm=matplotlib.colors.LogNorm())
 plt.colorbar(sc12)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot13 = plt.figure(13)
 plt.xlabel('Radial Position [cm]')
@@ -157,7 +172,7 @@ title = 'Relative magnitude error of GradB Drift'
 plt.title(title)
 sc13 = plt.scatter(x, y, s=20, c=torSliceDict.get('gdrift_er_rel_mag'), cmap='plasma')
 plt.colorbar(sc13)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot14 = plt.figure(14)
 plt.xlabel('Radial Position [cm]')
@@ -166,7 +181,7 @@ title = 'Magnitude of GradB Drift (Analytical)'
 plt.title(title)
 sc14 = plt.scatter(x, y, s=20, c=torSliceDict.get('gdrift_an_mag'), cmap='plasma')
 plt.colorbar(sc14)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot15 = plt.figure(15)
 plt.xlabel('Radial Position [cm]')
@@ -175,35 +190,45 @@ title = 'Magnitude of GradB Drift (Numerical)'
 plt.title(title)
 sc15 = plt.scatter(x, y, s=20, c=torSliceDict.get('gdrift_mag'), cmap='plasma')
 plt.colorbar(sc15)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot16 = plt.figure(16)
-plt.xlabel('Radial Position [cm]')
-plt.ylabel('Vertical Position [cm]')
-title = 'Relative magnitude error of GradB '
-plt.title(title)
-sc16 = plt.scatter(x, y, s=20, c=torSliceDict.get('gb_er_rel_mag'), cmap='plasma')
-plt.colorbar(sc16)
-plt.savefig(path+method+anCase+title+'.png')
+sns.violinplot(torSliceDict.get('edrift_er_rel_mag'))
+title = 'ErrorDistributionEDRIFT'+method+anCase
+plt.savefig(path+method+anCase+noise+title+'.png')
+
+
+# ! Different position for gradB
+x = [r[0] for r in torSliceDictGB.get('gb_pos')]
+y = [r[1] for r in torSliceDictGB.get('gb_pos')]
+
 
 plot17 = plt.figure(17)
 plt.xlabel('Radial Position [cm]')
 plt.ylabel('Vertical Position [cm]')
 title = 'Magnitude of GradB (Analytical)'
 plt.title(title)
-sc17 = plt.scatter(x, y, s=20, c=torSliceDict.get('gb_an_mag'), cmap='plasma')
+sc17 = plt.scatter(x, y, s=20, c=torSliceDictGB.get('gb_an_mag'), cmap='plasma')
 plt.colorbar(sc17)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plot18 = plt.figure(18)
 plt.xlabel('Radial Position [cm]')
 plt.ylabel('Vertical Position [cm]')
 title = 'Magnitude of GradB (Numerical)'
 plt.title(title)
-sc18 = plt.scatter(x, y, s=20, c=torSliceDict.get('gb_mag'), cmap='plasma')
+sc18 = plt.scatter(x, y, s=20, c=torSliceDictGB.get('gb_mag'), cmap='plasma')
 plt.colorbar(sc18)
-plt.savefig(path+method+anCase+title+'.png')
+plt.savefig(path+method+anCase+noise+title+'.png')
 
+plot19 = plt.figure(19)
+plt.xlabel('Radial Position [cm]')
+plt.ylabel('Vertical Position [cm]')
+title = 'Relative magnitude error of GradB '
+plt.title(title)
+sc19 = plt.scatter(x, y, s=20, c=torSliceDictGB.get('gb_er_rel_mag'), cmap='plasma', norm=matplotlib.colors.LogNorm())
+plt.colorbar(sc19)
+plt.savefig(path+method+anCase+noise+title+'.png')
 
 plt.show()
 print('Done Analysis2D')
diff --git a/Scripts_Matthieu/AnalysisFunctions.py b/Scripts_Matthieu/AnalysisFunctions.py
index 3e3da767d00698cf7383780a0bca976c64bc5d81..af91043568b8f77eca518ad2d02b12814048d7af 100644
--- a/Scripts_Matthieu/AnalysisFunctions.py
+++ b/Scripts_Matthieu/AnalysisFunctions.py
@@ -7,8 +7,9 @@ import pandas as pd
 import numpy as np
 import ast
 
-def extractValues(positions, drifts, efield, gradb, drgradb, bcells, edescr, bdescr, gradbdescr, ExB, gradBdrift):
+def extractValues(positions,gbpositions, drifts, efield, gradb, drgradb, bcells,bnodes, edescr, bdescr, gradbdescr, ExB, gradBdrift, ggcorr):
     pos = []
+    gbpos = []
     enum = []
     denum = []
     gnum = []
@@ -23,22 +24,26 @@ def extractValues(positions, drifts, efield, gradb, drgradb, bcells, edescr, bde
     dga = []
     ga = []
     bound = []
-    
+    boundnodes = []
+    ggcorrnum = []
+    print(len(drifts))
+    print(len(efield))
+    print(len(ggcorr))
     for i in range(len(drifts)):
         # Convert to float values to allow computations and interpretation
         den = [float(x) for x in drifts[i].split()]
         dpn = [float(x) for x in positions[i].split()]
         en = [float(x) for x in efield[i].split()]
-        gbn = [float(x) for x in gradb[i].split()]
         dbn = [float(x) for x in drgradb[i].split()]
         bnd = float(bcells[i])
+        gg = float(ggcorr[i])
 
         # filter our boundary cells => These have zero field and drift and are not useful for interpretation!
-        if  bnd != 1: #not math.isnan(en[0]):# and en != [0,0,0]:
+	# quick fix with nan
+        if  bnd != 1 and not math.isnan(en[0]) and not math.isnan(den[0]): #and (abs(max(en))) <500 and abs(min(en))<500: 
             denum.append(den)
             pos.append(dpn)
             enum.append(en)
-            gnum.append(gbn)
             dgnum.append(dbn)
             # Compute analytical efield and drifts at these positions
             efa = edescr(dpn[0], dpn[2], dpn[1], Constants.Rmajor)
@@ -46,15 +51,25 @@ def extractValues(positions, drifts, efield, gradb, drgradb, bcells, edescr, bde
             gfa = gradbdescr(dpn[0], dpn[2], dpn[1], Constants.Rmajor, Constants.Rminor)
             dgfa = gradBdrift(bfa,gfa)
             dga.append(dgfa.tolist())
-            ga.append(gfa)
             ea.append(efa)
             ba.append(bfa)
+            ggcorrnum.append(gg)
             dea.append(ExB(efa, bfa).tolist())
             denumperp.append(transformRadPerptoCyl(denum[-1],pos[-1]))
             enumperp.append(transformRadPerptoCyl(enum[-1],pos[-1]))
-            gnumperp.append(transformRadPerptoCyl(gnum[-1],pos[-1]))
             dgnumperp.append(transformRadPerptoCyl(dgnum[-1],pos[-1]))
-    return [pos, enum, denum,gnum,dgnum, enumperp,denumperp, gnumperp ,dgnumperp,ea , ba, dea, ga,dga]
+	    
+    for i in range(len(gradb)):
+        gbn = [float(x) for x in gradb[i].split()]
+        bnd = float(bnodes[i])
+        dpn = [float(x) for x in gbpositions[i].split()]
+        if  bnd != 1:
+            gnum.append(gbn)
+            gfa = gradbdescr(dpn[0], dpn[2], dpn[1], Constants.Rmajor, Constants.Rminor)
+            ga.append(gfa)
+            gbpos.append(dpn)
+            gnumperp.append(transformRadPerptoCyl(gnum[-1],gbpos[-1]))
+    return [pos,gbpos, enum, denum,gnum,dgnum, enumperp,denumperp, gnumperp ,dgnumperp,ea , ba, dea, ga,dga, ggcorrnum]
 
 def extractVectorInfo(numeric,analytical):
     res_mag = []
@@ -100,9 +115,9 @@ def getAllSlices(positions,allInfo,direction,coordinate):
     result.append(pos_slice)
     return result
 
-def getAllInfo(positions,drifts,efield, gradb, drgradb,bcells,edescr,bdescr, gradbdescr, ExB, gradBdrift,name):
-    [pos, enum, denum,gnum,dgnum, enumperp,denumperp, gnumperp ,dgnumperp,ea , ba, dea, ga,dga] = extractValues(positions,
-                        drifts, efield, gradb, drgradb, bcells, edescr, bdescr,gradbdescr, ExB, gradBdrift)
+def getAllInfo(positions,gbpositions,drifts,efield, gradb, drgradb,bcells,bnodes,edescr,bdescr, gradbdescr, ExB, gradBdrift,GGcorr, name):
+    [pos, gbpos, enum, denum,gnum,dgnum, enumperp,denumperp, gnumperp ,dgnumperp,ea , ba, dea, ga,dga, GGcorr] = extractValues(positions, gbpositions,
+                        drifts, efield, gradb, drgradb, bcells,bnodes, edescr, bdescr,gradbdescr, ExB, gradBdrift, GGcorr)
 
     [deres_mag, dean_mag, deer_mag, deer_vec, deer_rel_mag, deer_rel_vec] =  extractVectorInfo(denumperp,dea)
     [eres_mag, ean_mag, eer_mag, eer_vec, eer_rel_mag, eer_rel_vec] = extractVectorInfo(enumperp, ea)
@@ -112,18 +127,24 @@ def getAllInfo(positions,drifts,efield, gradb, drgradb,bcells,edescr,bdescr, gra
  #   print('res', dgres_mag)
 
     InfoDict = {}
+    InfoDictGB = {}
     keys = ['edrifts', 'efield','edrifts_perp','efield_perp' ,'ef_mag', 'ef_an_mag','ef_an', 'edrift_mag', 'edrift_an_mag', 'ef_vec_er', 'ef_mag_er',
             'edrift_vec_er', 'edrift_er_mag','efield_er_rel_mag', 'efield_er_rel_vec', 'edrift_er_rel_mag', 'edrift_er_rel_vec',
-            'gdrifts', 'gradb','gdrifts_perp','gradb_perp', 'gb_mag','gb_an_mag', 'gdrift_mag','gdrift_an_mag','gb_vec_er', 'gb_mag_er',
-            'gdrift_vec_er','gdrift_er_mag','gb_er_rel_mag','gb_er_rel_vec','gdrift_er_rel_mag', 'gb_er_rel_vec', 'pos']
-
+            'gdrifts', 'gdrifts_perp', 'gdrift_mag','gdrift_an_mag',
+            'gdrift_vec_er','gdrift_er_mag','gdrift_er_rel_mag', 'gb_er_rel_vec', 'GGcorr','pos']
+    keysgb = ['gradb','gradb_perp', 'gb_mag','gb_an_mag','gb_vec_er', 'gb_mag_er','gb_er_rel_mag','gb_er_rel_vec','gb_pos']
     values = [denum,enum,denumperp,enumperp,eres_mag,ean_mag,ea,deres_mag,dean_mag,eer_vec,eer_mag,deer_vec,deer_mag,eer_rel_mag,eer_rel_vec,deer_rel_mag,deer_rel_vec,
-              dgnum,gnum,dgnumperp,gnumperp,gres_mag,ean_mag,dgres_mag,dgan_mag,ger_vec,ger_mag,dger_vec,dger_mag,ger_rel_mag,ger_rel_vec,dger_rel_mag,dger_rel_vec,pos]
+              dgnum,dgnumperp,dgres_mag,dgan_mag,dger_vec,dger_mag,dger_rel_mag,dger_rel_vec,GGcorr,pos]
+    valuesgb = [gnum,gnumperp,gres_mag,gan_mag,ger_vec,ger_mag,ger_rel_mag,ger_rel_vec,gbpos]
     for i in range(len(values)):
         InfoDict[keys[i]] = values[i]
+    for j in range(len(valuesgb)):
+        InfoDictGB[keysgb[j]] = valuesgb[j]
     pandasDF = pd.DataFrame.from_dict(data = InfoDict)
     pandasDF.to_csv('AllInfo'+name+'.csv', header=True)
-    return InfoDict
+    pandasDF = pd.DataFrame.from_dict(data = InfoDictGB)
+    pandasDF.to_csv('AllInfoGB'+name+'.csv', header=True)
+    return [InfoDict,InfoDictGB]
 
 
 def readText(filepath):
@@ -144,10 +165,9 @@ def transformCyltoRadPerp(v,pos):
     result.append(v[2]*c-v[0]*s)
     return result
     
-def readAllInfo(method,descr, folder):
+def readAllInfo(method,descr,noise, folder):
     result = {}
-#    print(folder+'AllInfo'+method+'Descr'+descr+'.csv')
-    df = pd.read_csv(folder+'AllInfo'+method+'Descr'+descr+'.csv')
+    df = pd.read_csv(folder+'AllInfo'+method+'Descr'+descr+'Noise'+noise+'.csv')
     keys = list(df.columns)
     for i in range(1,len(keys)):
         series = df.loc[:,keys[i]].to_list()
@@ -159,7 +179,21 @@ def readAllInfo(method,descr, folder):
             else:
                 res[j] = series[j]
         result[keys[i]] = res
-    return result
+# Also get grad b info
+    resultGB = {}
+    df = pd.read_csv(folder+'AllInfoGB'+method+'Descr'+descr+'Noise'+noise+'.csv')
+    keys = list(df.columns)
+    for i in range(1,len(keys)):
+        series = df.loc[:,keys[i]].to_list()
+        N = len(series)
+        res = np.array([None]*N)
+        for j in range(N):
+            if type(series[j]) is str:
+                res[j] = np.array(ast.literal_eval(series[j]))
+            else:
+                res[j] = series[j]
+        resultGB[keys[i]] = res
+    return [result,resultGB]
 
 def gradientLength(gradientmags,functionvalues):
     result = []
diff --git a/Scripts_Matthieu/AnalyticalPrescriptions.py b/Scripts_Matthieu/AnalyticalPrescriptions.py
index 23048bdbe16d8935f071f5f42916a33b35486d5d..793841fc2224cd6522b0ba786ec0d4cc598e2218 100644
--- a/Scripts_Matthieu/AnalyticalPrescriptions.py
+++ b/Scripts_Matthieu/AnalyticalPrescriptions.py
@@ -75,6 +75,13 @@ def e_descr3(r,z,theta, Rmajor):
     Ephi = 0
     Ez = 0
     return [Er,Ephi, Ez]
+    
+def e_descr5(r,z,theta, Rmajor):
+    Er = 1000
+    Ephi = 0
+    Ez = 0
+    return [Er,Ephi, Ez]
+
 
 def e_descr2(r,z,theta, Rmajor):
     r = (r-Rmajor)/100
diff --git a/Scripts_Matthieu/ErrorAnalysis.py b/Scripts_Matthieu/ErrorAnalysis.py
new file mode 100644
index 0000000000000000000000000000000000000000..a1b8cd3101806902921ec06889dbc96df99b0d0c
--- /dev/null
+++ b/Scripts_Matthieu/ErrorAnalysis.py
@@ -0,0 +1,222 @@
+import math
+import driftFunctions
+import AnalyticalPrescriptions
+import Constants
+import Utilities
+import pandas as pd
+import matplotlib.pyplot as plt
+import PlottingFunctions
+import AnalysisFunctions
+import os
+import sys
+import numpy as np
+
+# Analyze the performance and errors of different methods
+# This script does this for given analytical prescriptions for potential and
+# magnetic field
+# Specify which analytical case is considered and which cases should be 
+# calculated
+analyticalCase = sys.argv[1]
+Cases = sys.argv[2:]
+path = os.getcwd()
+print(path)
+print(Cases)
+methods = ['1', '2', '3', '4','5'] 
+
+# Define structures to be computed    
+efield_error_key = 'efield_er_rel_mag'
+L2Errors = []
+MeanErrors = []
+SingleErrors = []
+MaxErrors = []
+GGErrors = []
+lens = []
+
+nbs = []
+siner = 1000
+dmin = 1000000000
+r = 550
+phi = 0.15
+z = 30
+gg = 10
+for method in methods:
+#    errorsDF = pd.DataFrame()
+    errorsL2 = []
+    errorsMean = []
+    errorsSingle = []
+    nbCells = []
+    charLength = []
+    maxErrors = []
+    errorsGG = []
+    for case in Cases:
+        print(case)
+        [caseInfo, caseInfoGB] = AnalysisFunctions.readAllInfo(method, analyticalCase, '0', case)
+        caseErrors = caseInfo.get(efield_error_key)
+        ggErrors = caseInfo.get('GGcorr')
+        positions = caseInfo.get('pos')
+#	errorsDF[case] = caseErrors
+        y = len(caseErrors)
+        errorsL2.append(math.sqrt(sum([x**2 for x in caseErrors]))/y)
+        errorsMean.append(sum([x for x in caseErrors])/y)
+        nbCells.append(y)
+        vol = AnalysisFunctions.readText(path+'/'+case+'Output'+method+'Descr'+analyticalCase+'Noise0/VOLAV')
+        vol = float(vol[0])
+#        print(vol)
+        charLength.append((vol/10**6)**(1/3))
+        print(charLength)
+        maxErrors.append(max(caseErrors))
+# Get single error
+        for i in range(len(caseErrors)):
+            ipos = positions[i]
+            d = (z-ipos[2])**2+(r-ipos[0])**2+(phi-ipos[0])**2
+            if d < dmin :
+                siner = caseErrors[i] 
+                gg = ggErrors[i]
+        errorsSingle.append(siner)
+        errorsGG.append(gg)	
+#        print(errors)
+#        print(maxErrors)
+#        print(nbCells)
+	
+    L2Errors.append(errorsL2)
+    MeanErrors.append(errorsMean)
+    SingleErrors.append(errorsSingle)
+    MaxErrors.append(maxErrors)
+    GGErrors.append(errorsGG)
+    nbs.append(nbCells)
+    lens.append(charLength)
+#    plot = plt.figure()
+#    sns.violinplot(data = case[methods[0]])
+#    plt.savefig(path+'/Descr'+analyticalCase+method+'Violin.png')
+print('L2',L2Errors)
+print('Mean', MeanErrors)
+print('Single', SingleErrors)
+print('Max',MaxErrors)
+print('GG',GGErrors)
+print(nbs) 
+# Compute measure for gradient lengths and typical grid size
+typsize = 50*36*150
+#maxGL = 
+#avGL =  
+ 
+
+upperX = 1e0
+lowerX = 1e-5
+upperY = 1e-1
+lowerY = 1e-9   
+xlabel = 'Characteristic Cell Length [m]'
+ylabel = 'Relative Magnitude Error'
+
+
+plot1 = plt.figure(1)
+v1 = plt.vlines(typsize,lowerY,upperY, color = 'g')
+sc1 = plt.scatter(lens[0],MeanErrors[0],label = methods[0])
+sc2 = plt.scatter(lens[1],MeanErrors[1],label = methods[1])
+sc3 = plt.scatter(lens[1],MeanErrors[2],label = methods[2]) 
+sc4 = plt.scatter(lens[1],MeanErrors[3],label = methods[3]) 
+sc5 = plt.scatter(lens[1],MeanErrors[4],label = methods[4]) 
+ax = plt.gca()
+ax.set_yscale('log')
+ax.set_xscale('log')
+limits = [lowerX, upperX, lowerY, upperY]
+ax.legend()
+x = np.linspace(0.000001,1,100)
+y = x**(1)/100
+plt.loglog(x,y)
+plt.axis(limits)
+plt.title('Mean Errors Description'+analyticalCase)
+plt.xlabel(xlabel)
+plt.ylabel(ylabel)
+plt.savefig(path+'/Descr'+analyticalCase+'MeanError.png')
+
+plot2 = plt.figure(2)
+sc1 = plt.scatter(lens[0],L2Errors[0],label = methods[0])
+sc2 = plt.scatter(lens[1],L2Errors[1],label = methods[1])
+sc3 = plt.scatter(lens[0],L2Errors[2],label = methods[2])
+sc4 = plt.scatter(lens[1],L2Errors[3],label = methods[3])
+sc5 = plt.scatter(lens[0],L2Errors[4],label = methods[4])
+
+v2 = plt.vlines(typsize,lowerY,upperY, color = 'g')
+ax = plt.gca()
+ax.set_yscale('log')
+ax.set_xscale('log')
+limits = [lowerX, upperX, lowerY, upperY]
+ax.legend()
+x = np.linspace(0.000001,1,100)
+y = x**(1)/100
+plt.loglog(x,y)
+plt.axis(limits)
+plt.title('L2 Errors Description'+analyticalCase)
+plt.xlabel(xlabel)
+plt.ylabel(ylabel)
+plt.savefig(path+'/Descr'+analyticalCase+'L2Error.png')
+
+
+plot3 = plt.figure(3)
+sc1 = plt.scatter(nbs[0],MaxErrors[0],label = 'Max Error '+methods[0])
+sc2 = plt.scatter(lens[1],MaxErrors[1],label = 'Max Error '+methods[1])
+sc3 = plt.scatter(lens[0],MaxErrors[2],label = 'Max Error '+methods[2])
+sc4 = plt.scatter(lens[1],MaxErrors[3],label = 'Max Error '+methods[3])
+sc5 = plt.scatter(lens[0],MaxErrors[4],label = 'Max Error '+methods[4])
+
+v2 = plt.vlines(typsize,lowerY,upperY, color = 'g')
+ax = plt.gca()
+ax.set_yscale('log')
+ax.set_xscale('log')
+limits = [lowerX, upperX, lowerY, upperY]
+ax.legend()
+x = np.linspace(0.000001,1,100)
+y = x**(1)/100
+plt.loglog(x,y)
+plt.axis(limits)
+plt.title('Max Errors Description'+analyticalCase)
+plt.xlabel(xlabel)
+plt.ylabel(ylabel)
+plt.savefig(path+'/Descr'+analyticalCase+'MaxError.png')
+
+
+plot4 = plt.figure(4)
+sc1 = plt.scatter(lens[0],SingleErrors[0],label = methods[0])
+sc2 = plt.scatter(lens[1],SingleErrors[1],label = methods[1])
+sc3 = plt.scatter(lens[0],SingleErrors[2],label = methods[2])
+sc4 = plt.scatter(lens[1],SingleErrors[3],label = methods[3])
+sc5 = plt.scatter(lens[0],SingleErrors[4],label = methods[4])
+
+v2 = plt.vlines(typsize,lowerY,upperY, color = 'g')
+ax = plt.gca()
+ax.set_yscale('log')
+ax.set_xscale('log')
+limits = [lowerX, upperX, lowerY, upperY]
+ax.legend()
+x = np.linspace(0.000001,1,100)
+y = x**(1)/100
+plt.loglog(x,y)
+plt.axis(limits)
+plt.title('Single Cell Error'+analyticalCase)
+plt.xlabel(xlabel)
+plt.ylabel(ylabel)
+plt.savefig(path+'/Descr'+analyticalCase+'SingleError.png')
+
+plot5 = plt.figure(5)
+sc1 = plt.scatter(nbs[0],GGErrors[0],label = methods[0])
+sc2 = plt.scatter(lens[1],GGErrors[1],label = methods[1])
+sc3 = plt.scatter(lens[0],GGErrors[2],label = methods[2])
+sc4 = plt.scatter(lens[1],GGErrors[3],label = methods[3])
+sc5 = plt.scatter(lens[0],GGErrors[4],label = methods[4])
+
+v2 = plt.vlines(typsize,lowerY,upperY, color = 'g')
+ax = plt.gca()
+ax.set_yscale('log')
+ax.set_xscale('log')
+limits = [lowerX, upperX, lowerY, upperY]
+ax.legend()
+x = np.linspace(0.000001,1,100)
+y = x**(1)/100
+plt.loglog(x,y)
+plt.axis(limits)
+plt.title('GG Correction Error'+analyticalCase)
+plt.xlabel(xlabel)
+plt.ylabel(ylabel)
+plt.savefig(path+'/Descr'+analyticalCase+'GG correction Error.png')
+
+plt.show()
diff --git a/Scripts_Matthieu/ErrorAnalysisNoise.py b/Scripts_Matthieu/ErrorAnalysisNoise.py
new file mode 100644
index 0000000000000000000000000000000000000000..f062101b3b1e0f19c386a1f720bf31cc69181ed9
--- /dev/null
+++ b/Scripts_Matthieu/ErrorAnalysisNoise.py
@@ -0,0 +1,91 @@
+import math
+import driftFunctions
+import AnalyticalPrescriptions
+import Constants
+import Utilities
+import pandas as pd
+import matplotlib.pyplot as plt
+import PlottingFunctions
+import AnalysisFunctions
+import os
+import sys
+
+# Analyze the performance and errors of different methods
+# This script does this for given analytical prescriptions for potential and
+# magnetic field
+# Specify which analytical case is considered and which cases should be 
+# calculated
+analyticalCase = sys.argv[1]
+#Cases = sys.argv[2:]
+path = os.getcwd()
+print(path)
+#print(Cases)
+case = ''
+methods = ['1', '2', '3', '4'] 
+noiseLevels = [0,0.005,0.01,0.015,0.02,0.025,0.03,0.035]
+noiseNames = ['0','11','12','13','14', '15','16','17']
+
+# Define structures to be computed    
+efield_error_key = 'efield_er_rel_mag'
+exb_error_key = 'edrift_er_rel_mag'
+avErrors = []
+avErrorsD = []
+MaxErrors = []
+nbs = []
+
+for method in methods:
+    errors = []
+    errorsD = []
+    nbCells = []
+    maxErrors = []
+    for noise in noiseNames:
+        print(case)
+        [caseInfo,caseInfoGB] = AnalysisFunctions.readAllInfo(method, analyticalCase,noise, case)
+        caseErrors = caseInfo.get(efield_error_key)
+        caseErrorsD = caseInfo.get(exb_error_key)
+        y = len(caseErrors)
+        errors.append(sum([x/y for x in caseErrors]))
+        errorsD.append(sum([x/y for x in caseErrorsD]))
+        nbCells.append(y)
+        maxErrors.append(max(caseErrors))
+	
+    avErrors.append(errors)
+    avErrorsD.append(errorsD)
+    MaxErrors.append(maxErrors)
+    nbs.append(nbCells)
+    
+print(avErrors)
+print(nbs) 
+# Compute measure for gradient lengths and typical grid size
+typsize = 50*36*150
+#maxGL = 
+#avGL =  
+ 
+
+upperX = 0.20
+lowerX = -0.01   
+upperY = 1
+lowerY = 1e-5   
+plot1 = plt.figure(1)
+#v1 = plt.vlines(typsize,lowerY,upperY, color = 'g')
+sc1 = plt.scatter(noiseLevels,avErrors[0],color = 'red',label = 'Average Error '+methods[0])
+sc2 = plt.scatter(noiseLevels,avErrors[1],color = 'green',label = 'Average Error '+methods[1])
+sc3 = plt.scatter(noiseLevels,avErrors[2],color = 'blue',label = 'Average Error '+methods[2]) 
+sc4 = plt.scatter(noiseLevels,avErrors[3],color = 'yellow',label = 'Average Error '+methods[3]) 
+sc5 = plt.scatter(noiseLevels,avErrorsD[0],color = 'maroon',label = 'Average Error ExB '+methods[0])
+sc6 = plt.scatter(noiseLevels,avErrorsD[1],color = 'darkgreen',label = 'Average Error ExB'+methods[1])
+sc7 = plt.scatter(noiseLevels,avErrorsD[2],color = 'navy',label = 'Average Error ExB'+methods[2]) 
+sc8 = plt.scatter(noiseLevels,avErrorsD[3],color = 'gold',label = 'Average Error ExB'+methods[3])
+#sc5 = plt.scatter(nbs[1],avErrors[4],color = 'red',label = 'Average Error '+methods[4]) 
+ax = plt.gca()
+ax.set_yscale('log')
+#ax.set_xscale('log')
+limits = [lowerX, upperX, lowerY, upperY]
+ax.legend()
+plt.axis(limits)
+plt.title('Average Errors Description'+analyticalCase)
+plt.xlabel('Noise SD Measure')
+plt.ylabel('Relative Error')
+plt.savefig(path+'/Descr'+analyticalCase+'Error.png')
+
+plt.show() 
diff --git a/Scripts_Matthieu/ExtractNoiseInfo.py b/Scripts_Matthieu/ExtractNoiseInfo.py
new file mode 100644
index 0000000000000000000000000000000000000000..adf9b829a244c2570367a51be82bd0efb9b10ee1
--- /dev/null
+++ b/Scripts_Matthieu/ExtractNoiseInfo.py
@@ -0,0 +1,64 @@
+import numpy as np
+import os
+import matplotlib.pyplot as plt
+
+
+path = os.getcwd()
+
+xnames = ['_1','_2','_3','_4','_5','_7','_8','_9','_10']
+Tdata = []
+
+for i in range(len(xnames)):
+    Temp = np.loadtxt('/u/mbj/Drifts/data-for-drift-computations/EIM/N03.00_P05.00_D05_C02.50-TEST/x-out'+xnames[i]+'/x-out/TE_TI', max_rows = 121554 )
+    Tlength = len(Temp)
+#    TempFiltered = Temp[Temp != 0]
+#    TempFiltered = np.nonzero(Temp)
+    TempFiltered = np.reshape(Temp,Tlength*6)
+    print('max',np.amax(TempFiltered))
+    Tdata.append(TempFiltered)
+
+
+avTemp = Tdata[0]    
+for i in range(1,len(xnames)):
+    print(Tdata[i][50000:50010])
+    avTemp = np.add(avTemp,Tdata[i])
+avTemp = avTemp*1/len(xnames)
+print(avTemp[100000:100010])
+
+difData = []  
+normdifData = []  
+for i in range(len(xnames)):
+    difTemp = np.absolute(np.subtract(avTemp,Tdata[i]))
+    normdifTemp = np.divide(difTemp,avTemp)
+    difData.append(difTemp)
+    normdifData.append(normdifTemp)
+    
+avDif = difData[0]    
+for i in range(1,len(xnames)):
+    avDif = np.add(avDif,difData[i])
+avDif = avDif*1/len(xnames)
+
+#covMat = np.outer(avDif,avDif) #np.empty((size,size))
+#for i in range(size):
+#    for j in range(size):
+#        covMat[i,j] = avDif[i]*avDif[j]*len(xnames)
+	
+
+avError = np.average(avDif)
+print('avError', avError)
+print(avDif)
+print(avTemp)
+
+plot1 = plt.figure(1)
+plt.xlabel('Temperature')
+plt.ylabel('Variance')
+sc1 = plt.scatter(avTemp,avDif)
+#plt.colorbar(sc1)
+
+
+plot2 = plt.figure(2)
+plt.xlabel('Av Temperature')
+plt.ylabel('Temp')
+sc1 = plt.scatter(avTemp,Tdata[1])
+#plt.colorbar(sc1)
+plt.show()
diff --git a/Scripts_Matthieu/InputsGeometry.py b/Scripts_Matthieu/InputsGeometry.py
new file mode 100644
index 0000000000000000000000000000000000000000..8c9408321e818e91af5be916c169b68350554e3a
--- /dev/null
+++ b/Scripts_Matthieu/InputsGeometry.py
@@ -0,0 +1,26 @@
+import AnalyticalPrescriptions
+from MeshFunctions import *
+import shutil
+import os
+def createInputFiles(nz, nr,np,nt,path, trange, Rmajor, Rminor, bfield, potential):
+    # Create the file with the general info
+    gi_path = path+"/input.geo"
+    CreateGeneralInfo(gi_path, nz,[nr],[np],[nt])
+
+
+    # Create the GEOMETRY_3D_DATA file
+    [tor,rad,hor] = create_toroidal_vertices(nt,nr,np,trange,Rmajor,Rminor)
+    print(tor)
+    filepath = path+'/GEOMETRY_3D_DATA'
+    writeGEOMETRY3DDATA(tor,rad,hor,nt,nr,np,filepath)
+
+    # Create the magnetic field data
+    # Create an analytical description
+    bfieldpath = path+"/BFIELD_STRENGTH"
+    imposeAnalytic(tor,rad,hor,bfield,Rmajor, Rminor, bfieldpath)
+
+    # Create the potential data
+    # At the moment we use the same grid for magnetic and plasma cells
+
+    potentialpath = path+"/POTENTIAL"
+    imposeAnalytic(tor,rad,hor,potential,Rmajor, Rminor,potentialpath)
diff --git a/Scripts_Matthieu/MeshFunctions.py b/Scripts_Matthieu/MeshFunctions.py
index fa6c54d49b882a3ff3192cc85e2eee1e91cd76d7..23d98cd18a640819e4feba446223cf34fc86f1c4 100644
--- a/Scripts_Matthieu/MeshFunctions.py
+++ b/Scripts_Matthieu/MeshFunctions.py
@@ -70,18 +70,6 @@ def imposeAnalytic(tor_surf,rad_pos,hor_pos,analytic_description,Rmajor, Rminor,
             bfs.write('\n')
     return
 
-def b_field_description(r,z,theta):
-    # Radial B-field (linear and max at central surfaces of the torus
-    R = math.sqrt((r-500)**2 + z**2)+0.01+0*theta
-    bval = 0.01/R*100  # R in cm
-    return bval
-
-def Bfieldvector(r,z,theta):
-    R = math.sqrt((r - 500) ** 2 + z ** 2) + 0.01
-    Br = 0
-    Bz = math.cos(theta/180*1/10*math.pi)*0.01
-    Btheta = 0.01/R*100
-    return [Br,Bz,Btheta]
 
 def CreateGeneralInfo(filepath, nz,r,z,theta):
     # nz is the number of zones considered
diff --git a/Scripts_Matthieu/NumericalAnalysisDescription1.py b/Scripts_Matthieu/NumericalAnalysisDescription1.py
index 0f54eaff2a6a5b3e78528ecba38080a7bbcad8d2..393eb22ee2182a8ff29670d1bcec61ef27525cca 100644
--- a/Scripts_Matthieu/NumericalAnalysisDescription1.py
+++ b/Scripts_Matthieu/NumericalAnalysisDescription1.py
@@ -9,14 +9,15 @@ import PlottingFunctions
 import AnalysisFunctions
 
 # Constants
-path = '/u/mbj/Drifts/data-for-drift-computations/Description1/'
+path = '/u/mbj/Drifts/data-for-drift-computations/Description1_bis/'
+method = 'GG'
 
 LSQfiles = []
-LSQfiles.append('Case50')
-LSQfiles.append('Case100')
-LSQfiles.append('Case200')
-LSQfiles.append('Case400')
-LSQfiles.append('Case1000')
+LSQfiles.append('Case10')
+LSQfiles.append('Case20')
+LSQfiles.append('Case40')
+LSQfiles.append('Case80')
+LSQfiles.append('Case160')
 
 
 
@@ -26,11 +27,14 @@ errorsGG = []
 nbCellsGG = []
 
 for i in range(len(LSQfiles)):
-    torSliceDict = AnalysisFunctions.getAllInfo(AnalysisFunctions.readText(path + LSQfiles[i] + '/OutputGG/DRIFT_POS'),
-                                                AnalysisFunctions.readText(path+LSQfiles[i]+'/OutputGG/ExB_RESULTS'),
-                                                AnalysisFunctions.readText(path + LSQfiles[i] + '/OutputGG/EFIELD'),
-                                                AnalyticalPrescriptions.e_descr1, AnalyticalPrescriptions.b_field_description1,
-                                                driftFunctions.ExBdrift)
+    torSliceDict = AnalysisFunctions.getAllInfo(AnalysisFunctions.readText(path + LSQfiles[i] + '/Output' + method+'/DRIFT_POS'),
+                                                AnalysisFunctions.readText(path+LSQfiles[i]+'/Output' + method+'/ExB_RESULTS'),
+                                                AnalysisFunctions.readText(path + LSQfiles[i] + '/Output' + method+'/EFIELD'),
+                                                AnalysisFunctions.readText(path + LSQfiles[i] + '/Output' + method+'/GRADB'),
+                                                AnalysisFunctions.readText(path + LSQfiles[i] + '/Output' + method+'/GRADB_DRIFTS'),
+                                                AnalyticalPrescriptions.e_descr2, AnalyticalPrescriptions.b_field_description1,
+                                                AnalyticalPrescriptions.gradb_description1, 
+                                                driftFunctions.ExBdrift, driftFunctions.gradBdrift)
     relerrors = torSliceDict.get('efield_er_rel_mag')
     y = len(relerrors)
     errorsLSQ.append(sum([x/y for x in relerrors]))
@@ -54,6 +58,7 @@ ax = plt.gca()
 ax.set_yscale('log')
 ax.set_xscale('log')
 
-
-
+limits = [1e4, 1e7, 1e-6, 1]
+plt.axis(limits)
+plt.savefig(path+method+'Error.png')
 plt.show()
diff --git a/Scripts_Matthieu/PlotCase.py b/Scripts_Matthieu/PlotCase.py
new file mode 100644
index 0000000000000000000000000000000000000000..102a60d0f89ce8afeeaa1ee54b62c50d496bb294
--- /dev/null
+++ b/Scripts_Matthieu/PlotCase.py
@@ -0,0 +1,61 @@
+import pandas as pd
+import AnalysisFunctions
+import seaborn as sns
+import matplotlib.pyplot as plt
+#df = AnalysisFunctions.readAllInfo('GG','1','Case20/')
+#da = df.loc[:,'edrifts'].to_list()
+#print(da[1])
+#print(type(da))
+#print(da)
+
+import math
+import driftFunctions
+import AnalyticalPrescriptions
+import Constants
+import Utilities
+import pandas as pd
+import matplotlib.pyplot as plt
+import PlottingFunctions
+import AnalysisFunctions
+import os
+import sys
+
+# Analyze the performance and errors of different methods
+# This script does this for given analytical prescriptions for potential and
+# magnetic field
+# Specify which analytical case is considered and which cases should be 
+# calculated
+analyticalCase = sys.argv[1]
+Cases = sys.argv[2:]
+path = os.getcwd()
+print(path)
+print(Cases)
+method = 'LSQ' 
+
+# Define structures to be computed    
+key = 'efield_er_rel_mag'
+avErrors = []
+MaxErrors = []
+nbs = []
+
+
+errorsDF = pd.DataFrame()
+errors = []
+nbCells = []
+maxErrors = []
+for case in Cases:
+    caseInfo = AnalysisFunctions.readAllInfo(method, analyticalCase, case +'/')
+    caseErrors = caseInfo.get(key)
+    tempDF = pd.DataFrame({case: caseErrors})
+    errorsDF = pd.concat([errorsDF,tempDF], axis = 1)
+    y = len(caseErrors)
+    errors.append(sum([x/y for x in caseErrors]))
+    nbCells.append(y)
+    maxErrors.append(max(caseErrors))
+    print(errors)
+    print(maxErrors)
+    print(nbCells)
+plot = plt.figure()
+sns.violinplot(data = errorsDF)
+plt.savefig(path+'/Descr'+analyticalCase+method+'Violin.png')	
+plt.show()
diff --git a/Scripts_Matthieu/PlotTrajectory.py b/Scripts_Matthieu/PlotTrajectory.py
new file mode 100644
index 0000000000000000000000000000000000000000..ccd263998fc6f789946c70f17a38f2c46dd7ced9
--- /dev/null
+++ b/Scripts_Matthieu/PlotTrajectory.py
@@ -0,0 +1,36 @@
+import matplotlib.pyplot as plt
+import os
+import sys
+import AnalysisFunctions
+
+path = os.getcwd()
+method = sys.argv[1]
+analyticalP = sys.argv[2]
+noise = sys.argv[3]
+name = sys.argv[4]
+aP = int(analyticalP)
+posNum = []
+posLen = []
+
+plot1 = plt.figure(1)
+plt.xlabel('Radial Position [cm]')
+plt.ylabel('Vertical Position [cm]')
+title = 'Trajectory'+ name
+plt.title(title)
+for j in range(1,6):
+    positions = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/TRAJECTORY'+str(j))
+    for i in range(len(positions)):
+        if i%2 == 0:
+            p = [float(x) for x in positions[i].split()]
+            posNum.append(p)
+        else:
+            l = float(positions[i][0])
+            posLen.append(l)
+
+    x = [r[0] for r in posNum]
+    y = [r[2] for r in posNum]
+
+
+    sc = plt.scatter(x, y, s=1)
+
+plt.show()
diff --git a/Scripts_Matthieu/PlotTrajectorySingle.py b/Scripts_Matthieu/PlotTrajectorySingle.py
new file mode 100644
index 0000000000000000000000000000000000000000..43579f758c76b2120c81cff6b6edd40ddd0a8d0f
--- /dev/null
+++ b/Scripts_Matthieu/PlotTrajectorySingle.py
@@ -0,0 +1,47 @@
+import matplotlib.pyplot as plt
+import os
+import sys
+import AnalysisFunctions
+import math
+import numpy as np
+
+path = os.getcwd()
+method = sys.argv[1]
+analyticalP = sys.argv[2]
+noise = sys.argv[3]
+name = sys.argv[4]
+aP = int(analyticalP)
+posNum = []
+posLen = []
+
+#plot1 = plt.figure(1)
+figure,axes = plt.subplots(1)
+plt.xlabel('Radial Position [cm]')
+plt.ylabel('Vertical Position [cm]')
+title = 'Trajectory'+ name
+plt.title(title)
+positions = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/TRAJECTORY1')
+for i in range(len(positions)):
+    if i%2 == 0:
+        p = [float(x) for x in positions[i].split()]
+        posNum.append(p)
+    else:
+        l = float(positions[i])
+        posLen.append(l)
+
+x = [r[0] for r in posNum]
+y = [r[2] for r in posNum]
+
+R = math.sqrt((x[0]-500)**2+y[0]**2)
+print(R)
+
+sc = plt.scatter(x, y, s=10, c = posLen, cmap = 'plasma')
+#circle = plt.Circle((500,0),R, color = 'green')
+angle = np.linspace(0,2*np.pi,150)
+x = (500+R*np.cos(angle))
+y = R*np.sin(angle)
+
+axes.plot(x,y)
+plt.show()
+plt.savefig(path+'/Trajectory'+method+analyticalP+noise+name+'.png')
+print(path+method+analyticalP+noise+name+'.png')
diff --git a/Scripts_Matthieu/PrepareGeometry.py b/Scripts_Matthieu/PrepareGeometry.py
index 93a9c49090c28af4421f773c353ab7c0f2b63fb0..72b53e5759c4e3d4ea8324ca002e810d80a2f3cc 100644
--- a/Scripts_Matthieu/PrepareGeometry.py
+++ b/Scripts_Matthieu/PrepareGeometry.py
@@ -3,10 +3,11 @@
 from MeshFunctions import *
 import os
 import shutil
+import AnalyticalPrescriptions
 # The folder where the different cases should be stored is
 folder = '/u/mbj/Drifts/data-for-drift-computations'
 # A specific folder for the case should be defined
-casefolder = "TestCase"
+casefolder = "TestNoise"
 # create the folder
 path = folder+"/"+casefolder
 # Check if the folder should be removed => Control to not loose simulations
@@ -24,9 +25,9 @@ os.mkdir(path)
 # Number of zones considered
 nz = 1
 # Number of toroidal, radial, poloidal surfaces
-nt = 5
-nr = 10
-np = 10
+nt = 36
+nr = 30
+np = 150
 # toroidal range (degrees)
 trange = 36
 # Major and minor radius (considering a torus) (in cm!)
diff --git a/Scripts_Matthieu/PrepareGeometryCompareNodal.py b/Scripts_Matthieu/PrepareGeometryCompareNodal.py
index 41d363737a9ea486d22cbc730eba56fab9fa3811..80235a113c31f886bf378dc68299395e1a0e3d1c 100644
--- a/Scripts_Matthieu/PrepareGeometryCompareNodal.py
+++ b/Scripts_Matthieu/PrepareGeometryCompareNodal.py
@@ -4,18 +4,18 @@ import shutil
 import AnalyticalPrescriptions
 # The folder where the different cases should be stored is
 basepath = '/u/mbj/Drifts/data-for-drift-computations'
-folder = '/u/mbj/Drifts/data-for-drift-computations/CompareNodal/'
+folder = '/u/mbj/Drifts/data-for-drift-computations/CompareNodal75'
 os.mkdir(folder)
 # Choose the parameters for the case to test
 # Geometry
 # Number of zones considered
 nz = 1
 # Loop over different cases to prepare
-nts = [9, 18, 36]
-nrs = [15, 30, 60]
-nps = [100, 200, 400]
+nts = [6, 9,12,18,24]
+nrs = [15,23, 30,45, 60]
+nps = [75,110, 150,225, 300]
 #cases = [nrs,nps]
-caseNames = ['Case1','Case2','Case3']
+caseNames = ['Case1','Case2','Case3', 'Case4', 'Case5']
 # Number of toroidal, radial, poloidal surfaces
 bfielddescription = AnalyticalPrescriptions.bfieldstrength1
 for i in range(len(nps)):
diff --git a/Scripts_Matthieu/TrajectoryDeviation.py b/Scripts_Matthieu/TrajectoryDeviation.py
new file mode 100644
index 0000000000000000000000000000000000000000..55a025c2e307e1bb08f583f7d5681fa4d520bb1c
--- /dev/null
+++ b/Scripts_Matthieu/TrajectoryDeviation.py
@@ -0,0 +1,61 @@
+import matplotlib.pyplot as plt
+import os
+import sys
+import AnalysisFunctions
+
+analyticalCase = sys.argv[1]
+path = os.getcwd()
+methods = ['6','8']
+noise = '0'
+
+pathlengths = []
+deviations = []
+reldev = []
+timesteps = []
+
+for method in methods:
+    pls = []
+    devs = []
+    rds = []
+    ts = []
+    trajInfo = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalCase+'Noise'+noise+'/TRAJECTORY_INFO')
+    for i in range(len(trajInfo)):
+        t = [float(x) for x in trajInfo[i].split()]
+        print(t)
+        pls.append(t[0])
+        devs.append(t[1])
+        ts.append(t[2])
+        rds.append(devs[i]/pls[i])
+    pathlengths.append(pls)
+    deviations.append(devs)
+    reldev.append(rds)
+    timesteps.append(ts)
+
+upperX = 1e-4
+lowerX = 1e-9
+upperY = 1e-3
+lowerY = 1e-8   
+xlabel = 'Timestep [s]'
+ylabel = 'Relative Deviation (after 1s)'
+
+
+plot1 = plt.figure(1)
+sc1 = plt.scatter(timesteps[0],reldev[0],label = methods[0])
+sc2 = plt.scatter(timesteps[1],reldev[1],label = methods[1])
+
+ax = plt.gca()
+ax.set_yscale('log')
+ax.set_xscale('log')
+limits = [lowerX, upperX, lowerY, upperY]
+ax.legend()
+#x = np.linspace(0.000001,1,100)
+#y = x**(1)/100
+#plt.loglog(x,y)
+#plt.axis(limits)
+plt.title('Relative Deviation'+analyticalCase)
+plt.xlabel(xlabel)
+plt.ylabel(ylabel)
+plt.savefig(path+'/Descr'+analyticalCase+'Reldev.png')
+
+plt.show()
+	
diff --git a/Scripts_Matthieu/writeInfo.py b/Scripts_Matthieu/writeInfo.py
new file mode 100644
index 0000000000000000000000000000000000000000..bb4fa864fb6385c25dd92595dc8d00a0b193f284
--- /dev/null
+++ b/Scripts_Matthieu/writeInfo.py
@@ -0,0 +1,43 @@
+import math
+import driftFunctions
+import AnalyticalPrescriptions
+import Constants
+import Utilities
+import pandas as pd
+import matplotlib.pyplot as plt
+import PlottingFunctions
+import AnalysisFunctions
+import os
+import sys
+
+path = os.getcwd()
+method = sys.argv[1]
+analyticalP = sys.argv[2]
+noise = sys.argv[3]
+aP = int(analyticalP)
+
+positions = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/DRIFT_POS')
+edrifts = AnalysisFunctions.readText(path+'/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/ExB_RESULTS')
+efield = AnalysisFunctions.readText(path + '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/EFIELD')
+gradb = AnalysisFunctions.readText(path +'/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/GRADB')
+gdrifts = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/GRADB_DRIFTS')
+boundarycells =  AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/BOUNDARYCELLS')
+boundarynodes = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/BOUNDARYNODES')
+gbpositions =  AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/GB_POS')
+ggcorr = AnalysisFunctions.readText(path+ '/Output' + method+'Descr'+analyticalP+'Noise'+noise+'/GGCORR')
+
+if aP ==1:
+    edescr = AnalyticalPrescriptions.e_descr1
+elif aP == 5:
+    edescr = AnalyticalPrescriptions.e_descr5
+elif aP >= 2:
+    edescr = AnalyticalPrescriptions.e_descr2
+    
+bdescr = AnalyticalPrescriptions.b_field_description1
+gdescr = AnalyticalPrescriptions.gradb_description1
+exb = driftFunctions.ExBdrift
+gB = driftFunctions.gradBdrift
+
+allInfo = AnalysisFunctions.getAllInfo(positions,gbpositions,edrifts,efield,gradb,gdrifts,boundarycells,boundarynodes,edescr,bdescr,gdescr,exb, gB, ggcorr, method+'Descr'+analyticalP+'Noise'+noise)
+
+print('CSV file written')