Commit 09e9920e authored by schubert.draco's avatar schubert.draco
Browse files

added files

parent 9f2ff30f
{
"numTweets_avg_overall": 2192.113743935629,
"numTweets_std_overall": 1061.2491135841647,
"lenTweets_avg_overall": 71.39748742726482,
"lenTweets_std_overall": 46.411523772082454,
"lenTweets_avg_avg": 70.82133036748466,
"lenTweets_std_avg": 20.141011684772987,
"lenTweets_avg_std": 39.39719115246855,
"lenTweets_std_std": 10.262975766623796,
"gender": {
"male": {
"numTweets_avg_overall": 2122.368110317493,
"numTweets_std_overall": 1089.6222901800236,
"len_avg_overall": 71.3233523605997,
"lenTweets_std_": 46.65132241209088,
"lenTweets_avg_avg": 70.52956999611311,
"lenTweets_std_avg": 20.69018192342381,
"lenTweets_avg_std": 39.21629415618757,
"lenTweets_std_std": 10.349072372467633
},
"female": {
"numTweets_avg_overall": 2368.3955963685694,
"numTweets_std_overall": 963.7100761992031,
"len_avg_overall": 71.56539903909596,
"lenTweets_std_": 45.86331616525645,
"lenTweets_avg_avg": 71.55875371664362,
"lenTweets_std_avg": 18.661868464283003,
"lenTweets_avg_std": 39.85357884524676,
"lenTweets_std_std": 10.02852300040678
}
},
"subgroups": {
"male_child_21": {
"numTweets_avg_overall": 1767.295871559633,
"numTweets_std_overall": 1194.4384616611944,
"len_avg_overall": 66.51979064060187,
"lenTweets_std_": 46.29229060273809,
"lenTweets_avg_avg": 64.40098234908798,
"lenTweets_std_avg": 19.124728140036797,
"lenTweets_avg_std": 38.79606751137136,
"lenTweets_std_std": 10.44020658599531
},
"male_young_adult_35": {
"numTweets_avg_overall": 1883.8103741496598,
"numTweets_std_overall": 1133.6236486477387,
"len_avg_overall": 59.82994058304719,
"lenTweets_std_": 40.75935752391876,
"lenTweets_avg_avg": 60.30036418762935,
"lenTweets_std_avg": 15.12137089468164,
"lenTweets_avg_std": 36.17980792677677,
"lenTweets_std_std": 7.577439523988967
},
"male_adult_50": {
"numTweets_avg_overall": 2339.248833396393,
"numTweets_std_overall": 994.8331991361135,
"len_avg_overall": 73.31284710121425,
"lenTweets_std_": 46.98794691736297,
"lenTweets_avg_avg": 72.87698602913602,
"lenTweets_std_avg": 19.039728017786338,
"lenTweets_avg_std": 40.72306579376926,
"lenTweets_std_std": 10.780681514024044
},
"male_old_adult_65": {
"numTweets_avg_overall": 2278.1801997394705,
"numTweets_std_overall": 1031.4466450748166,
"len_avg_overall": 82.09775658710922,
"lenTweets_std_": 49.39339459817052,
"lenTweets_avg_avg": 81.98922866827267,
"lenTweets_std_avg": 21.85932796455404,
"lenTweets_avg_std": 41.912230352115365,
"lenTweets_std_std": 11.93745937148505
},
"male_retiree": {
"numTweets_avg_overall": 2101.6541802388706,
"numTweets_std_overall": 1099.321815212385,
"len_avg_overall": 86.16038010491194,
"lenTweets_std_": 50.12779072263013,
"lenTweets_avg_avg": 85.46582563699941,
"lenTweets_std_avg": 22.396086985526093,
"lenTweets_avg_std": 41.57785496455442,
"lenTweets_std_std": 12.326925857495254
},
"female_child_21": {
"numTweets_avg_overall": 1999.8095238095239,
"numTweets_std_overall": 1096.983209586567,
"len_avg_overall": 64.67177643292035,
"lenTweets_std_": 44.52348768350967,
"lenTweets_avg_avg": 64.39516705649208,
"lenTweets_std_avg": 18.650402702619118,
"lenTweets_avg_std": 38.10127131926244,
"lenTweets_std_std": 11.11679547208627
},
"female_young_adult_35": {
"numTweets_avg_overall": 2217.995842572062,
"numTweets_std_overall": 1023.2535211219135,
"len_avg_overall": 63.87665249323058,
"lenTweets_std_": 41.842328298800695,
"lenTweets_avg_avg": 64.16868508327316,
"lenTweets_std_avg": 13.795413173625166,
"lenTweets_avg_std": 37.78364799784759,
"lenTweets_std_std": 8.312340615802565
},
"female_adult_50": {
"numTweets_avg_overall": 2532.795872208086,
"numTweets_std_overall": 852.9105324162001,
"len_avg_overall": 72.81265578084007,
"lenTweets_std_": 46.285281844873346,
"lenTweets_avg_avg": 72.86456642516825,
"lenTweets_std_avg": 17.46345000990039,
"lenTweets_avg_std": 40.94722672058441,
"lenTweets_std_std": 10.070957032746302
},
"female_old_adult_65": {
"numTweets_avg_overall": 2472.643169058016,
"numTweets_std_overall": 910.5320740427612,
"len_avg_overall": 80.6639163376557,
"lenTweets_std_": 48.81975966361505,
"lenTweets_avg_avg": 81.4854361836716,
"lenTweets_std_avg": 21.467205384787274,
"lenTweets_avg_std": 41.988910615619965,
"lenTweets_std_std": 11.59098122639457
},
"female_retiree": {
"numTweets_avg_overall": 2180.982206405694,
"numTweets_std_overall": 1070.7823682958333,
"len_avg_overall": 86.29652479538423,
"lenTweets_std_": 48.6546882840907,
"lenTweets_avg_avg": 85.95002639053074,
"lenTweets_std_avg": 21.022005278100746,
"lenTweets_avg_std": 41.00525387308098,
"lenTweets_std_std": 11.812315484225351
}
},
"life_phase": {
"child_21": {
"numTweets_avg_overall": 1856.8251057827927,
"numTweets_std_overall": 1162.6244835318894,
"len_avg_overall": 65.75341913225253,
"lenTweets_std_": 45.57617940412354,
"lenTweets_avg_avg": 64.39874317436488,
"lenTweets_std_avg": 18.930266579370286,
"lenTweets_avg_std": 38.52853667858471,
"lenTweets_std_std": 10.703354345420388
},
"young_adult_35": {
"numTweets_avg_overall": 1976.4456822372465,
"numTweets_std_overall": 1114.1843209300366,
"len_avg_overall": 61.08877068931477,
"lenTweets_std_": 41.14197237300491,
"lenTweets_avg_avg": 61.37265227855458,
"lenTweets_std_avg": 14.866417953206934,
"lenTweets_avg_std": 36.624989659381455,
"lenTweets_std_std": 7.821106012443668
},
"adult_50": {
"numTweets_avg_overall": 2398.953689167975,
"numTweets_std_overall": 957.4550930707834,
"len_avg_overall": 73.14994090648639,
"lenTweets_std_": 46.76084364575608,
"lenTweets_avg_avg": 72.8731548640188,
"lenTweets_std_avg": 18.56700318808111,
"lenTweets_avg_std": 40.792317012054276,
"lenTweets_std_std": 10.566582023293403
},
"old_adult_65": {
"numTweets_avg_overall": 2328.3854082783055,
"numTweets_std_overall": 1005.1665873626081,
"len_avg_overall": 81.70464195582123,
"lenTweets_std_": 49.24093943055357,
"lenTweets_avg_avg": 81.85916273932833,
"lenTweets_std_avg": 21.758164074239232,
"lenTweets_avg_std": 41.932049517287545,
"lenTweets_std_std": 11.84799676865512
},
"retiree": {
"numTweets_avg_overall": 2120.19925124792,
"numTweets_std_overall": 1093.011587801765,
"len_avg_overall": 86.19312005452663,
"lenTweets_std_": 49.7775510896713,
"lenTweets_avg_avg": 85.57902065508785,
"lenTweets_std_avg": 22.079172916741957,
"lenTweets_avg_std": 41.443882519338736,
"lenTweets_std_std": 12.208443722208731
}
},
"subKeys": [
"male",
"female",
"male_child_21",
"male_young_adult_35",
"male_adult_50",
"male_old_adult_65",
"male_retiree",
"female_child_21",
"female_young_adult_35",
"female_adult_50",
"female_old_adult_65",
"female_retiree",
"child_21",
"young_adult_35",
"adult_50",
"old_adult_65",
"retiree"
],
"statKeys": [
"numTweets_avg_overall",
"numTweets_std_overall",
"lenTweets_avg_overall",
"lenTweets_std_overall",
"lenTweets_avg_avg",
"lenTweets_std_avg",
"lenTweets_avg_std",
"lenTweets_std_std"
]
}
\ No newline at end of file
......@@ -9,14 +9,14 @@
# Job Name:
#SBATCH -J svm_count_2000
# Queue:
#SBATCH --partition=general
#SBATCH --partition=broadwell
# Number of nodes and MPI tasks per node:
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=1
# Enable Hyperthreading:
#SBATCH --ntasks-per-core=2
# for OpenMP:
#SBATCH --cpus-per-task=64
#SBATCH --cpus-per-task=80
#SBATCH --mail-type=none
#SBATCH --mail-user=schubert@coll.mpg.de
# Wall clock limit:
......@@ -34,4 +34,4 @@ module load scikit-learn/0.19.1
# Run the program:
srun python /draco/u/mschuber/PAN/attributionfeatures/Scripts/svm_count.py stratified_subsample bigram 3 2000 org
echo "job finished"
\ No newline at end of file
echo "job finished"
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment