Enabled Multiclass Logistic Regression Tests (#939)

* Multiclass logistic Regression tests enabled

* threshold providing in tests

* defining tolerance as a constant in baseTestBaseline Class

* upper case camel for constant and _ for large decimal numbers
This commit is contained in:
Anirudh Agnihotry 2018-09-27 12:17:30 -07:00 коммит произвёл Justin Ormont
Родитель 7fde5a378c
Коммит b871c862cf
52 изменённых файлов: 3937 добавлений и 27 удалений

Просмотреть файл

@ -0,0 +1,106 @@
maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1 nn=+} threads=- norm=No dout=%Output% data=%Data% seed=1
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 11 of 15 weights.
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 11 of 15 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 21 | 0 | 0 | 1.0000
1 || 0 | 29 | 1 | 0.9667
2 || 0 | 2 | 26 | 0.9286
||========================
Precision ||1.0000 |0.9355 |0.9630 |
Accuracy(micro-avg): 0.962025
Accuracy(macro-avg): 0.965079
Log-loss: 0.129858
Log-loss reduction: 88.059239
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 29 | 0 | 0 | 1.0000
1 || 0 | 18 | 2 | 0.9000
2 || 0 | 0 | 22 | 1.0000
||========================
Precision ||1.0000 |1.0000 |0.9167 |
Accuracy(micro-avg): 0.971831
Accuracy(macro-avg): 0.966667
Log-loss: 0.125563
Log-loss reduction: 88.434327
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.966928 (0.0049)
Accuracy(macro-avg): 0.965873 (0.0008)
Log-loss: 0.127710 (0.0021)
Log-loss reduction: 88.246783 (0.1875)
---------------------------------------
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'LBFGS data prep' started.
[1] 'LBFGS data prep' finished in %Time%.
[2] 'LBFGS Optimizer' started.
[2] (%Time%) 0 iterations Loss: 1.0986123085022
[2] (%Time%) 1 iterations Loss: 1.00646448135376 Improvement: 0.09215
[2] (%Time%) 2 iterations Loss: 0.909583747386932 Improvement: 0.09593
[2] (%Time%) 3 iterations Loss: 0.525106191635132 Improvement: 0.3158
[2] (%Time%) 4 iterations Loss: 0.400520384311676 Improvement: 0.1718
[2] (%Time%) 5 iterations Loss: 0.332601189613342 Improvement: 0.09382
[2] (%Time%) 6 iterations Loss: 0.281388521194458 Improvement: 0.06186
[2] (%Time%) 7 iterations Loss: 0.237996473908424 Improvement: 0.04801
[2] (%Time%) 8 iterations Loss: 0.212298363447189 Improvement: 0.03128
[2] (%Time%) 9 iterations Loss: 0.199792444705963 Improvement: 0.0172
[2] (%Time%) 10 iterations Loss: 0.194789424538612 Improvement: 0.008052
[2] (%Time%) 11 iterations Loss: 0.193230450153351 Improvement: 0.003182
[2] (%Time%) 12 iterations Loss: 0.192447692155838 Improvement: 0.001383
[2] (%Time%) 13 iterations Loss: 0.189304739236832 Improvement: 0.002703
[2] (%Time%) 14 iterations Loss: 0.187662661075592 Improvement: 0.001907
[2] (%Time%) 15 iterations Loss: 0.185374572873116 Improvement: 0.002193
[2] (%Time%) 16 iterations Loss: 0.18364554643631 Improvement: 0.001845
[2] (%Time%) 17 iterations Loss: 0.180794909596443 Improvement: 0.002599
[2] (%Time%) 18 iterations Loss: 0.178908497095108 Improvement: 0.002065
[2] (%Time%) 19 iterations Loss: 0.175620675086975 Improvement: 0.002982
[2] (%Time%) 20 iterations Loss: 0.174758642911911 Improvement: 0.001392
[2] (%Time%) 21 iterations Loss: 0.173962101340294 Improvement: 0.0009454
[2] 'LBFGS Optimizer' finished in %Time%.
[3] 'LBFGS data prep #2' started.
[3] 'LBFGS data prep #2' finished in %Time%.
[4] 'LBFGS Optimizer #2' started.
[4] (%Time%) 0 iterations Loss: 1.0986123085022
[4] (%Time%) 1 iterations Loss: 1.05856335163116 Improvement: 0.04005
[4] (%Time%) 2 iterations Loss: 1.00281620025635 Improvement: 0.05261
[4] (%Time%) 3 iterations Loss: 0.97780430316925 Improvement: 0.03158
[4] (%Time%) 4 iterations Loss: 0.752716302871704 Improvement: 0.1773
[4] (%Time%) 5 iterations Loss: 0.542387366294861 Improvement: 0.2021
[4] (%Time%) 6 iterations Loss: 0.443084180355072 Improvement: 0.125
[4] (%Time%) 7 iterations Loss: 0.343867212533951 Improvement: 0.1057
[4] (%Time%) 8 iterations Loss: 0.284590691328049 Improvement: 0.07087
[4] (%Time%) 9 iterations Loss: 0.254261910915375 Improvement: 0.04046
[4] (%Time%) 10 iterations Loss: 0.224356189370155 Improvement: 0.03255
[4] (%Time%) 11 iterations Loss: 0.215291574597359 Improvement: 0.01493
[4] (%Time%) 12 iterations Loss: 0.212821274995804 Improvement: 0.005586
[4] (%Time%) 13 iterations Loss: 0.212086588144302 Improvement: 0.001948
[4] (%Time%) 14 iterations Loss: 0.21061946451664 Improvement: 0.001587
[4] (%Time%) 15 iterations Loss: 0.209799557924271 Improvement: 0.001012
[4] (%Time%) 16 iterations Loss: 0.209267094731331 Improvement: 0.0006523
[4] 'LBFGS Optimizer #2' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt /nn Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.966928 0.965873 0.12771 88.24678 0.1 0.001 0.001 1 + MulticlassLogisticRegression %Data% %Output% 99 0 0 maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1 nn=+} threads=- norm=No dout=%Output% data=%Data% seed=1 /l2:0.1;/l1:0.001;/ot:0.001;/nt:1;/nn:+

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
5 0 0 0.0035057053190431496 0.996500432 0.00349906948 3.13832377E-10 0 1 2
6 0 0 0.0051384036034053185 0.9948748 0.00512487441 2.22647167E-09 0 1 2
8 0 0 0.023597698228469945 0.97667855 0.0233211145 2.72825922E-08 0 1 2
9 0 0 0.022962599466045772 0.977299035 0.0227003787 4.499484E-09 0 1 2
10 0 0 0.0046729979679451441 0.9953379 0.004661741 1.26637076E-10 0 1 2
11 0 0 0.0091857704417590124 0.9908563 0.009143643 3.08492987E-09 0 1 2
18 0 0 0.0065021446457294635 0.993518949 0.00648188451 1.99731842E-10 0 1 2
20 0 0 0.019374830182594178 0.980811656 0.0191886947 1.76858894E-09 0 1 2
21 0 0 0.0035476956060140506 0.9964586 0.00354147726 5.015228E-10 0 1 2
25 0 0 0.042900092641784626 0.9580071 0.04199198 1.61448348E-08 0 1 2
28 0 0 0.0089054287695274903 0.9911341 0.008866515 4.88396434E-10 0 1 2
31 0 0 0.013042435754463243 0.987042248 0.0129569778 1.33878E-09 0 1 2
32 0 0 0.00098419922257654145 0.9990163 0.0009850509 1.554676E-11 0 1 2
35 0 0 0.0098781498492706357 0.9901705 0.009829475 6.490435E-10 0 1 2
37 0 0 0.022962599466045772 0.977299035 0.0227003787 4.499484E-09 0 1 2
40 0 0 0.0043144180137515368 0.9956949 0.004305292 3.673708E-10 0 1 2
41 0 0 0.15568350462586372 0.85583 0.144169211 3.76882554E-07 0 1 2
44 0 0 0.0055828075458514027 0.994432747 0.005567818 2.75362888E-09 0 1 2
45 0 0 0.024173723400995727 0.9761161 0.02388395 1.18244365E-08 0 1 2
46 0 0 0.0030727459326603834 0.99693197 0.0030678818 2.29107275E-10 0 1 2
48 0 0 0.0042629377095657972 0.995746136 0.004253787 1.522038E-10 0 1 2
50 1 1 0.024013506746705774 0.9762725 0.0144919865 0.009234835 1 0 2
51 1 1 0.072395347498337356 0.9301631 0.035736762 0.03410162 1 0 2
52 1 1 0.051792165373778974 0.9495262 0.0431178622 0.00735404948 1 2 0
54 1 1 0.082283007087723056 0.921011269 0.07229979 0.006690142 1 2 0
56 1 1 0.15273669550977986 0.8583557 0.1074395 0.03420409 1 2 0
60 1 1 0.063400291298347761 0.9385677 0.045856636 0.0155749973 1 2 0
63 1 1 0.1825828261589445 0.833115637 0.156787589 0.010097893 1 2 0
64 1 1 0.16742896518424877 0.8458367 0.145771012 0.008392983 1 0 2
66 1 1 0.41035083606201167 0.663417459 0.309818625 0.02676427 1 2 0
68 1 1 0.30088424491217947 0.740163445 0.258734822 0.00110106682 1 2 0
69 1 1 0.04777336113279694 0.9533498 0.0237986427 0.02285125 1 2 0
70 1 2 0.91118618706322396 0.5845066 0.402047038 0.0134477587 2 1 0
71 1 1 0.045959452018421797 0.9550807 0.0331809036 0.0117382742 1 0 2
72 1 1 0.50734715339493097 0.6020907 0.396878272 0.00103087048 1 2 0
73 1 1 0.10112764949378679 0.903817654 0.08841138 0.00776978023 1 2 0
74 1 1 0.033042902390674173 0.967497051 0.0197654 0.01273903 1 0 2
76 1 1 0.048774702844104172 0.9523957 0.04408047 0.00352243264 1 2 0
77 1 1 0.2695808504765218 0.763699532 0.232171819 0.0041304836 1 2 0
79 1 1 0.068614938770302072 0.933686137 0.06392762 0.00238591246 1 0 2
82 1 1 0.053632731586794685 0.947780132 0.03757136 0.0146492831 1 0 2
88 1 1 0.13018795642085101 0.8779304 0.07867057 0.0433993 1 0 2
90 1 1 0.23196605346812185 0.792973042 0.196191877 0.0108351735 1 2 0
91 1 1 0.12409213246584563 0.883298457 0.098272115 0.01842791 1 2 0
92 1 1 0.048004153781509205 0.9531298 0.02491984 0.0219512414 1 2 0
93 1 1 0.080398556071786337 0.9227485 0.06396286 0.0132876914 1 0 2
95 1 1 0.097486524415444684 0.907114565 0.06073187 0.0321540236 1 0 2
96 1 1 0.10523226793183746 0.90011543 0.0571433567 0.0427421071 1 2 0
97 1 1 0.046310433607912968 0.954745531 0.0234447 0.02180964 1 0 2
98 1 1 0.20922658413289458 0.8112114 0.185506433 0.00328178448 1 0 2
99 1 1 0.08973693948509831 0.914171636 0.0482413843 0.0375866257 1 2 0
100 2 2 0.0019302508084448155 0.9980716 0.00192404329 5.731013E-06 2 1 0
102 2 2 0.074935939071374899 0.9278029 0.07215074 4.48439678E-05 2 1 0
104 2 2 0.013984677144163373 0.986112654 0.0138699533 1.8276387E-05 2 1 0
105 2 2 0.025325791981913353 0.9749922 0.0250045415 2.42776559E-06 2 1 0
106 2 2 0.057338702586033331 0.9442742 0.05496859 0.0007578 2 1 0
108 2 2 0.05276486712560681 0.948603034 0.05138688 1.01106552E-05 2 1 0
109 2 2 0.026607542399492295 0.9737433 0.0261797551 7.822918E-05 2 1 0
111 2 2 0.12270355663525476 0.884525836 0.115313262 0.000161169621 2 1 0
112 2 2 0.13206483189725685 0.8762842 0.123490989 0.0002246642 2 1 0
113 2 2 0.025638599667597892 0.9746873 0.0252530053 6.150582E-05 2 1 0
115 2 2 0.057498919511760234 0.9441229 0.0554439 0.00043176004 2 1 0
117 2 2 0.05953979497139298 0.942198038 0.05773727 6.540683E-05 2 1 0
120 2 2 0.054813408680433579 0.9466618 0.0532217249 0.000117798671 2 1 0
121 2 2 0.046131151077141996 0.9549167 0.04467517 0.000408185384 2 1 0
122 2 2 0.02511979345068182 0.9751931 0.0248055067 9.05202E-07 2 1 0
123 2 2 0.48315052810711973 0.616836965 0.381864876 0.00129971188 2 1 0
125 2 2 0.29876712456560445 0.7417321 0.2580291 0.0002381928 2 1 0
128 2 2 0.023098004562619914 0.9771667 0.0228099246 2.47050866E-05 2 1 0
129 2 1 0.76517106250260403 0.534373939 0.465254337 0.000369829067 1 2 0
131 2 2 0.53372197087197515 0.5864183 0.412872344 0.0007079753 2 1 0
132 2 2 0.015354128200897153 0.984763145 0.0152192824 1.64837747E-05 2 1 0
133 2 1 0.80958176603331877 0.55317384 0.44504416 0.001781164 1 2 0
137 2 2 0.1756400740881939 0.8389199 0.160485491 0.000594753 2 1 0
138 2 2 0.50333783990086933 0.604509532 0.38951236 0.00597788766 2 1 0
141 2 2 0.33763120502763588 0.713458359 0.285059243 0.00148137042 2 1 0
144 2 2 0.016765581813362788 0.9833742 0.01656379 6.213109E-05 2 1 0
145 2 2 0.12807846333012343 0.879784346 0.119780183 0.000434682646 2 1 0
147 2 2 0.24026111051541665 0.7864225 0.212648481 0.000927580346 2 1 0
0 0 0 0.0062236937498732119 0.993795633 0.00620483747 7.944746E-09 0 1 2
1 0 0 0.029022202834280027 0.9713949 0.0286041629 8.4053454E-08 0 1 2
2 0 0 0.010818497584619778 0.9892398 0.0107597355 3.879258E-08 0 1 2
3 0 0 0.029783231758986596 0.9706559 0.0293443277 3.03772538E-07 0 1 2
4 0 0 0.0045280297264245848 0.9954822 0.00451772846 7.630325E-09 0 1 2
7 0 0 0.012060178236157727 0.988012254 0.0119871311 3.107532E-08 0 1 2
12 0 0 0.027177832295976975 0.973188162 0.0268125031 7.716931E-08 0 1 2
13 0 0 0.0090827909291773482 0.990958333 0.009040892 4.44001422E-08 0 1 2
14 0 0 0.0006675563690858086 0.999332666 0.000667317654 3.80369833E-11 0 1 2
15 0 0 0.00062562720151946953 0.999374568 0.000626611931 2.342793E-10 0 1 2
16 0 0 0.0014362511566290306 0.9985648 0.00143428252 8.3973567E-10 0 1 2
17 0 0 0.0066089387166241515 0.993412852 0.00658752676 1.18178756E-08 0 1 2
19 0 0 0.0037040681088442817 0.9963028 0.00369813736 8.356012E-09 0 1 2
22 0 0 0.0010566928040176419 0.998943865 0.00105617661 1.56404645E-09 0 1 2
23 0 0 0.040098206611993717 0.9606951 0.03930444 4.272916E-07 0 1 2
24 0 0 0.048931112264951795 0.9522467 0.0477520749 9.429691E-07 0 1 2
26 0 0 0.019386315917387653 0.9808004 0.0191995315 1.38564857E-07 0 1 2
27 0 0 0.0089473457087430909 0.991092563 0.008906771 1.17854837E-08 0 1 2
29 0 0 0.031331998705965494 0.969153762 0.0308468789 3.17202932E-07 0 1 2
30 0 0 0.042858096825121733 0.95804733 0.0419520326 3.27043E-07 0 1 2
33 0 0 0.00071718178074236128 0.9992831 0.000716740265 1.96193978E-10 0 1 2
34 0 0 0.028594984112412413 0.97181 0.0281891245 8.059172E-08 0 1 2
36 0 0 0.0044670188114691546 0.995542943 0.004456434 1.134523E-09 0 1 2
38 0 0 0.019752530847094055 0.9804413 0.0195590239 1.97238663E-07 0 1 2
39 0 0 0.012134565229001918 0.987938762 0.0120618762 2.27859385E-08 0 1 2
42 0 0 0.010616309031417871 0.989439845 0.0105606038 9.83971E-08 0 1 2
43 0 0 0.016023658570775534 0.984104037 0.0158948544 2.164595E-07 0 1 2
47 0 0 0.015340812375050044 0.984776258 0.015224508 1.068231E-07 0 1 2
49 0 0 0.011529916484198184 0.9885363 0.0114635359 2.180063E-08 0 1 2
53 1 1 0.18975446513423647 0.8271622 0.172696114 0.0001419994 1 2 0
55 1 1 0.42001727411790701 0.65703547 0.342875183 8.88562E-05 1 2 0
57 1 1 0.044797903294000629 0.9561907 0.0403893776 0.003419859 1 2 0
58 1 1 0.0405044862851279 0.960304856 0.0395791 0.000117295334 1 2 0
59 1 1 0.37561798710727545 0.6868647 0.312571555 0.000565329567 1 2 0
61 1 1 0.16285702211594277 0.84971267 0.149735659 0.0005502151 1 2 0
62 1 1 0.016239180682403528 0.983891964 0.01596498 0.0001434301 1 2 0
65 1 1 0.019577034858477668 0.980613351 0.0189568941 0.00042852998 1 2 0
67 1 1 0.035419443989176395 0.9652005 0.0343228355 0.000475778332 1 2 0
75 1 1 0.027538273321890076 0.972837448 0.0268495046 0.000312928983 1 2 0
78 1 1 0.32294844088002383 0.7240112 0.27587077 0.000116450035 1 2 0
80 1 1 0.050098414656633554 0.9511358 0.0483473167 0.0005151696 1 2 0
81 1 1 0.026061987463741595 0.9742747 0.0249239281 0.0008016538 1 2 0
83 1 2 1.740543377756486 0.824572563 0.175425053 1.65982908E-06 2 1 0
84 1 2 1.2373199070152263 0.709771931 0.290160835 6.62596649E-05 2 1 0
85 1 1 0.36380768156103438 0.695024848 0.304472446 0.0005026918 1 2 0
86 1 1 0.074546831983721043 0.928164 0.0717040449 0.000130506931 1 2 0
87 1 1 0.065021514855800697 0.9370473 0.06291683 3.654533E-05 1 2 0
89 1 1 0.17665865765569469 0.8380658 0.161666483 0.000268906821 1 2 0
94 1 1 0.23230360596017513 0.7927054 0.207063466 0.0002309137 1 2 0
101 2 2 0.040252706237034482 0.9605467 0.03945323 3.15532048E-07 2 1 0
103 2 2 0.051494602671196522 0.9498088 0.05019172 1.28977433E-07 2 1 0
107 2 2 0.10300066689194605 0.9021264 0.09787331 1.9281309E-08 2 1 0
110 2 2 0.28325270161722177 0.7533294 0.246662438 8.488617E-06 2 1 0
114 2 2 0.0078811449325194051 0.99214983 0.007851987 6.352668E-08 2 1 0
116 2 2 0.13699725408993954 0.8719726 0.128025874 6.352816E-07 2 1 0
118 2 2 0.0077514487119431756 0.9922785 0.0077198213 5.014996E-11 2 1 0
119 2 2 0.29779051990653688 0.742456853 0.2575433 7.750907E-07 2 1 0
124 2 2 0.054756176970663442 0.946715951 0.05328287 2.7218934E-07 2 1 0
126 2 2 0.47350688219510623 0.6228143 0.3771728 1.24984781E-05 2 1 0
127 2 2 0.29028318177047763 0.7480517 0.251935542 1.09781113E-05 2 1 0
130 2 2 0.18683622247656881 0.8295796 0.1704187 4.700676E-08 2 1 0
134 2 2 0.0917015399878683 0.9123774 0.08762169 1.1354247E-07 2 1 0
135 2 2 0.13898086338633753 0.8702447 0.129756063 5.158962E-08 2 1 0
136 2 2 0.0084762571159262869 0.991559565 0.008440904 7.217419E-08 2 1 0
139 2 2 0.2463226552303073 0.78167 0.21833 1.72434341E-06 2 1 0
140 2 2 0.026458747425806992 0.9738882 0.0261124317 8.52053148E-08 2 1 0
142 2 2 0.040252706237034482 0.9605467 0.03945323 3.15532048E-07 2 1 0
143 2 2 0.01863096927099336 0.9815415 0.0184568968 2.96961424E-08 2 1 0
146 2 2 0.23264315285446821 0.7924363 0.207562491 1.23098891E-06 2 1 0
148 2 2 0.017330163598589021 0.98281914 0.017179586 3.21201668E-07 2 1 0
149 2 2 0.085189828468119011 0.918337941 0.0816599354 1.76131016E-06 2 1 0

Просмотреть файл

@ -0,0 +1,65 @@
maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1 nn=+} norm=No dout=%Output% data=%Data% out=%Output% seed=1
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 13 of 15 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 50 | 0 | 0 | 1.0000
1 || 0 | 48 | 2 | 0.9600
2 || 0 | 1 | 49 | 0.9800
||========================
Precision ||1.0000 |0.9796 |0.9608 |
Accuracy(micro-avg): 0.980000
Accuracy(macro-avg): 0.980000
Log-loss: 0.095534
Log-loss reduction: 91.304142
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.980000 (0.0000)
Accuracy(macro-avg): 0.980000 (0.0000)
Log-loss: 0.095534 (0.0000)
Log-loss reduction: 91.304142 (0.0000)
---------------------------------------
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'LBFGS data prep' started.
[1] 'LBFGS data prep' finished in %Time%.
[2] 'LBFGS Optimizer' started.
[2] (%Time%) 0 iterations Loss: 1.0986123085022
[2] (%Time%) 1 iterations Loss: 1.06389963626862 Improvement: 0.03471
[2] (%Time%) 2 iterations Loss: 1.01654124259949 Improvement: 0.04483
[2] (%Time%) 3 iterations Loss: 0.944314062595367 Improvement: 0.0657
[2] (%Time%) 4 iterations Loss: 0.668209552764893 Improvement: 0.2241
[2] (%Time%) 5 iterations Loss: 0.553279459476471 Improvement: 0.1421
[2] (%Time%) 6 iterations Loss: 0.427209556102753 Improvement: 0.1301
[2] (%Time%) 7 iterations Loss: 0.33543187379837 Improvement: 0.1014
[2] (%Time%) 8 iterations Loss: 0.271388441324234 Improvement: 0.07337
[2] (%Time%) 9 iterations Loss: 0.218755051493645 Improvement: 0.05782
[2] (%Time%) 10 iterations Loss: 0.192830204963684 Improvement: 0.0339
[2] (%Time%) 11 iterations Loss: 0.184821993112564 Improvement: 0.01448
[2] (%Time%) 12 iterations Loss: 0.182577073574066 Improvement: 0.005304
[2] (%Time%) 13 iterations Loss: 0.180941790342331 Improvement: 0.002552
[2] (%Time%) 14 iterations Loss: 0.178911954164505 Improvement: 0.00216
[2] (%Time%) 15 iterations Loss: 0.171350136399269 Improvement: 0.006211
[2] (%Time%) 16 iterations Loss: 0.157612159848213 Improvement: 0.01186
[2] (%Time%) 17 iterations Loss: 0.15358293056488 Improvement: 0.005986
[2] (%Time%) 18 iterations Loss: 0.151476576924324 Improvement: 0.003076
[2] (%Time%) 19 iterations Loss: 0.146950766444206 Improvement: 0.004163
[2] (%Time%) 20 iterations Loss: 0.143808200955391 Improvement: 0.003398
[2] (%Time%) 21 iterations Loss: 0.141508430242538 Improvement: 0.002574
[2] (%Time%) 22 iterations Loss: 0.140696823596954 Improvement: 0.001252
[2] (%Time%) 23 iterations Loss: 0.140071913599968 Improvement: 0.0007818
[2] 'LBFGS Optimizer' finished in %Time%.
[3] 'Saving model' started.
[3] 'Saving model' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt /nn Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.98 0.98 0.095534 91.30415 0.1 0.001 0.001 1 + MulticlassLogisticRegression %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1 nn=+} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /l2:0.1;/l1:0.001;/ot:0.001;/nt:1;/nn:+

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
0 0 0 0.0055084268363971593 0.9945067 0.00549493777 5.254127E-11 0 1 2
1 0 0 0.023817056572626422 0.976464331 0.0235365033 1.06045284E-09 0 1 2
2 0 0 0.0093231130112501059 0.9907202 0.0092794355 3.434876E-10 0 1 2
3 0 0 0.023443248531486313 0.9768294 0.02317019 3.03269165E-09 0 1 2
4 0 0 0.0040359770848233112 0.995972157 0.004027704 4.241018E-11 0 1 2
5 0 0 0.0045204256114966734 0.9954898 0.00451145973 6.890591E-11 0 1 2
6 0 0 0.0069789663841046852 0.99304533 0.006953795 5.53306345E-10 0 1 2
7 0 0 0.010097529396150412 0.9899533 0.0100480942 2.30156255E-10 0 1 2
8 0 0 0.03009602430401729 0.970352352 0.0296479575 6.92419366E-09 0 1 2
9 0 0 0.024332072472112821 0.975961566 0.02403856 8.20308266E-10 0 1 2
10 0 0 0.0043335142888201311 0.995675862 0.00432459963 1.73350657E-11 0 1 2
11 0 0 0.013555556014923508 0.9865359 0.013463337 8.13516365E-10 0 1 2
12 0 0 0.023529044193244086 0.9767456 0.02325533 8.72516E-10 0 1 2
13 0 0 0.008501023573343704 0.991535 0.008465137 4.216742E-10 0 1 2
14 0 0 0.0007095469417312554 0.9992907 0.0007089279 1.54630811E-13 0 1 2
15 0 0 0.00055310522407816078 0.999447048 0.0005532046 6.156226E-13 0 1 2
16 0 0 0.0012550477831004697 0.998745739 0.00125323888 4.032167E-12 0 1 2
17 0 0 0.0055084268363971593 0.9945067 0.00549389 8.433323E-11 0 1 2
18 0 0 0.0063285986099884577 0.9936914 0.00630976632 3.05021945E-11 0 1 2
19 0 0 0.003093731767670937 0.996911049 0.00309031014 3.913841E-11 0 1 2
20 0 0 0.020076735361690724 0.98012346 0.0198751818 3.10552029E-10 0 1 2
21 0 0 0.0041732725311333704 0.9958354 0.004164563 1.02791969E-10 0 1 2
22 0 0 0.0010643302939017342 0.998936236 0.0010639854 7.537851E-12 0 1 2
23 0 0 0.025999097964058838 0.974335968 0.025664188 4.81227547E-09 0 1 2
24 0 0 0.035129552664285101 0.9654803 0.034519136 6.708087E-09 0 1 2
25 0 0 0.045339558624474148 0.9556729 0.04432742 3.25503069E-09 0 1 2
26 0 0 0.013889300978196019 0.9862067 0.013794262 1.202008E-09 0 1 2
27 0 0 0.0076827928162563506 0.992346644 0.00765367 8.0804E-11 0 1 2
28 0 0 0.0075225540000182761 0.99250567 0.00749257533 6.505713E-11 0 1 2
29 0 0 0.024242421711841197 0.976049066 0.023950737 2.85124413E-09 0 1 2
30 0 0 0.032976122572028026 0.967561662 0.0324392952 3.50680263E-09 0 1 2
31 0 0 0.010608658479474184 0.9894474 0.0105537577 1.95062647E-10 0 1 2
32 0 0 0.0012779052922244011 0.9987229 0.00127653161 2.62278025E-12 0 1 2
33 0 0 0.00071336435395046255 0.9992869 0.0007127473 5.49134739E-13 0 1 2
34 0 0 0.024332072472112821 0.975961566 0.02403856 8.20308266E-10 0 1 2
35 0 0 0.0070286058750609979 0.992996037 0.007004459 7.352014E-11 0 1 2
36 0 0 0.004203798491639577 0.995805 0.0041957614 8.509567E-12 0 1 2
37 0 0 0.024332072472112821 0.975961566 0.02403856 8.20308266E-10 0 1 2
38 0 0 0.016292553507197185 0.983839452 0.0161602274 2.10622053E-09 0 1 2
39 0 0 0.01022337528087006 0.9898287 0.0101731718 1.74306264E-10 0 1 2
40 0 0 0.0039481873055618134 0.9960596 0.003941884 5.480231E-11 0 1 2
41 0 0 0.12700460629393751 0.8807296 0.11926952 7.253493E-08 0 1 2
42 0 0 0.0089816864331438984 0.9910585 0.008941018 7.907266E-10 0 1 2
43 0 0 0.010307321484771452 0.9897456 0.0102558751 1.89728877E-09 0 1 2
44 0 0 0.011131259446131789 0.988930464 0.0110695921 1.06843367E-09 0 1 2
45 0 0 0.023521416258048912 0.976753056 0.023246726 2.24788366E-09 0 1 2
46 0 0 0.0042648532082970193 0.9957442 0.004256002 4.955993E-11 0 1 2
47 0 0 0.012670520226440413 0.9874094 0.0125911683 9.19701E-10 0 1 2
48 0 0 0.0042801174699289152 0.995729 0.004271112 2.28877854E-11 0 1 2
49 0 0 0.0098857948144395781 0.9901629 0.009837127 1.8531357E-10 0 1 2
50 1 1 0.0090942192070797481 0.990947 0.00760637224 0.00144599774 1 2 0
51 1 1 0.034664173472457219 0.965929747 0.03118425 0.00288774725 1 2 0
52 1 1 0.043201456581903337 0.957718432 0.04173073 0.000551942037 1 2 0
53 1 1 0.13167189090230788 0.8766286 0.1223861 0.0009844311 1 2 0
54 1 1 0.075168589261662158 0.9275871 0.07181354 0.0005993034 1 2 0
55 1 1 0.18945690492401104 0.8274084 0.171777591 0.000814124243 1 2 0
56 1 1 0.11911771398067193 0.8877033 0.110390045 0.00190834212 1 2 0
57 1 1 0.025508903449772993 0.9748137 0.0150558352 0.0101304185 1 0 2
58 1 1 0.019184089039045001 0.980998755 0.0181579981 0.00084428 1 2 0
59 1 1 0.15901943723044118 0.8529798 0.142475456 0.00454534357 1 2 0
60 1 1 0.038457888295285382 0.9622722 0.0353976376 0.00232988712 1 2 0
61 1 1 0.065551772849485893 0.936550558 0.05916038 0.00428870367 1 2 0
62 1 1 0.010322557813937764 0.989730537 0.009494745 0.0007733656 1 2 0
63 1 1 0.17222977582877841 0.8417857 0.15765363 0.0005595186 1 2 0
64 1 1 0.030574768518940053 0.9698879 0.02344595 0.00666603027 1 0 2
65 1 1 0.0098151864535556514 0.9902328 0.00685487129 0.002912695 1 2 0
66 1 1 0.39688494987183254 0.6724114 0.326369762 0.001220205 1 2 0
67 1 1 0.012123102152506331 0.9879501 0.009482692 0.00256814132 1 2 0
68 1 1 0.33736423637406737 0.713648856 0.286240429 0.00010934045 1 2 0
69 1 1 0.021182995555155511 0.9790398 0.018236788 0.00272419979 1 2 0
70 1 2 1.0422287174318141 0.6469034 0.3526678 0.000428632979 2 1 0
71 1 1 0.013839742810404523 0.9862556 0.009148754 0.0045960024 1 2 0
72 1 1 0.58567805070236145 0.556728244 0.443214417 5.730098E-05 1 2 0
73 1 1 0.084991433828601512 0.918520153 0.08102878 0.000452291657 1 2 0
74 1 1 0.012519861356165776 0.9875582 0.0101578543 0.00228302833 1 2 0
75 1 1 0.013351424023883354 0.9867373 0.0110855019 0.002178057 1 2 0
76 1 1 0.043354008859284664 0.957572341 0.04211245 0.00031350847 1 2 0
77 1 1 0.30744937627516217 0.7353201 0.2644465 0.000233673083 1 2 0
78 1 1 0.17038347938641368 0.843341351 0.155579925 0.001078887 1 2 0
79 1 1 0.014785760729894784 0.985323 0.0131924609 0.00148394948 1 0 2
80 1 1 0.023090379914646061 0.977174163 0.0200124662 0.002812842 1 2 0
81 1 1 0.012531268625402953 0.9875469 0.008534899 0.00391809177 1 2 0
82 1 1 0.016162852194115184 0.983967066 0.0111717647 0.00486272341 1 2 0
83 1 2 1.7073783847266821 0.8186427 0.181340575 1.856788E-05 2 1 0
84 1 1 0.62556457936332965 0.5349593 0.4640455 0.000995344 1 2 0
85 1 1 0.11158751479919021 0.8944131 0.10046719 0.00511890557 1 2 0
86 1 1 0.036201489056084446 0.964445949 0.0344701856 0.00108303118 1 2 0
87 1 1 0.063205727958762947 0.9387503 0.06098643 0.000264116941 1 2 0
88 1 1 0.045185081816751668 0.95582056 0.03791894 0.006261989 1 2 0
89 1 1 0.092376681069536226 0.911761642 0.08637355 0.00186417461 1 2 0
90 1 1 0.20824432396488596 0.8120086 0.187371209 0.0006201498 1 2 0
91 1 1 0.10052487189251949 0.9043626 0.09452107 0.00111814367 1 2 0
92 1 1 0.022739404059431985 0.9775172 0.0198875815 0.002597051 1 2 0
93 1 1 0.020528802753463863 0.9796805 0.01107447 0.00924518052 1 0 2
94 1 1 0.10138322927163509 0.9035867 0.09466396 0.00174795289 1 2 0
95 1 1 0.031475061991293116 0.9690151 0.0264395941 0.00454706931 1 2 0
96 1 1 0.055107111138908711 0.9463838 0.05032458 0.00329180225 1 2 0
97 1 1 0.020542126999945221 0.9796674 0.01800886 0.00232210569 1 2 0
98 1 1 0.054157234957735327 0.947283149 0.0504526943 0.00226498954 1 0 2
99 1 1 0.046203621795273951 0.9548475 0.0417587571 0.00339318742 1 2 0
100 2 2 0.00071718178074236128 0.9992831 0.000718441559 2.38225937E-08 2 1 0
101 2 2 0.0295181133625457 0.9709133 0.02908533 3.05521712E-06 2 1 0
102 2 2 0.03940963957571933 0.9613568 0.0386419035 6.5010255E-07 2 1 0
103 2 2 0.042282092241463093 0.9585993 0.0414001346 1.49368452E-06 2 1 0
104 2 2 0.0064544509780086849 0.993566334 0.0064327796 1.60839249E-07 2 1 0
105 2 2 0.01117706708299614 0.988885164 0.0111167124 1.85687732E-08 2 1 0
106 2 2 0.038978580718911832 0.9617713 0.03821299 1.69269952E-05 2 1 0
107 2 2 0.050289127935841414 0.950954437 0.049045492 1.65044582E-07 2 1 0
108 2 2 0.028732319565020852 0.9716765 0.0283248872 1.54309248E-07 2 1 0
109 2 2 0.011814131250558819 0.9882554 0.01174459 6.20176E-07 2 1 0
110 2 2 0.31888964037460732 0.7269558 0.2729284 0.000117515236 2 1 0
111 2 2 0.075557681270826682 0.927226245 0.07277129 3.731746E-06 2 1 0
112 2 2 0.076839464588257703 0.9260385 0.07395845 4.66545225E-06 2 1 0
113 2 2 0.013885493373327248 0.986210465 0.0137876067 1.11059865E-06 2 1 0
114 2 2 0.0034065386776152063 0.996599257 0.003398758 4.821891E-07 2 1 0
115 2 2 0.030685455439561936 0.969780564 0.0302113257 6.934844E-06 2 1 0
116 2 2 0.12955092298505261 0.878489852 0.121503189 7.952961E-06 2 1 0
117 2 2 0.029476184676114042 0.970954 0.0290464126 3.82003577E-07 2 1 0
118 2 2 0.00083540993102648666 0.999164939 0.0008341192 1.58499533E-10 2 1 0
119 2 2 0.18156812803858025 0.8339614 0.166032091 5.2386E-06 2 1 0
120 2 2 0.027637472531907004 0.972740948 0.0272570327 1.62744573E-06 2 1 0
121 2 2 0.027248391079925608 0.9731195 0.0268742 7.42350176E-06 2 1 0
122 2 2 0.011028260200213382 0.9890323 0.0109692747 7.2080053E-09 2 1 0
123 2 2 0.36983106060626558 0.690851033 0.309092641 5.79547559E-05 2 1 0
124 2 2 0.047893534239738376 0.953235269 0.0467611663 3.86117654E-06 2 1 0
125 2 2 0.20047380759045563 0.8183429 0.181652829 3.98381962E-06 2 1 0
126 2 2 0.4699516553461579 0.6250325 0.374836236 0.000132376663 2 1 0
127 2 2 0.37024691655472053 0.6905638 0.3092908 0.000146074992 2 1 0
128 2 2 0.011428805441366281 0.988636255 0.0113640688 3.00256971E-07 2 1 0
129 2 2 0.61292266773756554 0.541765153 0.458223 1.04854416E-05 2 1 0
130 2 2 0.07425686647063412 0.9284332 0.07156495 3.35034969E-07 2 1 0
131 2 2 0.36683655555557643 0.6929229 0.30706656 1.031481E-05 2 1 0
132 2 2 0.0071334751500833167 0.9928919 0.007109147 1.87870214E-07 2 1 0
133 2 1 0.70880889315821138 0.5077022 0.492230147 6.755029E-05 1 2 0
134 2 2 0.085922224030283809 0.9176656 0.082333684 1.23881819E-06 2 1 0
135 2 2 0.040046712040999119 0.96074456 0.0392556675 3.22532E-07 2 1 0
136 2 2 0.0066299388926063978 0.993392 0.00660762331 1.06753293E-06 2 1 0
137 2 2 0.11832431033727081 0.8884079 0.111580558 9.978711E-06 2 1 0
138 2 2 0.40202712796988099 0.6689626 0.330820858 0.000218098256 2 1 0
139 2 2 0.17507936837701382 0.8393904 0.160591438 1.860986E-05 2 1 0
140 2 2 0.01180647152915853 0.988262951 0.0117355874 7.341407E-07 2 1 0
141 2 2 0.2141532959019663 0.807224631 0.192715973 5.853019E-05 2 1 0
142 2 2 0.0295181133625457 0.9709133 0.02908533 3.05521712E-06 2 1 0
143 2 2 0.0095748664566649728 0.9904708 0.009528417 3.031272E-07 2 1 0
144 2 2 0.0073585580223861757 0.99266845 0.007331067 6.058027E-07 2 1 0
145 2 2 0.071910829528973333 0.9306139 0.06937557 1.16139881E-05 2 1 0
146 2 2 0.12055584737407964 0.8864276 0.113564476 8.48525E-06 2 1 0
147 2 2 0.15924069591370296 0.8527911 0.147182718 2.5248728E-05 2 1 0
148 2 2 0.017257693530893608 0.982890368 0.0171058215 5.317109E-06 2 1 0
149 2 2 0.10890195713815695 0.89681834 0.10315422 2.62842859E-05 2 1 0

Просмотреть файл

@ -0,0 +1,109 @@
maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% data=%Data% seed=1
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 15 of 15 weights.
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 15 of 15 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 21 | 0 | 0 | 1.0000
1 || 0 | 29 | 1 | 0.9667
2 || 0 | 2 | 26 | 0.9286
||========================
Precision ||1.0000 |0.9355 |0.9630 |
Accuracy(micro-avg): 0.962025
Accuracy(macro-avg): 0.965079
Log-loss: 0.101866
Log-loss reduction: 90.633114
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 29 | 0 | 0 | 1.0000
1 || 0 | 19 | 1 | 0.9500
2 || 0 | 0 | 22 | 1.0000
||========================
Precision ||1.0000 |1.0000 |0.9565 |
Accuracy(micro-avg): 0.985915
Accuracy(macro-avg): 0.983333
Log-loss: 0.075812
Log-loss reduction: 93.016939
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.973970 (0.0119)
Accuracy(macro-avg): 0.974206 (0.0091)
Log-loss: 0.088839 (0.0130)
Log-loss reduction: 91.825026 (1.1919)
---------------------------------------
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'LBFGS data prep' started.
[1] 'LBFGS data prep' finished in %Time%.
[2] 'LBFGS Optimizer' started.
[2] (%Time%) 0 iterations Loss: 1.0986123085022
[2] (%Time%) 1 iterations Loss: 0.975501239299774 Improvement: 0.1231
[2] (%Time%) 2 iterations Loss: 0.828468441963196 Improvement: 0.1422
[2] (%Time%) 3 iterations Loss: 0.49238583445549 Improvement: 0.2899
[2] (%Time%) 4 iterations Loss: 0.410263001918793 Improvement: 0.1335
[2] (%Time%) 5 iterations Loss: 0.373202115297318 Improvement: 0.06109
[2] (%Time%) 6 iterations Loss: 0.326229214668274 Improvement: 0.0505
[2] (%Time%) 7 iterations Loss: 0.30860298871994 Improvement: 0.02584
[2] (%Time%) 8 iterations Loss: 0.249911725521088 Improvement: 0.05048
[2] (%Time%) 9 iterations Loss: 0.197030156850815 Improvement: 0.05228
[2] (%Time%) 10 iterations Loss: 0.183768630027771 Improvement: 0.02302
[2] (%Time%) 11 iterations Loss: 0.174268662929535 Improvement: 0.01288
[2] (%Time%) 12 iterations Loss: 0.1489098072052 Improvement: 0.02224
[2] (%Time%) 13 iterations Loss: 0.146679118275642 Improvement: 0.007233
[2] (%Time%) 14 iterations Loss: 0.127629071474075 Improvement: 0.0161
[2] (%Time%) 15 iterations Loss: 0.127402290701866 Improvement: 0.004194
[2] (%Time%) 16 iterations Loss: 0.127095967531204 Improvement: 0.001278
[2] (%Time%) 17 iterations Loss: 0.1268040984869 Improvement: 0.0005385
[2] 'LBFGS Optimizer' finished in %Time%.
[3] 'LBFGS data prep #2' started.
[3] 'LBFGS data prep #2' finished in %Time%.
[4] 'LBFGS Optimizer #2' started.
[4] (%Time%) 0 iterations Loss: 1.0986123085022
[4] (%Time%) 1 iterations Loss: 1.03655636310577 Improvement: 0.06206
[4] (%Time%) 2 iterations Loss: 1.00361847877502 Improvement: 0.03876
[4] (%Time%) 3 iterations Loss: 0.937079250812531 Improvement: 0.05993
[4] (%Time%) 4 iterations Loss: 0.819244384765625 Improvement: 0.1035
[4] (%Time%) 5 iterations Loss: 0.728321373462677 Improvement: 0.09406
[4] (%Time%) 6 iterations Loss: 0.581992864608765 Improvement: 0.1333
[4] (%Time%) 7 iterations Loss: 0.440624892711639 Improvement: 0.1393
[4] (%Time%) 8 iterations Loss: 0.368180394172668 Improvement: 0.08917
[4] (%Time%) 9 iterations Loss: 0.287548065185547 Improvement: 0.08277
[4] (%Time%) 10 iterations Loss: 0.239883854985237 Improvement: 0.05644
[4] (%Time%) 11 iterations Loss: 0.217700272798538 Improvement: 0.03075
[4] (%Time%) 12 iterations Loss: 0.206228733062744 Improvement: 0.01629
[4] (%Time%) 13 iterations Loss: 0.192829161882401 Improvement: 0.01412
[4] (%Time%) 14 iterations Loss: 0.185032933950424 Improvement: 0.009378
[4] (%Time%) 15 iterations Loss: 0.181731522083282 Improvement: 0.00482
[4] (%Time%) 16 iterations Loss: 0.168401405215263 Improvement: 0.0112
[4] (%Time%) 17 iterations Loss: 0.159209698438644 Improvement: 0.009694
[4] (%Time%) 18 iterations Loss: 0.150576055049896 Improvement: 0.008899
[4] (%Time%) 19 iterations Loss: 0.14181961119175 Improvement: 0.008792
[4] (%Time%) 20 iterations Loss: 0.135607719421387 Improvement: 0.006857
[4] (%Time%) 21 iterations Loss: 0.134872287511826 Improvement: 0.002266
[4] (%Time%) 22 iterations Loss: 0.133358553051949 Improvement: 0.001702
[4] (%Time%) 23 iterations Loss: 0.132842555642128 Improvement: 0.0008124
[4] 'LBFGS Optimizer #2' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.97397 0.974206 0.088839 91.82503 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Output% 99 0 0 maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% data=%Data% seed=1 /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,117 @@
maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% seed=1 xf=TreeFeat{lps=0 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves}
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 71 instances
Binning and forming Feature objects
Reserved memory for tree learner: 16380 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 39
improvement criterion: Mean Improvement
L1 regularization selected 39 of 39 weights.
Not training a calibrator because it is not needed.
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 79 instances
Binning and forming Feature objects
Reserved memory for tree learner: 17472 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 54
improvement criterion: Mean Improvement
L1 regularization selected 54 of 54 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 21 | 0 | 0 | 1.0000
1 || 0 | 25 | 5 | 0.8333
2 || 0 | 1 | 27 | 0.9643
||========================
Precision ||1.0000 |0.9615 |0.8438 |
Accuracy(micro-avg): 0.924051
Accuracy(macro-avg): 0.932540
Log-loss: 0.330649
Log-loss reduction: 69.595935
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 29 | 0 | 0 | 1.0000
1 || 0 | 19 | 1 | 0.9500
2 || 0 | 2 | 20 | 0.9091
||========================
Precision ||1.0000 |0.9048 |0.9524 |
Accuracy(micro-avg): 0.957746
Accuracy(macro-avg): 0.953030
Log-loss: 0.157832
Log-loss reduction: 85.461953
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.940899 (0.0168)
Accuracy(macro-avg): 0.942785 (0.0102)
Log-loss: 0.244241 (0.0864)
Log-loss reduction: 77.528944 (7.9330)
---------------------------------------
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'FastTree data preparation' started.
[1] 'FastTree data preparation' finished in %Time%.
[2] 'FastTree in-memory bins initialization' started.
[2] 'FastTree in-memory bins initialization' finished in %Time%.
[3] 'FastTree feature conversion' started.
[3] 'FastTree feature conversion' finished in %Time%.
[4] 'FastTree training' started.
[4] 'FastTree training' finished in %Time%.
[5] 'LBFGS data prep' started.
[5] 'LBFGS data prep' finished in %Time%.
[6] 'LBFGS Optimizer' started.
[6] (%Time%) 0 iterations Loss: 1.0986123085022
[6] (%Time%) 1 iterations Loss: 0.529107213020325 Improvement: 0.5695
[6] (%Time%) 2 iterations Loss: 0.162161201238632 Improvement: 0.4075
[6] (%Time%) 3 iterations Loss: 0.110731095075607 Improvement: 0.1362
[6] (%Time%) 4 iterations Loss: 0.082178421318531 Improvement: 0.05515
[6] (%Time%) 5 iterations Loss: 0.0707422941923141 Improvement: 0.02233
[6] (%Time%) 6 iterations Loss: 0.0665594562888145 Improvement: 0.008717
[6] (%Time%) 7 iterations Loss: 0.0660991221666336 Improvement: 0.002524
[6] (%Time%) 8 iterations Loss: 0.0654922351241112 Improvement: 0.001086
[6] (%Time%) 9 iterations Loss: 0.0654363483190536 Improvement: 0.0003135
[6] 'LBFGS Optimizer' finished in %Time%.
[7] 'FastTree data preparation #2' started.
[7] 'FastTree data preparation #2' finished in %Time%.
[8] 'FastTree in-memory bins initialization #2' started.
[8] 'FastTree in-memory bins initialization #2' finished in %Time%.
[9] 'FastTree feature conversion #2' started.
[9] 'FastTree feature conversion #2' finished in %Time%.
[10] 'FastTree training #2' started.
[10] 'FastTree training #2' finished in %Time%.
[11] 'LBFGS data prep #2' started.
[11] 'LBFGS data prep #2' finished in %Time%.
[12] 'LBFGS Optimizer #2' started.
[12] (%Time%) 0 iterations Loss: 1.0986123085022
[12] (%Time%) 1 iterations Loss: 0.607897818088531 Improvement: 0.4907
[12] (%Time%) 2 iterations Loss: 0.202578827738762 Improvement: 0.4224
[12] (%Time%) 3 iterations Loss: 0.143362611532211 Improvement: 0.1457
[12] (%Time%) 4 iterations Loss: 0.107794404029846 Improvement: 0.06277
[12] (%Time%) 5 iterations Loss: 0.0930556431412697 Improvement: 0.02671
[12] (%Time%) 6 iterations Loss: 0.088469035923481 Improvement: 0.01011
[12] (%Time%) 7 iterations Loss: 0.086934432387352 Improvement: 0.003679
[12] (%Time%) 8 iterations Loss: 0.0866307020187378 Improvement: 0.001148
[12] (%Time%) 9 iterations Loss: 0.0862946063280106 Improvement: 0.000539
[12] 'LBFGS Optimizer #2' finished in %Time%.

Просмотреть файл

@ -0,0 +1,116 @@
maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% seed=1 xf=TreeFeat{lps=2 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves}
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 71 instances
Binning and forming Feature objects
Reserved memory for tree learner: 16380 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 45
improvement criterion: Mean Improvement
L1 regularization selected 44 of 45 weights.
Not training a calibrator because it is not needed.
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 79 instances
Binning and forming Feature objects
Reserved memory for tree learner: 17472 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 48
improvement criterion: Mean Improvement
L1 regularization selected 48 of 48 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 21 | 0 | 0 | 1.0000
1 || 0 | 25 | 5 | 0.8333
2 || 0 | 1 | 27 | 0.9643
||========================
Precision ||1.0000 |0.9615 |0.8438 |
Accuracy(micro-avg): 0.924051
Accuracy(macro-avg): 0.932540
Log-loss: 0.201590
Log-loss reduction: 81.463253
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 29 | 0 | 0 | 1.0000
1 || 0 | 19 | 1 | 0.9500
2 || 0 | 1 | 21 | 0.9545
||========================
Precision ||1.0000 |0.9500 |0.9545 |
Accuracy(micro-avg): 0.971831
Accuracy(macro-avg): 0.968182
Log-loss: 0.101915
Log-loss reduction: 90.612517
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.947941 (0.0239)
Accuracy(macro-avg): 0.950361 (0.0178)
Log-loss: 0.151753 (0.0498)
Log-loss reduction: 86.037885 (4.5746)
---------------------------------------
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'FastTree data preparation' started.
[1] 'FastTree data preparation' finished in %Time%.
[2] 'FastTree in-memory bins initialization' started.
[2] 'FastTree in-memory bins initialization' finished in %Time%.
[3] 'FastTree feature conversion' started.
[3] 'FastTree feature conversion' finished in %Time%.
[4] 'FastTree training' started.
[4] 'FastTree training' finished in %Time%.
[5] 'LBFGS data prep' started.
[5] 'LBFGS data prep' finished in %Time%.
[6] 'LBFGS Optimizer' started.
[6] (%Time%) 0 iterations Loss: 1.0986123085022
[6] (%Time%) 1 iterations Loss: 0.556313633918762 Improvement: 0.5423
[6] (%Time%) 2 iterations Loss: 0.151027098298073 Improvement: 0.4327
[6] (%Time%) 3 iterations Loss: 0.0993023291230202 Improvement: 0.1424
[6] (%Time%) 4 iterations Loss: 0.0695240423083305 Improvement: 0.05761
[6] (%Time%) 5 iterations Loss: 0.0572926141321659 Improvement: 0.02354
[6] (%Time%) 6 iterations Loss: 0.0536528006196022 Improvement: 0.008612
[6] (%Time%) 7 iterations Loss: 0.0518658980727196 Improvement: 0.003493
[6] (%Time%) 8 iterations Loss: 0.0517856702208519 Improvement: 0.0009333
[6] 'LBFGS Optimizer' finished in %Time%.
[7] 'FastTree data preparation #2' started.
[7] 'FastTree data preparation #2' finished in %Time%.
[8] 'FastTree in-memory bins initialization #2' started.
[8] 'FastTree in-memory bins initialization #2' finished in %Time%.
[9] 'FastTree feature conversion #2' started.
[9] 'FastTree feature conversion #2' finished in %Time%.
[10] 'FastTree training #2' started.
[10] 'FastTree training #2' finished in %Time%.
[11] 'LBFGS data prep #2' started.
[11] 'LBFGS data prep #2' finished in %Time%.
[12] 'LBFGS Optimizer #2' started.
[12] (%Time%) 0 iterations Loss: 1.0986123085022
[12] (%Time%) 1 iterations Loss: 0.588071405887604 Improvement: 0.5105
[12] (%Time%) 2 iterations Loss: 0.210458397865295 Improvement: 0.4042
[12] (%Time%) 3 iterations Loss: 0.143802016973495 Improvement: 0.147
[12] (%Time%) 4 iterations Loss: 0.109668917953968 Improvement: 0.06202
[12] (%Time%) 5 iterations Loss: 0.0927119106054306 Improvement: 0.02819
[12] (%Time%) 6 iterations Loss: 0.0866884738206863 Improvement: 0.01156
[12] (%Time%) 7 iterations Loss: 0.0849770903587341 Improvement: 0.004173
[12] (%Time%) 8 iterations Loss: 0.0845689475536346 Improvement: 0.001349
[12] (%Time%) 9 iterations Loss: 0.0844891592860222 Improvement: 0.0003972
[12] 'LBFGS Optimizer #2' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.947941 0.950361 0.151753 86.03789 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Output% 99 0 0 maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% seed=1 xf=TreeFeat{lps=2 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves} /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
5 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
6 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
8 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
9 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
10 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
11 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
18 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
20 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
21 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
25 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
28 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
31 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
32 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
35 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
37 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
40 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
41 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
44 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
45 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
46 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
48 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
50 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
51 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
52 1 2 1.357521751036989 0.72348994 0.257297635 0.0192124527 2 1 0
54 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
56 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
60 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
63 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
64 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
66 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
68 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
69 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
70 1 2 4.217452541805657 0.9806482 0.0147361364 0.004615784 2 1 0
71 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
72 1 2 1.357521751036989 0.72348994 0.257297635 0.0192124527 2 1 0
73 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
74 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
76 1 2 1.357521751036989 0.72348994 0.257297635 0.0192124527 2 1 0
77 1 2 1.357521751036989 0.72348994 0.257297635 0.0192124527 2 1 0
79 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
82 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
88 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
90 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
91 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
92 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
93 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
95 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
96 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
97 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
98 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
99 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
100 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
102 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
104 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
105 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
106 2 1 5.361709612467159 0.991841853 0.00469287625 0.003465219 1 2 0
108 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
109 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
111 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
112 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
113 2 2 0.019541477421649685 0.9806482 0.0147361364 0.004615784 2 1 0
115 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
117 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
120 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
121 2 2 0.019541477421649685 0.9806482 0.0147361364 0.004615784 2 1 0
122 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
123 2 2 0.011587138891097372 0.988479733 0.00731784431 0.004202525 2 1 0
125 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
128 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
129 2 2 0.064084548067755445 0.9379257 0.0514734164 0.0106008854 2 1 0
131 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
132 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
133 2 2 0.32366863740744056 0.72348994 0.257297635 0.0192124527 2 1 0
137 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
138 2 2 0.019541477421649685 0.9806482 0.0147361364 0.004615784 2 1 0
141 2 2 0.024004226690419721 0.9762816 0.0165876672 0.00713065453 2 1 0
144 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
145 2 2 0.024004226690419721 0.9762816 0.0165876672 0.00713065453 2 1 0
147 2 2 0.024004226690419721 0.9762816 0.0165876672 0.00713065453 2 1 0
0 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
1 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
2 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
3 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
4 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
7 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
12 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
13 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
14 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
15 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
16 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
17 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
19 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
22 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
23 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
24 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
26 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
27 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
29 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
30 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
33 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
34 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
36 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
38 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
39 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
42 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
43 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
47 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
49 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
53 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
55 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
57 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
58 1 1 0.0092000873585017535 0.9908421 0.00664671045 0.00251116115 1 2 0
59 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
61 1 1 0.016912213899205067 0.98323 0.0148431538 0.00192679861 1 2 0
62 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
65 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
67 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
75 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
78 1 1 0.016912213899205067 0.98323 0.0148431538 0.00192679861 1 2 0
80 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
81 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
83 1 2 2.5891336906268299 0.917143047 0.07508506 0.007771907 2 1 0
84 1 1 0.016912213899205067 0.98323 0.0148431538 0.00192679861 1 2 0
85 1 1 0.016912213899205067 0.98323 0.0148431538 0.00192679861 1 2 0
86 1 1 0.050227892662411955 0.9510127 0.04610691 0.00288033066 1 2 0
87 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
89 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
94 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
101 2 2 0.002020849860847866 0.9979812 0.00101441843 0.0010043825 2 0 1
103 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
107 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
110 2 2 0.011657028232298779 0.988410652 0.00830857 0.00328082382 2 1 0
114 2 2 0.002020849860847866 0.9979812 0.00101441843 0.0010043825 2 0 1
116 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
118 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
119 2 1 3.0767924818472583 0.9510127 0.04610691 0.00288033066 1 2 0
124 2 2 0.0077648440687823166 0.9922652 0.004709411 0.0030252568 2 1 0
126 2 2 0.23194741246350345 0.7929878 0.201070011 0.005942198 2 1 0
127 2 2 0.23194741246350345 0.7929878 0.201070011 0.005942198 2 1 0
130 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
134 2 2 0.4124399329774272 0.662032962 0.322351336 0.0156157119 2 1 0
135 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
136 2 2 0.0077648440687823166 0.9922652 0.004709411 0.0030252568 2 1 0
139 2 2 0.011079306481754472 0.988981843 0.00756055675 0.00345751923 2 1 0
140 2 2 0.0077648440687823166 0.9922652 0.004709411 0.0030252568 2 1 0
142 2 2 0.002020849860847866 0.9979812 0.00101441843 0.0010043825 2 0 1
143 2 2 0.0077648440687823166 0.9922652 0.004709411 0.0030252568 2 1 0
146 2 2 0.23194741246350345 0.7929878 0.201070011 0.005942198 2 1 0
148 2 2 0.011079306481754472 0.988981843 0.00756055675 0.00345751923 2 1 0
149 2 2 0.002020849860847866 0.9979812 0.00101441843 0.0010043825 2 0 1

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.940899 0.942785 0.244241 77.52895 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Output% 99 0 0 maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% seed=1 xf=TreeFeat{lps=0 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves} /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
5 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
6 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
8 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
9 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
10 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
11 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
18 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
20 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
21 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
25 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
28 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
31 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
32 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
35 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
37 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
40 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
41 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
44 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
45 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
46 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
48 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
50 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
51 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
52 1 2 4.8380222052271913 0.9885047 0.007922708 0.003572509 2 1 0
54 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
56 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
60 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
63 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
64 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
66 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
68 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
69 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
70 1 2 3.4342713652954884 0.9619754 0.0322489 0.005775679 2 1 0
71 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
72 1 2 3.235481539168072 0.953945756 0.039341256 0.00671289442 2 1 0
73 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
74 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
76 1 2 3.235481539168072 0.953945756 0.039341256 0.00671289442 2 1 0
77 1 2 4.8380222052271913 0.9885047 0.007922708 0.003572509 2 1 0
79 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
82 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
88 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
90 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
91 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
92 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
93 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
95 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
96 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
97 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
98 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
99 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
100 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
102 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
104 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
105 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
106 2 1 5.6502623862339236 0.99403 0.003516594 0.002453451 1 2 0
108 2 2 0.01399731003087396 0.9861002 0.009722762 0.004176987 2 1 0
109 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
111 2 2 0.01399731003087396 0.9861002 0.009722762 0.004176987 2 1 0
112 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
113 2 2 0.16905655561427343 0.844461143 0.145666644 0.009872116 2 1 0
115 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
117 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
120 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
121 2 2 0.16905655561427343 0.844461143 0.145666644 0.009872116 2 1 0
122 2 2 0.01399731003087396 0.9861002 0.009722762 0.004176987 2 1 0
123 2 2 0.047148468732643084 0.953945756 0.039341256 0.00671289442 2 1 0
125 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
128 2 2 0.01399731003087396 0.9861002 0.009722762 0.004176987 2 1 0
129 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
131 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
132 2 2 0.01399731003087396 0.9861002 0.009722762 0.004176987 2 1 0
133 2 2 0.047148468732643084 0.953945756 0.039341256 0.00671289442 2 1 0
137 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
138 2 2 0.038766404860270692 0.9619754 0.0322489 0.005775679 2 1 0
141 2 2 0.011561873799839452 0.9885047 0.007922708 0.003572509 2 1 0
144 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
145 2 2 0.011561873799839452 0.9885047 0.007922708 0.003572509 2 1 0
147 2 2 0.011561873799839452 0.9885047 0.007922708 0.003572509 2 1 0
0 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
1 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
2 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
3 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
4 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
7 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
12 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
13 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
14 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
15 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
16 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
17 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
19 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
22 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
23 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
24 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
26 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
27 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
29 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
30 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
33 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
34 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
36 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
38 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
39 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
42 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
43 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
47 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
49 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
53 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
55 1 1 0.0062134377760607613 0.9938058 0.003886248 0.00230790745 1 2 0
57 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
58 1 1 0.012213000330511811 0.9878613 0.00857989 0.00355893746 1 2 0
59 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
61 1 1 0.016997572281395461 0.9831461 0.0150941061 0.00175973971 1 2 0
62 1 1 0.0062134377760607613 0.9938058 0.003886248 0.00230790745 1 2 0
65 1 1 0.012213000330511811 0.9878613 0.00857989 0.00355893746 1 2 0
67 1 1 0.0062134377760607613 0.9938058 0.003886248 0.00230790745 1 2 0
75 1 1 0.012213000330511811 0.9878613 0.00857989 0.00355893746 1 2 0
78 1 1 0.016997572281395461 0.9831461 0.0150941061 0.00175973971 1 2 0
80 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
81 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
83 1 2 4.2093796995147192 0.9820518 0.01485558 0.003092663 2 1 0
84 1 1 0.016997572281395461 0.9831461 0.0150941061 0.00175973971 1 2 0
85 1 1 0.37872337668893652 0.684735 0.308789551 0.00647537876 1 2 0
86 1 1 0.043367889719735153 0.957559049 0.039393153 0.003047859 1 2 0
87 1 1 0.012213000330511811 0.9878613 0.00857989 0.00355893746 1 2 0
89 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
94 1 1 0.0062134377760607613 0.9938058 0.003886248 0.00230790745 1 2 0
101 2 2 0.0011134383604485582 0.9988872 0.000686069543 0.000426827959 2 0 1
103 2 2 0.0041885353949071905 0.9958202 0.00250371476 0.00167586142 2 1 0
107 2 2 0.0041885353949071905 0.9958202 0.00250371476 0.00167586142 2 1 0
110 2 2 0.0011134383604485582 0.9988872 0.000686069543 0.000426827959 2 0 1
114 2 2 0.0063545716100066839 0.9936656 0.003907088 0.00242741266 2 1 0
116 2 2 0.0041885353949071905 0.9958202 0.00250371476 0.00167586142 2 1 0
118 2 2 0.0055382144176810257 0.9944771 0.00311130448 0.00241164817 2 1 0
119 2 1 4.1934509368151227 0.9831461 0.0150941061 0.00175973971 1 2 0
124 2 2 0.0010213701720459014 0.998979151 0.0006811208 0.000339646853 2 0 1
126 2 2 0.38831541266765224 0.6781984 0.314145625 0.00765600568 2 1 0
127 2 2 0.38831541266765224 0.6781984 0.314145625 0.00765600568 2 1 0
130 2 2 0.0010213701720459014 0.998979151 0.0006811208 0.000339646853 2 0 1
134 2 1 1.0955395966130874 0.64945 0.334359139 0.0161907785 1 2 0
135 2 2 0.0055382144176810257 0.9944771 0.00311130448 0.00241164817 2 1 0
136 2 2 0.0055382144176810257 0.9944771 0.00311130448 0.00241164817 2 1 0
139 2 2 0.0011134383604485582 0.9988872 0.000686069543 0.000426827959 2 0 1
140 2 2 0.0055382144176810257 0.9944771 0.00311130448 0.00241164817 2 1 0
142 2 2 0.0011134383604485582 0.9988872 0.000686069543 0.000426827959 2 0 1
143 2 2 0.0055382144176810257 0.9944771 0.00311130448 0.00241164817 2 1 0
146 2 2 0.065049630430406014 0.937020957 0.0586935952 0.004285537 2 1 0
148 2 2 0.0063545716100066839 0.9936656 0.003907088 0.00242741266 2 1 0
149 2 2 0.0048432621878218415 0.995168447 0.00314460555 0.00168708875 2 1 0

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
5 0 0 0.0039186265649996796 0.996089041 0.003910775 2.08655385E-11 0 1 2
6 0 0 0.0066394791182328066 0.9933825 0.0066170604 2.15241977E-10 0 1 2
8 0 0 0.025494106553378942 0.9748281 0.0251719262 2.83894752E-09 0 1 2
9 0 0 0.019190772567240676 0.9809922 0.0190079622 2.70583778E-10 0 1 2
10 0 0 0.0035562493991592786 0.996450067 0.00355021679 4.92729945E-12 0 1 2
11 0 0 0.012516964292635404 0.987561047 0.0124393385 2.80085038E-10 0 1 2
18 0 0 0.0047769616990695207 0.99523443 0.00476594642 8.127354E-12 0 1 2
20 0 0 0.014707546996037419 0.9854001 0.01460007 8.914995E-11 0 1 2
21 0 0 0.003647833406238951 0.9963588 0.00364135345 3.50849176E-11 0 1 2
25 0 0 0.032317619176197167 0.968199 0.0318007767 1.10024678E-09 0 1 2
28 0 0 0.0057745087199158104 0.994242132 0.005757959 2.02363057E-11 0 1 2
31 0 0 0.0071621105155360203 0.9928635 0.00713674352 6.243001E-11 0 1 2
32 0 0 0.0014066451993830036 0.998594344 0.00140553329 7.324321E-13 0 1 2
35 0 0 0.0052270768592320212 0.994786561 0.00521328 2.50269475E-11 0 1 2
37 0 0 0.019190772567240676 0.9809922 0.0190079622 2.70583778E-10 0 1 2
40 0 0 0.0033063652339419308 0.9966991 0.00330088427 1.89582534E-11 0 1 2
41 0 0 0.077088331379826819 0.9258081 0.07419214 3.33435963E-08 0 1 2
44 0 0 0.010642032182541446 0.9894144 0.0105857411 3.503732E-10 0 1 2
45 0 0 0.017220520546649732 0.9829269 0.0170729719 8.533566E-10 0 1 2
46 0 0 0.0041847046934752278 0.995824039 0.00417628 1.51512067E-11 0 1 2
48 0 0 0.0036583023932260659 0.9963484 0.00365164177 6.70034037E-12 0 1 2
50 1 1 0.0098977138531054429 0.9901511 0.00557819242 0.00427070633 1 2 0
51 1 1 0.031642692265860799 0.9688527 0.0240902212 0.00705716759 1 2 0
52 1 1 0.035736352134450186 0.964894652 0.0334406 0.00166473968 1 2 0
54 1 1 0.072410342279000534 0.930149138 0.06803843 0.00181231985 1 2 0
56 1 1 0.093650567008779079 0.9106009 0.08499185 0.004407246 1 2 0
60 1 1 0.04249022665363885 0.958399832 0.0362676121 0.005332458 1 2 0
63 1 1 0.13750479017047515 0.8715302 0.127108455 0.00136140583 1 2 0
64 1 1 0.054985312386571866 0.94649905 0.048023738 0.005477275 1 0 2
66 1 1 0.32485345716523295 0.722633243 0.274839342 0.00252740132 1 2 0
68 1 1 0.41496231775214937 0.660365164 0.33927533 0.000359431346 1 2 0
69 1 1 0.021870636107098145 0.9783668 0.0153707173 0.006262359 1 2 0
70 1 2 0.95191325789517367 0.613012552 0.3860018 0.0009856701 2 1 0
71 1 1 0.019884826671693976 0.9803116 0.0117813116 0.007907144 1 0 2
72 1 1 0.59602620487261415 0.55099684 0.448828 0.0001750491 1 2 0
73 1 1 0.061367247268114919 0.9404778 0.0584481172 0.00107416452 1 2 0
74 1 1 0.014403823587870975 0.9856994 0.008161746 0.00613881 1 2 0
76 1 1 0.038603275709482521 0.962132335 0.0368479043 0.00101978832 1 2 0
77 1 1 0.29143003374400789 0.7471943 0.252085924 0.0007197116 1 2 0
79 1 1 0.031789553326302268 0.9687104 0.030109819 0.00117970794 1 0 2
82 1 1 0.020876873287020672 0.97933954 0.011350465 0.00930999 1 0 2
88 1 1 0.040395996500067109 0.960409045 0.0276476946 0.0119432509 1 2 0
90 1 1 0.1660756926194604 0.8469821 0.151687235 0.001330623 1 2 0
91 1 1 0.078328373844083643 0.924660742 0.07274478 0.002594536 1 2 0
92 1 1 0.023790381883611583 0.9764904 0.0172090363 0.006300515 1 2 0
93 1 1 0.031245715484219467 0.9692374 0.0224260911 0.008336624 1 0 2
95 1 1 0.026741238049869182 0.973613143 0.01771209 0.008674903 1 2 0
96 1 1 0.046394842362099863 0.954664946 0.0385362953 0.00679865666 1 2 0
97 1 1 0.020153788914160642 0.980047941 0.0141839078 0.00576809375 1 2 0
98 1 1 0.10663125927218892 0.898857057 0.0991064161 0.00203663576 1 0 2
99 1 1 0.042470947370459415 0.9584183 0.03420087 0.007380766 1 2 0
100 2 2 0.00061608453557407644 0.9993841 0.0006157423 5.20009849E-08 2 1 0
102 2 2 0.03520011910757518 0.9654122 0.03458591 2.11709448E-06 2 1 0
104 2 2 0.0055899402333488843 0.994425654 0.005573941 4.13432133E-07 2 1 0
105 2 2 0.010776984144366402 0.9892809 0.0107188253 7.398785E-08 2 1 0
106 2 2 0.035450938842364201 0.9651701 0.03480021 2.97981169E-05 2 1 0
108 2 2 0.02539346900894791 0.974926233 0.025073193 4.91410162E-07 2 1 0
109 2 2 0.010955823550611778 0.989104 0.0108943991 1.82278131E-06 2 1 0
111 2 2 0.064874143655307498 0.9371854 0.06280439 1.01753494E-05 2 1 0
112 2 2 0.065967573668299667 0.9361612 0.06382503 1.35423443E-05 2 1 0
113 2 2 0.010099395894045529 0.989951432 0.01004624 2.24200812E-06 2 1 0
115 2 2 0.025224193337550441 0.9750913 0.0248927623 1.61508033E-05 2 1 0
117 2 2 0.040343741890525309 0.960459232 0.03953917 1.57988256E-06 2 1 0
120 2 2 0.02318047647021565 0.9770861 0.0229095761 4.537662E-06 2 1 0
121 2 2 0.023067445323975651 0.9771966 0.0227888674 1.47519113E-05 2 1 0
122 2 2 0.010407475995621918 0.9896465 0.0103535978 3.082614E-08 2 1 0
123 2 2 0.3268465712630802 0.7211944 0.278644919 0.000160744428 2 1 0
125 2 2 0.2461805298239165 0.7817811 0.218203962 1.49805483E-05 2 1 0
128 2 2 0.0093602944019322582 0.9906834 0.009315666 7.62494949E-07 2 1 0
129 2 1 0.72902345881548536 0.517581046 0.482379824 3.91569338E-05 1 2 0
131 2 2 0.50472807995053026 0.6036697 0.3962875 4.27463638E-05 2 1 0
132 2 2 0.0054111588821948668 0.994603455 0.005395858 4.54962333E-07 2 1 0
133 2 1 0.80137756193014009 0.551096559 0.4487104 0.000192968553 1 2 0
137 2 2 0.138713849419592 0.8704771 0.129494175 2.89310847E-05 2 1 0
138 2 2 0.41129824947871368 0.6627892 0.336678356 0.000532399 2 1 0
141 2 2 0.15623141596710644 0.8553612 0.144476026 0.0001627244 2 1 0
144 2 2 0.0057015524406273162 0.9943147 0.005683652 1.455023E-06 2 1 0
145 2 2 0.050843236304848244 0.950427651 0.04954298 2.935713E-05 2 1 0
147 2 2 0.14442514040251331 0.8655197 0.134411365 6.885501E-05 2 1 0
0 0 0 0.00389289626734539 0.9961147 0.00388562 4.35209946E-13 0 1 2
1 0 0 0.012644744843927328 0.987434864 0.0125649935 1.20199536E-11 0 1 2
2 0 0 0.0068264018990562975 0.993196845 0.006802921 3.977263E-12 0 1 2
3 0 0 0.017663957572318569 0.982491136 0.0175090022 4.86960923E-11 0 1 2
4 0 0 0.0033855462771325432 0.9966202 0.00337941712 3.752789E-13 0 1 2
7 0 0 0.0072031139444910328 0.992822766 0.007177612 2.37773321E-12 0 1 2
12 0 0 0.013219808473621149 0.9868672 0.0131330229 9.955089E-12 0 1 2
13 0 0 0.0067634503253886933 0.9932594 0.006741032 5.51140253E-12 0 1 2
14 0 0 0.0004300803787185737 0.99957 0.0004298582 4.855314E-16 0 1 2
15 0 0 0.0005798232354486219 0.999420345 0.000579488464 3.10407929E-15 0 1 2
16 0 0 0.0010280527364537647 0.9989725 0.00102712377 2.45259982E-14 0 1 2
17 0 0 0.0039882812042811395 0.996019661 0.003979865 7.59585157E-13 0 1 2
19 0 0 0.0029926331194280015 0.99701184 0.00298810145 3.63156038E-13 0 1 2
22 0 0 0.0011491819568960609 0.9988515 0.00114855962 6.334511E-14 0 1 2
23 0 0 0.017463655687227918 0.98268795 0.0173117835 7.403799E-11 0 1 2
24 0 0 0.031715412558545882 0.968782246 0.0312175769 1.26180913E-10 0 1 2
26 0 0 0.010593417790593548 0.9894625 0.0105371028 1.63020032E-11 0 1 2
27 0 0 0.0050983234754958173 0.994914651 0.005085708 6.78546358E-13 0 1 2
29 0 0 0.0187730771464137 0.98140204 0.0185974967 4.52416021E-11 0 1 2
30 0 0 0.021565460472193786 0.9786654 0.02133441 5.23468144E-11 0 1 2
33 0 0 0.00068568844551682487 0.999314547 0.000685595965 2.65897216E-15 0 1 2
34 0 0 0.014051409604386065 0.986046851 0.0139533244 9.251471E-12 0 1 2
36 0 0 0.0021076343691449066 0.9978946 0.0021053094 4.41079566E-14 0 1 2
38 0 0 0.012707705555407789 0.9873727 0.0126277 3.37066139E-11 0 1 2
39 0 0 0.0067262453481025412 0.9932963 0.006703257 1.64459679E-12 0 1 2
42 0 0 0.0083856724102233139 0.9916494 0.008350652 1.20181885E-11 0 1 2
43 0 0 0.0090265536748324916 0.991014063 0.00898587 2.96402451E-11 0 1 2
47 0 0 0.010248185046573205 0.989804149 0.0101959091 1.29456289E-11 0 1 2
49 0 0 0.0063266791540774296 0.9936933 0.006306445 1.76891347E-12 0 1 2
53 1 1 0.10908627419032911 0.896653056 0.100547634 0.00279926369 1 2 0
55 1 1 0.13382980096571034 0.874738932 0.123627327 0.00163388788 1 2 0
57 1 1 0.032485513905672912 0.9680365 0.02845792 0.00350564788 1 0 2
58 1 1 0.014136100959005988 0.985963345 0.0110323895 0.0030042557 1 2 0
59 1 1 0.088965912338422562 0.914876759 0.07777976 0.00734356465 1 2 0
61 1 1 0.037055462184954119 0.9636227 0.0281057619 0.008271456 1 2 0
62 1 1 0.010662093085153215 0.989394546 0.006804485 0.00380102685 1 2 0
65 1 1 0.0124035025724159 0.9876731 0.009420066 0.00290678721 1 0 2
67 1 1 0.011022113126039873 0.9890384 0.00670390576 0.00425770739 1 0 2
75 1 1 0.012557041083151357 0.98752147 0.007134205 0.00534433033 1 0 2
78 1 1 0.11787630637387389 0.888806 0.108796746 0.002397152 1 2 0
80 1 1 0.018740888550878902 0.98143363 0.0107581457 0.00780817959 1 2 0
81 1 1 0.01532223104169358 0.984794557 0.0112667764 0.003938712 1 0 2
83 1 2 2.0409789338544426 0.870069146 0.129901484 2.94224083E-05 2 1 0
84 1 1 0.43623206546961257 0.6464677 0.352110773 0.00142157311 1 2 0
85 1 1 0.05070113760972858 0.9505627 0.0423230827 0.00711412542 1 2 0
86 1 1 0.023943664227516325 0.9763407 0.020373445 0.003285837 1 2 0
87 1 1 0.067026826092418473 0.9351701 0.0635259151 0.00130392925 1 2 0
89 1 1 0.063164711929489775 0.938788831 0.0567600951 0.00445109932 1 2 0
94 1 1 0.064262629430308249 0.9377587 0.05857061 0.00367062865 1 2 0
101 2 2 0.01641893911409267 0.9837151 0.0162818022 2.98962755E-06 2 1 0
103 2 2 0.021575936016214155 0.978655159 0.0213434044 1.57773707E-06 2 1 0
107 2 2 0.015651238280913731 0.9844706 0.0155291827 2.12675829E-07 2 1 0
110 2 2 0.31672575146404086 0.7285305 0.271245658 0.000223787472 2 1 0
114 2 2 0.0015682948475232242 0.998432934 0.00156654033 3.143049E-07 2 1 0
116 2 2 0.083519922011792383 0.919872761 0.0801154 1.17583113E-05 2 1 0
118 2 2 8.392686160427524E-05 0.9999161 8.3445615E-05 9.315716E-11 2 1 0
119 2 2 0.083749782035049736 0.919661343 0.08032946 9.204548E-06 2 1 0
124 2 2 0.030857563825488415 0.9696137 0.03038175 4.521532E-06 2 1 0
126 2 2 0.42635579718355782 0.652884 0.3468231 0.000292929617 2 1 0
127 2 2 0.37435529902904258 0.6877325 0.312001377 0.000266118121 2 1 0
130 2 2 0.023466557862108686 0.976806641 0.0231929272 5.31211867E-07 2 1 0
134 2 2 0.039153548126797694 0.961603045 0.0383953266 1.45486206E-06 2 1 0
135 2 2 0.01277634509471311 0.9873049 0.012694709 4.930856E-07 2 1 0
136 2 2 0.0041656711133312499 0.995843 0.0041561476 7.559071E-07 2 1 0
139 2 2 0.12062356177145393 0.886367559 0.113597296 3.53434771E-05 2 1 0
140 2 2 0.0054774415012383863 0.994537532 0.0054619913 6.990564E-07 2 1 0
142 2 2 0.01641893911409267 0.9837151 0.0162818022 2.98962755E-06 2 1 0
143 2 2 0.0041274853374921976 0.995881 0.00411869865 2.57669541E-07 2 1 0
146 2 2 0.062442762435292995 0.939466834 0.0605186969 1.4492558E-05 2 1 0
148 2 2 0.01353833699897897 0.9865529 0.0134423738 4.60714637E-06 2 1 0
149 2 2 0.091871736345547891 0.912222147 0.08774486 3.309085E-05 2 1 0

Просмотреть файл

@ -0,0 +1,66 @@
maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% data=%Data% out=%Output% seed=1
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 15 of 15 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 50 | 0 | 0 | 1.0000
1 || 0 | 48 | 2 | 0.9600
2 || 0 | 1 | 49 | 0.9800
||========================
Precision ||1.0000 |0.9796 |0.9608 |
Accuracy(micro-avg): 0.980000
Accuracy(macro-avg): 0.980000
Log-loss: 0.072218
Log-loss reduction: 93.426390
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.980000 (0.0000)
Accuracy(macro-avg): 0.980000 (0.0000)
Log-loss: 0.072218 (0.0000)
Log-loss reduction: 93.426390 (0.0000)
---------------------------------------
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'LBFGS data prep' started.
[1] 'LBFGS data prep' finished in %Time%.
[2] 'LBFGS Optimizer' started.
[2] (%Time%) 0 iterations Loss: 1.0986123085022
[2] (%Time%) 1 iterations Loss: 1.09053671360016 Improvement: 0.008076
[2] (%Time%) 2 iterations Loss: 0.964357972145081 Improvement: 0.1026
[2] (%Time%) 3 iterations Loss: 0.874466478824615 Improvement: 0.09291
[2] (%Time%) 4 iterations Loss: 0.53207802772522 Improvement: 0.2808
[2] (%Time%) 5 iterations Loss: 0.460592895746231 Improvement: 0.1236
[2] (%Time%) 6 iterations Loss: 0.381620526313782 Improvement: 0.09013
[2] (%Time%) 7 iterations Loss: 0.301508545875549 Improvement: 0.08262
[2] (%Time%) 8 iterations Loss: 0.230116382241249 Improvement: 0.0742
[2] (%Time%) 9 iterations Loss: 0.170902773737907 Improvement: 0.06296
[2] (%Time%) 10 iterations Loss: 0.143164187669754 Improvement: 0.03654
[2] (%Time%) 11 iterations Loss: 0.135387286543846 Improvement: 0.01497
[2] (%Time%) 12 iterations Loss: 0.133318409323692 Improvement: 0.005294
[2] (%Time%) 13 iterations Loss: 0.132491216063499 Improvement: 0.001944
[2] (%Time%) 14 iterations Loss: 0.124604761600494 Improvement: 0.006401
[2] (%Time%) 15 iterations Loss: 0.120595537126064 Improvement: 0.004607
[2] (%Time%) 16 iterations Loss: 0.119206272065639 Improvement: 0.002194
[2] (%Time%) 17 iterations Loss: 0.117203310132027 Improvement: 0.002051
[2] (%Time%) 18 iterations Loss: 0.116163291037083 Improvement: 0.001293
[2] (%Time%) 19 iterations Loss: 0.109811097383499 Improvement: 0.005087
[2] (%Time%) 20 iterations Loss: 0.106156274676323 Improvement: 0.004013
[2] (%Time%) 21 iterations Loss: 0.104246392846107 Improvement: 0.002436
[2] (%Time%) 22 iterations Loss: 0.10310410708189 Improvement: 0.001466
[2] (%Time%) 23 iterations Loss: 0.102218925952911 Improvement: 0.00103
[2] (%Time%) 24 iterations Loss: 0.101610459387302 Improvement: 0.0007139
[2] 'LBFGS Optimizer' finished in %Time%.
[3] 'Saving model' started.
[3] 'Saving model' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.98 0.98 0.072218 93.42639 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,69 @@
maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% out=%Output% seed=1 xf=TreeFeat{lps=0 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves}
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 150 instances
Binning and forming Feature objects
Reserved memory for tree learner: 20436 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 72
improvement criterion: Mean Improvement
L1 regularization selected 72 of 72 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 50 | 0 | 0 | 1.0000
1 || 0 | 49 | 1 | 0.9800
2 || 0 | 2 | 48 | 0.9600
||========================
Precision ||1.0000 |0.9608 |0.9796 |
Accuracy(micro-avg): 0.980000
Accuracy(macro-avg): 0.980000
Log-loss: 0.048652
Log-loss reduction: 95.571483
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.980000 (0.0000)
Accuracy(macro-avg): 0.980000 (0.0000)
Log-loss: 0.048652 (0.0000)
Log-loss reduction: 95.571483 (0.0000)
---------------------------------------
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'FastTree data preparation' started.
[1] 'FastTree data preparation' finished in %Time%.
[2] 'FastTree in-memory bins initialization' started.
[2] 'FastTree in-memory bins initialization' finished in %Time%.
[3] 'FastTree feature conversion' started.
[3] 'FastTree feature conversion' finished in %Time%.
[4] 'FastTree training' started.
[4] 'FastTree training' finished in %Time%.
[5] 'LBFGS data prep' started.
[5] 'LBFGS data prep' finished in %Time%.
[6] 'LBFGS Optimizer' started.
[6] (%Time%) 0 iterations Loss: 1.0986123085022
[6] (%Time%) 1 iterations Loss: 0.62896740436554 Improvement: 0.4696
[6] (%Time%) 2 iterations Loss: 0.213765218853951 Improvement: 0.4261
[6] (%Time%) 3 iterations Loss: 0.144495338201523 Improvement: 0.1542
[6] (%Time%) 4 iterations Loss: 0.106832779943943 Improvement: 0.06646
[6] (%Time%) 5 iterations Loss: 0.0936193987727165 Improvement: 0.02649
[6] (%Time%) 6 iterations Loss: 0.083323560655117 Improvement: 0.01434
[6] (%Time%) 7 iterations Loss: 0.0787383615970612 Improvement: 0.007024
[6] (%Time%) 8 iterations Loss: 0.0768212750554085 Improvement: 0.003194
[6] (%Time%) 9 iterations Loss: 0.0741848275065422 Improvement: 0.002776
[6] (%Time%) 10 iterations Loss: 0.0730299279093742 Improvement: 0.00156
[6] (%Time%) 11 iterations Loss: 0.0716971307992935 Improvement: 0.00139
[6] (%Time%) 12 iterations Loss: 0.0708837881684303 Improvement: 0.0009574
[6] 'LBFGS Optimizer' finished in %Time%.
[7] 'Saving model' started.
[7] 'Saving model' finished in %Time%.

Просмотреть файл

@ -0,0 +1,67 @@
maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% out=%Output% seed=1 xf=TreeFeat{lps=2 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves}
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 150 instances
Binning and forming Feature objects
Reserved memory for tree learner: 20436 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 81
improvement criterion: Mean Improvement
L1 regularization selected 81 of 81 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 50 | 0 | 0 | 1.0000
1 || 0 | 49 | 1 | 0.9800
2 || 0 | 3 | 47 | 0.9400
||========================
Precision ||1.0000 |0.9423 |0.9792 |
Accuracy(micro-avg): 0.973333
Accuracy(macro-avg): 0.973333
Log-loss: 0.052580
Log-loss reduction: 95.213998
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.973333 (0.0000)
Accuracy(macro-avg): 0.973333 (0.0000)
Log-loss: 0.052580 (0.0000)
Log-loss reduction: 95.213998 (0.0000)
---------------------------------------
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'FastTree data preparation' started.
[1] 'FastTree data preparation' finished in %Time%.
[2] 'FastTree in-memory bins initialization' started.
[2] 'FastTree in-memory bins initialization' finished in %Time%.
[3] 'FastTree feature conversion' started.
[3] 'FastTree feature conversion' finished in %Time%.
[4] 'FastTree training' started.
[4] 'FastTree training' finished in %Time%.
[5] 'LBFGS data prep' started.
[5] 'LBFGS data prep' finished in %Time%.
[6] 'LBFGS Optimizer' started.
[6] (%Time%) 0 iterations Loss: 1.0986123085022
[6] (%Time%) 1 iterations Loss: 0.659841060638428 Improvement: 0.4388
[6] (%Time%) 2 iterations Loss: 0.203571543097496 Improvement: 0.4528
[6] (%Time%) 3 iterations Loss: 0.133906096220016 Improvement: 0.1609
[6] (%Time%) 4 iterations Loss: 0.108745984733105 Improvement: 0.05869
[6] (%Time%) 5 iterations Loss: 0.0906000584363937 Improvement: 0.02825
[6] (%Time%) 6 iterations Loss: 0.0810708180069923 Improvement: 0.01421
[6] (%Time%) 7 iterations Loss: 0.0780067816376686 Improvement: 0.005849
[6] (%Time%) 8 iterations Loss: 0.0760208815336227 Improvement: 0.002952
[6] (%Time%) 9 iterations Loss: 0.0755626112222672 Improvement: 0.001082
[6] (%Time%) 10 iterations Loss: 0.0751652047038078 Improvement: 0.0005685
[6] 'LBFGS Optimizer' finished in %Time%.
[7] 'Saving model' started.
[7] 'Saving model' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.973333 0.973333 0.05258 95.214 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% out=%Output% seed=1 xf=TreeFeat{lps=2 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves} /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
0 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
1 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
2 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
3 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
4 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
5 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
6 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
7 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
8 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
9 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
10 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
11 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
12 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
13 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
14 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
15 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
16 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
17 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
18 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
19 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
20 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
21 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
22 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
23 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
24 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
25 0 0 0.009448319936012424 0.9905962 0.005269755 0.004134127 0 2 1
26 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
27 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
28 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
29 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
30 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
31 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
32 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
33 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
34 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
35 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
36 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
37 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
38 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
39 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
40 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
41 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
42 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
43 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
44 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
45 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
46 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
47 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
48 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
49 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
50 1 1 0.021321326112559644 0.978904366 0.0185372382 0.00255840342 1 2 0
51 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
52 1 1 0.057755711518056148 0.9438805 0.0540314876 0.00208806922 1 2 0
53 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
54 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
55 1 1 0.010453672105827311 0.9896008 0.00705624232 0.00334312418 1 2 0
56 1 1 0.057755711518056148 0.9438805 0.0540314876 0.00208806922 1 2 0
57 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
58 1 1 0.010453672105827311 0.9896008 0.00705624232 0.00334312418 1 2 0
59 1 1 0.022228986093586683 0.978016257 0.0183866452 0.00359701412 1 2 0
60 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
61 1 1 0.002874388769313735 0.997129738 0.00218560919 0.0006844687 1 2 0
62 1 1 0.0025157957624932083 0.997487366 0.00131361035 0.001199242 1 2 0
63 1 1 0.021321326112559644 0.978904366 0.0185372382 0.00255840342 1 2 0
64 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
65 1 1 0.00050450174369561743 0.9994956 0.0003428632 0.000161661 1 0 2
66 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
67 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
68 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
69 1 1 0.0025157957624932083 0.997487366 0.00131361035 0.001199242 1 2 0
70 1 2 1.5216391055963379 0.774603248 0.218353689 0.007043035 2 1 0
71 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
72 1 1 0.057755711518056148 0.9438805 0.0540314876 0.00208806922 1 2 0
73 1 1 0.021321326112559644 0.978904366 0.0185372382 0.00255840342 1 2 0
74 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
75 1 1 0.00050450174369561743 0.9994956 0.0003428632 0.000161661 1 0 2
76 1 1 0.021321326112559644 0.978904366 0.0185372382 0.00255840342 1 2 0
77 1 1 0.66969455479852757 0.5118649 0.480443418 0.00769158732 1 2 0
78 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
79 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
80 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
81 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
82 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
83 1 1 0.66969455479852757 0.5118649 0.480443418 0.00769158732 1 2 0
84 1 1 0.6145726680541016 0.540872 0.450446427 0.008681549 1 2 0
85 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
86 1 1 0.057755711518056148 0.9438805 0.0540314876 0.00208806922 1 2 0
87 1 1 0.00050450174369561743 0.9994956 0.0003428632 0.000161661 1 0 2
88 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
89 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
90 1 1 0.0081417295935607269 0.9918913 0.00619866839 0.00190992071 1 2 0
91 1 1 0.010453672105827311 0.9896008 0.00705624232 0.00334312418 1 2 0
92 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
93 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
94 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
95 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
96 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
97 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
98 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
99 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
100 2 2 0.0027975792788985728 0.99720633 0.00140296342 0.00139077951 2 0 1
101 2 2 0.0060782606764628114 0.9939402 0.00406833738 0.00199138 2 1 0
102 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
103 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
104 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
105 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
106 2 1 0.79751612827614782 0.540872 0.450446427 0.008681549 1 2 0
107 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
108 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
109 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
110 2 2 0.0027107946424502136 0.9972929 0.00169270684 0.00101456023 2 1 0
111 2 2 0.00038625307961716445 0.9996138 0.000308882067 7.720726E-05 2 0 1
112 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
113 2 2 0.0060782606764628114 0.9939402 0.00406833738 0.00199138 2 1 0
114 2 2 0.0060782606764628114 0.9939402 0.00406833738 0.00199138 2 1 0
115 2 2 0.0027975792788985728 0.99720633 0.00140296342 0.00139077951 2 0 1
116 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
117 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
118 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
119 2 1 0.73304581395521817 0.5118649 0.480443418 0.00769158732 1 2 0
120 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
121 2 2 0.069479934485845732 0.932878852 0.06260931 0.0045117354 2 1 0
122 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
123 2 2 0.061853403505252154 0.9400207 0.0568036325 0.00317562232 2 1 0
124 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
125 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
126 2 2 0.25540431923984186 0.774603248 0.218353689 0.007043035 2 1 0
127 2 2 0.25540431923984186 0.774603248 0.218353689 0.007043035 2 1 0
128 2 2 0.0028037955474232222 0.997200131 0.00140025932 0.00139938819 2 1 0
129 2 2 0.095699813778642617 0.908736765 0.08522418 0.00603903 2 1 0
130 2 2 0.00057171233510822377 0.999428451 0.000422048615 0.0001493216 2 0 1
131 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
132 2 2 0.0028037955474232222 0.997200131 0.00140025932 0.00139938819 2 1 0
133 2 1 0.73304581395521817 0.5118649 0.480443418 0.00769158732 1 2 0
134 2 2 0.095699813778642617 0.908736765 0.08522418 0.00603903 2 1 0
135 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
136 2 2 0.0027975792788985728 0.99720633 0.00140296342 0.00139077951 2 0 1
137 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
138 2 2 0.25540431923984186 0.774603248 0.218353689 0.007043035 2 1 0
139 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
140 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
141 2 2 0.0027107946424502136 0.9972929 0.00169270684 0.00101456023 2 1 0
142 2 2 0.0060782606764628114 0.9939402 0.00406833738 0.00199138 2 1 0
143 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
144 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
145 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
146 2 2 0.001620293327937558 0.998381 0.000876012957 0.000743192446 2 1 0
147 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
148 2 2 0.010240055565253037 0.9898122 0.00643974636 0.00374803343 2 1 0
149 2 2 0.020741951150490413 0.9794717 0.0168389976 0.00368933426 2 1 0

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.98 0.98 0.048652 95.57148 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% out=%Output% seed=1 xf=TreeFeat{lps=0 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves} /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
0 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
1 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
2 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
3 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
4 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
5 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
6 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
7 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
8 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
9 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
10 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
11 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
12 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
13 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
14 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
15 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
16 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
17 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
18 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
19 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
20 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
21 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
22 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
23 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
24 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
25 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
26 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
27 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
28 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
29 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
30 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
31 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
32 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
33 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
34 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
35 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
36 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
37 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
38 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
39 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
40 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
41 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
42 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
43 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
44 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
45 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
46 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
47 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
48 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
49 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
50 1 1 0.017149938128276376 0.9829963 0.0158224776 0.00118121586 1 2 0
51 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
52 1 1 0.46518926341456768 0.628016233 0.3688916 0.00309218233 1 2 0
53 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
54 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
55 1 1 0.010133053663594078 0.9899181 0.00823902 0.00184285524 1 2 0
56 1 1 0.063370253429356399 0.9385959 0.0599906556 0.00141336839 1 2 0
57 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
58 1 1 0.010133053663594078 0.9899181 0.00823902 0.00184285524 1 2 0
59 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
60 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
61 1 1 0.0029611876049965222 0.9970432 0.00249748817 0.000459320465 1 2 0
62 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
63 1 1 0.017149938128276376 0.9829963 0.0158224776 0.00118121586 1 2 0
64 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
65 1 1 0.00099804121244394858 0.999002457 0.000630193 0.000367255969 1 2 0
66 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
67 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
68 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
69 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
70 1 2 1.115149236434358 0.6676605 0.327866346 0.004473159 2 1 0
71 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
72 1 1 0.46518926341456768 0.628016233 0.3688916 0.00309218233 1 2 0
73 1 1 0.017149938128276376 0.9829963 0.0158224776 0.00118121586 1 2 0
74 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
75 1 1 0.00099804121244394858 0.999002457 0.000630193 0.000367255969 1 2 0
76 1 1 0.017149938128276376 0.9829963 0.0158224776 0.00118121586 1 2 0
77 1 1 0.36444069762825837 0.694585 0.301859677 0.00355520425 1 2 0
78 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
79 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
80 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
81 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
82 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
83 1 1 0.46518926341456768 0.628016233 0.3688916 0.00309218233 1 2 0
84 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
85 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
86 1 1 0.063370253429356399 0.9385959 0.0599906556 0.00141336839 1 2 0
87 1 1 0.00099804121244394858 0.999002457 0.000630193 0.000367255969 1 2 0
88 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
89 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
90 1 1 0.00099804121244394858 0.999002457 0.000630193 0.000367255969 1 2 0
91 1 1 0.010133053663594078 0.9899181 0.00823902 0.00184285524 1 2 0
92 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
93 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
94 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
95 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
96 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
97 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
98 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
99 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
100 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
101 2 2 0.015195136716169138 0.9849197 0.0135209756 0.00155933737 2 1 0
102 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
103 2 2 0.00033902907870159203 0.999661 0.000212118714 0.000126917963 2 0 1
104 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
105 2 2 0.0036106245223803345 0.9963959 0.00253308774 0.00107112422 2 1 0
106 2 2 0.44574496282930443 0.640347064 0.354238272 0.005414769 2 1 0
107 2 2 0.00044343765178377152 0.999556661 0.000272705773 0.00017054558 2 0 1
108 2 2 0.00033902907870159203 0.999661 0.000212118714 0.000126917963 2 0 1
109 2 2 0.0035705457622275658 0.9964358 0.002492974 0.00107111351 2 1 0
110 2 2 0.0017090133953465831 0.998292446 0.00131496089 0.000392403 2 1 0
111 2 2 0.00020553793666013725 0.9997945 0.000142994817 6.261714E-05 2 0 1
112 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
113 2 2 0.054315558430791992 0.9471332 0.05060918 0.00225768937 2 1 0
114 2 2 0.015195136716169138 0.9849197 0.0135209756 0.00155933737 2 1 0
115 2 2 0.0014806020430615894 0.9985205 0.0009170116 0.0005626859 2 1 0
116 2 2 0.00033902907870159203 0.999661 0.000212118714 0.000126917963 2 0 1
117 2 2 0.0035705457622275658 0.9964358 0.002492974 0.00107111351 2 1 0
118 2 2 0.0036106245223803345 0.9963959 0.00253308774 0.00107112422 2 1 0
119 2 1 0.99725245372779048 0.628016233 0.3688916 0.00309218233 1 2 0
120 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
121 2 2 0.054315558430791992 0.9471332 0.05060918 0.00225768937 2 1 0
122 2 2 0.0036106245223803345 0.9963959 0.00253308774 0.00107112422 2 1 0
123 2 2 0.0057025115683581925 0.9943137 0.005097879 0.000588452967 2 1 0
124 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
125 2 2 0.00044343765178377152 0.999556661 0.000272705773 0.00017054558 2 0 1
126 2 2 0.40397550595071413 0.6676605 0.327866346 0.004473159 2 1 0
127 2 2 0.054315558430791992 0.9471332 0.05060918 0.00225768937 2 1 0
128 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
129 2 2 0.080654836005556188 0.922512054 0.0746743754 0.002813671 2 1 0
130 2 2 0.00044343765178377152 0.999556661 0.000272705773 0.00017054558 2 0 1
131 2 2 0.0035705457622275658 0.9964358 0.002492974 0.00107111351 2 1 0
132 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
133 2 1 0.99725245372779048 0.628016233 0.3688916 0.00309218233 1 2 0
134 2 2 0.080654836005556188 0.922512054 0.0746743754 0.002813671 2 1 0
135 2 2 0.0036106245223803345 0.9963959 0.00253308774 0.00107112422 2 1 0
136 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
137 2 2 0.00033902907870159203 0.999661 0.000212118714 0.000126917963 2 0 1
138 2 2 0.40397550595071413 0.6676605 0.327866346 0.004473159 2 1 0
139 2 2 0.001495823863942061 0.9985053 0.000931790448 0.00056270574 2 1 0
140 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
141 2 2 0.0017090133953465831 0.998292446 0.00131496089 0.000392403 2 1 0
142 2 2 0.015195136716169138 0.9849197 0.0135209756 0.00155933737 2 1 0
143 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
144 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
145 2 2 0.001495823863942061 0.9985053 0.000931790448 0.00056270574 2 1 0
146 2 2 0.0057025115683581925 0.9943137 0.005097879 0.000588452967 2 1 0
147 2 2 0.001495823863942061 0.9985053 0.000931790448 0.00056270574 2 1 0
148 2 2 0.011771430559717877 0.9882976 0.009459271 0.00224316632 2 1 0
149 2 2 0.015195136716169138 0.9849197 0.0135209756 0.00155933737 2 1 0

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
0 0 0 0.0011081873220258624 0.9988924 0.00110751833 1.39210543E-13 0 1 2
1 0 0 0.0070400107011273196 0.9929847 0.00701517845 6.033473E-12 0 1 2
2 0 0 0.0022964599964677033 0.9977062 0.00229415065 1.31948857E-12 0 1 2
3 0 0 0.0071621105155360203 0.9928635 0.00713652233 1.568493E-11 0 1 2
4 0 0 0.00076675000483667258 0.999233544 0.0007660015 9.598792E-14 0 1 2
5 0 0 0.00070191216101047108 0.999298334 0.000701762037 1.767304E-13 0 1 2
6 0 0 0.0015125382870987553 0.9984886 0.001511353 1.99488364E-12 0 1 2
7 0 0 0.0023565021891202727 0.9976463 0.002354226 7.60859745E-13 0 1 2
8 0 0 0.010224338755429094 0.989827752 0.0101724006 4.50257E-11 0 1 2
9 0 0 0.007608435929714304 0.992420435 0.0075791534 3.604188E-12 0 1 2
10 0 0 0.00077438528059811521 0.9992259 0.0007741245 3.67899477E-14 0 1 2
11 0 0 0.0034665877492082151 0.9965394 0.003460485 2.86678125E-12 0 1 2
12 0 0 0.0075073002188368347 0.9925208 0.00747876149 4.1399926E-12 0 1 2
13 0 0 0.0023870324875610912 0.9976158 0.002384095 1.57063577E-12 0 1 2
14 0 0 8.0111851446806079E-05 0.9999199 8.027427E-05 1.70893926E-16 0 1 2
15 0 0 5.149973917474053E-05 0.9999485 5.10407663E-05 6.476145E-16 0 1 2
16 0 0 0.0001506918954455657 0.9998493 0.000150896725 7.78926E-15 0 1 2
17 0 0 0.0010404633316893413 0.9989601 0.00103944924 2.66592168E-13 0 1 2
18 0 0 0.0010795457796386584 0.998921037 0.00107904861 7.74551457E-14 0 1 2
19 0 0 0.00050354758858267578 0.9994966 0.000503402262 8.393544E-14 0 1 2
20 0 0 0.0050583048608438558 0.994954467 0.005045267 1.21072217E-12 0 1 2
21 0 0 0.00068377979041008637 0.999316454 0.0006838254 3.0647487E-13 0 1 2
22 0 0 0.00016499926836891572 0.999835 0.000165351419 1.263396E-14 0 1 2
23 0 0 0.0060672865852426372 0.9939511 0.00604853174 3.67925274E-11 0 1 2
24 0 0 0.010941481475240914 0.989118159 0.0108818505 2.95929635E-11 0 1 2
25 0 0 0.015109992553949719 0.9850036 0.0149965808 2.16925887E-11 0 1 2
26 0 0 0.0030479341887764532 0.9969567 0.00304320036 6.085656E-12 0 1 2
27 0 0 0.001626024669168138 0.9983753 0.00162422692 2.31037764E-13 0 1 2
28 0 0 0.001602144297411704 0.998399138 0.00160105072 2.01864784E-13 0 1 2
29 0 0 0.0072584081998492492 0.9927679 0.0072323475 1.36550138E-11 0 1 2
30 0 0 0.010481860611279436 0.9895729 0.0104267541 1.97468066E-11 0 1 2
31 0 0 0.002068511721101683 0.9979336 0.00206610747 9.345534E-13 0 1 2
32 0 0 0.00018789149083271798 0.9998121 0.000188248741 2.51111472E-15 0 1 2
33 0 0 8.2973107700441223E-05 0.99991703 8.28565E-05 5.085376E-16 0 1 2
34 0 0 0.007608435929714304 0.992420435 0.0075791534 3.604188E-12 0 1 2
35 0 0 0.0015602356235697344 0.998441 0.00155867427 2.66303E-13 0 1 2
36 0 0 0.00075147962820386978 0.9992488 0.000751453335 2.136476E-14 0 1 2
37 0 0 0.007608435929714304 0.992420435 0.0075791534 3.604188E-12 0 1 2
38 0 0 0.0048217604636851983 0.995189846 0.004810019 1.08745435E-11 0 1 2
39 0 0 0.002354650087500236 0.9976481 0.002351973 5.78798457E-13 0 1 2
40 0 0 0.00070859259095342364 0.999291658 0.000708641659 1.60603938E-13 0 1 2
41 0 0 0.05864808223146472 0.9430386 0.0569613 1.37746259E-09 0 1 2
42 0 0 0.0023031510872087162 0.9976995 0.00230074348 2.997355E-12 0 1 2
43 0 0 0.0018548872509052583 0.998146832 0.00185356825 1.17147351E-11 0 1 2
44 0 0 0.0021972937219044641 0.9978051 0.00219457783 3.64257843E-12 0 1 2
45 0 0 0.0066146987210017709 0.99340713 0.00659268349 1.51941237E-11 0 1 2
46 0 0 0.00078774715346045169 0.999212563 0.0007876004 9.565127E-14 0 1 2
47 0 0 0.0033750801452213284 0.9966306 0.00336904428 3.782066E-12 0 1 2
48 0 0 0.00077533969416734708 0.999224961 0.000774866843 4.83624235E-14 0 1 2
49 0 0 0.0023231648847811732 0.997679532 0.0023206654 6.64796668E-13 0 1 2
50 1 1 0.0060152962161020033 0.994002759 0.00400032569 0.00199713139 1 2 0
51 1 1 0.023390285894295822 0.976881146 0.0186334234 0.004485526 1 2 0
52 1 1 0.031560872878237577 0.968932 0.0304043666 0.0006636082 1 2 0
53 1 1 0.084995522035361218 0.9185164 0.08058272 0.000900880957 1 2 0
54 1 1 0.063648693965999981 0.9383346 0.06099709 0.0006684431 1 2 0
55 1 1 0.089732440645847733 0.914175749 0.0849883854 0.000835962768 1 2 0
56 1 1 0.085243960631224261 0.918288231 0.0786991939 0.00301247532 1 2 0
57 1 1 0.019522210073795572 0.9806671 0.0168701168 0.00246294332 1 0 2
58 1 1 0.0098853132401056608 0.9901634 0.008936252 0.000900245446 1 2 0
59 1 1 0.092869061379903875 0.9113128 0.082560055 0.00612714142 1 2 0
60 1 1 0.014433393557966039 0.985670269 0.0125383837 0.001791122 1 2 0
61 1 1 0.045261412953232133 0.9557476 0.0376504771 0.006601967 1 2 0
62 1 1 0.0040368747706341756 0.995971262 0.00346833514 0.0005603906 1 2 0
63 1 1 0.10217425852556423 0.9028722 0.09653503 0.000592705 1 2 0
64 1 1 0.043710241505504309 0.9572313 0.0402606353 0.00250799 1 0 2
65 1 1 0.0080103773041286016 0.9920216 0.004349263 0.00362897874 1 0 2
66 1 1 0.25858017840179764 0.7721471 0.22617422 0.00167868065 1 2 0
67 1 1 0.0046706026176554045 0.9953403 0.00242033927 0.002239571 1 0 2
68 1 1 0.41731980067242436 0.6588102 0.341115355 7.465122E-05 1 2 0
69 1 1 0.0088439096717521076 0.9911951 0.00616651075 0.00263821287 1 2 0
70 1 2 1.1248658137051197 0.674737453 0.324696034 0.0005666125 2 1 0
71 1 1 0.010528060157971919 0.989527166 0.00620779675 0.00426507555 1 0 2
72 1 1 0.61055850847847093 0.5430475 0.45691213 4.016438E-05 1 2 0
73 1 1 0.033657066868998158 0.966903031 0.03271102 0.000386148255 1 2 0
74 1 1 0.0075878956642768964 0.9924408 0.004705976 0.0028532357 1 2 0
75 1 1 0.0093107195215621132 0.9907325 0.00627015159 0.00299742026 1 2 0
76 1 1 0.030895247221220504 0.969577134 0.0301216021 0.0003013593 1 2 0
77 1 1 0.36569263913806865 0.693716 0.306031 0.000253220467 1 2 0
78 1 1 0.12329888682954605 0.8839994 0.114668027 0.00133259967 1 2 0
79 1 1 0.017058382448049515 0.9830863 0.01655834 0.000355129 1 0 2
80 1 1 0.0098905503734397458 0.9901582 0.00717079034 0.002671104 1 2 0
81 1 1 0.0060877956403809913 0.9939307 0.00369574339 0.00237366813 1 0 2
82 1 1 0.010145216499399137 0.9899061 0.00589559553 0.004198351 1 0 2
83 1 2 1.7833237210655208 0.8319088 0.168078572 1.28333786E-05 2 1 0
84 1 1 0.41036916458021622 0.6634053 0.335155129 0.00143950759 1 2 0
85 1 1 0.074115509877208191 0.9285644 0.0619499721 0.009485668 1 2 0
86 1 1 0.025645754545416 0.9746803 0.0238822829 0.00143721956 1 2 0
87 1 1 0.047437678245327429 0.9536699 0.0461276025 0.000202563053 1 2 0
88 1 1 0.023062687682211796 0.9772012 0.0140921762 0.008706564 1 2 0
89 1 1 0.051273104302225654 0.9500192 0.0480280071 0.00195292477 1 2 0
90 1 1 0.088339816972695842 0.915449739 0.08400096 0.0005494726 1 2 0
91 1 1 0.054495307349226502 0.946962953 0.0517152026 0.00132180948 1 2 0
92 1 1 0.011043085652212155 0.989017665 0.00821223 0.00277022063 1 2 0
93 1 1 0.014310342276657975 0.985791564 0.0117249712 0.00248343241 1 0 2
94 1 1 0.049737456333124833 0.9514792 0.04661856 0.00190224242 1 2 0
95 1 1 0.0136037104159475 0.9864884 0.007890029 0.00562156225 1 2 0
96 1 1 0.025367974956993136 0.9749511 0.02095981 0.004089204 1 2 0
97 1 1 0.010987280419450679 0.989072859 0.008089061 0.002838109 1 2 0
98 1 1 0.081510331556232662 0.9217232 0.07764353 0.000633295451 1 0 2
99 1 1 0.023043169388812675 0.9772203 0.01862162 0.00415814156 1 2 0
100 2 2 0.00011063234023480313 0.9998894 0.000110273286 4.32938174E-09 2 1 0
101 2 2 0.013696642277261918 0.98639673 0.0136018591 1.25408337E-06 2 1 0
102 2 2 0.010937685077955646 0.9891219 0.0108777983 1.61583827E-07 2 1 0
103 2 2 0.025969000497816655 0.9743653 0.0256338213 6.83490839E-07 2 1 0
104 2 2 0.0015602356235697344 0.998441 0.001558811 3.603013E-08 2 1 0
105 2 2 0.0026788798728120773 0.9973247 0.00267550955 2.70883938E-09 2 1 0
106 2 2 0.030226255425741325 0.970226 0.02976235 1.14747845E-05 2 1 0
107 2 2 0.024762178100796752 0.9755419 0.0244582947 4.46629365E-08 2 1 0
108 2 2 0.011696829349738497 0.9883713 0.0116284136 3.284619E-08 2 1 0
109 2 2 0.0019693083254810079 0.9980326 0.00196759985 1.60975972E-07 2 1 0
110 2 2 0.16616345155985321 0.8469078 0.152995974 9.62346458E-05 2 1 0
111 2 2 0.031529930899520178 0.968961954 0.0310366023 1.33421645E-06 2 1 0
112 2 2 0.023412251624584629 0.9768597 0.023138877 1.59477213E-06 2 1 0
113 2 2 0.0043978696251254648 0.9956118 0.004387678 3.01778442E-07 2 1 0
114 2 2 0.00051404334489802697 0.9994861 0.000513345643 9.412643E-08 2 1 0
115 2 2 0.0073065605196956278 0.992720068 0.0072776177 2.56496514E-06 2 1 0
116 2 2 0.085114022402325365 0.918407559 0.08158773 4.63447259E-06 2 1 0
117 2 2 0.010481860611279436 0.9895729 0.010426864 1.47937257E-07 2 1 0
118 2 2 8.5834372140887149E-05 0.999914169 8.559229E-05 7.07661924E-12 2 1 0
119 2 2 0.14361526314671486 0.866220951 0.133777007 2.21380515E-06 2 1 0
120 2 2 0.0058536456241034358 0.994163454 0.00583615573 4.441736E-07 2 1 0
121 2 2 0.011340123133497897 0.988723934 0.0112725906 3.45189E-06 2 1 0
122 2 2 0.0027837721283902814 0.9972201 0.00277955178 8.58777549E-10 2 1 0
123 2 2 0.21739579576290038 0.804611444 0.195352 3.663746E-05 2 1 0
124 2 2 0.018497017687550327 0.981673 0.018325327 1.77490767E-06 2 1 0
125 2 2 0.13388922074520956 0.874686956 0.125310808 2.1976773E-06 2 1 0
126 2 2 0.30901959520667954 0.7341664 0.2657276 0.000105864478 2 1 0
127 2 2 0.2791385441945381 0.7564351 0.243426546 0.000138261472 2 1 0
128 2 2 0.0030822522819900344 0.9969225 0.00307789678 6.864916E-08 2 1 0
129 2 2 0.56484888647314668 0.56844604 0.431547046 6.86041358E-06 2 1 0
130 2 2 0.02669716058567774 0.973656058 0.0263440143 7.645144E-08 2 1 0
131 2 2 0.22184797501072168 0.801037133 0.198955223 7.891673E-06 2 1 0
132 2 2 0.0015115831696781118 0.998489559 0.00151082047 3.59063E-08 2 1 0
133 2 1 0.80210662927418985 0.551559 0.4483834 5.73823527E-05 1 2 0
134 2 2 0.10944270798526276 0.8963335 0.103665754 7.052652E-07 2 1 0
135 2 2 0.0061168808403790232 0.9939018 0.00609836355 4.79565649E-08 2 1 0
136 2 2 0.0014457419615165081 0.9985553 0.0014441628 3.58452127E-07 2 1 0
137 2 2 0.085367229331733907 0.918175042 0.08181802 6.72193E-06 2 1 0
138 2 2 0.31077265568961848 0.7328805 0.266897172 0.0002224321 2 1 0
139 2 2 0.059129692998003365 0.9425845 0.0574071258 8.420951E-06 2 1 0
140 2 2 0.0018806248795496621 0.998121142 0.00187838788 1.54243637E-07 2 1 0
141 2 2 0.046927431083058919 0.954156637 0.0458193719 2.41775069E-05 2 1 0
142 2 2 0.013696642277261918 0.98639673 0.0136018591 1.25408337E-06 2 1 0
143 2 2 0.0020218054648021076 0.997980237 0.002019646 7.11884454E-08 2 1 0
144 2 2 0.0010766816705180082 0.9989239 0.001075794 1.343269E-07 2 1 0
145 2 2 0.014124373105892742 0.9859749 0.014021622 3.4730067E-06 2 1 0
146 2 2 0.04510638702287053 0.9558958 0.0441013277 2.863036E-06 2 1 0
147 2 2 0.06771376104309032 0.934527934 0.06545844 1.33754611E-05 2 1 0
148 2 2 0.0049534133678820593 0.995058835 0.00493860524 2.47912658E-06 2 1 0
149 2 2 0.08111477993566267 0.922087848 0.07789185 2.04699681E-05 2 1 0

Просмотреть файл

@ -0,0 +1,106 @@
maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1 nn=+} threads=- norm=No dout=%Output% data=%Data% seed=1
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 11 of 15 weights.
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 11 of 15 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 21 | 0 | 0 | 1.0000
1 || 0 | 29 | 1 | 0.9667
2 || 0 | 2 | 26 | 0.9286
||========================
Precision ||1.0000 |0.9355 |0.9630 |
Accuracy(micro-avg): 0.962025
Accuracy(macro-avg): 0.965079
Log-loss: 0.129858
Log-loss reduction: 88.059239
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 29 | 0 | 0 | 1.0000
1 || 0 | 18 | 2 | 0.9000
2 || 0 | 0 | 22 | 1.0000
||========================
Precision ||1.0000 |1.0000 |0.9167 |
Accuracy(micro-avg): 0.971831
Accuracy(macro-avg): 0.966667
Log-loss: 0.125563
Log-loss reduction: 88.434327
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.966928 (0.0049)
Accuracy(macro-avg): 0.965873 (0.0008)
Log-loss: 0.127710 (0.0021)
Log-loss reduction: 88.246783 (0.1875)
---------------------------------------
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'LBFGS data prep' started.
[1] 'LBFGS data prep' finished in %Time%.
[2] 'LBFGS Optimizer' started.
[2] (%Time%) 0 iterations Loss: 1.0986123085022
[2] (%Time%) 1 iterations Loss: 1.00646448135376 Improvement: 0.09215
[2] (%Time%) 2 iterations Loss: 0.909583747386932 Improvement: 0.09593
[2] (%Time%) 3 iterations Loss: 0.525106191635132 Improvement: 0.3158
[2] (%Time%) 4 iterations Loss: 0.400520384311676 Improvement: 0.1718
[2] (%Time%) 5 iterations Loss: 0.332601189613342 Improvement: 0.09382
[2] (%Time%) 6 iterations Loss: 0.281388521194458 Improvement: 0.06186
[2] (%Time%) 7 iterations Loss: 0.237996473908424 Improvement: 0.04801
[2] (%Time%) 8 iterations Loss: 0.212298363447189 Improvement: 0.03128
[2] (%Time%) 9 iterations Loss: 0.199792444705963 Improvement: 0.0172
[2] (%Time%) 10 iterations Loss: 0.194789424538612 Improvement: 0.008052
[2] (%Time%) 11 iterations Loss: 0.193230450153351 Improvement: 0.003182
[2] (%Time%) 12 iterations Loss: 0.192447692155838 Improvement: 0.001383
[2] (%Time%) 13 iterations Loss: 0.189304739236832 Improvement: 0.002703
[2] (%Time%) 14 iterations Loss: 0.187662661075592 Improvement: 0.001907
[2] (%Time%) 15 iterations Loss: 0.185374572873116 Improvement: 0.002193
[2] (%Time%) 16 iterations Loss: 0.18364554643631 Improvement: 0.001845
[2] (%Time%) 17 iterations Loss: 0.180794909596443 Improvement: 0.002599
[2] (%Time%) 18 iterations Loss: 0.178908497095108 Improvement: 0.002065
[2] (%Time%) 19 iterations Loss: 0.175620675086975 Improvement: 0.002982
[2] (%Time%) 20 iterations Loss: 0.174758642911911 Improvement: 0.001392
[2] (%Time%) 21 iterations Loss: 0.173962101340294 Improvement: 0.0009454
[2] 'LBFGS Optimizer' finished in %Time%.
[3] 'LBFGS data prep #2' started.
[3] 'LBFGS data prep #2' finished in %Time%.
[4] 'LBFGS Optimizer #2' started.
[4] (%Time%) 0 iterations Loss: 1.0986123085022
[4] (%Time%) 1 iterations Loss: 1.05856335163116 Improvement: 0.04005
[4] (%Time%) 2 iterations Loss: 1.00281620025635 Improvement: 0.05261
[4] (%Time%) 3 iterations Loss: 0.97780430316925 Improvement: 0.03158
[4] (%Time%) 4 iterations Loss: 0.752716302871704 Improvement: 0.1773
[4] (%Time%) 5 iterations Loss: 0.542387366294861 Improvement: 0.2021
[4] (%Time%) 6 iterations Loss: 0.443084180355072 Improvement: 0.125
[4] (%Time%) 7 iterations Loss: 0.343867212533951 Improvement: 0.1057
[4] (%Time%) 8 iterations Loss: 0.284590691328049 Improvement: 0.07087
[4] (%Time%) 9 iterations Loss: 0.254261910915375 Improvement: 0.04046
[4] (%Time%) 10 iterations Loss: 0.224356189370155 Improvement: 0.03255
[4] (%Time%) 11 iterations Loss: 0.215291574597359 Improvement: 0.01493
[4] (%Time%) 12 iterations Loss: 0.212821274995804 Improvement: 0.005586
[4] (%Time%) 13 iterations Loss: 0.212086588144302 Improvement: 0.001948
[4] (%Time%) 14 iterations Loss: 0.21061946451664 Improvement: 0.001587
[4] (%Time%) 15 iterations Loss: 0.209799557924271 Improvement: 0.001012
[4] (%Time%) 16 iterations Loss: 0.209267094731331 Improvement: 0.0006523
[4] 'LBFGS Optimizer #2' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt /nn Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.966928 0.965873 0.12771 88.24678 0.1 0.001 0.001 1 + MulticlassLogisticRegression %Data% %Output% 99 0 0 maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1 nn=+} threads=- norm=No dout=%Output% data=%Data% seed=1 /l2:0.1;/l1:0.001;/ot:0.001;/nt:1;/nn:+

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
5 0 0 0.0035057053190431496 0.996500432 0.00349906948 3.13832377E-10 0 1 2
6 0 0 0.0051384036034053185 0.9948748 0.00512487441 2.22647167E-09 0 1 2
8 0 0 0.023597698228469945 0.97667855 0.0233211145 2.72825922E-08 0 1 2
9 0 0 0.022962599466045772 0.977299035 0.0227003787 4.499484E-09 0 1 2
10 0 0 0.0046729979679451441 0.9953379 0.004661741 1.26637076E-10 0 1 2
11 0 0 0.0091857704417590124 0.9908563 0.009143643 3.08492987E-09 0 1 2
18 0 0 0.0065021446457294635 0.993518949 0.00648188451 1.99731842E-10 0 1 2
20 0 0 0.019374830182594178 0.980811656 0.0191886947 1.76858894E-09 0 1 2
21 0 0 0.0035476956060140506 0.9964586 0.00354147726 5.015228E-10 0 1 2
25 0 0 0.042900092641784626 0.9580071 0.04199198 1.61448348E-08 0 1 2
28 0 0 0.0089054287695274903 0.9911341 0.008866515 4.88396434E-10 0 1 2
31 0 0 0.013042435754463243 0.987042248 0.0129569778 1.33878E-09 0 1 2
32 0 0 0.00098419922257654145 0.9990163 0.0009850509 1.554676E-11 0 1 2
35 0 0 0.0098781498492706357 0.9901705 0.009829475 6.490435E-10 0 1 2
37 0 0 0.022962599466045772 0.977299035 0.0227003787 4.499484E-09 0 1 2
40 0 0 0.0043144180137515368 0.9956949 0.004305292 3.673708E-10 0 1 2
41 0 0 0.15568350462586372 0.85583 0.144169211 3.76882554E-07 0 1 2
44 0 0 0.0055828075458514027 0.994432747 0.005567818 2.75362888E-09 0 1 2
45 0 0 0.024173723400995727 0.9761161 0.02388395 1.18244365E-08 0 1 2
46 0 0 0.0030727459326603834 0.99693197 0.0030678818 2.29107275E-10 0 1 2
48 0 0 0.0042629377095657972 0.995746136 0.004253787 1.522038E-10 0 1 2
50 1 1 0.024013506746705774 0.9762725 0.0144919865 0.009234835 1 0 2
51 1 1 0.072395347498337356 0.9301631 0.035736762 0.03410162 1 0 2
52 1 1 0.051792165373778974 0.9495262 0.0431178622 0.00735404948 1 2 0
54 1 1 0.082283007087723056 0.921011269 0.07229979 0.006690142 1 2 0
56 1 1 0.15273669550977986 0.8583557 0.1074395 0.03420409 1 2 0
60 1 1 0.063400291298347761 0.9385677 0.045856636 0.0155749973 1 2 0
63 1 1 0.1825828261589445 0.833115637 0.156787589 0.010097893 1 2 0
64 1 1 0.16742896518424877 0.8458367 0.145771012 0.008392983 1 0 2
66 1 1 0.41035083606201167 0.663417459 0.309818625 0.02676427 1 2 0
68 1 1 0.30088424491217947 0.740163445 0.258734822 0.00110106682 1 2 0
69 1 1 0.04777336113279694 0.9533498 0.0237986427 0.02285125 1 2 0
70 1 2 0.91118618706322396 0.5845066 0.402047038 0.0134477587 2 1 0
71 1 1 0.045959452018421797 0.9550807 0.0331809036 0.0117382742 1 0 2
72 1 1 0.50734715339493097 0.6020907 0.396878272 0.00103087048 1 2 0
73 1 1 0.10112764949378679 0.903817654 0.08841138 0.00776978023 1 2 0
74 1 1 0.033042902390674173 0.967497051 0.0197654 0.01273903 1 0 2
76 1 1 0.048774702844104172 0.9523957 0.04408047 0.00352243264 1 2 0
77 1 1 0.2695808504765218 0.763699532 0.232171819 0.0041304836 1 2 0
79 1 1 0.068614938770302072 0.933686137 0.06392762 0.00238591246 1 0 2
82 1 1 0.053632731586794685 0.947780132 0.03757136 0.0146492831 1 0 2
88 1 1 0.13018795642085101 0.8779304 0.07867057 0.0433993 1 0 2
90 1 1 0.23196605346812185 0.792973042 0.196191877 0.0108351735 1 2 0
91 1 1 0.12409213246584563 0.883298457 0.098272115 0.01842791 1 2 0
92 1 1 0.048004153781509205 0.9531298 0.02491984 0.0219512414 1 2 0
93 1 1 0.080398556071786337 0.9227485 0.06396286 0.0132876914 1 0 2
95 1 1 0.097486524415444684 0.907114565 0.06073187 0.0321540236 1 0 2
96 1 1 0.10523226793183746 0.90011543 0.0571433567 0.0427421071 1 2 0
97 1 1 0.046310433607912968 0.954745531 0.0234447 0.02180964 1 0 2
98 1 1 0.20922658413289458 0.8112114 0.185506433 0.00328178448 1 0 2
99 1 1 0.08973693948509831 0.914171636 0.0482413843 0.0375866257 1 2 0
100 2 2 0.0019302508084448155 0.9980716 0.00192404329 5.731013E-06 2 1 0
102 2 2 0.074935939071374899 0.9278029 0.07215074 4.48439678E-05 2 1 0
104 2 2 0.013984677144163373 0.986112654 0.0138699533 1.8276387E-05 2 1 0
105 2 2 0.025325791981913353 0.9749922 0.0250045415 2.42776559E-06 2 1 0
106 2 2 0.057338702586033331 0.9442742 0.05496859 0.0007578 2 1 0
108 2 2 0.05276486712560681 0.948603034 0.05138688 1.01106552E-05 2 1 0
109 2 2 0.026607542399492295 0.9737433 0.0261797551 7.822918E-05 2 1 0
111 2 2 0.12270355663525476 0.884525836 0.115313262 0.000161169621 2 1 0
112 2 2 0.13206483189725685 0.8762842 0.123490989 0.0002246642 2 1 0
113 2 2 0.025638599667597892 0.9746873 0.0252530053 6.150582E-05 2 1 0
115 2 2 0.057498919511760234 0.9441229 0.0554439 0.00043176004 2 1 0
117 2 2 0.05953979497139298 0.942198038 0.05773727 6.540683E-05 2 1 0
120 2 2 0.054813408680433579 0.9466618 0.0532217249 0.000117798671 2 1 0
121 2 2 0.046131151077141996 0.9549167 0.04467517 0.000408185384 2 1 0
122 2 2 0.02511979345068182 0.9751931 0.0248055067 9.05202E-07 2 1 0
123 2 2 0.48315052810711973 0.616836965 0.381864876 0.00129971188 2 1 0
125 2 2 0.29876712456560445 0.7417321 0.2580291 0.0002381928 2 1 0
128 2 2 0.023098004562619914 0.9771667 0.0228099246 2.47050866E-05 2 1 0
129 2 1 0.76517106250260403 0.534373939 0.465254337 0.000369829067 1 2 0
131 2 2 0.53372197087197515 0.5864183 0.412872344 0.0007079753 2 1 0
132 2 2 0.015354128200897153 0.984763145 0.0152192824 1.64837747E-05 2 1 0
133 2 1 0.80958176603331877 0.55317384 0.44504416 0.001781164 1 2 0
137 2 2 0.1756400740881939 0.8389199 0.160485491 0.000594753 2 1 0
138 2 2 0.50333783990086933 0.604509532 0.38951236 0.00597788766 2 1 0
141 2 2 0.33763120502763588 0.713458359 0.285059243 0.00148137042 2 1 0
144 2 2 0.016765581813362788 0.9833742 0.01656379 6.213109E-05 2 1 0
145 2 2 0.12807846333012343 0.879784346 0.119780183 0.000434682646 2 1 0
147 2 2 0.24026111051541665 0.7864225 0.212648481 0.000927580346 2 1 0
0 0 0 0.0062236937498732119 0.993795633 0.00620483747 7.944746E-09 0 1 2
1 0 0 0.029022202834280027 0.9713949 0.0286041629 8.4053454E-08 0 1 2
2 0 0 0.010818497584619778 0.9892398 0.0107597355 3.879258E-08 0 1 2
3 0 0 0.029783231758986596 0.9706559 0.0293443277 3.03772538E-07 0 1 2
4 0 0 0.0045280297264245848 0.9954822 0.00451772846 7.630325E-09 0 1 2
7 0 0 0.012060178236157727 0.988012254 0.0119871311 3.107532E-08 0 1 2
12 0 0 0.027177832295976975 0.973188162 0.0268125031 7.716931E-08 0 1 2
13 0 0 0.0090827909291773482 0.990958333 0.009040892 4.44001422E-08 0 1 2
14 0 0 0.0006675563690858086 0.999332666 0.000667317654 3.80369833E-11 0 1 2
15 0 0 0.00062562720151946953 0.999374568 0.000626611931 2.342793E-10 0 1 2
16 0 0 0.0014362511566290306 0.9985648 0.00143428252 8.3973567E-10 0 1 2
17 0 0 0.0066089387166241515 0.993412852 0.00658752676 1.18178756E-08 0 1 2
19 0 0 0.0037040681088442817 0.9963028 0.00369813736 8.356012E-09 0 1 2
22 0 0 0.0010566928040176419 0.998943865 0.00105617661 1.56404645E-09 0 1 2
23 0 0 0.040098206611993717 0.9606951 0.03930444 4.272916E-07 0 1 2
24 0 0 0.048931112264951795 0.9522467 0.0477520749 9.429691E-07 0 1 2
26 0 0 0.019386315917387653 0.9808004 0.0191995315 1.38564857E-07 0 1 2
27 0 0 0.0089473457087430909 0.991092563 0.008906771 1.17854837E-08 0 1 2
29 0 0 0.031331998705965494 0.969153762 0.0308468789 3.17202932E-07 0 1 2
30 0 0 0.042858096825121733 0.95804733 0.0419520326 3.27043E-07 0 1 2
33 0 0 0.00071718178074236128 0.9992831 0.000716740265 1.96193978E-10 0 1 2
34 0 0 0.028594984112412413 0.97181 0.0281891245 8.059172E-08 0 1 2
36 0 0 0.0044670188114691546 0.995542943 0.004456434 1.134523E-09 0 1 2
38 0 0 0.019752530847094055 0.9804413 0.0195590239 1.97238663E-07 0 1 2
39 0 0 0.012134565229001918 0.987938762 0.0120618762 2.27859385E-08 0 1 2
42 0 0 0.010616309031417871 0.989439845 0.0105606038 9.83971E-08 0 1 2
43 0 0 0.016023658570775534 0.984104037 0.0158948544 2.164595E-07 0 1 2
47 0 0 0.015340812375050044 0.984776258 0.015224508 1.068231E-07 0 1 2
49 0 0 0.011529916484198184 0.9885363 0.0114635359 2.180063E-08 0 1 2
53 1 1 0.18975446513423647 0.8271622 0.172696114 0.0001419994 1 2 0
55 1 1 0.42001727411790701 0.65703547 0.342875183 8.88562E-05 1 2 0
57 1 1 0.044797903294000629 0.9561907 0.0403893776 0.003419859 1 2 0
58 1 1 0.0405044862851279 0.960304856 0.0395791 0.000117295334 1 2 0
59 1 1 0.37561798710727545 0.6868647 0.312571555 0.000565329567 1 2 0
61 1 1 0.16285702211594277 0.84971267 0.149735659 0.0005502151 1 2 0
62 1 1 0.016239180682403528 0.983891964 0.01596498 0.0001434301 1 2 0
65 1 1 0.019577034858477668 0.980613351 0.0189568941 0.00042852998 1 2 0
67 1 1 0.035419443989176395 0.9652005 0.0343228355 0.000475778332 1 2 0
75 1 1 0.027538273321890076 0.972837448 0.0268495046 0.000312928983 1 2 0
78 1 1 0.32294844088002383 0.7240112 0.27587077 0.000116450035 1 2 0
80 1 1 0.050098414656633554 0.9511358 0.0483473167 0.0005151696 1 2 0
81 1 1 0.026061987463741595 0.9742747 0.0249239281 0.0008016538 1 2 0
83 1 2 1.740543377756486 0.824572563 0.175425053 1.65982908E-06 2 1 0
84 1 2 1.2373199070152263 0.709771931 0.290160835 6.62596649E-05 2 1 0
85 1 1 0.36380768156103438 0.695024848 0.304472446 0.0005026918 1 2 0
86 1 1 0.074546831983721043 0.928164 0.0717040449 0.000130506931 1 2 0
87 1 1 0.065021514855800697 0.9370473 0.06291683 3.654533E-05 1 2 0
89 1 1 0.17665865765569469 0.8380658 0.161666483 0.000268906821 1 2 0
94 1 1 0.23230360596017513 0.7927054 0.207063466 0.0002309137 1 2 0
101 2 2 0.040252706237034482 0.9605467 0.03945323 3.15532048E-07 2 1 0
103 2 2 0.051494602671196522 0.9498088 0.05019172 1.28977433E-07 2 1 0
107 2 2 0.10300066689194605 0.9021264 0.09787331 1.9281309E-08 2 1 0
110 2 2 0.28325270161722177 0.7533294 0.246662438 8.488617E-06 2 1 0
114 2 2 0.0078811449325194051 0.99214983 0.007851987 6.352668E-08 2 1 0
116 2 2 0.13699725408993954 0.8719726 0.128025874 6.352816E-07 2 1 0
118 2 2 0.0077514487119431756 0.9922785 0.0077198213 5.014996E-11 2 1 0
119 2 2 0.29779051990653688 0.742456853 0.2575433 7.750907E-07 2 1 0
124 2 2 0.054756176970663442 0.946715951 0.05328287 2.7218934E-07 2 1 0
126 2 2 0.47350688219510623 0.6228143 0.3771728 1.24984781E-05 2 1 0
127 2 2 0.29028318177047763 0.7480517 0.251935542 1.09781113E-05 2 1 0
130 2 2 0.18683622247656881 0.8295796 0.1704187 4.700676E-08 2 1 0
134 2 2 0.0917015399878683 0.9123774 0.08762169 1.1354247E-07 2 1 0
135 2 2 0.13898086338633753 0.8702447 0.129756063 5.158962E-08 2 1 0
136 2 2 0.0084762571159262869 0.991559565 0.008440904 7.217419E-08 2 1 0
139 2 2 0.2463226552303073 0.78167 0.21833 1.72434341E-06 2 1 0
140 2 2 0.026458747425806992 0.9738882 0.0261124317 8.52053148E-08 2 1 0
142 2 2 0.040252706237034482 0.9605467 0.03945323 3.15532048E-07 2 1 0
143 2 2 0.01863096927099336 0.9815415 0.0184568968 2.96961424E-08 2 1 0
146 2 2 0.23264315285446821 0.7924363 0.207562491 1.23098891E-06 2 1 0
148 2 2 0.017330163598589021 0.98281914 0.017179586 3.21201668E-07 2 1 0
149 2 2 0.085189828468119011 0.918337941 0.0816599354 1.76131016E-06 2 1 0

Просмотреть файл

@ -0,0 +1,65 @@
maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1 nn=+} norm=No dout=%Output% data=%Data% out=%Output% seed=1
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 13 of 15 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 50 | 0 | 0 | 1.0000
1 || 0 | 48 | 2 | 0.9600
2 || 0 | 1 | 49 | 0.9800
||========================
Precision ||1.0000 |0.9796 |0.9608 |
Accuracy(micro-avg): 0.980000
Accuracy(macro-avg): 0.980000
Log-loss: 0.095534
Log-loss reduction: 91.304142
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.980000 (0.0000)
Accuracy(macro-avg): 0.980000 (0.0000)
Log-loss: 0.095534 (0.0000)
Log-loss reduction: 91.304142 (0.0000)
---------------------------------------
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'LBFGS data prep' started.
[1] 'LBFGS data prep' finished in %Time%.
[2] 'LBFGS Optimizer' started.
[2] (%Time%) 0 iterations Loss: 1.0986123085022
[2] (%Time%) 1 iterations Loss: 1.06389963626862 Improvement: 0.03471
[2] (%Time%) 2 iterations Loss: 1.01654124259949 Improvement: 0.04483
[2] (%Time%) 3 iterations Loss: 0.944314062595367 Improvement: 0.0657
[2] (%Time%) 4 iterations Loss: 0.668209552764893 Improvement: 0.2241
[2] (%Time%) 5 iterations Loss: 0.553279459476471 Improvement: 0.1421
[2] (%Time%) 6 iterations Loss: 0.427209556102753 Improvement: 0.1301
[2] (%Time%) 7 iterations Loss: 0.33543187379837 Improvement: 0.1014
[2] (%Time%) 8 iterations Loss: 0.271388441324234 Improvement: 0.07337
[2] (%Time%) 9 iterations Loss: 0.218755051493645 Improvement: 0.05782
[2] (%Time%) 10 iterations Loss: 0.192830204963684 Improvement: 0.0339
[2] (%Time%) 11 iterations Loss: 0.184821993112564 Improvement: 0.01448
[2] (%Time%) 12 iterations Loss: 0.182577073574066 Improvement: 0.005304
[2] (%Time%) 13 iterations Loss: 0.180941790342331 Improvement: 0.002552
[2] (%Time%) 14 iterations Loss: 0.178911954164505 Improvement: 0.00216
[2] (%Time%) 15 iterations Loss: 0.171350136399269 Improvement: 0.006211
[2] (%Time%) 16 iterations Loss: 0.157612159848213 Improvement: 0.01186
[2] (%Time%) 17 iterations Loss: 0.15358293056488 Improvement: 0.005986
[2] (%Time%) 18 iterations Loss: 0.151476576924324 Improvement: 0.003076
[2] (%Time%) 19 iterations Loss: 0.146950766444206 Improvement: 0.004163
[2] (%Time%) 20 iterations Loss: 0.143808200955391 Improvement: 0.003398
[2] (%Time%) 21 iterations Loss: 0.141508430242538 Improvement: 0.002574
[2] (%Time%) 22 iterations Loss: 0.140696823596954 Improvement: 0.001252
[2] (%Time%) 23 iterations Loss: 0.140071913599968 Improvement: 0.0007818
[2] 'LBFGS Optimizer' finished in %Time%.
[3] 'Saving model' started.
[3] 'Saving model' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt /nn Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.98 0.98 0.095534 91.30415 0.1 0.001 0.001 1 + MulticlassLogisticRegression %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1 nn=+} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /l2:0.1;/l1:0.001;/ot:0.001;/nt:1;/nn:+

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
0 0 0 0.0055084268363971593 0.9945067 0.00549493777 5.254127E-11 0 1 2
1 0 0 0.023817056572626422 0.976464331 0.0235365033 1.06045284E-09 0 1 2
2 0 0 0.0093231130112501059 0.9907202 0.0092794355 3.434876E-10 0 1 2
3 0 0 0.023443248531486313 0.9768294 0.02317019 3.03269165E-09 0 1 2
4 0 0 0.0040359770848233112 0.995972157 0.004027704 4.241018E-11 0 1 2
5 0 0 0.0045204256114966734 0.9954898 0.00451145973 6.890591E-11 0 1 2
6 0 0 0.0069789663841046852 0.99304533 0.006953795 5.53306345E-10 0 1 2
7 0 0 0.010097529396150412 0.9899533 0.0100480942 2.30156255E-10 0 1 2
8 0 0 0.03009602430401729 0.970352352 0.0296479575 6.92419366E-09 0 1 2
9 0 0 0.024332072472112821 0.975961566 0.02403856 8.20308266E-10 0 1 2
10 0 0 0.0043335142888201311 0.995675862 0.00432459963 1.73350657E-11 0 1 2
11 0 0 0.013555556014923508 0.9865359 0.013463337 8.13516365E-10 0 1 2
12 0 0 0.023529044193244086 0.9767456 0.02325533 8.72516E-10 0 1 2
13 0 0 0.008501023573343704 0.991535 0.008465137 4.216742E-10 0 1 2
14 0 0 0.0007095469417312554 0.9992907 0.0007089279 1.54630811E-13 0 1 2
15 0 0 0.00055310522407816078 0.999447048 0.0005532046 6.156226E-13 0 1 2
16 0 0 0.0012550477831004697 0.998745739 0.00125323888 4.032167E-12 0 1 2
17 0 0 0.0055084268363971593 0.9945067 0.00549389 8.433323E-11 0 1 2
18 0 0 0.0063285986099884577 0.9936914 0.00630976632 3.05021945E-11 0 1 2
19 0 0 0.003093731767670937 0.996911049 0.00309031014 3.913841E-11 0 1 2
20 0 0 0.020076735361690724 0.98012346 0.0198751818 3.10552029E-10 0 1 2
21 0 0 0.0041732725311333704 0.9958354 0.004164563 1.02791969E-10 0 1 2
22 0 0 0.0010643302939017342 0.998936236 0.0010639854 7.537851E-12 0 1 2
23 0 0 0.025999097964058838 0.974335968 0.025664188 4.81227547E-09 0 1 2
24 0 0 0.035129552664285101 0.9654803 0.034519136 6.708087E-09 0 1 2
25 0 0 0.045339558624474148 0.9556729 0.04432742 3.25503069E-09 0 1 2
26 0 0 0.013889300978196019 0.9862067 0.013794262 1.202008E-09 0 1 2
27 0 0 0.0076827928162563506 0.992346644 0.00765367 8.0804E-11 0 1 2
28 0 0 0.0075225540000182761 0.99250567 0.00749257533 6.505713E-11 0 1 2
29 0 0 0.024242421711841197 0.976049066 0.023950737 2.85124413E-09 0 1 2
30 0 0 0.032976122572028026 0.967561662 0.0324392952 3.50680263E-09 0 1 2
31 0 0 0.010608658479474184 0.9894474 0.0105537577 1.95062647E-10 0 1 2
32 0 0 0.0012779052922244011 0.9987229 0.00127653161 2.62278025E-12 0 1 2
33 0 0 0.00071336435395046255 0.9992869 0.0007127473 5.49134739E-13 0 1 2
34 0 0 0.024332072472112821 0.975961566 0.02403856 8.20308266E-10 0 1 2
35 0 0 0.0070286058750609979 0.992996037 0.007004459 7.352014E-11 0 1 2
36 0 0 0.004203798491639577 0.995805 0.0041957614 8.509567E-12 0 1 2
37 0 0 0.024332072472112821 0.975961566 0.02403856 8.20308266E-10 0 1 2
38 0 0 0.016292553507197185 0.983839452 0.0161602274 2.10622053E-09 0 1 2
39 0 0 0.01022337528087006 0.9898287 0.0101731718 1.74306264E-10 0 1 2
40 0 0 0.0039481873055618134 0.9960596 0.003941884 5.480231E-11 0 1 2
41 0 0 0.12700460629393751 0.8807296 0.11926952 7.253493E-08 0 1 2
42 0 0 0.0089816864331438984 0.9910585 0.008941018 7.907266E-10 0 1 2
43 0 0 0.010307321484771452 0.9897456 0.0102558751 1.89728877E-09 0 1 2
44 0 0 0.011131259446131789 0.988930464 0.0110695921 1.06843367E-09 0 1 2
45 0 0 0.023521416258048912 0.976753056 0.023246726 2.24788366E-09 0 1 2
46 0 0 0.0042648532082970193 0.9957442 0.004256002 4.955993E-11 0 1 2
47 0 0 0.012670520226440413 0.9874094 0.0125911683 9.19701E-10 0 1 2
48 0 0 0.0042801174699289152 0.995729 0.004271112 2.28877854E-11 0 1 2
49 0 0 0.0098857948144395781 0.9901629 0.009837127 1.8531357E-10 0 1 2
50 1 1 0.0090942192070797481 0.990947 0.00760637224 0.00144599774 1 2 0
51 1 1 0.034664173472457219 0.965929747 0.03118425 0.00288774725 1 2 0
52 1 1 0.043201456581903337 0.957718432 0.04173073 0.000551942037 1 2 0
53 1 1 0.13167189090230788 0.8766286 0.1223861 0.0009844311 1 2 0
54 1 1 0.075168589261662158 0.9275871 0.07181354 0.0005993034 1 2 0
55 1 1 0.18945690492401104 0.8274084 0.171777591 0.000814124243 1 2 0
56 1 1 0.11911771398067193 0.8877033 0.110390045 0.00190834212 1 2 0
57 1 1 0.025508903449772993 0.9748137 0.0150558352 0.0101304185 1 0 2
58 1 1 0.019184089039045001 0.980998755 0.0181579981 0.00084428 1 2 0
59 1 1 0.15901943723044118 0.8529798 0.142475456 0.00454534357 1 2 0
60 1 1 0.038457888295285382 0.9622722 0.0353976376 0.00232988712 1 2 0
61 1 1 0.065551772849485893 0.936550558 0.05916038 0.00428870367 1 2 0
62 1 1 0.010322557813937764 0.989730537 0.009494745 0.0007733656 1 2 0
63 1 1 0.17222977582877841 0.8417857 0.15765363 0.0005595186 1 2 0
64 1 1 0.030574768518940053 0.9698879 0.02344595 0.00666603027 1 0 2
65 1 1 0.0098151864535556514 0.9902328 0.00685487129 0.002912695 1 2 0
66 1 1 0.39688494987183254 0.6724114 0.326369762 0.001220205 1 2 0
67 1 1 0.012123102152506331 0.9879501 0.009482692 0.00256814132 1 2 0
68 1 1 0.33736423637406737 0.713648856 0.286240429 0.00010934045 1 2 0
69 1 1 0.021182995555155511 0.9790398 0.018236788 0.00272419979 1 2 0
70 1 2 1.0422287174318141 0.6469034 0.3526678 0.000428632979 2 1 0
71 1 1 0.013839742810404523 0.9862556 0.009148754 0.0045960024 1 2 0
72 1 1 0.58567805070236145 0.556728244 0.443214417 5.730098E-05 1 2 0
73 1 1 0.084991433828601512 0.918520153 0.08102878 0.000452291657 1 2 0
74 1 1 0.012519861356165776 0.9875582 0.0101578543 0.00228302833 1 2 0
75 1 1 0.013351424023883354 0.9867373 0.0110855019 0.002178057 1 2 0
76 1 1 0.043354008859284664 0.957572341 0.04211245 0.00031350847 1 2 0
77 1 1 0.30744937627516217 0.7353201 0.2644465 0.000233673083 1 2 0
78 1 1 0.17038347938641368 0.843341351 0.155579925 0.001078887 1 2 0
79 1 1 0.014785760729894784 0.985323 0.0131924609 0.00148394948 1 0 2
80 1 1 0.023090379914646061 0.977174163 0.0200124662 0.002812842 1 2 0
81 1 1 0.012531268625402953 0.9875469 0.008534899 0.00391809177 1 2 0
82 1 1 0.016162852194115184 0.983967066 0.0111717647 0.00486272341 1 2 0
83 1 2 1.7073783847266821 0.8186427 0.181340575 1.856788E-05 2 1 0
84 1 1 0.62556457936332965 0.5349593 0.4640455 0.000995344 1 2 0
85 1 1 0.11158751479919021 0.8944131 0.10046719 0.00511890557 1 2 0
86 1 1 0.036201489056084446 0.964445949 0.0344701856 0.00108303118 1 2 0
87 1 1 0.063205727958762947 0.9387503 0.06098643 0.000264116941 1 2 0
88 1 1 0.045185081816751668 0.95582056 0.03791894 0.006261989 1 2 0
89 1 1 0.092376681069536226 0.911761642 0.08637355 0.00186417461 1 2 0
90 1 1 0.20824432396488596 0.8120086 0.187371209 0.0006201498 1 2 0
91 1 1 0.10052487189251949 0.9043626 0.09452107 0.00111814367 1 2 0
92 1 1 0.022739404059431985 0.9775172 0.0198875815 0.002597051 1 2 0
93 1 1 0.020528802753463863 0.9796805 0.01107447 0.00924518052 1 0 2
94 1 1 0.10138322927163509 0.9035867 0.09466396 0.00174795289 1 2 0
95 1 1 0.031475061991293116 0.9690151 0.0264395941 0.00454706931 1 2 0
96 1 1 0.055107111138908711 0.9463838 0.05032458 0.00329180225 1 2 0
97 1 1 0.020542126999945221 0.9796674 0.01800886 0.00232210569 1 2 0
98 1 1 0.054157234957735327 0.947283149 0.0504526943 0.00226498954 1 0 2
99 1 1 0.046203621795273951 0.9548475 0.0417587571 0.00339318742 1 2 0
100 2 2 0.00071718178074236128 0.9992831 0.000718441559 2.38225937E-08 2 1 0
101 2 2 0.0295181133625457 0.9709133 0.02908533 3.05521712E-06 2 1 0
102 2 2 0.03940963957571933 0.9613568 0.0386419035 6.5010255E-07 2 1 0
103 2 2 0.042282092241463093 0.9585993 0.0414001346 1.49368452E-06 2 1 0
104 2 2 0.0064544509780086849 0.993566334 0.0064327796 1.60839249E-07 2 1 0
105 2 2 0.01117706708299614 0.988885164 0.0111167124 1.85687732E-08 2 1 0
106 2 2 0.038978580718911832 0.9617713 0.03821299 1.69269952E-05 2 1 0
107 2 2 0.050289127935841414 0.950954437 0.049045492 1.65044582E-07 2 1 0
108 2 2 0.028732319565020852 0.9716765 0.0283248872 1.54309248E-07 2 1 0
109 2 2 0.011814131250558819 0.9882554 0.01174459 6.20176E-07 2 1 0
110 2 2 0.31888964037460732 0.7269558 0.2729284 0.000117515236 2 1 0
111 2 2 0.075557681270826682 0.927226245 0.07277129 3.731746E-06 2 1 0
112 2 2 0.076839464588257703 0.9260385 0.07395845 4.66545225E-06 2 1 0
113 2 2 0.013885493373327248 0.986210465 0.0137876067 1.11059865E-06 2 1 0
114 2 2 0.0034065386776152063 0.996599257 0.003398758 4.821891E-07 2 1 0
115 2 2 0.030685455439561936 0.969780564 0.0302113257 6.934844E-06 2 1 0
116 2 2 0.12955092298505261 0.878489852 0.121503189 7.952961E-06 2 1 0
117 2 2 0.029476184676114042 0.970954 0.0290464126 3.82003577E-07 2 1 0
118 2 2 0.00083540993102648666 0.999164939 0.0008341192 1.58499533E-10 2 1 0
119 2 2 0.18156812803858025 0.8339614 0.166032091 5.2386E-06 2 1 0
120 2 2 0.027637472531907004 0.972740948 0.0272570327 1.62744573E-06 2 1 0
121 2 2 0.027248391079925608 0.9731195 0.0268742 7.42350176E-06 2 1 0
122 2 2 0.011028260200213382 0.9890323 0.0109692747 7.2080053E-09 2 1 0
123 2 2 0.36983106060626558 0.690851033 0.309092641 5.79547559E-05 2 1 0
124 2 2 0.047893534239738376 0.953235269 0.0467611663 3.86117654E-06 2 1 0
125 2 2 0.20047380759045563 0.8183429 0.181652829 3.98381962E-06 2 1 0
126 2 2 0.4699516553461579 0.6250325 0.374836236 0.000132376663 2 1 0
127 2 2 0.37024691655472053 0.6905638 0.3092908 0.000146074992 2 1 0
128 2 2 0.011428805441366281 0.988636255 0.0113640688 3.00256971E-07 2 1 0
129 2 2 0.61292266773756554 0.541765153 0.458223 1.04854416E-05 2 1 0
130 2 2 0.07425686647063412 0.9284332 0.07156495 3.35034969E-07 2 1 0
131 2 2 0.36683655555557643 0.6929229 0.30706656 1.031481E-05 2 1 0
132 2 2 0.0071334751500833167 0.9928919 0.007109147 1.87870214E-07 2 1 0
133 2 1 0.70880889315821138 0.5077022 0.492230147 6.755029E-05 1 2 0
134 2 2 0.085922224030283809 0.9176656 0.082333684 1.23881819E-06 2 1 0
135 2 2 0.040046712040999119 0.96074456 0.0392556675 3.22532E-07 2 1 0
136 2 2 0.0066299388926063978 0.993392 0.00660762331 1.06753293E-06 2 1 0
137 2 2 0.11832431033727081 0.8884079 0.111580558 9.978711E-06 2 1 0
138 2 2 0.40202712796988099 0.6689626 0.330820858 0.000218098256 2 1 0
139 2 2 0.17507936837701382 0.8393904 0.160591438 1.860986E-05 2 1 0
140 2 2 0.01180647152915853 0.988262951 0.0117355874 7.341407E-07 2 1 0
141 2 2 0.2141532959019663 0.807224631 0.192715973 5.853019E-05 2 1 0
142 2 2 0.0295181133625457 0.9709133 0.02908533 3.05521712E-06 2 1 0
143 2 2 0.0095748664566649728 0.9904708 0.009528417 3.031272E-07 2 1 0
144 2 2 0.0073585580223861757 0.99266845 0.007331067 6.058027E-07 2 1 0
145 2 2 0.071910829528973333 0.9306139 0.06937557 1.16139881E-05 2 1 0
146 2 2 0.12055584737407964 0.8864276 0.113564476 8.48525E-06 2 1 0
147 2 2 0.15924069591370296 0.8527911 0.147182718 2.5248728E-05 2 1 0
148 2 2 0.017257693530893608 0.982890368 0.0171058215 5.317109E-06 2 1 0
149 2 2 0.10890195713815695 0.89681834 0.10315422 2.62842859E-05 2 1 0

Просмотреть файл

@ -0,0 +1,109 @@
maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% data=%Data% seed=1
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 15 of 15 weights.
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 15 of 15 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 21 | 0 | 0 | 1.0000
1 || 0 | 29 | 1 | 0.9667
2 || 0 | 2 | 26 | 0.9286
||========================
Precision ||1.0000 |0.9355 |0.9630 |
Accuracy(micro-avg): 0.962025
Accuracy(macro-avg): 0.965079
Log-loss: 0.101866
Log-loss reduction: 90.633114
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 29 | 0 | 0 | 1.0000
1 || 0 | 19 | 1 | 0.9500
2 || 0 | 0 | 22 | 1.0000
||========================
Precision ||1.0000 |1.0000 |0.9565 |
Accuracy(micro-avg): 0.985915
Accuracy(macro-avg): 0.983333
Log-loss: 0.075812
Log-loss reduction: 93.016939
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.973970 (0.0119)
Accuracy(macro-avg): 0.974206 (0.0091)
Log-loss: 0.088839 (0.0130)
Log-loss reduction: 91.825026 (1.1919)
---------------------------------------
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'LBFGS data prep' started.
[1] 'LBFGS data prep' finished in %Time%.
[2] 'LBFGS Optimizer' started.
[2] (%Time%) 0 iterations Loss: 1.0986123085022
[2] (%Time%) 1 iterations Loss: 0.975501239299774 Improvement: 0.1231
[2] (%Time%) 2 iterations Loss: 0.828468441963196 Improvement: 0.1422
[2] (%Time%) 3 iterations Loss: 0.49238583445549 Improvement: 0.2899
[2] (%Time%) 4 iterations Loss: 0.410263001918793 Improvement: 0.1335
[2] (%Time%) 5 iterations Loss: 0.373202115297318 Improvement: 0.06109
[2] (%Time%) 6 iterations Loss: 0.326229214668274 Improvement: 0.0505
[2] (%Time%) 7 iterations Loss: 0.30860298871994 Improvement: 0.02584
[2] (%Time%) 8 iterations Loss: 0.249911725521088 Improvement: 0.05048
[2] (%Time%) 9 iterations Loss: 0.197030156850815 Improvement: 0.05228
[2] (%Time%) 10 iterations Loss: 0.183768630027771 Improvement: 0.02302
[2] (%Time%) 11 iterations Loss: 0.174268662929535 Improvement: 0.01288
[2] (%Time%) 12 iterations Loss: 0.1489098072052 Improvement: 0.02224
[2] (%Time%) 13 iterations Loss: 0.146679118275642 Improvement: 0.007233
[2] (%Time%) 14 iterations Loss: 0.127629071474075 Improvement: 0.0161
[2] (%Time%) 15 iterations Loss: 0.127402290701866 Improvement: 0.004194
[2] (%Time%) 16 iterations Loss: 0.127095967531204 Improvement: 0.001278
[2] (%Time%) 17 iterations Loss: 0.1268040984869 Improvement: 0.0005385
[2] 'LBFGS Optimizer' finished in %Time%.
[3] 'LBFGS data prep #2' started.
[3] 'LBFGS data prep #2' finished in %Time%.
[4] 'LBFGS Optimizer #2' started.
[4] (%Time%) 0 iterations Loss: 1.0986123085022
[4] (%Time%) 1 iterations Loss: 1.03655636310577 Improvement: 0.06206
[4] (%Time%) 2 iterations Loss: 1.00361847877502 Improvement: 0.03876
[4] (%Time%) 3 iterations Loss: 0.937079250812531 Improvement: 0.05993
[4] (%Time%) 4 iterations Loss: 0.819244384765625 Improvement: 0.1035
[4] (%Time%) 5 iterations Loss: 0.728321373462677 Improvement: 0.09406
[4] (%Time%) 6 iterations Loss: 0.581992864608765 Improvement: 0.1333
[4] (%Time%) 7 iterations Loss: 0.440624892711639 Improvement: 0.1393
[4] (%Time%) 8 iterations Loss: 0.368180394172668 Improvement: 0.08917
[4] (%Time%) 9 iterations Loss: 0.287548065185547 Improvement: 0.08277
[4] (%Time%) 10 iterations Loss: 0.239883854985237 Improvement: 0.05644
[4] (%Time%) 11 iterations Loss: 0.217700272798538 Improvement: 0.03075
[4] (%Time%) 12 iterations Loss: 0.206228733062744 Improvement: 0.01629
[4] (%Time%) 13 iterations Loss: 0.192829161882401 Improvement: 0.01412
[4] (%Time%) 14 iterations Loss: 0.185032933950424 Improvement: 0.009378
[4] (%Time%) 15 iterations Loss: 0.181731522083282 Improvement: 0.00482
[4] (%Time%) 16 iterations Loss: 0.168401405215263 Improvement: 0.0112
[4] (%Time%) 17 iterations Loss: 0.159209698438644 Improvement: 0.009694
[4] (%Time%) 18 iterations Loss: 0.150576055049896 Improvement: 0.008899
[4] (%Time%) 19 iterations Loss: 0.14181961119175 Improvement: 0.008792
[4] (%Time%) 20 iterations Loss: 0.135607719421387 Improvement: 0.006857
[4] (%Time%) 21 iterations Loss: 0.134872287511826 Improvement: 0.002266
[4] (%Time%) 22 iterations Loss: 0.133358553051949 Improvement: 0.001702
[4] (%Time%) 23 iterations Loss: 0.132842555642128 Improvement: 0.0008124
[4] 'LBFGS Optimizer #2' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.97397 0.974206 0.088839 91.82503 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Output% 99 0 0 maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% data=%Data% seed=1 /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,117 @@
maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% seed=1 xf=TreeFeat{lps=0 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves}
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 71 instances
Binning and forming Feature objects
Reserved memory for tree learner: 16380 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 39
improvement criterion: Mean Improvement
L1 regularization selected 39 of 39 weights.
Not training a calibrator because it is not needed.
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 79 instances
Binning and forming Feature objects
Reserved memory for tree learner: 17472 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 54
improvement criterion: Mean Improvement
L1 regularization selected 54 of 54 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 21 | 0 | 0 | 1.0000
1 || 0 | 25 | 5 | 0.8333
2 || 0 | 1 | 27 | 0.9643
||========================
Precision ||1.0000 |0.9615 |0.8438 |
Accuracy(micro-avg): 0.924051
Accuracy(macro-avg): 0.932540
Log-loss: 0.330649
Log-loss reduction: 69.595935
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 29 | 0 | 0 | 1.0000
1 || 0 | 19 | 1 | 0.9500
2 || 0 | 2 | 20 | 0.9091
||========================
Precision ||1.0000 |0.9048 |0.9524 |
Accuracy(micro-avg): 0.957746
Accuracy(macro-avg): 0.953030
Log-loss: 0.157832
Log-loss reduction: 85.461953
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.940899 (0.0168)
Accuracy(macro-avg): 0.942785 (0.0102)
Log-loss: 0.244241 (0.0864)
Log-loss reduction: 77.528944 (7.9330)
---------------------------------------
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'FastTree data preparation' started.
[1] 'FastTree data preparation' finished in %Time%.
[2] 'FastTree in-memory bins initialization' started.
[2] 'FastTree in-memory bins initialization' finished in %Time%.
[3] 'FastTree feature conversion' started.
[3] 'FastTree feature conversion' finished in %Time%.
[4] 'FastTree training' started.
[4] 'FastTree training' finished in %Time%.
[5] 'LBFGS data prep' started.
[5] 'LBFGS data prep' finished in %Time%.
[6] 'LBFGS Optimizer' started.
[6] (%Time%) 0 iterations Loss: 1.0986123085022
[6] (%Time%) 1 iterations Loss: 0.529107213020325 Improvement: 0.5695
[6] (%Time%) 2 iterations Loss: 0.162161201238632 Improvement: 0.4075
[6] (%Time%) 3 iterations Loss: 0.110731095075607 Improvement: 0.1362
[6] (%Time%) 4 iterations Loss: 0.082178421318531 Improvement: 0.05515
[6] (%Time%) 5 iterations Loss: 0.0707422941923141 Improvement: 0.02233
[6] (%Time%) 6 iterations Loss: 0.0665594562888145 Improvement: 0.008717
[6] (%Time%) 7 iterations Loss: 0.0660991221666336 Improvement: 0.002524
[6] (%Time%) 8 iterations Loss: 0.0654922351241112 Improvement: 0.001086
[6] (%Time%) 9 iterations Loss: 0.0654363483190536 Improvement: 0.0003135
[6] 'LBFGS Optimizer' finished in %Time%.
[7] 'FastTree data preparation #2' started.
[7] 'FastTree data preparation #2' finished in %Time%.
[8] 'FastTree in-memory bins initialization #2' started.
[8] 'FastTree in-memory bins initialization #2' finished in %Time%.
[9] 'FastTree feature conversion #2' started.
[9] 'FastTree feature conversion #2' finished in %Time%.
[10] 'FastTree training #2' started.
[10] 'FastTree training #2' finished in %Time%.
[11] 'LBFGS data prep #2' started.
[11] 'LBFGS data prep #2' finished in %Time%.
[12] 'LBFGS Optimizer #2' started.
[12] (%Time%) 0 iterations Loss: 1.0986123085022
[12] (%Time%) 1 iterations Loss: 0.607897818088531 Improvement: 0.4907
[12] (%Time%) 2 iterations Loss: 0.202578827738762 Improvement: 0.4224
[12] (%Time%) 3 iterations Loss: 0.143362611532211 Improvement: 0.1457
[12] (%Time%) 4 iterations Loss: 0.107794404029846 Improvement: 0.06277
[12] (%Time%) 5 iterations Loss: 0.0930556431412697 Improvement: 0.02671
[12] (%Time%) 6 iterations Loss: 0.088469035923481 Improvement: 0.01011
[12] (%Time%) 7 iterations Loss: 0.086934432387352 Improvement: 0.003679
[12] (%Time%) 8 iterations Loss: 0.0866307020187378 Improvement: 0.001148
[12] (%Time%) 9 iterations Loss: 0.0862946063280106 Improvement: 0.000539
[12] 'LBFGS Optimizer #2' finished in %Time%.

Просмотреть файл

@ -0,0 +1,116 @@
maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% seed=1 xf=TreeFeat{lps=2 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves}
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 71 instances
Binning and forming Feature objects
Reserved memory for tree learner: 16380 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 45
improvement criterion: Mean Improvement
L1 regularization selected 44 of 45 weights.
Not training a calibrator because it is not needed.
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 79 instances
Binning and forming Feature objects
Reserved memory for tree learner: 17472 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 48
improvement criterion: Mean Improvement
L1 regularization selected 48 of 48 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 21 | 0 | 0 | 1.0000
1 || 0 | 25 | 5 | 0.8333
2 || 0 | 1 | 27 | 0.9643
||========================
Precision ||1.0000 |0.9615 |0.8438 |
Accuracy(micro-avg): 0.924051
Accuracy(macro-avg): 0.932540
Log-loss: 0.201590
Log-loss reduction: 81.463253
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 29 | 0 | 0 | 1.0000
1 || 0 | 19 | 1 | 0.9500
2 || 0 | 1 | 21 | 0.9545
||========================
Precision ||1.0000 |0.9500 |0.9545 |
Accuracy(micro-avg): 0.971831
Accuracy(macro-avg): 0.968182
Log-loss: 0.101915
Log-loss reduction: 90.612517
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.947941 (0.0239)
Accuracy(macro-avg): 0.950361 (0.0178)
Log-loss: 0.151753 (0.0498)
Log-loss reduction: 86.037885 (4.5746)
---------------------------------------
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Warning: There is no NA value for type 'Text'. The missing key value will be mapped to the default value of 'Text'
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'FastTree data preparation' started.
[1] 'FastTree data preparation' finished in %Time%.
[2] 'FastTree in-memory bins initialization' started.
[2] 'FastTree in-memory bins initialization' finished in %Time%.
[3] 'FastTree feature conversion' started.
[3] 'FastTree feature conversion' finished in %Time%.
[4] 'FastTree training' started.
[4] 'FastTree training' finished in %Time%.
[5] 'LBFGS data prep' started.
[5] 'LBFGS data prep' finished in %Time%.
[6] 'LBFGS Optimizer' started.
[6] (%Time%) 0 iterations Loss: 1.0986123085022
[6] (%Time%) 1 iterations Loss: 0.556313633918762 Improvement: 0.5423
[6] (%Time%) 2 iterations Loss: 0.151027098298073 Improvement: 0.4327
[6] (%Time%) 3 iterations Loss: 0.0993023291230202 Improvement: 0.1424
[6] (%Time%) 4 iterations Loss: 0.0695240423083305 Improvement: 0.05761
[6] (%Time%) 5 iterations Loss: 0.0572926141321659 Improvement: 0.02354
[6] (%Time%) 6 iterations Loss: 0.0536528006196022 Improvement: 0.008612
[6] (%Time%) 7 iterations Loss: 0.0518658980727196 Improvement: 0.003493
[6] (%Time%) 8 iterations Loss: 0.0517856702208519 Improvement: 0.0009333
[6] 'LBFGS Optimizer' finished in %Time%.
[7] 'FastTree data preparation #2' started.
[7] 'FastTree data preparation #2' finished in %Time%.
[8] 'FastTree in-memory bins initialization #2' started.
[8] 'FastTree in-memory bins initialization #2' finished in %Time%.
[9] 'FastTree feature conversion #2' started.
[9] 'FastTree feature conversion #2' finished in %Time%.
[10] 'FastTree training #2' started.
[10] 'FastTree training #2' finished in %Time%.
[11] 'LBFGS data prep #2' started.
[11] 'LBFGS data prep #2' finished in %Time%.
[12] 'LBFGS Optimizer #2' started.
[12] (%Time%) 0 iterations Loss: 1.0986123085022
[12] (%Time%) 1 iterations Loss: 0.588071405887604 Improvement: 0.5105
[12] (%Time%) 2 iterations Loss: 0.210458397865295 Improvement: 0.4042
[12] (%Time%) 3 iterations Loss: 0.143802016973495 Improvement: 0.147
[12] (%Time%) 4 iterations Loss: 0.109668917953968 Improvement: 0.06202
[12] (%Time%) 5 iterations Loss: 0.0927119106054306 Improvement: 0.02819
[12] (%Time%) 6 iterations Loss: 0.0866884738206863 Improvement: 0.01156
[12] (%Time%) 7 iterations Loss: 0.0849770903587341 Improvement: 0.004173
[12] (%Time%) 8 iterations Loss: 0.0845689475536346 Improvement: 0.001349
[12] (%Time%) 9 iterations Loss: 0.0844891592860222 Improvement: 0.0003972
[12] 'LBFGS Optimizer #2' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.947941 0.950361 0.151753 86.03789 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Output% 99 0 0 maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% seed=1 xf=TreeFeat{lps=2 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves} /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
5 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
6 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
8 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
9 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
10 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
11 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
18 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
20 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
21 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
25 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
28 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
31 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
32 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
35 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
37 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
40 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
41 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
44 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
45 0 0 0.0055627883417999026 0.994452655 0.00301888748 0.00252835476 0 2 1
46 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
48 0 0 0.0062537425593983181 0.9937658 0.00340008875 0.0028340437 0 2 1
50 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
51 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
52 1 2 1.357521751036989 0.72348994 0.257297635 0.0192124527 2 1 0
54 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
56 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
60 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
63 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
64 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
66 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
68 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
69 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
70 1 2 4.217452541805657 0.9806482 0.0147361364 0.004615784 2 1 0
71 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
72 1 2 1.357521751036989 0.72348994 0.257297635 0.0192124527 2 1 0
73 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
74 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
76 1 2 1.357521751036989 0.72348994 0.257297635 0.0192124527 2 1 0
77 1 2 1.357521751036989 0.72348994 0.257297635 0.0192124527 2 1 0
79 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
82 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
88 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
90 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
91 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
92 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
93 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
95 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
96 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
97 1 1 0.0053598617878104484 0.9946545 0.0032052286 0.002140288 1 2 0
98 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
99 1 1 0.0081916071232031266 0.991841853 0.00469287625 0.003465219 1 2 0
100 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
102 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
104 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
105 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
106 2 1 5.361709612467159 0.991841853 0.00469287625 0.003465219 1 2 0
108 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
109 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
111 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
112 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
113 2 2 0.019541477421649685 0.9806482 0.0147361364 0.004615784 2 1 0
115 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
117 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
120 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
121 2 2 0.019541477421649685 0.9806482 0.0147361364 0.004615784 2 1 0
122 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
123 2 2 0.011587138891097372 0.988479733 0.00731784431 0.004202525 2 1 0
125 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
128 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
129 2 2 0.064084548067755445 0.9379257 0.0514734164 0.0106008854 2 1 0
131 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
132 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
133 2 2 0.32366863740744056 0.72348994 0.257297635 0.0192124527 2 1 0
137 2 2 0.0029477966595112797 0.997056544 0.00180420361 0.0011390592 2 0 1
138 2 2 0.019541477421649685 0.9806482 0.0147361364 0.004615784 2 1 0
141 2 2 0.024004226690419721 0.9762816 0.0165876672 0.00713065453 2 1 0
144 2 2 0.0057142010115053248 0.9943021 0.003090978 0.002606992 2 0 1
145 2 2 0.024004226690419721 0.9762816 0.0165876672 0.00713065453 2 1 0
147 2 2 0.024004226690419721 0.9762816 0.0165876672 0.00713065453 2 1 0
0 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
1 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
2 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
3 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
4 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
7 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
12 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
13 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
14 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
15 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
16 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
17 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
19 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
22 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
23 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
24 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
26 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
27 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
29 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
30 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
33 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
34 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
36 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
38 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
39 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
42 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
43 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
47 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
49 0 0 0.0070464334762157747 0.992978334 0.00379372248 0.003227859 0 2 1
53 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
55 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
57 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
58 1 1 0.0092000873585017535 0.9908421 0.00664671045 0.00251116115 1 2 0
59 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
61 1 1 0.016912213899205067 0.98323 0.0148431538 0.00192679861 1 2 0
62 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
65 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
67 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
75 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
78 1 1 0.016912213899205067 0.98323 0.0148431538 0.00192679861 1 2 0
80 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
81 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
83 1 2 2.5891336906268299 0.917143047 0.07508506 0.007771907 2 1 0
84 1 1 0.016912213899205067 0.98323 0.0148431538 0.00192679861 1 2 0
85 1 1 0.016912213899205067 0.98323 0.0148431538 0.00192679861 1 2 0
86 1 1 0.050227892662411955 0.9510127 0.04610691 0.00288033066 1 2 0
87 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
89 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
94 1 1 0.0037217168855646515 0.9962852 0.00208102469 0.00163372431 1 2 0
101 2 2 0.002020849860847866 0.9979812 0.00101441843 0.0010043825 2 0 1
103 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
107 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
110 2 2 0.011657028232298779 0.988410652 0.00830857 0.00328082382 2 1 0
114 2 2 0.002020849860847866 0.9979812 0.00101441843 0.0010043825 2 0 1
116 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
118 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
119 2 1 3.0767924818472583 0.9510127 0.04610691 0.00288033066 1 2 0
124 2 2 0.0077648440687823166 0.9922652 0.004709411 0.0030252568 2 1 0
126 2 2 0.23194741246350345 0.7929878 0.201070011 0.005942198 2 1 0
127 2 2 0.23194741246350345 0.7929878 0.201070011 0.005942198 2 1 0
130 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
134 2 2 0.4124399329774272 0.662032962 0.322351336 0.0156157119 2 1 0
135 2 2 0.0015005993849081293 0.9985005 0.00093225 0.000567381445 2 0 1
136 2 2 0.0077648440687823166 0.9922652 0.004709411 0.0030252568 2 1 0
139 2 2 0.011079306481754472 0.988981843 0.00756055675 0.00345751923 2 1 0
140 2 2 0.0077648440687823166 0.9922652 0.004709411 0.0030252568 2 1 0
142 2 2 0.002020849860847866 0.9979812 0.00101441843 0.0010043825 2 0 1
143 2 2 0.0077648440687823166 0.9922652 0.004709411 0.0030252568 2 1 0
146 2 2 0.23194741246350345 0.7929878 0.201070011 0.005942198 2 1 0
148 2 2 0.011079306481754472 0.988981843 0.00756055675 0.00345751923 2 1 0
149 2 2 0.002020849860847866 0.9979812 0.00101441843 0.0010043825 2 0 1

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.940899 0.942785 0.244241 77.52895 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Output% 99 0 0 maml.exe CV tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} threads=- norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% seed=1 xf=TreeFeat{lps=0 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves} /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
5 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
6 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
8 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
9 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
10 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
11 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
18 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
20 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
21 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
25 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
28 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
31 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
32 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
35 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
37 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
40 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
41 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
44 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
45 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
46 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
48 0 0 0.0050964063795893803 0.994916558 0.00261889934 0.002464454 0 2 1
50 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
51 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
52 1 2 4.8380222052271913 0.9885047 0.007922708 0.003572509 2 1 0
54 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
56 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
60 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
63 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
64 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
66 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
68 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
69 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
70 1 2 3.4342713652954884 0.9619754 0.0322489 0.005775679 2 1 0
71 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
72 1 2 3.235481539168072 0.953945756 0.039341256 0.00671289442 2 1 0
73 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
74 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
76 1 2 3.235481539168072 0.953945756 0.039341256 0.00671289442 2 1 0
77 1 2 4.8380222052271913 0.9885047 0.007922708 0.003572509 2 1 0
79 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
82 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
88 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
90 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
91 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
92 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
93 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
95 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
96 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
97 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
98 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
99 1 1 0.0059878929225182214 0.99403 0.003516594 0.002453451 1 2 0
100 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
102 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
104 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
105 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
106 2 1 5.6502623862339236 0.99403 0.003516594 0.002453451 1 2 0
108 2 2 0.01399731003087396 0.9861002 0.009722762 0.004176987 2 1 0
109 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
111 2 2 0.01399731003087396 0.9861002 0.009722762 0.004176987 2 1 0
112 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
113 2 2 0.16905655561427343 0.844461143 0.145666644 0.009872116 2 1 0
115 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
117 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
120 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
121 2 2 0.16905655561427343 0.844461143 0.145666644 0.009872116 2 1 0
122 2 2 0.01399731003087396 0.9861002 0.009722762 0.004176987 2 1 0
123 2 2 0.047148468732643084 0.953945756 0.039341256 0.00671289442 2 1 0
125 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
128 2 2 0.01399731003087396 0.9861002 0.009722762 0.004176987 2 1 0
129 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
131 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
132 2 2 0.01399731003087396 0.9861002 0.009722762 0.004176987 2 1 0
133 2 2 0.047148468732643084 0.953945756 0.039341256 0.00671289442 2 1 0
137 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
138 2 2 0.038766404860270692 0.9619754 0.0322489 0.005775679 2 1 0
141 2 2 0.011561873799839452 0.9885047 0.007922708 0.003572509 2 1 0
144 2 2 0.0040831366035799354 0.9959252 0.00216659228 0.0019083831 2 0 1
145 2 2 0.011561873799839452 0.9885047 0.007922708 0.003572509 2 1 0
147 2 2 0.011561873799839452 0.9885047 0.007922708 0.003572509 2 1 0
0 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
1 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
2 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
3 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
4 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
7 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
12 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
13 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
14 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
15 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
16 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
17 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
19 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
22 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
23 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
24 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
26 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
27 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
29 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
30 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
33 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
34 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
36 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
38 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
39 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
42 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
43 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
47 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
49 0 0 0.0072698756866274679 0.9927565 0.00368259917 0.00356091326 0 2 1
53 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
55 1 1 0.0062134377760607613 0.9938058 0.003886248 0.00230790745 1 2 0
57 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
58 1 1 0.012213000330511811 0.9878613 0.00857989 0.00355893746 1 2 0
59 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
61 1 1 0.016997572281395461 0.9831461 0.0150941061 0.00175973971 1 2 0
62 1 1 0.0062134377760607613 0.9938058 0.003886248 0.00230790745 1 2 0
65 1 1 0.012213000330511811 0.9878613 0.00857989 0.00355893746 1 2 0
67 1 1 0.0062134377760607613 0.9938058 0.003886248 0.00230790745 1 2 0
75 1 1 0.012213000330511811 0.9878613 0.00857989 0.00355893746 1 2 0
78 1 1 0.016997572281395461 0.9831461 0.0150941061 0.00175973971 1 2 0
80 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
81 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
83 1 2 4.2093796995147192 0.9820518 0.01485558 0.003092663 2 1 0
84 1 1 0.016997572281395461 0.9831461 0.0150941061 0.00175973971 1 2 0
85 1 1 0.37872337668893652 0.684735 0.308789551 0.00647537876 1 2 0
86 1 1 0.043367889719735153 0.957559049 0.039393153 0.003047859 1 2 0
87 1 1 0.012213000330511811 0.9878613 0.00857989 0.00355893746 1 2 0
89 1 1 0.0073526135982484933 0.992674351 0.00468322355 0.00264250115 1 2 0
94 1 1 0.0062134377760607613 0.9938058 0.003886248 0.00230790745 1 2 0
101 2 2 0.0011134383604485582 0.9988872 0.000686069543 0.000426827959 2 0 1
103 2 2 0.0041885353949071905 0.9958202 0.00250371476 0.00167586142 2 1 0
107 2 2 0.0041885353949071905 0.9958202 0.00250371476 0.00167586142 2 1 0
110 2 2 0.0011134383604485582 0.9988872 0.000686069543 0.000426827959 2 0 1
114 2 2 0.0063545716100066839 0.9936656 0.003907088 0.00242741266 2 1 0
116 2 2 0.0041885353949071905 0.9958202 0.00250371476 0.00167586142 2 1 0
118 2 2 0.0055382144176810257 0.9944771 0.00311130448 0.00241164817 2 1 0
119 2 1 4.1934509368151227 0.9831461 0.0150941061 0.00175973971 1 2 0
124 2 2 0.0010213701720459014 0.998979151 0.0006811208 0.000339646853 2 0 1
126 2 2 0.38831541266765224 0.6781984 0.314145625 0.00765600568 2 1 0
127 2 2 0.38831541266765224 0.6781984 0.314145625 0.00765600568 2 1 0
130 2 2 0.0010213701720459014 0.998979151 0.0006811208 0.000339646853 2 0 1
134 2 1 1.0955395966130874 0.64945 0.334359139 0.0161907785 1 2 0
135 2 2 0.0055382144176810257 0.9944771 0.00311130448 0.00241164817 2 1 0
136 2 2 0.0055382144176810257 0.9944771 0.00311130448 0.00241164817 2 1 0
139 2 2 0.0011134383604485582 0.9988872 0.000686069543 0.000426827959 2 0 1
140 2 2 0.0055382144176810257 0.9944771 0.00311130448 0.00241164817 2 1 0
142 2 2 0.0011134383604485582 0.9988872 0.000686069543 0.000426827959 2 0 1
143 2 2 0.0055382144176810257 0.9944771 0.00311130448 0.00241164817 2 1 0
146 2 2 0.065049630430406014 0.937020957 0.0586935952 0.004285537 2 1 0
148 2 2 0.0063545716100066839 0.9936656 0.003907088 0.00242741266 2 1 0
149 2 2 0.0048432621878218415 0.995168447 0.00314460555 0.00168708875 2 1 0

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
5 0 0 0.0039186265649996796 0.996089041 0.003910775 2.08655385E-11 0 1 2
6 0 0 0.0066394791182328066 0.9933825 0.0066170604 2.15241977E-10 0 1 2
8 0 0 0.025494106553378942 0.9748281 0.0251719262 2.83894752E-09 0 1 2
9 0 0 0.019190772567240676 0.9809922 0.0190079622 2.70583778E-10 0 1 2
10 0 0 0.0035562493991592786 0.996450067 0.00355021679 4.92729945E-12 0 1 2
11 0 0 0.012516964292635404 0.987561047 0.0124393385 2.80085038E-10 0 1 2
18 0 0 0.0047769616990695207 0.99523443 0.00476594642 8.127354E-12 0 1 2
20 0 0 0.014707546996037419 0.9854001 0.01460007 8.914995E-11 0 1 2
21 0 0 0.003647833406238951 0.9963588 0.00364135345 3.50849176E-11 0 1 2
25 0 0 0.032317619176197167 0.968199 0.0318007767 1.10024678E-09 0 1 2
28 0 0 0.0057745087199158104 0.994242132 0.005757959 2.02363057E-11 0 1 2
31 0 0 0.0071621105155360203 0.9928635 0.00713674352 6.243001E-11 0 1 2
32 0 0 0.0014066451993830036 0.998594344 0.00140553329 7.324321E-13 0 1 2
35 0 0 0.0052270768592320212 0.994786561 0.00521328 2.50269475E-11 0 1 2
37 0 0 0.019190772567240676 0.9809922 0.0190079622 2.70583778E-10 0 1 2
40 0 0 0.0033063652339419308 0.9966991 0.00330088427 1.89582534E-11 0 1 2
41 0 0 0.077088331379826819 0.9258081 0.07419214 3.33435963E-08 0 1 2
44 0 0 0.010642032182541446 0.9894144 0.0105857411 3.503732E-10 0 1 2
45 0 0 0.017220520546649732 0.9829269 0.0170729719 8.533566E-10 0 1 2
46 0 0 0.0041847046934752278 0.995824039 0.00417628 1.51512067E-11 0 1 2
48 0 0 0.0036583023932260659 0.9963484 0.00365164177 6.70034037E-12 0 1 2
50 1 1 0.0098977138531054429 0.9901511 0.00557819242 0.00427070633 1 2 0
51 1 1 0.031642692265860799 0.9688527 0.0240902212 0.00705716759 1 2 0
52 1 1 0.035736352134450186 0.964894652 0.0334406 0.00166473968 1 2 0
54 1 1 0.072410342279000534 0.930149138 0.06803843 0.00181231985 1 2 0
56 1 1 0.093650567008779079 0.9106009 0.08499185 0.004407246 1 2 0
60 1 1 0.04249022665363885 0.958399832 0.0362676121 0.005332458 1 2 0
63 1 1 0.13750479017047515 0.8715302 0.127108455 0.00136140583 1 2 0
64 1 1 0.054985312386571866 0.94649905 0.048023738 0.005477275 1 0 2
66 1 1 0.32485345716523295 0.722633243 0.274839342 0.00252740132 1 2 0
68 1 1 0.41496231775214937 0.660365164 0.33927533 0.000359431346 1 2 0
69 1 1 0.021870636107098145 0.9783668 0.0153707173 0.006262359 1 2 0
70 1 2 0.95191325789517367 0.613012552 0.3860018 0.0009856701 2 1 0
71 1 1 0.019884826671693976 0.9803116 0.0117813116 0.007907144 1 0 2
72 1 1 0.59602620487261415 0.55099684 0.448828 0.0001750491 1 2 0
73 1 1 0.061367247268114919 0.9404778 0.0584481172 0.00107416452 1 2 0
74 1 1 0.014403823587870975 0.9856994 0.008161746 0.00613881 1 2 0
76 1 1 0.038603275709482521 0.962132335 0.0368479043 0.00101978832 1 2 0
77 1 1 0.29143003374400789 0.7471943 0.252085924 0.0007197116 1 2 0
79 1 1 0.031789553326302268 0.9687104 0.030109819 0.00117970794 1 0 2
82 1 1 0.020876873287020672 0.97933954 0.011350465 0.00930999 1 0 2
88 1 1 0.040395996500067109 0.960409045 0.0276476946 0.0119432509 1 2 0
90 1 1 0.1660756926194604 0.8469821 0.151687235 0.001330623 1 2 0
91 1 1 0.078328373844083643 0.924660742 0.07274478 0.002594536 1 2 0
92 1 1 0.023790381883611583 0.9764904 0.0172090363 0.006300515 1 2 0
93 1 1 0.031245715484219467 0.9692374 0.0224260911 0.008336624 1 0 2
95 1 1 0.026741238049869182 0.973613143 0.01771209 0.008674903 1 2 0
96 1 1 0.046394842362099863 0.954664946 0.0385362953 0.00679865666 1 2 0
97 1 1 0.020153788914160642 0.980047941 0.0141839078 0.00576809375 1 2 0
98 1 1 0.10663125927218892 0.898857057 0.0991064161 0.00203663576 1 0 2
99 1 1 0.042470947370459415 0.9584183 0.03420087 0.007380766 1 2 0
100 2 2 0.00061608453557407644 0.9993841 0.0006157423 5.20009849E-08 2 1 0
102 2 2 0.03520011910757518 0.9654122 0.03458591 2.11709448E-06 2 1 0
104 2 2 0.0055899402333488843 0.994425654 0.005573941 4.13432133E-07 2 1 0
105 2 2 0.010776984144366402 0.9892809 0.0107188253 7.398785E-08 2 1 0
106 2 2 0.035450938842364201 0.9651701 0.03480021 2.97981169E-05 2 1 0
108 2 2 0.02539346900894791 0.974926233 0.025073193 4.91410162E-07 2 1 0
109 2 2 0.010955823550611778 0.989104 0.0108943991 1.82278131E-06 2 1 0
111 2 2 0.064874143655307498 0.9371854 0.06280439 1.01753494E-05 2 1 0
112 2 2 0.065967573668299667 0.9361612 0.06382503 1.35423443E-05 2 1 0
113 2 2 0.010099395894045529 0.989951432 0.01004624 2.24200812E-06 2 1 0
115 2 2 0.025224193337550441 0.9750913 0.0248927623 1.61508033E-05 2 1 0
117 2 2 0.040343741890525309 0.960459232 0.03953917 1.57988256E-06 2 1 0
120 2 2 0.02318047647021565 0.9770861 0.0229095761 4.537662E-06 2 1 0
121 2 2 0.023067445323975651 0.9771966 0.0227888674 1.47519113E-05 2 1 0
122 2 2 0.010407475995621918 0.9896465 0.0103535978 3.082614E-08 2 1 0
123 2 2 0.3268465712630802 0.7211944 0.278644919 0.000160744428 2 1 0
125 2 2 0.2461805298239165 0.7817811 0.218203962 1.49805483E-05 2 1 0
128 2 2 0.0093602944019322582 0.9906834 0.009315666 7.62494949E-07 2 1 0
129 2 1 0.72902345881548536 0.517581046 0.482379824 3.91569338E-05 1 2 0
131 2 2 0.50472807995053026 0.6036697 0.3962875 4.27463638E-05 2 1 0
132 2 2 0.0054111588821948668 0.994603455 0.005395858 4.54962333E-07 2 1 0
133 2 1 0.80137756193014009 0.551096559 0.4487104 0.000192968553 1 2 0
137 2 2 0.138713849419592 0.8704771 0.129494175 2.89310847E-05 2 1 0
138 2 2 0.41129824947871368 0.6627892 0.336678356 0.000532399 2 1 0
141 2 2 0.15623141596710644 0.8553612 0.144476026 0.0001627244 2 1 0
144 2 2 0.0057015524406273162 0.9943147 0.005683652 1.455023E-06 2 1 0
145 2 2 0.050843236304848244 0.950427651 0.04954298 2.935713E-05 2 1 0
147 2 2 0.14442514040251331 0.8655197 0.134411365 6.885501E-05 2 1 0
0 0 0 0.00389289626734539 0.9961147 0.00388562 4.35209946E-13 0 1 2
1 0 0 0.012644744843927328 0.987434864 0.0125649935 1.20199536E-11 0 1 2
2 0 0 0.0068264018990562975 0.993196845 0.006802921 3.977263E-12 0 1 2
3 0 0 0.017663957572318569 0.982491136 0.0175090022 4.86960923E-11 0 1 2
4 0 0 0.0033855462771325432 0.9966202 0.00337941712 3.752789E-13 0 1 2
7 0 0 0.0072031139444910328 0.992822766 0.007177612 2.37773321E-12 0 1 2
12 0 0 0.013219808473621149 0.9868672 0.0131330229 9.955089E-12 0 1 2
13 0 0 0.0067634503253886933 0.9932594 0.006741032 5.51140253E-12 0 1 2
14 0 0 0.0004300803787185737 0.99957 0.0004298582 4.855314E-16 0 1 2
15 0 0 0.0005798232354486219 0.999420345 0.000579488464 3.10407929E-15 0 1 2
16 0 0 0.0010280527364537647 0.9989725 0.00102712377 2.45259982E-14 0 1 2
17 0 0 0.0039882812042811395 0.996019661 0.003979865 7.59585157E-13 0 1 2
19 0 0 0.0029926331194280015 0.99701184 0.00298810145 3.63156038E-13 0 1 2
22 0 0 0.0011491819568960609 0.9988515 0.00114855962 6.334511E-14 0 1 2
23 0 0 0.017463655687227918 0.98268795 0.0173117835 7.403799E-11 0 1 2
24 0 0 0.031715412558545882 0.968782246 0.0312175769 1.26180913E-10 0 1 2
26 0 0 0.010593417790593548 0.9894625 0.0105371028 1.63020032E-11 0 1 2
27 0 0 0.0050983234754958173 0.994914651 0.005085708 6.78546358E-13 0 1 2
29 0 0 0.0187730771464137 0.98140204 0.0185974967 4.52416021E-11 0 1 2
30 0 0 0.021565460472193786 0.9786654 0.02133441 5.23468144E-11 0 1 2
33 0 0 0.00068568844551682487 0.999314547 0.000685595965 2.65897216E-15 0 1 2
34 0 0 0.014051409604386065 0.986046851 0.0139533244 9.251471E-12 0 1 2
36 0 0 0.0021076343691449066 0.9978946 0.0021053094 4.41079566E-14 0 1 2
38 0 0 0.012707705555407789 0.9873727 0.0126277 3.37066139E-11 0 1 2
39 0 0 0.0067262453481025412 0.9932963 0.006703257 1.64459679E-12 0 1 2
42 0 0 0.0083856724102233139 0.9916494 0.008350652 1.20181885E-11 0 1 2
43 0 0 0.0090265536748324916 0.991014063 0.00898587 2.96402451E-11 0 1 2
47 0 0 0.010248185046573205 0.989804149 0.0101959091 1.29456289E-11 0 1 2
49 0 0 0.0063266791540774296 0.9936933 0.006306445 1.76891347E-12 0 1 2
53 1 1 0.10908627419032911 0.896653056 0.100547634 0.00279926369 1 2 0
55 1 1 0.13382980096571034 0.874738932 0.123627327 0.00163388788 1 2 0
57 1 1 0.032485513905672912 0.9680365 0.02845792 0.00350564788 1 0 2
58 1 1 0.014136100959005988 0.985963345 0.0110323895 0.0030042557 1 2 0
59 1 1 0.088965912338422562 0.914876759 0.07777976 0.00734356465 1 2 0
61 1 1 0.037055462184954119 0.9636227 0.0281057619 0.008271456 1 2 0
62 1 1 0.010662093085153215 0.989394546 0.006804485 0.00380102685 1 2 0
65 1 1 0.0124035025724159 0.9876731 0.009420066 0.00290678721 1 0 2
67 1 1 0.011022113126039873 0.9890384 0.00670390576 0.00425770739 1 0 2
75 1 1 0.012557041083151357 0.98752147 0.007134205 0.00534433033 1 0 2
78 1 1 0.11787630637387389 0.888806 0.108796746 0.002397152 1 2 0
80 1 1 0.018740888550878902 0.98143363 0.0107581457 0.00780817959 1 2 0
81 1 1 0.01532223104169358 0.984794557 0.0112667764 0.003938712 1 0 2
83 1 2 2.0409789338544426 0.870069146 0.129901484 2.94224083E-05 2 1 0
84 1 1 0.43623206546961257 0.6464677 0.352110773 0.00142157311 1 2 0
85 1 1 0.05070113760972858 0.9505627 0.0423230827 0.00711412542 1 2 0
86 1 1 0.023943664227516325 0.9763407 0.020373445 0.003285837 1 2 0
87 1 1 0.067026826092418473 0.9351701 0.0635259151 0.00130392925 1 2 0
89 1 1 0.063164711929489775 0.938788831 0.0567600951 0.00445109932 1 2 0
94 1 1 0.064262629430308249 0.9377587 0.05857061 0.00367062865 1 2 0
101 2 2 0.01641893911409267 0.9837151 0.0162818022 2.98962755E-06 2 1 0
103 2 2 0.021575936016214155 0.978655159 0.0213434044 1.57773707E-06 2 1 0
107 2 2 0.015651238280913731 0.9844706 0.0155291827 2.12675829E-07 2 1 0
110 2 2 0.31672575146404086 0.7285305 0.271245658 0.000223787472 2 1 0
114 2 2 0.0015682948475232242 0.998432934 0.00156654033 3.143049E-07 2 1 0
116 2 2 0.083519922011792383 0.919872761 0.0801154 1.17583113E-05 2 1 0
118 2 2 8.392686160427524E-05 0.9999161 8.3445615E-05 9.315716E-11 2 1 0
119 2 2 0.083749782035049736 0.919661343 0.08032946 9.204548E-06 2 1 0
124 2 2 0.030857563825488415 0.9696137 0.03038175 4.521532E-06 2 1 0
126 2 2 0.42635579718355782 0.652884 0.3468231 0.000292929617 2 1 0
127 2 2 0.37435529902904258 0.6877325 0.312001377 0.000266118121 2 1 0
130 2 2 0.023466557862108686 0.976806641 0.0231929272 5.31211867E-07 2 1 0
134 2 2 0.039153548126797694 0.961603045 0.0383953266 1.45486206E-06 2 1 0
135 2 2 0.01277634509471311 0.9873049 0.012694709 4.930856E-07 2 1 0
136 2 2 0.0041656711133312499 0.995843 0.0041561476 7.559071E-07 2 1 0
139 2 2 0.12062356177145393 0.886367559 0.113597296 3.53434771E-05 2 1 0
140 2 2 0.0054774415012383863 0.994537532 0.0054619913 6.990564E-07 2 1 0
142 2 2 0.01641893911409267 0.9837151 0.0162818022 2.98962755E-06 2 1 0
143 2 2 0.0041274853374921976 0.995881 0.00411869865 2.57669541E-07 2 1 0
146 2 2 0.062442762435292995 0.939466834 0.0605186969 1.4492558E-05 2 1 0
148 2 2 0.01353833699897897 0.9865529 0.0134423738 4.60714637E-06 2 1 0
149 2 2 0.091871736345547891 0.912222147 0.08774486 3.309085E-05 2 1 0

Просмотреть файл

@ -0,0 +1,66 @@
maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% data=%Data% out=%Output% seed=1
Not adding a normalizer.
Beginning optimization
num vars: 15
improvement criterion: Mean Improvement
L1 regularization selected 15 of 15 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 50 | 0 | 0 | 1.0000
1 || 0 | 48 | 2 | 0.9600
2 || 0 | 1 | 49 | 0.9800
||========================
Precision ||1.0000 |0.9796 |0.9608 |
Accuracy(micro-avg): 0.980000
Accuracy(macro-avg): 0.980000
Log-loss: 0.072218
Log-loss reduction: 93.426390
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.980000 (0.0000)
Accuracy(macro-avg): 0.980000 (0.0000)
Log-loss: 0.072218 (0.0000)
Log-loss reduction: 93.426390 (0.0000)
---------------------------------------
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'LBFGS data prep' started.
[1] 'LBFGS data prep' finished in %Time%.
[2] 'LBFGS Optimizer' started.
[2] (%Time%) 0 iterations Loss: 1.0986123085022
[2] (%Time%) 1 iterations Loss: 1.09053671360016 Improvement: 0.008076
[2] (%Time%) 2 iterations Loss: 0.964357972145081 Improvement: 0.1026
[2] (%Time%) 3 iterations Loss: 0.874466478824615 Improvement: 0.09291
[2] (%Time%) 4 iterations Loss: 0.53207802772522 Improvement: 0.2808
[2] (%Time%) 5 iterations Loss: 0.460592895746231 Improvement: 0.1236
[2] (%Time%) 6 iterations Loss: 0.381620526313782 Improvement: 0.09013
[2] (%Time%) 7 iterations Loss: 0.301508545875549 Improvement: 0.08262
[2] (%Time%) 8 iterations Loss: 0.230116382241249 Improvement: 0.0742
[2] (%Time%) 9 iterations Loss: 0.170902773737907 Improvement: 0.06296
[2] (%Time%) 10 iterations Loss: 0.143164187669754 Improvement: 0.03654
[2] (%Time%) 11 iterations Loss: 0.135387286543846 Improvement: 0.01497
[2] (%Time%) 12 iterations Loss: 0.133318409323692 Improvement: 0.005294
[2] (%Time%) 13 iterations Loss: 0.132491216063499 Improvement: 0.001944
[2] (%Time%) 14 iterations Loss: 0.124604761600494 Improvement: 0.006401
[2] (%Time%) 15 iterations Loss: 0.120595537126064 Improvement: 0.004607
[2] (%Time%) 16 iterations Loss: 0.119206272065639 Improvement: 0.002194
[2] (%Time%) 17 iterations Loss: 0.117203310132027 Improvement: 0.002051
[2] (%Time%) 18 iterations Loss: 0.116163291037083 Improvement: 0.001293
[2] (%Time%) 19 iterations Loss: 0.109811097383499 Improvement: 0.005087
[2] (%Time%) 20 iterations Loss: 0.106156274676323 Improvement: 0.004013
[2] (%Time%) 21 iterations Loss: 0.104246392846107 Improvement: 0.002436
[2] (%Time%) 22 iterations Loss: 0.10310410708189 Improvement: 0.001466
[2] (%Time%) 23 iterations Loss: 0.102218925952911 Improvement: 0.00103
[2] (%Time%) 24 iterations Loss: 0.101610459387302 Improvement: 0.0007139
[2] 'LBFGS Optimizer' finished in %Time%.
[3] 'Saving model' started.
[3] 'Saving model' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.98 0.98 0.072218 93.42639 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,69 @@
maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% out=%Output% seed=1 xf=TreeFeat{lps=0 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves}
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 150 instances
Binning and forming Feature objects
Reserved memory for tree learner: 20436 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 72
improvement criterion: Mean Improvement
L1 regularization selected 72 of 72 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 50 | 0 | 0 | 1.0000
1 || 0 | 49 | 1 | 0.9800
2 || 0 | 2 | 48 | 0.9600
||========================
Precision ||1.0000 |0.9608 |0.9796 |
Accuracy(micro-avg): 0.980000
Accuracy(macro-avg): 0.980000
Log-loss: 0.048652
Log-loss reduction: 95.571483
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.980000 (0.0000)
Accuracy(macro-avg): 0.980000 (0.0000)
Log-loss: 0.048652 (0.0000)
Log-loss reduction: 95.571483 (0.0000)
---------------------------------------
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'FastTree data preparation' started.
[1] 'FastTree data preparation' finished in %Time%.
[2] 'FastTree in-memory bins initialization' started.
[2] 'FastTree in-memory bins initialization' finished in %Time%.
[3] 'FastTree feature conversion' started.
[3] 'FastTree feature conversion' finished in %Time%.
[4] 'FastTree training' started.
[4] 'FastTree training' finished in %Time%.
[5] 'LBFGS data prep' started.
[5] 'LBFGS data prep' finished in %Time%.
[6] 'LBFGS Optimizer' started.
[6] (%Time%) 0 iterations Loss: 1.0986123085022
[6] (%Time%) 1 iterations Loss: 0.62896740436554 Improvement: 0.4696
[6] (%Time%) 2 iterations Loss: 0.213765218853951 Improvement: 0.4261
[6] (%Time%) 3 iterations Loss: 0.144495338201523 Improvement: 0.1542
[6] (%Time%) 4 iterations Loss: 0.106832779943943 Improvement: 0.06646
[6] (%Time%) 5 iterations Loss: 0.0936193987727165 Improvement: 0.02649
[6] (%Time%) 6 iterations Loss: 0.083323560655117 Improvement: 0.01434
[6] (%Time%) 7 iterations Loss: 0.0787383615970612 Improvement: 0.007024
[6] (%Time%) 8 iterations Loss: 0.0768212750554085 Improvement: 0.003194
[6] (%Time%) 9 iterations Loss: 0.0741848275065422 Improvement: 0.002776
[6] (%Time%) 10 iterations Loss: 0.0730299279093742 Improvement: 0.00156
[6] (%Time%) 11 iterations Loss: 0.0716971307992935 Improvement: 0.00139
[6] (%Time%) 12 iterations Loss: 0.0708837881684303 Improvement: 0.0009574
[6] 'LBFGS Optimizer' finished in %Time%.
[7] 'Saving model' started.
[7] 'Saving model' finished in %Time%.

Просмотреть файл

@ -0,0 +1,67 @@
maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% out=%Output% seed=1 xf=TreeFeat{lps=2 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves}
Making per-feature arrays
Changing data from row-wise to column-wise
Processed 150 instances
Binning and forming Feature objects
Reserved memory for tree learner: 20436 bytes
Starting to train ...
Not training a calibrator because it is not needed.
Not adding a normalizer.
Beginning optimization
num vars: 81
improvement criterion: Mean Improvement
L1 regularization selected 81 of 81 weights.
Not training a calibrator because it is not needed.
Confusion table
||========================
PREDICTED || 0 | 1 | 2 | Recall
TRUTH ||========================
0 || 50 | 0 | 0 | 1.0000
1 || 0 | 49 | 1 | 0.9800
2 || 0 | 3 | 47 | 0.9400
||========================
Precision ||1.0000 |0.9423 |0.9792 |
Accuracy(micro-avg): 0.973333
Accuracy(macro-avg): 0.973333
Log-loss: 0.052580
Log-loss reduction: 95.213998
OVERALL RESULTS
---------------------------------------
Accuracy(micro-avg): 0.973333 (0.0000)
Accuracy(macro-avg): 0.973333 (0.0000)
Log-loss: 0.052580 (0.0000)
Log-loss reduction: 95.213998 (0.0000)
---------------------------------------
Physical memory usage(MB): %Number%
Virtual memory usage(MB): %Number%
%DateTime% Time elapsed(s): %Number%
--- Progress log ---
[1] 'FastTree data preparation' started.
[1] 'FastTree data preparation' finished in %Time%.
[2] 'FastTree in-memory bins initialization' started.
[2] 'FastTree in-memory bins initialization' finished in %Time%.
[3] 'FastTree feature conversion' started.
[3] 'FastTree feature conversion' finished in %Time%.
[4] 'FastTree training' started.
[4] 'FastTree training' finished in %Time%.
[5] 'LBFGS data prep' started.
[5] 'LBFGS data prep' finished in %Time%.
[6] 'LBFGS Optimizer' started.
[6] (%Time%) 0 iterations Loss: 1.0986123085022
[6] (%Time%) 1 iterations Loss: 0.659841060638428 Improvement: 0.4388
[6] (%Time%) 2 iterations Loss: 0.203571543097496 Improvement: 0.4528
[6] (%Time%) 3 iterations Loss: 0.133906096220016 Improvement: 0.1609
[6] (%Time%) 4 iterations Loss: 0.108745984733105 Improvement: 0.05869
[6] (%Time%) 5 iterations Loss: 0.0906000584363937 Improvement: 0.02825
[6] (%Time%) 6 iterations Loss: 0.0810708180069923 Improvement: 0.01421
[6] (%Time%) 7 iterations Loss: 0.0780067816376686 Improvement: 0.005849
[6] (%Time%) 8 iterations Loss: 0.0760208815336227 Improvement: 0.002952
[6] (%Time%) 9 iterations Loss: 0.0755626112222672 Improvement: 0.001082
[6] (%Time%) 10 iterations Loss: 0.0751652047038078 Improvement: 0.0005685
[6] 'LBFGS Optimizer' finished in %Time%.
[7] 'Saving model' started.
[7] 'Saving model' finished in %Time%.

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.973333 0.973333 0.05258 95.214 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% out=%Output% seed=1 xf=TreeFeat{lps=2 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves} /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
0 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
1 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
2 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
3 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
4 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
5 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
6 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
7 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
8 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
9 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
10 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
11 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
12 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
13 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
14 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
15 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
16 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
17 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
18 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
19 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
20 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
21 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
22 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
23 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
24 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
25 0 0 0.009448319936012424 0.9905962 0.005269755 0.004134127 0 2 1
26 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
27 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
28 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
29 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
30 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
31 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
32 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
33 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
34 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
35 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
36 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
37 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
38 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
39 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
40 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
41 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
42 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
43 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
44 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
45 0 0 0.0057439947230261707 0.9942725 0.00320922164 0.00251825759 0 2 1
46 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
47 0 0 0.0019955864125022859 0.9980064 0.0011133193 0.0008801577 0 2 1
48 0 0 0.0040450138254278329 0.995963156 0.002256032 0.00178077759 0 2 1
49 0 0 0.0032863915501666614 0.996719 0.00183256145 0.00144841243 0 2 1
50 1 1 0.021321326112559644 0.978904366 0.0185372382 0.00255840342 1 2 0
51 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
52 1 1 0.057755711518056148 0.9438805 0.0540314876 0.00208806922 1 2 0
53 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
54 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
55 1 1 0.010453672105827311 0.9896008 0.00705624232 0.00334312418 1 2 0
56 1 1 0.057755711518056148 0.9438805 0.0540314876 0.00208806922 1 2 0
57 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
58 1 1 0.010453672105827311 0.9896008 0.00705624232 0.00334312418 1 2 0
59 1 1 0.022228986093586683 0.978016257 0.0183866452 0.00359701412 1 2 0
60 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
61 1 1 0.002874388769313735 0.997129738 0.00218560919 0.0006844687 1 2 0
62 1 1 0.0025157957624932083 0.997487366 0.00131361035 0.001199242 1 2 0
63 1 1 0.021321326112559644 0.978904366 0.0185372382 0.00255840342 1 2 0
64 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
65 1 1 0.00050450174369561743 0.9994956 0.0003428632 0.000161661 1 0 2
66 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
67 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
68 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
69 1 1 0.0025157957624932083 0.997487366 0.00131361035 0.001199242 1 2 0
70 1 2 1.5216391055963379 0.774603248 0.218353689 0.007043035 2 1 0
71 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
72 1 1 0.057755711518056148 0.9438805 0.0540314876 0.00208806922 1 2 0
73 1 1 0.021321326112559644 0.978904366 0.0185372382 0.00255840342 1 2 0
74 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
75 1 1 0.00050450174369561743 0.9994956 0.0003428632 0.000161661 1 0 2
76 1 1 0.021321326112559644 0.978904366 0.0185372382 0.00255840342 1 2 0
77 1 1 0.66969455479852757 0.5118649 0.480443418 0.00769158732 1 2 0
78 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
79 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
80 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
81 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
82 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
83 1 1 0.66969455479852757 0.5118649 0.480443418 0.00769158732 1 2 0
84 1 1 0.6145726680541016 0.540872 0.450446427 0.008681549 1 2 0
85 1 1 0.024120783122654886 0.9761678 0.02104084 0.002791362 1 2 0
86 1 1 0.057755711518056148 0.9438805 0.0540314876 0.00208806922 1 2 0
87 1 1 0.00050450174369561743 0.9994956 0.0003428632 0.000161661 1 0 2
88 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
89 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
90 1 1 0.0081417295935607269 0.9918913 0.00619866839 0.00190992071 1 2 0
91 1 1 0.010453672105827311 0.9896008 0.00705624232 0.00334312418 1 2 0
92 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
93 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
94 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
95 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
96 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
97 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
98 1 1 0.014104847140120776 0.98599416 0.0111910813 0.00281463587 1 2 0
99 1 1 0.0037231527319482333 0.99628377 0.002173206 0.00154322956 1 2 0
100 2 2 0.0027975792788985728 0.99720633 0.00140296342 0.00139077951 2 0 1
101 2 2 0.0060782606764628114 0.9939402 0.00406833738 0.00199138 2 1 0
102 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
103 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
104 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
105 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
106 2 1 0.79751612827614782 0.540872 0.450446427 0.008681549 1 2 0
107 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
108 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
109 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
110 2 2 0.0027107946424502136 0.9972929 0.00169270684 0.00101456023 2 1 0
111 2 2 0.00038625307961716445 0.9996138 0.000308882067 7.720726E-05 2 0 1
112 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
113 2 2 0.0060782606764628114 0.9939402 0.00406833738 0.00199138 2 1 0
114 2 2 0.0060782606764628114 0.9939402 0.00406833738 0.00199138 2 1 0
115 2 2 0.0027975792788985728 0.99720633 0.00140296342 0.00139077951 2 0 1
116 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
117 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
118 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
119 2 1 0.73304581395521817 0.5118649 0.480443418 0.00769158732 1 2 0
120 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
121 2 2 0.069479934485845732 0.932878852 0.06260931 0.0045117354 2 1 0
122 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
123 2 2 0.061853403505252154 0.9400207 0.0568036325 0.00317562232 2 1 0
124 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
125 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
126 2 2 0.25540431923984186 0.774603248 0.218353689 0.007043035 2 1 0
127 2 2 0.25540431923984186 0.774603248 0.218353689 0.007043035 2 1 0
128 2 2 0.0028037955474232222 0.997200131 0.00140025932 0.00139938819 2 1 0
129 2 2 0.095699813778642617 0.908736765 0.08522418 0.00603903 2 1 0
130 2 2 0.00057171233510822377 0.999428451 0.000422048615 0.0001493216 2 0 1
131 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
132 2 2 0.0028037955474232222 0.997200131 0.00140025932 0.00139938819 2 1 0
133 2 1 0.73304581395521817 0.5118649 0.480443418 0.00769158732 1 2 0
134 2 2 0.095699813778642617 0.908736765 0.08522418 0.00603903 2 1 0
135 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
136 2 2 0.0027975792788985728 0.99720633 0.00140296342 0.00139077951 2 0 1
137 2 2 0.00090502910908000144 0.9990954 0.0005804024 0.000324115856 2 0 1
138 2 2 0.25540431923984186 0.774603248 0.218353689 0.007043035 2 1 0
139 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
140 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
141 2 2 0.0027107946424502136 0.9972929 0.00169270684 0.00101456023 2 1 0
142 2 2 0.0060782606764628114 0.9939402 0.00406833738 0.00199138 2 1 0
143 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
144 2 2 0.004610062044209896 0.995400548 0.00268544327 0.0019138573 2 1 0
145 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
146 2 2 0.001620293327937558 0.998381 0.000876012957 0.000743192446 2 1 0
147 2 2 0.0046234153867987507 0.995387256 0.00270372839 0.00190896646 2 1 0
148 2 2 0.010240055565253037 0.9898122 0.00643974636 0.00374803343 2 1 0
149 2 2 0.020741951150490413 0.9794717 0.0168389976 0.00368933426 2 1 0

Просмотреть файл

@ -0,0 +1,4 @@
MulticlassLogisticRegression
Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /l2 /l1 /ot /nt Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
0.98 0.98 0.048652 95.57148 0.1 0.001 0.001 1 MulticlassLogisticRegression %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=MulticlassLogisticRegression{l1=0.001 l2=0.1 ot=1e-3 nt=1} norm=No dout=%Output% loader=Text{col=Label:U4[0-2]:0 col=Features:1-*} data=%Data% out=%Output% seed=1 xf=TreeFeat{lps=0 trainer=ftr{iter=3}} xf=copy{col=Features:Leaves} /l2:0.1;/l1:0.001;/ot:0.001;/nt:1

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
0 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
1 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
2 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
3 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
4 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
5 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
6 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
7 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
8 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
9 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
10 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
11 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
12 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
13 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
14 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
15 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
16 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
17 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
18 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
19 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
20 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
21 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
22 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
23 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
24 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
25 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
26 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
27 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
28 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
29 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
30 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
31 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
32 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
33 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
34 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
35 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
36 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
37 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
38 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
39 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
40 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
41 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
42 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
43 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
44 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
45 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
46 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
47 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
48 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
49 0 0 0.0023665394446646414 0.997636259 0.00138825784 0.0009752967 0 2 1
50 1 1 0.017149938128276376 0.9829963 0.0158224776 0.00118121586 1 2 0
51 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
52 1 1 0.46518926341456768 0.628016233 0.3688916 0.00309218233 1 2 0
53 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
54 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
55 1 1 0.010133053663594078 0.9899181 0.00823902 0.00184285524 1 2 0
56 1 1 0.063370253429356399 0.9385959 0.0599906556 0.00141336839 1 2 0
57 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
58 1 1 0.010133053663594078 0.9899181 0.00823902 0.00184285524 1 2 0
59 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
60 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
61 1 1 0.0029611876049965222 0.9970432 0.00249748817 0.000459320465 1 2 0
62 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
63 1 1 0.017149938128276376 0.9829963 0.0158224776 0.00118121586 1 2 0
64 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
65 1 1 0.00099804121244394858 0.999002457 0.000630193 0.000367255969 1 2 0
66 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
67 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
68 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
69 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
70 1 2 1.115149236434358 0.6676605 0.327866346 0.004473159 2 1 0
71 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
72 1 1 0.46518926341456768 0.628016233 0.3688916 0.00309218233 1 2 0
73 1 1 0.017149938128276376 0.9829963 0.0158224776 0.00118121586 1 2 0
74 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
75 1 1 0.00099804121244394858 0.999002457 0.000630193 0.000367255969 1 2 0
76 1 1 0.017149938128276376 0.9829963 0.0158224776 0.00118121586 1 2 0
77 1 1 0.36444069762825837 0.694585 0.301859677 0.00355520425 1 2 0
78 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
79 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
80 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
81 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
82 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
83 1 1 0.46518926341456768 0.628016233 0.3688916 0.00309218233 1 2 0
84 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
85 1 1 0.034770253463596357 0.9658273 0.03191964 0.002253152 1 2 0
86 1 1 0.063370253429356399 0.9385959 0.0599906556 0.00141336839 1 2 0
87 1 1 0.00099804121244394858 0.999002457 0.000630193 0.000367255969 1 2 0
88 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
89 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
90 1 1 0.00099804121244394858 0.999002457 0.000630193 0.000367255969 1 2 0
91 1 1 0.010133053663594078 0.9899181 0.00823902 0.00184285524 1 2 0
92 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
93 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
94 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
95 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
96 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
97 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
98 1 1 0.0040569232970457132 0.9959513 0.00308299367 0.0009658426 1 2 0
99 1 1 0.004469892647378515 0.9955401 0.00340973283 0.00105001475 1 2 0
100 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
101 2 2 0.015195136716169138 0.9849197 0.0135209756 0.00155933737 2 1 0
102 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
103 2 2 0.00033902907870159203 0.999661 0.000212118714 0.000126917963 2 0 1
104 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
105 2 2 0.0036106245223803345 0.9963959 0.00253308774 0.00107112422 2 1 0
106 2 2 0.44574496282930443 0.640347064 0.354238272 0.005414769 2 1 0
107 2 2 0.00044343765178377152 0.999556661 0.000272705773 0.00017054558 2 0 1
108 2 2 0.00033902907870159203 0.999661 0.000212118714 0.000126917963 2 0 1
109 2 2 0.0035705457622275658 0.9964358 0.002492974 0.00107111351 2 1 0
110 2 2 0.0017090133953465831 0.998292446 0.00131496089 0.000392403 2 1 0
111 2 2 0.00020553793666013725 0.9997945 0.000142994817 6.261714E-05 2 0 1
112 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
113 2 2 0.054315558430791992 0.9471332 0.05060918 0.00225768937 2 1 0
114 2 2 0.015195136716169138 0.9849197 0.0135209756 0.00155933737 2 1 0
115 2 2 0.0014806020430615894 0.9985205 0.0009170116 0.0005626859 2 1 0
116 2 2 0.00033902907870159203 0.999661 0.000212118714 0.000126917963 2 0 1
117 2 2 0.0035705457622275658 0.9964358 0.002492974 0.00107111351 2 1 0
118 2 2 0.0036106245223803345 0.9963959 0.00253308774 0.00107112422 2 1 0
119 2 1 0.99725245372779048 0.628016233 0.3688916 0.00309218233 1 2 0
120 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
121 2 2 0.054315558430791992 0.9471332 0.05060918 0.00225768937 2 1 0
122 2 2 0.0036106245223803345 0.9963959 0.00253308774 0.00107112422 2 1 0
123 2 2 0.0057025115683581925 0.9943137 0.005097879 0.000588452967 2 1 0
124 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
125 2 2 0.00044343765178377152 0.999556661 0.000272705773 0.00017054558 2 0 1
126 2 2 0.40397550595071413 0.6676605 0.327866346 0.004473159 2 1 0
127 2 2 0.054315558430791992 0.9471332 0.05060918 0.00225768937 2 1 0
128 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
129 2 2 0.080654836005556188 0.922512054 0.0746743754 0.002813671 2 1 0
130 2 2 0.00044343765178377152 0.999556661 0.000272705773 0.00017054558 2 0 1
131 2 2 0.0035705457622275658 0.9964358 0.002492974 0.00107111351 2 1 0
132 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
133 2 1 0.99725245372779048 0.628016233 0.3688916 0.00309218233 1 2 0
134 2 2 0.080654836005556188 0.922512054 0.0746743754 0.002813671 2 1 0
135 2 2 0.0036106245223803345 0.9963959 0.00253308774 0.00107112422 2 1 0
136 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
137 2 2 0.00033902907870159203 0.999661 0.000212118714 0.000126917963 2 0 1
138 2 2 0.40397550595071413 0.6676605 0.327866346 0.004473159 2 1 0
139 2 2 0.001495823863942061 0.9985053 0.000931790448 0.00056270574 2 1 0
140 2 2 0.0027241824146091785 0.9972795 0.001886568 0.000833805068 2 1 0
141 2 2 0.0017090133953465831 0.998292446 0.00131496089 0.000392403 2 1 0
142 2 2 0.015195136716169138 0.9849197 0.0135209756 0.00155933737 2 1 0
143 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
144 2 2 0.002694119944734754 0.9973095 0.00185667316 0.000833787955 2 1 0
145 2 2 0.001495823863942061 0.9985053 0.000931790448 0.00056270574 2 1 0
146 2 2 0.0057025115683581925 0.9943137 0.005097879 0.000588452967 2 1 0
147 2 2 0.001495823863942061 0.9985053 0.000931790448 0.00056270574 2 1 0
148 2 2 0.011771430559717877 0.9882976 0.009459271 0.00224316632 2 1 0
149 2 2 0.015195136716169138 0.9849197 0.0135209756 0.00155933737 2 1 0

Просмотреть файл

@ -0,0 +1,151 @@
Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class
0 0 0 0.0011081873220258624 0.9988924 0.00110751833 1.39210543E-13 0 1 2
1 0 0 0.0070400107011273196 0.9929847 0.00701517845 6.033473E-12 0 1 2
2 0 0 0.0022964599964677033 0.9977062 0.00229415065 1.31948857E-12 0 1 2
3 0 0 0.0071621105155360203 0.9928635 0.00713652233 1.568493E-11 0 1 2
4 0 0 0.00076675000483667258 0.999233544 0.0007660015 9.598792E-14 0 1 2
5 0 0 0.00070191216101047108 0.999298334 0.000701762037 1.767304E-13 0 1 2
6 0 0 0.0015125382870987553 0.9984886 0.001511353 1.99488364E-12 0 1 2
7 0 0 0.0023565021891202727 0.9976463 0.002354226 7.60859745E-13 0 1 2
8 0 0 0.010224338755429094 0.989827752 0.0101724006 4.50257E-11 0 1 2
9 0 0 0.007608435929714304 0.992420435 0.0075791534 3.604188E-12 0 1 2
10 0 0 0.00077438528059811521 0.9992259 0.0007741245 3.67899477E-14 0 1 2
11 0 0 0.0034665877492082151 0.9965394 0.003460485 2.86678125E-12 0 1 2
12 0 0 0.0075073002188368347 0.9925208 0.00747876149 4.1399926E-12 0 1 2
13 0 0 0.0023870324875610912 0.9976158 0.002384095 1.57063577E-12 0 1 2
14 0 0 8.0111851446806079E-05 0.9999199 8.027427E-05 1.70893926E-16 0 1 2
15 0 0 5.149973917474053E-05 0.9999485 5.10407663E-05 6.476145E-16 0 1 2
16 0 0 0.0001506918954455657 0.9998493 0.000150896725 7.78926E-15 0 1 2
17 0 0 0.0010404633316893413 0.9989601 0.00103944924 2.66592168E-13 0 1 2
18 0 0 0.0010795457796386584 0.998921037 0.00107904861 7.74551457E-14 0 1 2
19 0 0 0.00050354758858267578 0.9994966 0.000503402262 8.393544E-14 0 1 2
20 0 0 0.0050583048608438558 0.994954467 0.005045267 1.21072217E-12 0 1 2
21 0 0 0.00068377979041008637 0.999316454 0.0006838254 3.0647487E-13 0 1 2
22 0 0 0.00016499926836891572 0.999835 0.000165351419 1.263396E-14 0 1 2
23 0 0 0.0060672865852426372 0.9939511 0.00604853174 3.67925274E-11 0 1 2
24 0 0 0.010941481475240914 0.989118159 0.0108818505 2.95929635E-11 0 1 2
25 0 0 0.015109992553949719 0.9850036 0.0149965808 2.16925887E-11 0 1 2
26 0 0 0.0030479341887764532 0.9969567 0.00304320036 6.085656E-12 0 1 2
27 0 0 0.001626024669168138 0.9983753 0.00162422692 2.31037764E-13 0 1 2
28 0 0 0.001602144297411704 0.998399138 0.00160105072 2.01864784E-13 0 1 2
29 0 0 0.0072584081998492492 0.9927679 0.0072323475 1.36550138E-11 0 1 2
30 0 0 0.010481860611279436 0.9895729 0.0104267541 1.97468066E-11 0 1 2
31 0 0 0.002068511721101683 0.9979336 0.00206610747 9.345534E-13 0 1 2
32 0 0 0.00018789149083271798 0.9998121 0.000188248741 2.51111472E-15 0 1 2
33 0 0 8.2973107700441223E-05 0.99991703 8.28565E-05 5.085376E-16 0 1 2
34 0 0 0.007608435929714304 0.992420435 0.0075791534 3.604188E-12 0 1 2
35 0 0 0.0015602356235697344 0.998441 0.00155867427 2.66303E-13 0 1 2
36 0 0 0.00075147962820386978 0.9992488 0.000751453335 2.136476E-14 0 1 2
37 0 0 0.007608435929714304 0.992420435 0.0075791534 3.604188E-12 0 1 2
38 0 0 0.0048217604636851983 0.995189846 0.004810019 1.08745435E-11 0 1 2
39 0 0 0.002354650087500236 0.9976481 0.002351973 5.78798457E-13 0 1 2
40 0 0 0.00070859259095342364 0.999291658 0.000708641659 1.60603938E-13 0 1 2
41 0 0 0.05864808223146472 0.9430386 0.0569613 1.37746259E-09 0 1 2
42 0 0 0.0023031510872087162 0.9976995 0.00230074348 2.997355E-12 0 1 2
43 0 0 0.0018548872509052583 0.998146832 0.00185356825 1.17147351E-11 0 1 2
44 0 0 0.0021972937219044641 0.9978051 0.00219457783 3.64257843E-12 0 1 2
45 0 0 0.0066146987210017709 0.99340713 0.00659268349 1.51941237E-11 0 1 2
46 0 0 0.00078774715346045169 0.999212563 0.0007876004 9.565127E-14 0 1 2
47 0 0 0.0033750801452213284 0.9966306 0.00336904428 3.782066E-12 0 1 2
48 0 0 0.00077533969416734708 0.999224961 0.000774866843 4.83624235E-14 0 1 2
49 0 0 0.0023231648847811732 0.997679532 0.0023206654 6.64796668E-13 0 1 2
50 1 1 0.0060152962161020033 0.994002759 0.00400032569 0.00199713139 1 2 0
51 1 1 0.023390285894295822 0.976881146 0.0186334234 0.004485526 1 2 0
52 1 1 0.031560872878237577 0.968932 0.0304043666 0.0006636082 1 2 0
53 1 1 0.084995522035361218 0.9185164 0.08058272 0.000900880957 1 2 0
54 1 1 0.063648693965999981 0.9383346 0.06099709 0.0006684431 1 2 0
55 1 1 0.089732440645847733 0.914175749 0.0849883854 0.000835962768 1 2 0
56 1 1 0.085243960631224261 0.918288231 0.0786991939 0.00301247532 1 2 0
57 1 1 0.019522210073795572 0.9806671 0.0168701168 0.00246294332 1 0 2
58 1 1 0.0098853132401056608 0.9901634 0.008936252 0.000900245446 1 2 0
59 1 1 0.092869061379903875 0.9113128 0.082560055 0.00612714142 1 2 0
60 1 1 0.014433393557966039 0.985670269 0.0125383837 0.001791122 1 2 0
61 1 1 0.045261412953232133 0.9557476 0.0376504771 0.006601967 1 2 0
62 1 1 0.0040368747706341756 0.995971262 0.00346833514 0.0005603906 1 2 0
63 1 1 0.10217425852556423 0.9028722 0.09653503 0.000592705 1 2 0
64 1 1 0.043710241505504309 0.9572313 0.0402606353 0.00250799 1 0 2
65 1 1 0.0080103773041286016 0.9920216 0.004349263 0.00362897874 1 0 2
66 1 1 0.25858017840179764 0.7721471 0.22617422 0.00167868065 1 2 0
67 1 1 0.0046706026176554045 0.9953403 0.00242033927 0.002239571 1 0 2
68 1 1 0.41731980067242436 0.6588102 0.341115355 7.465122E-05 1 2 0
69 1 1 0.0088439096717521076 0.9911951 0.00616651075 0.00263821287 1 2 0
70 1 2 1.1248658137051197 0.674737453 0.324696034 0.0005666125 2 1 0
71 1 1 0.010528060157971919 0.989527166 0.00620779675 0.00426507555 1 0 2
72 1 1 0.61055850847847093 0.5430475 0.45691213 4.016438E-05 1 2 0
73 1 1 0.033657066868998158 0.966903031 0.03271102 0.000386148255 1 2 0
74 1 1 0.0075878956642768964 0.9924408 0.004705976 0.0028532357 1 2 0
75 1 1 0.0093107195215621132 0.9907325 0.00627015159 0.00299742026 1 2 0
76 1 1 0.030895247221220504 0.969577134 0.0301216021 0.0003013593 1 2 0
77 1 1 0.36569263913806865 0.693716 0.306031 0.000253220467 1 2 0
78 1 1 0.12329888682954605 0.8839994 0.114668027 0.00133259967 1 2 0
79 1 1 0.017058382448049515 0.9830863 0.01655834 0.000355129 1 0 2
80 1 1 0.0098905503734397458 0.9901582 0.00717079034 0.002671104 1 2 0
81 1 1 0.0060877956403809913 0.9939307 0.00369574339 0.00237366813 1 0 2
82 1 1 0.010145216499399137 0.9899061 0.00589559553 0.004198351 1 0 2
83 1 2 1.7833237210655208 0.8319088 0.168078572 1.28333786E-05 2 1 0
84 1 1 0.41036916458021622 0.6634053 0.335155129 0.00143950759 1 2 0
85 1 1 0.074115509877208191 0.9285644 0.0619499721 0.009485668 1 2 0
86 1 1 0.025645754545416 0.9746803 0.0238822829 0.00143721956 1 2 0
87 1 1 0.047437678245327429 0.9536699 0.0461276025 0.000202563053 1 2 0
88 1 1 0.023062687682211796 0.9772012 0.0140921762 0.008706564 1 2 0
89 1 1 0.051273104302225654 0.9500192 0.0480280071 0.00195292477 1 2 0
90 1 1 0.088339816972695842 0.915449739 0.08400096 0.0005494726 1 2 0
91 1 1 0.054495307349226502 0.946962953 0.0517152026 0.00132180948 1 2 0
92 1 1 0.011043085652212155 0.989017665 0.00821223 0.00277022063 1 2 0
93 1 1 0.014310342276657975 0.985791564 0.0117249712 0.00248343241 1 0 2
94 1 1 0.049737456333124833 0.9514792 0.04661856 0.00190224242 1 2 0
95 1 1 0.0136037104159475 0.9864884 0.007890029 0.00562156225 1 2 0
96 1 1 0.025367974956993136 0.9749511 0.02095981 0.004089204 1 2 0
97 1 1 0.010987280419450679 0.989072859 0.008089061 0.002838109 1 2 0
98 1 1 0.081510331556232662 0.9217232 0.07764353 0.000633295451 1 0 2
99 1 1 0.023043169388812675 0.9772203 0.01862162 0.00415814156 1 2 0
100 2 2 0.00011063234023480313 0.9998894 0.000110273286 4.32938174E-09 2 1 0
101 2 2 0.013696642277261918 0.98639673 0.0136018591 1.25408337E-06 2 1 0
102 2 2 0.010937685077955646 0.9891219 0.0108777983 1.61583827E-07 2 1 0
103 2 2 0.025969000497816655 0.9743653 0.0256338213 6.83490839E-07 2 1 0
104 2 2 0.0015602356235697344 0.998441 0.001558811 3.603013E-08 2 1 0
105 2 2 0.0026788798728120773 0.9973247 0.00267550955 2.70883938E-09 2 1 0
106 2 2 0.030226255425741325 0.970226 0.02976235 1.14747845E-05 2 1 0
107 2 2 0.024762178100796752 0.9755419 0.0244582947 4.46629365E-08 2 1 0
108 2 2 0.011696829349738497 0.9883713 0.0116284136 3.284619E-08 2 1 0
109 2 2 0.0019693083254810079 0.9980326 0.00196759985 1.60975972E-07 2 1 0
110 2 2 0.16616345155985321 0.8469078 0.152995974 9.62346458E-05 2 1 0
111 2 2 0.031529930899520178 0.968961954 0.0310366023 1.33421645E-06 2 1 0
112 2 2 0.023412251624584629 0.9768597 0.023138877 1.59477213E-06 2 1 0
113 2 2 0.0043978696251254648 0.9956118 0.004387678 3.01778442E-07 2 1 0
114 2 2 0.00051404334489802697 0.9994861 0.000513345643 9.412643E-08 2 1 0
115 2 2 0.0073065605196956278 0.992720068 0.0072776177 2.56496514E-06 2 1 0
116 2 2 0.085114022402325365 0.918407559 0.08158773 4.63447259E-06 2 1 0
117 2 2 0.010481860611279436 0.9895729 0.010426864 1.47937257E-07 2 1 0
118 2 2 8.5834372140887149E-05 0.999914169 8.559229E-05 7.07661924E-12 2 1 0
119 2 2 0.14361526314671486 0.866220951 0.133777007 2.21380515E-06 2 1 0
120 2 2 0.0058536456241034358 0.994163454 0.00583615573 4.441736E-07 2 1 0
121 2 2 0.011340123133497897 0.988723934 0.0112725906 3.45189E-06 2 1 0
122 2 2 0.0027837721283902814 0.9972201 0.00277955178 8.58777549E-10 2 1 0
123 2 2 0.21739579576290038 0.804611444 0.195352 3.663746E-05 2 1 0
124 2 2 0.018497017687550327 0.981673 0.018325327 1.77490767E-06 2 1 0
125 2 2 0.13388922074520956 0.874686956 0.125310808 2.1976773E-06 2 1 0
126 2 2 0.30901959520667954 0.7341664 0.2657276 0.000105864478 2 1 0
127 2 2 0.2791385441945381 0.7564351 0.243426546 0.000138261472 2 1 0
128 2 2 0.0030822522819900344 0.9969225 0.00307789678 6.864916E-08 2 1 0
129 2 2 0.56484888647314668 0.56844604 0.431547046 6.86041358E-06 2 1 0
130 2 2 0.02669716058567774 0.973656058 0.0263440143 7.645144E-08 2 1 0
131 2 2 0.22184797501072168 0.801037133 0.198955223 7.891673E-06 2 1 0
132 2 2 0.0015115831696781118 0.998489559 0.00151082047 3.59063E-08 2 1 0
133 2 1 0.80210662927418985 0.551559 0.4483834 5.73823527E-05 1 2 0
134 2 2 0.10944270798526276 0.8963335 0.103665754 7.052652E-07 2 1 0
135 2 2 0.0061168808403790232 0.9939018 0.00609836355 4.79565649E-08 2 1 0
136 2 2 0.0014457419615165081 0.9985553 0.0014441628 3.58452127E-07 2 1 0
137 2 2 0.085367229331733907 0.918175042 0.08181802 6.72193E-06 2 1 0
138 2 2 0.31077265568961848 0.7328805 0.266897172 0.0002224321 2 1 0
139 2 2 0.059129692998003365 0.9425845 0.0574071258 8.420951E-06 2 1 0
140 2 2 0.0018806248795496621 0.998121142 0.00187838788 1.54243637E-07 2 1 0
141 2 2 0.046927431083058919 0.954156637 0.0458193719 2.41775069E-05 2 1 0
142 2 2 0.013696642277261918 0.98639673 0.0136018591 1.25408337E-06 2 1 0
143 2 2 0.0020218054648021076 0.997980237 0.002019646 7.11884454E-08 2 1 0
144 2 2 0.0010766816705180082 0.9989239 0.001075794 1.343269E-07 2 1 0
145 2 2 0.014124373105892742 0.9859749 0.014021622 3.4730067E-06 2 1 0
146 2 2 0.04510638702287053 0.9558958 0.0441013277 2.863036E-06 2 1 0
147 2 2 0.06771376104309032 0.934527934 0.06545844 1.33754611E-05 2 1 0
148 2 2 0.0049534133678820593 0.995058835 0.00493860524 2.47912658E-06 2 1 0
149 2 2 0.08111477993566267 0.922087848 0.07789185 2.04699681E-05 2 1 0

Просмотреть файл

@ -156,25 +156,24 @@ namespace Microsoft.ML.Runtime.RunTests
/// <summary>
/// Multiclass Logistic Regression test.
/// </summary>
[Fact(Skip = "Need CoreTLC specific baseline update")]
[Fact]
[TestCategory("Multiclass")]
[TestCategory("Logistic Regression")]
public void MulticlassLRTest()
{
RunOneAllTests(TestLearners.multiclassLogisticRegression, TestDatasets.iris);
RunOneAllTests(TestLearners.multiclassLogisticRegression, TestDatasets.irisLabelName);
RunOneAllTests(TestLearners.multiclassLogisticRegression, TestDatasets.iris, precision: 10_000);
Done();
}
/// <summary>
/// Multiclass Logistic Regression with non-negative coefficients test.
/// </summary>
[Fact(Skip = "Need CoreTLC specific baseline update")]
[Fact]
[TestCategory("Multiclass")]
[TestCategory("Logistic Regression")]
public void MulticlassLRNonNegativeTest()
{
RunOneAllTests(TestLearners.multiclassLogisticRegressionNonNegative, TestDatasets.iris);
RunOneAllTests(TestLearners.multiclassLogisticRegressionNonNegative, TestDatasets.iris, precision: 10_000);
Done();
}
@ -193,7 +192,7 @@ namespace Microsoft.ML.Runtime.RunTests
/// <summary>
/// Multiclass Logistic Regression test with a tree featurizer.
/// </summary>
[Fact(Skip = "Need CoreTLC specific baseline update")]
[Fact]
[TestCategory("Multiclass")]
[TestCategory("Logistic Regression")]
[TestCategory("FastTree")]
@ -201,8 +200,8 @@ namespace Microsoft.ML.Runtime.RunTests
{
RunMTAThread(() =>
{
RunOneAllTests(TestLearners.multiclassLogisticRegression, TestDatasets.irisTreeFeaturized);
RunOneAllTests(TestLearners.multiclassLogisticRegression, TestDatasets.irisTreeFeaturizedPermuted);
RunOneAllTests(TestLearners.multiclassLogisticRegression, TestDatasets.irisTreeFeaturized, precision: 10_000);
RunOneAllTests(TestLearners.multiclassLogisticRegression, TestDatasets.irisTreeFeaturizedPermuted, precision: 10_000);
});
Done();
}

Просмотреть файл

@ -24,6 +24,7 @@ namespace Microsoft.ML.Runtime.RunTests
/// </summary>
public abstract partial class BaseTestBaseline : BaseTestClass, IDisposable
{
public const decimal Tolerance = 10_000_000;
private readonly ITestOutputHelper _output;
protected BaseTestBaseline(ITestOutputHelper helper) : base(helper)
@ -374,12 +375,12 @@ namespace Microsoft.ML.Runtime.RunTests
/// Check whether two files are same ignoring volatile differences (path, dates, times, etc).
/// Returns true if the check passes.
/// </summary>
protected bool CheckEqualityNormalized(string dir, string name, string nameBase = null)
protected bool CheckEqualityNormalized(string dir, string name, string nameBase = null, decimal precision = Tolerance)
{
return CheckEqualityCore(dir, name, nameBase ?? name, true);
return CheckEqualityCore(dir, name, nameBase ?? name, true, precision);
}
protected bool CheckEqualityCore(string dir, string name, string nameBase, bool normalize)
protected bool CheckEqualityCore(string dir, string name, string nameBase, bool normalize, decimal precision = Tolerance)
{
Contracts.Assert(IsActive);
Contracts.AssertValue(dir); // Can be empty.
@ -406,7 +407,7 @@ namespace Microsoft.ML.Runtime.RunTests
if (!CheckBaseFile(basePath))
return false;
bool res = CheckEqualityFromPathsCore(relPath, basePath, outPath);
bool res = CheckEqualityFromPathsCore(relPath, basePath, outPath, precision: precision);
// No need to keep the raw (unnormalized) output file.
if (normalize && res)
@ -523,7 +524,7 @@ namespace Microsoft.ML.Runtime.RunTests
/// skipping the given number of lines on the output, and finding the corresponding line
/// in the baseline.
/// </summary>
protected bool CheckEqualityNormalized(string dir, string name, string suffix, int skip)
protected bool CheckEqualityNormalized(string dir, string name, string suffix, int skip, decimal precision = Tolerance)
{
Contracts.Assert(IsActive);
Contracts.AssertValue(dir); // Can be empty.
@ -544,7 +545,7 @@ namespace Microsoft.ML.Runtime.RunTests
if (!CheckBaseFile(basePath))
return false;
bool res = CheckEqualityFromPathsCore(relPath, basePath, outPath, skip);
bool res = CheckEqualityFromPathsCore(relPath, basePath, outPath, skip, precision);
// No need to keep the raw (unnormalized) output file.
if (res)
@ -553,7 +554,7 @@ namespace Microsoft.ML.Runtime.RunTests
return res;
}
protected bool CheckEqualityFromPathsCore(string relPath, string basePath, string outPath, int skip = 0, decimal precision = 10000000)
protected bool CheckEqualityFromPathsCore(string relPath, string basePath, string outPath, int skip = 0, decimal precision = Tolerance)
{
Contracts.Assert(skip >= 0);

Просмотреть файл

@ -90,7 +90,7 @@ namespace Microsoft.ML.Runtime.RunTests
/// <summary>
/// Run the predictor with given args and check if it adds up
/// </summary>
protected void Run(RunContext ctx)
protected void Run(RunContext ctx, decimal precision = Tolerance)
{
Contracts.Assert(IsActive);
List<string> args = new List<string>();
@ -164,7 +164,7 @@ namespace Microsoft.ML.Runtime.RunTests
}
var consOutPath = ctx.StdoutPath();
TestCore(ctx, ctx.Command.ToString(), runcmd);
bool matched = consOutPath.CheckEqualityNormalized();
bool matched = consOutPath.CheckEqualityNormalized(precision);
if (modelPath != null && (ctx.Summary || ctx.SaveAsIni))
{
@ -190,7 +190,7 @@ namespace Microsoft.ML.Runtime.RunTests
}
MainForTest(Env, LogWriter, str);
files.ForEach(file => CheckEqualityNormalized(dir, file));
files.ForEach(file => CheckEqualityNormalized(dir, file, precision: precision));
}
if (ctx.Command == Cmd.Train || ctx.Command == Cmd.Test || ctx.ExpectedToFail)
@ -351,11 +351,11 @@ namespace Microsoft.ML.Runtime.RunTests
/// Run TrainTest, CV, and TrainSaveTest for a single predictor on a single dataset.
/// </summary>
protected void RunOneAllTests(PredictorAndArgs predictor, TestDataset dataset,
string[] extraSettings = null, string extraTag = "", bool summary = false)
string[] extraSettings = null, string extraTag = "", bool summary = false, decimal precision = Tolerance)
{
Contracts.Assert(IsActive);
Run_TrainTest(predictor, dataset, extraSettings, extraTag, summary: summary);
Run_CV(predictor, dataset, extraSettings, extraTag, useTest: true);
Run_TrainTest(predictor, dataset, extraSettings, extraTag, summary: summary, precision: precision);
Run_CV(predictor, dataset, extraSettings, extraTag, useTest: true, precision: precision);
}
/// <summary>
@ -383,10 +383,10 @@ namespace Microsoft.ML.Runtime.RunTests
/// Run a train-test unit test
/// </summary>
protected void Run_TrainTest(PredictorAndArgs predictor, TestDataset dataset,
string[] extraSettings = null, string extraTag = "", bool expectFailure = false, bool summary = false, bool saveAsIni = false)
string[] extraSettings = null, string extraTag = "", bool expectFailure = false, bool summary = false, bool saveAsIni = false, decimal precision = Tolerance)
{
RunContext ctx = new RunContext(this, Cmd.TrainTest, predictor, dataset, extraSettings, extraTag, expectFailure: expectFailure, summary: summary, saveAsIni: saveAsIni);
Run(ctx);
Run(ctx, precision);
}
// REVIEW: Remove TrainSaveTest and supporting code.
@ -421,7 +421,7 @@ namespace Microsoft.ML.Runtime.RunTests
/// <paramref name="useTest"/> is set.
/// </summary>
protected void Run_CV(PredictorAndArgs predictor, TestDataset dataset,
string[] extraSettings = null, string extraTag = "", bool useTest = false)
string[] extraSettings = null, string extraTag = "", bool useTest = false, decimal precision = Tolerance)
{
if (useTest)
{
@ -431,7 +431,7 @@ namespace Microsoft.ML.Runtime.RunTests
dataset.trainFilename = dataset.testFilename;
}
RunContext cvCtx = new RunContext(this, Cmd.CV, predictor, dataset, extraSettings, extraTag);
Run(cvCtx);
Run(cvCtx, precision);
}
/// <summary>

Просмотреть файл

@ -70,10 +70,10 @@ namespace Microsoft.ML.Runtime.RunTests
return _testCmd.CheckEquality(_dir, _name);
}
public bool CheckEqualityNormalized()
public bool CheckEqualityNormalized(decimal precision = Tolerance)
{
Contracts.Assert(CanBeBaselined);
return _testCmd.CheckEqualityNormalized(_dir, _name);
return _testCmd.CheckEqualityNormalized(_dir, _name, precision: precision);
}
public string ArgStr(string name)