Support multiclass classification (#53)

Support multiclass classification (#53)
This commit is contained in:
wxchan 2016-11-01 13:31:44 +08:00 коммит произвёл Guolin Ke
Родитель 90ffe1c9b7
Коммит aa796a8598
21 изменённых файлов: 8034 добавлений и 67 удалений

Просмотреть файл

@ -0,0 +1,500 @@
1 0.109 1.261 -0.274 2.605 0.472 -0.429 -0.983 1.000 -0.095 -1.219 -0.369 -0.312 -0.840 1.281 -0.618 -0.532 -0.132 0.443 0.028 2.201 0.044 1.671 0.660 -0.114 0.574 0.276 0.680 -0.670
4 -0.294 -0.659 -0.569 2.351 0.775 -0.339 -2.436 -1.704 0.086 -0.291 -1.152 -0.132 -0.745 -0.641 -0.173 -1.016 -1.386 -1.354 -0.405 -1.583 2.190 0.933 0.720 -0.885 -0.848 -1.416 1.838 1.343
2 0.194 1.849 -1.486 0.190 0.331 -1.080 -2.175 1.073 0.371 -1.200 0.022 0.784 -1.400 -0.130 -1.631 -0.166 0.156 0.944 0.360 -0.081 -0.736 0.534 -1.664 0.267 -0.813 0.871 0.924 -0.935
0 0.261 -0.391 -0.138 -0.822 0.774 -0.146 -0.738 -0.121 1.735 0.335 1.076 -1.612 -0.197 -1.000 -0.519 -0.767 1.289 0.717 -0.755 -0.465 0.363 0.891 1.041 0.635 1.814 0.427 -0.445 -0.815
2 0.547 -0.093 -0.077 -2.957 -0.713 0.933 0.970 -0.735 0.165 -0.189 0.690 2.209 0.708 -0.670 -0.078 1.402 -1.409 -0.615 0.710 1.294 0.609 -0.607 0.537 0.414 0.351 -0.097 0.053 -0.907
4 -0.544 1.656 0.213 2.566 -1.318 -0.301 -0.304 -0.777 -0.351 -0.480 -0.979 1.795 -0.083 0.927 0.990 1.428 -0.982 1.266 -0.074 2.036 0.525 -1.153 1.258 1.159 -1.292 0.745 1.604 0.479
2 0.499 0.546 -0.268 0.686 -0.923 -2.454 0.707 0.166 -0.610 -1.471 -2.037 0.052 1.649 -0.139 -0.361 -0.024 2.179 0.752 -0.934 0.028 0.332 -1.056 0.759 -0.818 -1.511 0.076 -0.082 -0.001
0 -0.651 -0.487 -0.592 -0.864 0.049 -0.831 0.270 -0.050 -0.239 -0.908 -0.577 0.755 0.501 -0.978 0.099 0.751 -1.669 0.543 -0.663 0.571 -0.763 -1.805 -1.628 0.048 0.260 -0.904 0.639 -1.662
4 -1.463 0.326 1.539 0.616 0.623 -0.292 1.767 0.728 0.818 0.100 0.842 -0.257 1.059 0.309 -0.462 1.705 -0.238 -0.841 2.439 0.076 -0.003 0.519 0.449 -0.589 -2.275 1.666 -0.145 1.938
2 0.837 0.724 -0.514 0.910 -1.161 1.077 -0.261 1.684 -1.521 1.763 -0.361 -0.379 0.195 -0.697 -0.072 -0.738 -0.701 0.487 -0.902 -0.768 -1.373 -0.504 1.312 -0.846 -0.732 0.194 -0.371 2.228
1 -0.350 -0.612 0.361 -0.562 -0.930 -1.303 0.705 -0.769 0.447 -0.075 1.981 0.363 -1.274 -0.208 0.410 -0.552 0.526 -0.348 0.143 0.604 0.265 -0.976 -1.465 2.297 -1.661 0.787 0.096 -0.769
2 -0.852 0.442 0.536 1.362 -0.641 -0.848 1.189 -0.225 -0.899 -0.495 -1.386 -0.028 -2.881 0.266 -1.542 1.269 -0.706 -0.828 0.042 0.393 0.035 -1.621 0.118 -0.079 -0.117 0.579 -0.773 -1.133
0 -1.667 0.223 -1.121 0.194 0.025 0.773 -1.064 -0.295 -1.464 0.098 -0.681 -0.536 0.098 -1.381 0.109 -1.370 -0.347 -1.482 0.666 0.646 -0.322 0.344 0.915 0.802 -1.206 0.740 -1.173 0.267
1 0.068 0.818 -1.738 0.085 -0.070 0.725 -0.188 -1.039 0.007 -1.083 -1.925 0.727 0.579 0.467 1.303 -0.665 -1.431 -0.070 -0.348 -0.842 -0.080 -0.612 -1.836 -0.441 -1.136 0.232 -0.656 1.112
4 0.009 -0.265 -1.209 0.797 -2.475 -0.172 1.974 -0.278 1.191 -1.676 -0.027 -0.851 -1.736 0.834 -0.205 0.103 -1.528 -0.716 0.647 -1.237 -1.428 1.648 -1.814 0.095 1.461 -0.233 -1.034 -0.994
2 0.726 1.481 1.267 0.788 -1.491 -2.417 0.117 0.229 -2.124 -0.887 1.691 0.393 -0.295 -0.137 0.296 -0.308 0.281 -0.229 0.286 -0.140 -2.183 0.244 1.006 0.018 -0.536 1.063 0.160 -0.036
4 -0.374 -0.465 -1.333 -0.949 0.981 0.504 0.273 -1.335 0.101 -2.304 0.843 0.259 -1.345 1.616 -1.796 0.530 0.555 -1.240 -0.038 -1.637 -2.846 -1.068 -0.617 0.644 0.067 0.094 -1.014 -0.530
0 1.231 -0.064 0.033 0.833 -0.892 -0.583 -0.094 -0.657 -1.489 -0.029 0.136 -0.243 1.020 0.129 0.756 0.265 0.854 1.836 0.166 0.881 0.361 1.008 0.637 -0.645 0.166 -0.404 0.274 0.939
4 -0.647 -0.659 -1.209 -0.148 1.131 1.019 -0.026 1.784 -0.055 -0.341 -1.977 0.199 -0.896 1.452 -1.243 1.287 0.205 -1.149 -1.661 0.370 -0.921 -1.971 1.979 0.868 -1.145 -1.817 1.181 -1.354
2 0.314 -0.396 -1.002 1.413 0.361 0.356 -0.549 -0.580 -0.445 -0.045 2.310 -0.581 0.389 -2.498 -0.284 -0.588 -0.132 0.580 0.916 1.235 2.289 -1.210 -0.520 0.079 0.005 0.951 0.216 -1.186
3 -0.653 -0.269 0.167 -1.778 -0.499 0.294 0.597 1.236 0.016 0.421 1.926 0.748 -0.621 -0.607 0.991 -2.071 -0.566 -0.159 -0.548 -0.582 0.196 0.978 0.174 0.773 -1.765 1.876 1.908 -0.310
3 -0.033 -1.985 -2.104 -0.444 -1.038 -1.375 0.260 -0.999 -1.046 1.118 0.341 -0.548 0.095 0.233 -0.446 -0.502 1.835 -0.209 1.280 -0.305 0.140 -0.627 -0.211 0.256 2.191 0.856 0.963 -1.432
3 -0.127 -0.265 -0.637 1.229 -1.362 -0.085 -0.844 0.567 0.134 1.182 0.735 0.082 0.744 0.836 -0.913 0.773 0.297 0.489 -0.408 -0.854 -1.034 -3.408 -0.709 2.439 0.822 0.318 0.961 -0.260
4 -2.755 0.924 -1.861 0.345 0.034 -1.552 1.405 -2.208 1.390 0.034 -0.175 -0.780 -1.042 -1.789 0.008 -0.425 0.499 0.297 0.941 0.205 0.910 0.567 0.908 -1.248 0.338 -1.810 0.097 -0.171
2 -0.202 0.177 -1.019 0.758 -1.104 -1.135 0.949 1.259 0.287 -0.281 -1.108 -1.077 -0.438 -0.440 -2.299 0.380 -1.768 -1.403 -0.553 0.245 0.105 -0.044 -1.096 -0.056 1.254 0.007 0.647 -2.126
4 0.175 0.528 0.853 -1.185 -0.018 0.140 3.120 0.901 0.122 -1.231 -0.183 2.311 -1.496 -0.209 1.321 2.220 -0.193 0.623 0.985 0.698 0.314 1.398 1.208 0.499 1.745 0.190 0.233 -0.202
4 -0.052 -0.478 0.395 -0.274 -2.571 -1.746 -0.255 -0.102 0.447 -1.724 -0.401 -0.156 0.292 1.451 0.229 -1.350 1.062 1.287 -1.010 0.417 2.362 1.323 -0.552 1.960 -0.098 0.590 2.763 0.381
0 -0.607 0.753 0.541 -0.371 1.879 -0.843 0.458 -1.308 0.302 0.515 -0.197 0.045 -0.477 0.931 0.875 0.438 0.700 0.553 -0.410 0.356 0.754 -0.204 0.570 -1.322 -1.185 0.083 -0.312 1.101
2 -0.369 -1.453 1.285 -0.066 -0.155 -0.311 0.256 -1.681 -1.971 0.452 0.560 1.672 -0.159 0.085 0.035 -0.307 -0.558 -2.198 0.301 -0.725 1.913 0.997 -0.588 0.871 -0.643 -0.672 -0.416 0.290
4 -1.068 1.667 -0.093 -1.823 0.985 -1.302 1.507 -1.712 -1.619 -1.039 -0.824 -0.102 -2.205 0.194 -0.502 -1.238 0.153 1.491 1.334 0.329 -0.399 -1.305 -1.170 1.561 0.425 -0.069 0.284 -1.353
4 -1.824 0.659 0.617 0.814 2.506 -1.299 -1.488 0.706 -0.337 -0.986 -0.283 -0.618 1.725 2.262 -1.636 -1.477 -0.759 -0.690 -0.775 -0.194 -0.447 -1.354 -1.027 1.546 -0.357 -0.409 -1.520 -0.463
0 -1.368 0.589 -0.055 -0.378 -1.288 -0.300 -0.415 0.318 0.167 0.792 -0.485 -1.860 0.991 -0.936 -1.107 -0.043 0.302 -0.531 -0.539 -0.391 1.417 -0.003 -0.968 -0.958 -0.251 0.238 0.484 0.124
0 -0.965 -1.354 -0.158 -0.337 -0.611 -1.336 -1.668 -1.137 0.871 0.029 -0.928 -0.098 0.258 0.103 -0.543 -0.539 -0.470 1.209 0.592 1.290 0.117 -0.988 1.295 -0.330 -1.114 -0.967 0.690 0.138
1 -0.290 -1.035 -0.820 -1.467 0.141 -0.862 -0.495 -0.128 1.044 0.929 -0.055 1.266 -2.012 1.877 0.692 0.103 -0.891 -0.438 0.609 0.632 0.668 -0.728 -0.930 0.195 0.770 0.003 1.481 0.033
1 -1.215 2.249 1.060 -0.160 1.110 -0.347 0.475 0.592 0.862 -0.460 0.883 -0.442 0.731 -0.018 -1.824 -0.737 0.848 -0.304 1.214 -1.213 0.696 -0.048 -0.247 -1.178 0.210 -1.009 -0.636 -0.141
1 -1.617 -0.229 -0.249 0.627 0.046 0.623 0.703 2.155 -0.315 -0.941 0.246 -0.802 1.482 -0.283 -0.566 -0.471 0.718 -0.158 -0.642 1.665 -0.856 0.036 0.652 -1.631 0.480 -1.055 0.823 -0.963
2 1.022 -0.268 0.245 -2.234 0.225 -1.043 -0.351 -1.567 -0.882 -0.164 -0.272 0.885 -1.868 -0.051 -0.099 -0.458 -0.913 1.102 2.048 -1.714 -0.924 -1.004 -0.282 1.256 0.528 -0.154 0.016 -0.319
3 -0.153 -0.452 0.570 0.579 -0.310 1.099 -1.020 0.408 1.547 1.248 -1.612 1.060 -1.243 -2.433 0.479 0.931 -1.346 1.494 -0.244 -0.874 0.362 0.725 0.883 -0.750 0.366 0.165 -1.291 -1.055
1 -0.803 1.518 1.136 2.244 0.580 -0.334 -1.220 0.130 0.103 1.263 -0.188 -0.724 -1.792 -0.099 -0.711 -0.397 -0.203 0.096 -0.910 -0.686 1.451 -0.261 0.716 -0.386 -0.257 1.023 -0.531 0.872
1 -2.040 -0.141 -0.711 0.734 -0.479 0.467 0.625 -0.335 0.490 -0.255 0.343 -0.949 -0.944 -1.166 0.263 1.207 0.961 -0.241 -0.162 0.820 1.318 -2.157 -0.507 0.156 -0.042 -0.072 1.906 -0.838
3 1.358 0.042 0.877 -0.775 -0.615 -1.600 1.109 -0.576 -1.328 0.501 1.343 -0.371 -1.771 -0.324 0.239 0.595 0.777 0.076 -0.259 0.686 -1.323 -0.969 -0.727 -0.442 -1.383 0.972 -1.212 -2.688
0 -0.174 -0.956 0.877 -0.937 0.865 0.054 0.284 0.040 0.238 -0.603 1.559 1.015 -0.592 -0.263 0.450 -0.356 -0.501 0.597 0.028 -0.081 -0.037 -1.521 1.920 -0.712 -0.832 -1.960 -0.520 0.920
4 -0.969 -0.131 -0.226 2.224 -0.489 0.449 -0.080 2.200 0.299 1.428 1.351 -1.764 -0.067 -0.233 -1.742 -1.553 0.529 1.406 -0.126 -1.170 0.624 -0.069 -0.241 -0.352 -2.241 0.544 1.581 -0.561
4 1.007 0.023 -0.864 1.590 -0.750 -2.162 0.458 -1.105 0.989 -0.381 0.604 1.402 -1.209 -0.566 -0.076 1.538 -0.658 -0.595 0.478 -0.577 0.153 -1.552 2.214 -1.348 -0.322 -0.032 1.913 -1.325
4 -0.451 2.086 -1.295 1.459 1.215 1.901 -1.588 -0.869 1.492 -1.877 -1.606 -0.488 0.192 1.592 -0.185 -0.608 0.311 0.470 0.441 -0.178 -1.955 -0.918 1.420 0.187 -0.323 1.118 -1.130 -0.225
4 0.893 1.292 0.102 -0.997 -0.715 0.629 -1.135 -0.582 0.513 -0.651 1.421 -1.038 -3.288 -0.778 0.310 0.863 -0.408 -0.277 -0.031 1.742 0.379 -0.493 2.052 0.011 -2.075 -1.387 0.558 -1.777
4 0.711 -1.612 0.990 -1.148 -0.725 -0.018 2.103 2.309 -0.713 -0.173 0.996 -0.361 0.703 -0.812 1.110 -1.219 -1.618 1.175 -0.115 -0.129 -0.659 0.937 -0.484 1.344 -2.643 -2.446 1.094 0.657
4 -1.149 0.533 -0.579 -0.930 0.311 -1.577 -0.037 -0.311 -1.064 0.541 -0.565 0.739 0.314 -0.479 1.761 0.181 -0.320 1.808 -0.348 2.572 0.609 1.802 1.153 -0.581 0.539 1.552 2.205 0.240
1 -0.943 0.450 -0.116 -1.006 -0.286 -1.356 -0.610 0.605 1.217 -0.690 1.447 -1.912 0.388 -0.305 -0.404 -1.976 -0.577 -0.596 -1.351 -0.276 0.019 0.150 -0.641 -0.054 0.853 1.015 0.805 0.815
1 0.317 1.162 -0.190 -0.512 1.780 -1.521 0.152 0.321 -0.766 0.305 2.563 -0.297 -0.620 1.460 0.786 0.979 0.276 -0.005 -0.454 0.047 -0.435 -0.575 -0.660 1.185 -0.794 -1.201 -1.058 0.074
2 1.513 0.658 -0.687 -1.551 -0.822 0.218 -0.471 1.002 -1.489 -0.129 -0.767 0.991 0.543 -0.831 0.854 -1.623 -1.786 -0.885 -1.983 -0.113 -0.081 -0.406 0.539 -0.723 -1.298 -0.797 -1.220 -0.612
4 2.724 -1.607 1.359 0.200 -0.425 0.996 1.627 0.072 0.284 0.167 -0.074 -1.667 -0.870 1.244 -0.523 -1.031 -0.355 -0.247 -1.431 -0.164 0.093 -0.719 0.320 2.788 -0.311 -1.230 1.053 0.970
4 0.210 -0.202 -2.281 0.259 0.680 1.205 -0.545 -1.052 -0.390 -1.206 0.548 0.111 -1.198 -0.423 1.561 -0.062 -0.725 -0.527 0.175 1.822 -1.050 1.428 -0.309 1.287 -1.156 1.552 1.781 1.865
1 0.581 0.369 0.820 -0.318 -0.010 -0.181 0.954 0.717 0.488 -1.349 1.997 -0.472 -0.297 1.673 0.096 -0.373 0.726 -1.012 -1.694 -0.732 1.760 -0.876 -0.356 0.172 0.571 0.366 -0.909 1.561
3 -0.916 -0.054 -0.935 0.870 1.537 -0.845 -0.031 0.917 0.024 0.562 0.052 -1.057 0.119 1.127 0.314 1.624 -1.772 0.379 -0.648 -0.876 1.334 -0.255 -0.352 0.344 -2.153 1.355 1.904 0.915
2 1.169 -0.985 -1.313 -2.164 1.301 0.772 2.109 0.483 -0.277 0.238 0.422 -1.071 -1.037 -0.986 0.009 -1.215 1.358 1.521 -0.779 0.083 0.963 0.441 0.090 0.661 0.860 0.505 -0.477 0.763
0 -0.464 -1.177 1.994 -0.673 -1.187 0.443 0.054 -1.076 0.155 -0.587 -1.245 0.471 0.106 0.634 -0.807 0.852 -0.967 0.045 -0.449 -0.865 0.599 0.515 -0.228 -0.198 0.041 0.083 -0.886 -0.261
0 -1.167 -1.565 1.211 -0.225 0.120 -0.637 1.098 -0.184 1.276 -0.629 -0.849 -1.016 0.569 0.978 -0.537 0.178 -0.293 1.209 -0.391 0.290 1.182 -0.119 -0.593 -0.257 -0.039 0.425 0.115 0.944
2 -1.306 -0.606 -1.189 0.098 -0.336 1.372 0.605 -1.201 1.564 -0.857 0.285 -1.133 -2.060 0.924 2.044 -1.211 -0.017 0.032 -0.778 0.807 -0.846 0.396 -1.447 0.144 -0.166 -0.184 0.973 0.053
4 1.498 0.223 -0.410 -0.960 -0.353 0.013 -1.118 0.640 -0.848 -1.458 -1.320 -0.480 0.548 1.640 -0.240 0.333 -0.071 -2.130 0.378 -0.022 -1.577 1.690 0.600 -1.666 0.524 3.026 0.007 0.691
1 -0.558 0.511 2.162 0.737 0.548 0.240 -1.228 1.428 -0.039 0.343 1.179 -0.358 -1.330 -0.632 -0.522 0.568 -0.428 -1.935 -1.613 -0.763 -0.346 -0.605 -0.550 0.017 0.377 -1.561 0.994 0.110
3 1.343 -0.375 0.460 0.207 0.706 1.673 0.676 1.743 0.702 0.132 0.321 -2.625 -0.455 -0.144 0.438 -0.026 0.334 -1.323 -1.581 -1.183 1.571 -1.268 -0.789 0.105 -1.592 -0.872 0.677 -0.438
0 -0.422 -1.600 -0.317 -0.217 -0.305 -0.305 -0.173 0.508 1.343 -0.169 -1.202 0.082 0.634 1.018 0.406 -1.046 0.358 -0.410 0.476 -0.027 0.531 -0.670 -0.699 -1.242 -1.757 -0.346 -0.751 1.292
4 -1.489 0.394 0.958 0.051 1.384 0.746 -1.301 1.184 -0.417 -1.163 -0.670 1.780 1.750 -0.351 0.995 0.444 -0.323 0.303 -1.805 -0.538 -0.080 0.334 1.280 2.277 -0.746 0.453 0.530 -2.800
3 0.580 0.075 -0.623 0.201 1.046 -0.240 -0.955 0.207 -0.642 0.328 1.267 -1.142 -1.846 2.293 0.133 -1.246 -1.001 -0.475 0.302 -2.417 0.699 -0.687 1.483 0.386 0.894 -0.234 0.245 1.384
4 -1.445 1.318 -0.490 0.708 -0.614 1.684 -0.031 -1.329 0.470 1.301 1.188 0.094 0.812 -0.422 0.591 0.363 -2.895 -1.001 0.208 0.165 -0.029 0.089 1.208 0.354 -2.333 1.948 -1.907 0.958
1 -0.700 0.155 -0.624 0.155 0.162 -0.578 0.203 -0.458 -0.502 0.302 0.599 0.043 -1.081 -0.948 1.293 -0.227 -0.679 1.975 1.317 1.076 0.008 -0.230 -0.217 -0.319 0.444 0.859 -2.607 -0.921
3 -0.135 -0.569 1.305 -1.329 -0.483 0.257 2.419 1.955 -0.045 0.349 0.602 -1.012 0.010 1.356 0.142 1.950 -0.446 -0.089 -0.081 -1.301 1.106 -0.334 0.258 -0.598 1.725 1.233 0.058 -1.224
2 0.218 0.335 0.453 -0.629 1.158 -1.004 1.201 -0.959 0.129 0.677 0.470 -0.935 -0.983 -1.461 -1.294 -1.416 1.288 1.816 0.462 0.576 0.716 -0.223 -0.574 -0.178 -1.730 0.060 2.006 -0.170
0 -1.154 0.354 -0.134 -0.033 -0.552 0.593 -1.273 0.723 -0.008 -0.108 1.719 0.072 0.016 -0.495 -0.401 1.544 1.352 -0.181 2.132 -0.527 0.518 0.386 -0.677 0.398 -0.759 1.269 0.264 -0.156
3 0.619 0.044 0.937 -0.276 -0.724 1.190 0.318 -0.684 -0.418 -0.736 2.022 0.004 0.158 0.721 -1.215 1.097 -0.608 1.830 -0.313 1.038 -2.497 -1.175 -0.294 -2.031 0.601 0.313 -0.991 -0.016
0 0.799 0.006 -0.558 0.994 0.528 -0.768 0.049 -1.352 0.686 -0.262 -0.436 -0.033 0.040 0.895 0.591 -0.156 0.764 0.401 -0.427 -0.125 -0.148 0.505 -0.367 -0.390 1.969 0.940 -0.761 0.571
2 0.484 0.298 1.362 1.081 1.075 0.475 1.042 -0.300 -0.567 -0.339 2.272 -1.044 0.943 1.980 0.397 -0.318 0.556 -0.493 -0.910 1.346 1.282 -0.319 -0.488 0.216 -0.096 -0.818 -1.524 -1.458
3 0.378 0.183 -0.012 1.110 -1.286 0.034 -0.403 0.716 0.854 1.470 0.420 -0.215 0.965 -0.037 -0.703 -0.562 -0.443 2.518 1.612 -0.975 1.248 0.540 -1.020 -1.066 0.505 -1.914 2.140 0.026
1 -0.035 -1.066 -0.423 0.416 -0.065 -0.934 -1.637 -0.302 1.781 -0.458 -0.364 0.110 0.229 0.166 0.418 0.394 -0.060 -0.721 1.270 -1.028 -1.460 0.079 1.066 0.932 0.598 1.899 -1.400 0.435
3 1.453 1.077 0.711 0.094 1.661 1.259 1.548 0.696 0.123 -0.399 -1.269 0.502 0.585 -0.350 0.614 -0.332 0.867 -0.882 -1.055 -0.241 0.829 -1.159 1.050 -1.643 -0.247 -1.413 -2.610 0.366
3 0.561 -0.954 -1.195 0.054 0.064 -0.147 -0.289 1.711 0.426 1.971 1.269 1.703 -1.074 0.593 -0.432 -1.663 1.496 -0.294 -1.055 -0.415 -0.796 -0.694 -0.222 1.731 -0.666 1.659 0.849 0.396
4 -0.772 1.336 1.106 -0.645 -0.987 -0.620 -1.633 -1.216 -1.261 0.322 0.588 2.078 0.956 -0.007 -0.415 1.332 -1.218 0.036 -1.231 0.571 1.337 -1.551 1.903 -0.959 -0.729 0.165 -1.110 2.042
3 -0.250 -0.677 -0.481 1.093 0.602 0.995 -1.247 0.336 1.728 0.272 0.671 0.769 0.716 -0.187 0.448 -0.785 1.327 0.215 -1.001 1.637 2.163 0.079 -1.510 0.455 -1.857 -1.521 0.975 0.212
4 1.045 -1.233 1.673 -0.715 0.795 1.235 0.041 -0.080 0.967 1.193 2.945 0.297 1.375 0.925 -2.850 -0.144 -1.271 -1.202 -1.025 0.318 -0.815 0.721 0.382 0.589 0.069 -1.018 -0.525 1.785
2 0.052 1.354 -2.074 1.193 1.272 0.402 -0.841 -0.668 1.198 0.826 -1.396 0.556 0.912 0.514 -0.052 1.267 0.349 1.343 -2.181 -0.721 -1.196 0.284 -0.541 0.230 1.382 0.178 -0.558 -0.120
1 0.310 -0.112 -0.420 0.510 0.035 -0.788 0.550 -0.233 -0.829 -0.844 -0.666 -0.287 -1.713 0.183 1.669 -1.482 -1.711 0.765 0.442 -0.859 -1.775 1.745 0.530 -0.429 -0.531 -0.579 0.486 -1.337
0 -0.188 -0.253 -0.145 -1.117 1.147 -0.263 1.259 1.080 0.470 1.234 -0.207 -0.556 -0.648 0.072 0.697 0.384 1.917 0.506 -0.551 0.161 0.901 -0.473 -0.633 1.148 0.775 0.038 -0.516 -0.288
0 0.669 0.034 -1.125 -0.072 0.698 -2.146 -0.391 -1.275 -0.055 0.578 -0.203 -0.506 -0.774 -0.766 0.625 0.552 1.691 0.482 0.985 -1.077 -0.250 -1.188 -0.266 -0.573 0.289 -0.061 0.607 1.181
4 -0.153 -0.458 -0.287 0.978 0.235 -1.311 -1.859 1.466 -1.469 -0.602 1.339 -0.565 -0.453 -0.245 -0.180 -0.579 -0.542 1.140 0.293 -0.945 1.952 1.452 0.592 0.001 -0.407 0.910 2.413 2.252
4 0.545 -0.071 0.763 0.940 -2.912 -0.434 -1.857 0.239 -0.301 1.356 0.207 -0.486 0.939 -0.318 -0.498 0.394 1.613 0.854 0.567 2.751 -0.732 -0.755 0.146 1.515 -0.942 1.755 -1.439 -0.121
3 -2.016 -0.390 -0.935 -0.134 -0.120 -1.073 -0.810 -0.916 2.335 -1.307 0.425 -2.114 -0.220 1.253 1.149 -0.045 0.677 -0.064 -0.020 -1.344 -1.210 -0.198 1.179 1.050 -1.173 0.037 1.607 0.789
0 -0.258 0.055 -1.244 1.200 0.613 -0.085 -1.219 -0.213 -0.273 -1.080 -1.162 0.766 -0.324 0.886 -0.013 0.752 0.718 0.467 1.183 0.100 -1.495 -1.335 -0.653 0.742 -0.400 0.205 0.536 0.164
3 -3.274 0.889 -0.047 -0.011 -0.267 0.608 0.063 0.804 0.015 0.541 0.779 -0.128 0.766 0.492 0.186 -0.696 -0.842 0.385 0.640 -0.327 0.721 -0.700 -0.120 -0.580 -0.653 -0.809 3.428 1.125
2 0.370 -1.431 0.804 -1.111 -2.099 0.824 1.011 1.363 0.272 1.475 -1.617 -0.362 0.910 -0.005 0.503 0.479 0.893 1.932 0.494 0.613 -0.909 -0.440 0.163 -0.155 -1.157 1.001 -0.508 -0.022
0 0.085 0.589 0.199 -2.147 -0.158 0.996 0.882 1.507 -1.779 0.763 -0.697 0.050 0.155 -0.270 0.067 1.197 -0.498 0.078 -0.859 0.594 0.179 0.368 0.923 0.872 0.249 -0.588 0.813 -0.567
4 0.187 1.742 1.962 -0.832 3.442 0.169 0.632 -2.652 1.008 -2.154 -0.144 -0.553 0.466 0.179 0.587 1.246 -1.004 -0.204 -0.845 -0.921 1.933 -1.269 -0.822 2.194 -0.127 -0.064 0.401 -1.205
0 -0.752 0.262 -1.275 -0.436 -0.418 -0.522 0.219 0.434 0.014 0.901 0.390 -0.494 -0.108 -0.081 -0.383 0.510 -2.131 0.032 0.309 0.599 1.108 -0.141 0.832 0.611 0.264 -1.391 1.090 0.096
0 -0.272 -2.046 -0.449 1.161 0.183 0.531 -1.095 -0.900 -0.124 0.350 0.608 0.035 -0.561 -0.518 -0.685 -0.737 -0.861 0.539 0.052 0.531 -0.293 -0.085 -1.840 0.755 0.387 0.127 0.916 1.702
4 2.101 2.078 -2.316 0.629 -0.282 -1.064 1.455 -0.138 0.923 1.016 0.674 -0.334 -0.324 0.095 1.455 1.037 -0.245 -0.086 1.785 -1.232 -1.875 -0.865 2.116 -1.195 -0.926 0.307 0.182 1.160
0 -0.553 0.680 0.948 0.586 -0.652 0.643 1.208 0.402 0.835 1.790 0.898 -0.727 0.140 -0.332 0.173 1.705 -0.424 -1.267 -0.128 -0.884 -0.147 -0.149 0.620 -0.942 1.031 -1.169 -1.141 -0.244
3 1.302 0.276 0.815 0.393 -0.847 0.248 -0.332 -1.063 0.920 -1.305 -0.659 2.217 0.701 0.245 -2.173 -0.118 -1.165 -0.078 -0.190 -0.857 1.172 0.955 -1.126 1.574 -1.150 1.664 -0.211 -0.400
3 1.898 -2.365 0.365 0.166 0.355 -0.558 0.003 -1.094 1.038 1.007 1.641 -0.774 0.270 2.043 -0.675 -1.009 0.300 1.165 -0.667 1.933 -0.916 0.658 -0.677 -0.466 0.823 -0.209 -1.519 -0.799
4 -0.415 -2.426 0.880 2.158 1.974 -0.476 -0.287 0.874 0.092 0.129 -2.197 -0.817 -0.204 1.778 1.764 -1.111 -1.231 1.682 1.117 -0.474 -0.333 -1.144 -1.214 -1.605 -0.136 -0.417 -2.040 -0.365
4 -0.772 -0.327 1.185 -0.448 0.400 0.072 -0.274 0.650 0.375 0.437 1.485 -2.901 0.143 0.372 -1.317 -1.662 -1.742 3.620 -2.126 -1.778 -0.169 1.344 0.730 -0.153 0.789 0.805 -0.096 -1.300
3 -0.732 0.735 1.397 -0.673 1.475 -0.378 0.225 -0.363 0.250 -1.050 1.029 -1.635 -0.934 1.297 2.263 -0.528 1.534 1.293 0.408 -0.480 1.403 -0.188 0.465 -2.162 -0.403 -0.611 0.126 -0.314
4 -0.458 0.142 -0.771 0.017 0.079 -0.182 -1.644 -1.051 -0.162 -2.136 1.025 1.300 0.852 0.282 -1.233 0.307 1.405 -1.133 2.554 0.818 -1.061 -1.010 -0.505 -0.054 -0.377 -2.801 -1.302 -0.007
3 0.777 0.503 -0.640 0.811 -1.283 -2.228 -1.562 -0.922 -0.755 -0.435 -0.390 -0.650 -2.116 0.192 -0.265 -0.696 1.398 -0.242 0.059 0.796 -1.490 -0.023 0.793 -0.091 -1.303 -0.482 0.189 -2.107
1 1.429 0.321 -0.218 0.096 1.137 0.057 -0.020 0.128 0.872 -0.467 0.189 0.215 0.578 0.993 0.078 -0.471 0.679 -1.569 -1.077 -1.013 -0.716 -1.391 0.957 -0.342 -1.724 -1.203 0.314 -1.634
3 0.886 -1.530 0.249 -0.936 0.655 1.079 -1.891 0.265 -1.558 -1.717 0.763 -0.098 -0.143 1.559 -0.271 -0.789 -0.506 0.406 -1.060 -0.253 0.964 -1.597 -1.641 -0.339 0.679 1.481 -0.791 -0.476
2 -0.247 0.291 -0.750 1.254 0.817 0.281 0.714 1.157 -0.525 0.047 -1.185 2.254 -0.510 -2.002 -0.424 -0.455 0.714 0.656 0.839 -0.118 0.436 -2.222 -0.313 1.628 1.647 -0.770 -0.076 -0.251
0 -1.384 -1.904 0.874 0.258 1.549 0.971 -0.623 0.247 -0.792 -1.613 1.196 -0.194 1.348 0.109 0.278 0.577 -0.184 -0.237 0.242 1.246 0.505 0.017 -0.411 -0.118 -0.864 -0.982 -0.238 0.006
2 -0.966 -0.107 -0.665 -1.543 -0.182 0.106 1.268 0.937 1.220 0.720 0.333 -1.229 0.150 0.597 1.457 0.686 1.240 -1.032 -0.326 0.429 1.161 -0.145 -0.082 0.687 -1.126 1.607 -0.879 -1.919
2 -1.283 0.717 -0.453 0.330 -0.624 1.474 -1.434 1.034 -1.913 0.097 -1.387 0.713 0.342 0.525 -0.270 -0.265 -0.135 -1.174 2.576 -0.797 -0.400 0.321 -0.448 0.286 -1.851 -0.036 -0.940 0.402
0 1.013 -0.161 1.329 -1.660 0.446 0.138 0.834 -0.076 -0.984 -1.092 -0.012 -0.260 0.857 0.954 0.048 0.026 1.065 -0.245 -0.204 -1.323 0.879 -0.090 1.137 -1.207 -0.491 0.946 0.326 -1.183
4 -1.772 0.941 -0.361 -0.467 1.478 0.170 0.904 -0.623 -0.725 -0.365 0.670 0.051 0.468 -2.603 -1.882 -2.154 -0.379 -1.398 1.690 -0.021 0.217 -1.090 -1.121 1.551 1.782 0.416 -2.032 -0.425
1 -0.952 -1.032 0.604 0.771 -0.250 -0.597 0.569 -1.482 1.950 0.452 -0.159 0.830 0.631 0.235 -1.890 0.411 1.456 -0.555 0.959 0.849 0.294 0.170 0.560 0.416 1.891 0.967 0.707 1.057
2 1.566 0.434 1.083 0.387 0.379 0.753 -0.567 -0.556 0.350 1.941 -1.541 -2.029 -0.264 1.031 0.051 -1.169 0.079 -1.733 0.629 -0.309 -0.082 0.427 0.206 -0.541 1.034 0.797 -1.774 -0.230
3 0.633 -1.243 -2.269 -0.977 -0.243 2.580 0.543 -1.808 -0.690 -0.053 0.254 -1.315 -0.135 -0.718 -1.104 0.260 0.774 0.841 0.574 -0.497 0.715 0.309 0.134 -0.612 -0.497 0.074 2.440 0.301
0 0.148 -0.643 0.262 -0.331 0.880 0.333 -0.315 0.232 1.315 -0.570 0.403 -0.070 0.427 -0.655 0.110 1.108 -0.937 -1.329 -1.291 -1.171 0.595 -1.406 -0.298 0.618 0.417 -0.386 0.191 0.107
0 -0.719 -0.234 1.263 -0.775 0.483 0.747 0.077 0.040 -1.025 0.731 -1.254 0.391 -0.518 -0.395 -0.201 0.499 1.562 0.111 -0.720 -1.440 1.160 0.175 -0.232 0.059 0.767 -0.410 -0.666 -1.310
3 0.265 -0.096 0.818 -1.546 -0.614 -0.791 0.123 -1.861 -1.498 1.854 -0.448 -1.560 0.247 1.735 0.266 -0.284 0.287 0.635 -0.347 -0.244 0.348 0.150 -2.634 0.734 0.967 -0.010 0.872 -0.477
2 1.473 2.152 -0.709 0.140 -0.400 -0.182 0.930 -0.046 1.316 -0.516 0.741 -0.443 -1.837 -1.021 0.124 1.921 -0.460 -0.169 0.490 -0.599 -0.484 0.072 -0.437 -1.783 -0.650 0.615 0.646 1.336
0 -1.292 -0.696 -1.591 -0.046 -0.534 -0.960 -0.006 -1.009 0.498 -0.086 -0.285 0.134 0.518 -0.089 -0.320 -0.197 0.765 -1.561 -0.054 1.318 0.849 0.278 0.560 2.260 0.169 -0.446 1.260 0.177
3 1.464 0.352 -0.991 -0.893 1.218 -1.343 0.237 1.947 -1.703 1.175 0.620 0.328 0.249 0.289 0.459 -0.599 0.438 -0.492 -0.833 -0.407 -0.268 1.740 1.117 0.157 -1.392 -0.842 1.656 -1.280
3 0.823 -0.220 1.427 -0.086 0.348 -1.596 -0.139 -2.060 -0.117 -0.731 1.831 0.903 -0.306 0.022 1.382 -2.009 0.235 -1.025 1.312 -0.484 0.102 0.682 1.380 0.589 2.116 -0.655 0.186 0.030
1 0.338 0.107 -0.414 0.721 0.010 -0.087 -0.087 -0.298 -1.037 0.172 0.003 -0.419 -0.691 0.730 0.331 -1.333 -0.739 2.111 -1.194 -0.825 -0.499 1.248 1.471 -1.325 -1.542 -0.523 0.002 1.798
3 0.255 0.957 1.120 0.412 -2.233 0.148 0.212 -1.087 -1.790 0.462 -1.249 2.061 0.263 1.352 0.031 0.752 0.053 1.006 -2.038 0.888 0.669 -0.154 -0.409 -0.299 0.283 1.214 -0.534 -1.702
3 -0.292 0.090 -0.670 0.636 0.752 1.028 1.425 -0.143 -1.227 -1.131 1.735 1.615 1.338 -0.586 1.688 -0.170 0.343 0.826 1.261 0.541 1.375 0.013 -0.203 -0.320 -1.149 -2.798 1.041 -0.234
4 0.382 0.506 0.034 2.083 0.645 1.839 0.293 -0.605 -2.070 0.371 0.641 -1.255 0.042 -1.402 0.831 0.673 -1.836 -1.468 -2.054 0.369 0.381 0.870 -1.096 -0.453 -0.770 -0.920 -0.942 -1.288
3 -0.450 0.904 1.121 1.078 -2.689 -1.009 -0.282 1.845 0.445 -0.457 -0.555 -0.522 -0.864 0.763 0.732 -1.116 -1.727 -0.498 -0.693 0.240 1.660 -0.850 0.714 0.693 -1.807 -0.652 -1.129 0.787
1 0.084 -0.634 1.757 0.527 -0.000 -1.106 -1.364 0.496 1.799 0.409 0.175 0.190 0.103 0.203 -0.685 -0.974 0.109 1.493 0.777 -1.233 -0.255 0.729 1.517 1.791 -0.211 0.824 0.249 -0.120
0 0.107 -0.345 0.688 -0.613 -0.057 1.184 0.342 -0.060 -0.602 -0.633 -0.851 -0.541 0.756 -0.222 -1.350 1.768 0.320 -1.407 -0.442 0.111 1.705 -1.108 0.976 1.399 -0.629 -0.278 -1.036 -0.203
2 0.185 -0.838 1.456 -0.158 -1.370 -0.174 -0.691 0.938 -0.406 -0.712 -1.245 -0.692 -0.284 0.628 -1.513 -2.158 0.599 -0.714 1.433 -0.786 0.178 -0.572 -0.474 -1.309 1.377 0.112 0.326 1.387
3 0.572 0.651 -0.118 -1.157 -0.682 1.152 -0.485 0.143 -1.076 0.337 -0.458 1.047 0.774 -0.496 -0.127 -0.551 -1.419 -0.857 0.198 0.325 0.149 -0.302 0.933 0.064 0.703 -1.774 -3.922 0.284
2 -0.401 0.734 -0.989 -0.473 -0.325 0.808 1.056 -0.972 -0.404 -1.934 -0.001 0.045 1.417 -0.175 -0.121 -0.446 0.529 0.257 0.430 2.141 -1.488 -2.737 0.713 -0.047 -0.573 1.013 0.170 -0.407
4 -1.013 0.519 -2.715 -0.043 0.613 1.073 -1.502 0.322 1.662 2.279 0.003 -0.952 1.510 0.670 1.980 1.823 -1.650 0.481 0.292 0.933 0.029 2.006 -0.193 -0.200 1.075 -0.116 0.008 -1.053
1 -0.651 -0.763 -0.956 -1.799 1.047 -0.057 -0.063 1.107 -0.908 1.576 -0.285 -1.556 -0.690 0.357 0.110 -1.227 0.157 -0.101 0.115 1.278 0.533 0.009 -0.589 -0.682 -1.088 0.766 0.722 1.488
3 1.970 0.176 -1.312 0.783 -1.192 0.546 -0.542 0.209 0.671 0.520 -1.705 0.986 -0.904 -1.145 -1.063 -1.623 2.340 -0.483 -1.467 1.070 -1.099 -0.279 0.729 -0.382 0.442 -0.320 -0.600 0.042
3 -0.573 0.759 1.786 -1.168 0.858 -0.599 -1.871 0.927 1.819 2.111 0.301 -1.444 -0.207 -0.109 1.096 -0.024 -2.076 0.630 0.072 0.904 0.248 0.689 0.015 0.284 -0.921 0.354 -0.185 0.490
4 -3.236 0.475 1.848 -0.080 0.386 -1.081 -0.022 -0.548 0.616 2.215 -1.101 -1.121 0.147 -0.735 -0.396 -0.540 -1.354 -0.293 -1.260 0.815 0.999 -1.592 0.416 -1.789 -0.668 0.459 1.209 1.399
4 -0.904 1.844 0.939 -0.312 2.036 -2.341 1.047 0.043 1.213 -0.483 -1.136 0.238 -0.496 0.271 0.356 -1.594 0.901 -2.835 2.991 0.263 -0.711 -1.636 0.594 0.392 -0.082 -0.366 0.671 -2.259
1 -1.060 0.042 -0.124 0.954 0.357 -1.058 0.369 -1.109 -1.016 -0.109 -0.663 -1.336 0.981 -1.411 1.087 0.281 -1.286 0.374 0.562 0.016 0.406 -1.440 0.010 -1.647 -0.941 -1.205 -0.090 0.436
4 -0.291 0.522 1.170 -0.131 -0.057 0.022 -0.261 -0.542 1.764 0.138 2.629 0.273 0.210 0.223 0.932 -0.339 1.191 2.093 -0.519 -0.519 -1.467 -1.086 -1.071 -1.349 -1.943 -0.140 2.413 1.458
2 -0.365 -1.466 0.778 -0.108 -0.057 -1.018 -0.391 0.237 -1.205 -0.168 -0.435 -0.104 -0.180 -1.346 0.849 -2.465 0.214 1.040 -0.084 -1.378 0.789 -1.613 -0.337 1.138 -0.111 0.021 1.232 -1.519
2 0.489 0.778 1.150 0.490 0.073 -1.080 0.273 -0.376 1.376 0.091 -0.160 1.416 -0.482 -0.003 2.009 0.266 1.125 1.504 -0.552 0.909 -0.333 2.040 1.444 0.743 1.045 -0.696 1.718 -0.904
2 0.149 -0.631 0.646 -1.122 -1.457 0.361 -0.042 0.702 1.293 -0.479 0.744 -1.640 -1.255 -0.006 -0.379 -2.216 1.761 -2.163 -0.471 0.925 -0.228 0.587 -0.786 0.120 1.373 -0.484 0.465 -0.270
4 -0.845 0.892 -1.551 -0.323 -1.359 0.425 1.354 0.851 -0.424 -1.156 3.002 2.757 -1.254 0.639 -1.180 -1.338 0.052 0.054 0.939 -2.148 -0.049 0.307 2.770 -0.879 0.409 1.529 2.173 1.730
0 1.049 -1.089 -1.805 -0.264 -0.250 0.615 0.253 -0.858 -0.562 -0.023 -0.556 -0.759 -0.139 -0.272 0.298 -0.029 -0.532 0.650 -0.949 1.022 -0.872 -0.621 -1.756 1.133 0.583 -0.696 0.749 -0.456
0 0.532 1.084 -0.341 -0.269 -0.041 -0.214 -0.482 -0.160 0.431 -1.446 -0.262 -0.182 0.812 1.262 0.551 1.055 -1.049 0.906 0.734 -1.189 -1.041 0.511 -0.254 -0.796 0.955 -0.407 0.520 0.972
4 0.037 1.815 2.613 -0.464 -0.833 1.130 0.335 1.136 0.399 -0.194 -0.189 0.433 -0.211 1.220 -1.302 1.214 -1.873 0.145 -0.331 -1.516 -2.060 -1.098 0.046 -0.602 -1.041 1.352 -1.232 0.679
1 0.440 -0.075 1.093 0.970 0.413 0.055 0.657 0.314 1.061 0.900 -1.018 -1.086 -0.701 0.976 -1.576 0.395 0.363 1.163 0.180 -1.343 -0.959 -1.182 -0.538 -0.183 -1.165 1.471 -1.325 1.346
0 1.090 -0.106 -0.229 -0.751 1.105 -0.227 0.621 0.383 0.231 -0.557 -0.246 0.647 -0.782 -0.512 -0.731 -0.026 -0.319 -0.871 0.650 -0.606 0.061 -1.168 -1.638 -0.691 -1.944 1.400 1.401 -0.073
2 -0.717 -2.274 0.053 0.290 -0.140 1.137 -0.904 -0.005 0.464 0.778 -0.760 -0.160 0.595 -0.328 1.470 -1.311 0.154 0.184 -1.287 0.012 -0.283 -0.363 -1.247 -0.595 0.947 0.311 -2.764 -0.012
3 0.841 -1.052 -0.699 -1.477 1.283 -0.766 -0.314 -0.567 1.013 -0.940 1.513 -1.470 -1.749 -0.707 2.047 -1.905 0.396 -0.054 0.021 1.783 -0.586 0.435 0.011 -0.524 -0.639 0.312 -0.367 -0.351
2 2.074 -0.283 -1.497 -0.536 1.214 -0.339 0.394 0.051 0.572 0.501 -0.297 1.453 0.509 1.182 -0.245 -0.166 2.681 -0.037 1.253 -1.456 0.358 -0.408 1.539 -0.676 -0.429 0.580 -1.051 0.127
3 -0.567 0.157 -1.000 0.637 -0.546 -0.115 -1.128 -1.240 0.291 1.251 -1.035 1.925 0.678 0.276 -0.045 -0.640 0.866 0.089 -0.625 -1.245 2.704 1.784 0.035 -0.263 -0.163 1.957 0.062 -1.285
4 0.363 0.343 -0.112 0.301 -0.991 1.263 0.071 0.975 -0.504 1.956 -0.233 -0.794 -2.077 0.162 0.759 0.869 0.037 -1.028 1.615 -0.258 -2.001 -0.150 -1.986 -2.491 -0.491 -1.097 -0.617 -0.119
3 0.566 -1.067 -1.063 -1.676 -0.546 3.493 0.696 -1.208 1.020 -0.623 -1.035 -0.308 -0.164 0.215 0.683 -0.883 0.051 -0.585 -1.444 -0.173 -1.442 0.658 -1.514 -0.803 -0.572 0.430 0.623 0.401
3 -0.538 0.027 -1.118 0.342 0.216 -0.463 1.311 0.406 1.418 -2.304 0.059 0.943 -0.204 1.597 -1.826 -0.493 0.908 0.311 -0.927 -1.045 0.451 0.374 -0.253 0.496 -1.837 0.262 -2.087 1.255
4 1.006 -0.042 0.820 -2.478 -0.529 -0.815 -2.285 0.103 -0.100 -0.467 -0.126 0.103 1.013 0.643 0.329 2.307 -0.708 -1.360 1.174 1.867 -0.635 1.378 -0.533 0.585 0.640 -0.631 1.847 1.962
4 -0.892 1.578 0.574 0.300 1.013 0.894 -1.959 -1.705 -0.143 -1.074 1.660 -0.991 0.092 0.583 2.444 1.149 -0.661 0.525 0.698 0.622 -0.981 2.032 -1.224 0.680 -0.882 0.391 -0.196 -1.303
3 1.543 0.143 -2.307 -0.772 -0.552 0.720 -1.476 0.061 0.122 0.437 1.113 -1.330 -0.338 -0.478 -0.322 -0.417 0.101 1.031 -0.961 -0.842 -0.782 1.297 -0.395 -0.381 2.726 -0.533 -1.044 -1.825
4 -0.187 -0.851 0.922 -0.221 0.608 -1.194 1.734 1.277 -1.910 -0.802 0.478 -1.320 -0.132 0.427 -0.185 0.539 -0.693 -2.244 0.982 0.976 2.504 -2.207 0.751 -1.969 1.756 0.488 -0.952 0.780
4 -1.750 -0.279 0.514 -0.920 1.158 1.692 0.171 -0.192 -0.313 1.772 1.203 -1.506 -0.031 1.797 0.859 -0.959 -0.244 -0.296 -1.082 2.203 1.230 0.431 0.374 1.349 -0.396 0.836 2.176 0.386
3 0.518 -0.529 -0.869 -0.011 0.137 2.918 -0.184 -1.130 0.273 -2.075 -0.092 0.016 0.795 0.974 -0.504 -0.151 -0.766 -1.128 0.310 -1.274 -0.415 -0.995 0.550 1.801 -1.302 -1.853 -0.125 0.016
2 0.179 0.032 -0.778 0.806 0.015 0.065 -0.004 -1.046 -0.817 -1.582 1.139 0.002 0.995 -0.450 1.148 -1.617 0.972 -0.078 -1.386 -2.823 0.502 0.990 0.062 0.603 0.446 0.056 0.959 -1.477
4 2.589 0.145 0.576 1.400 1.278 0.511 -0.141 0.514 -0.816 0.152 -1.109 -1.781 0.050 1.262 0.926 -0.082 1.747 -0.569 -1.918 -2.182 -1.181 0.817 -0.635 0.461 1.646 -1.100 0.478 0.539
1 -0.356 1.544 -0.747 0.558 2.328 -0.982 0.949 0.678 -0.198 0.817 -0.111 0.879 0.249 0.397 0.332 -1.242 -1.051 -1.057 -1.199 -0.414 -1.802 0.221 -0.841 0.301 0.187 -0.902 0.203 -0.755
1 -0.969 -1.248 0.889 -0.935 1.427 -0.746 0.605 1.912 0.603 1.930 -0.945 1.195 -0.707 0.987 -0.057 0.241 -0.022 -0.308 0.128 0.435 -1.501 -0.279 0.655 0.158 0.968 -1.475 0.311 0.765
4 -0.913 1.858 0.251 0.372 0.424 -0.072 -0.618 -1.621 -0.236 -1.113 -0.435 0.520 -1.182 -2.330 -1.115 -0.146 1.613 1.527 2.460 -1.046 -1.821 0.054 0.002 2.159 -1.217 -0.036 1.673 0.944
4 -0.446 -2.462 1.944 -1.343 0.241 0.377 2.178 2.313 0.408 -1.025 -0.994 -0.622 -0.029 -0.465 -0.970 0.089 -0.842 1.383 -0.834 -0.684 0.112 -1.306 -0.371 -1.503 -0.060 -0.828 -0.495 0.139
3 0.983 0.036 -1.637 -0.122 0.676 -0.303 -0.928 0.604 -0.266 -0.085 -1.314 1.460 1.576 -0.432 -0.709 2.396 0.267 -0.815 0.260 0.030 1.182 2.309 -0.033 0.706 -0.446 -1.258 0.065 -1.865
1 -0.137 -0.309 1.254 -0.581 0.459 -2.130 0.316 -0.331 -1.826 -0.006 -0.473 0.149 -1.130 -0.446 -0.988 -0.356 1.026 -0.319 -1.688 1.294 0.818 -0.187 0.428 -0.590 0.703 0.695 -0.688 -0.475
0 0.813 1.117 -0.176 0.056 -0.343 0.996 0.392 -0.558 -0.513 -0.015 -0.332 1.442 0.808 0.806 0.234 -0.827 0.696 -2.098 0.174 -0.812 -0.889 -0.463 -0.084 -0.023 0.790 -0.174 -0.102 -1.396
3 0.244 -0.855 -0.103 0.798 -0.578 0.011 1.035 -0.032 -1.537 -2.274 -1.142 1.183 -0.261 0.347 1.297 -1.246 2.664 -0.105 0.536 -0.705 -0.966 0.025 -0.610 0.091 -2.129 -1.374 -0.517 -1.138
4 -0.011 0.733 0.531 -0.180 0.558 1.424 -1.787 -2.029 1.298 0.899 1.430 -0.108 1.754 2.893 1.153 0.582 0.734 0.169 0.322 0.449 2.273 -0.761 -0.222 0.003 0.627 0.246 -1.287 0.074
0 0.864 1.221 -0.126 -0.028 0.607 0.725 -0.892 -1.326 -0.880 -1.655 -0.040 0.223 -0.365 1.268 -0.230 -1.589 -0.173 -1.458 -0.491 0.334 0.367 -0.379 -0.177 -0.766 -0.166 0.555 0.466 -0.599
0 0.413 0.838 0.127 -0.514 -0.436 0.220 0.049 0.905 -0.480 -1.218 0.359 -0.478 0.784 0.008 0.655 -1.014 0.143 -0.699 -1.220 -0.861 -0.544 1.245 -0.571 -1.272 0.131 0.029 -0.492 -0.161
2 -1.164 -0.612 -0.260 0.038 -1.052 0.304 -1.683 -0.164 0.143 -0.604 -1.022 2.212 -0.575 -1.317 1.971 0.302 0.146 1.801 0.214 -0.890 -0.859 0.697 -0.071 -0.836 0.809 -0.505 -1.583 -0.353
0 0.286 -1.688 -0.349 -0.016 -0.055 0.715 0.165 0.445 -0.599 1.707 -0.289 0.329 0.114 0.451 1.200 -0.841 0.826 0.737 0.173 0.227 -1.156 -0.340 -1.907 -0.306 -0.063 -1.399 0.299 1.207
3 1.092 -0.962 -1.554 1.162 -0.744 -1.188 0.740 0.617 0.928 -1.809 -0.145 -0.450 0.814 0.064 -1.626 0.821 -0.621 0.408 -0.770 -0.889 -0.210 -0.377 1.039 -3.112 0.214 0.133 0.878 1.088
1 0.029 -1.010 -0.942 1.298 0.045 0.970 -0.165 0.420 -1.744 0.861 -0.029 -0.470 0.629 0.994 -0.406 -1.196 1.666 -0.216 -1.919 -0.422 0.450 -0.082 -0.129 -0.352 -0.421 -1.278 -1.298 0.364
2 -0.049 -1.292 -0.391 1.243 -0.456 -1.001 -1.657 1.329 0.085 0.676 0.781 -1.049 -0.462 1.476 1.353 0.175 1.396 1.080 1.762 0.539 -1.372 -0.421 1.590 -0.166 -0.915 -0.640 -0.968 0.225
3 -0.728 -0.054 0.931 2.046 -1.460 0.022 -3.130 -0.429 0.117 0.163 1.638 -0.227 1.562 0.247 -0.510 0.191 0.129 0.199 -0.698 -0.710 1.136 1.055 -0.829 -0.135 0.383 0.815 0.188 1.154
2 -0.239 0.915 -1.645 0.360 0.440 -0.960 1.862 -1.257 -0.243 2.455 -0.951 -0.621 0.156 -1.328 0.517 -1.781 0.001 1.447 0.641 -0.041 -0.322 -0.085 -0.287 0.398 0.645 0.144 0.701 0.514
2 -1.489 0.559 0.073 0.091 0.337 -0.350 0.070 0.241 0.746 -1.527 1.291 -1.155 0.762 0.079 -1.281 0.528 -0.945 1.069 -0.711 -1.495 1.534 0.181 0.867 -0.851 -1.892 1.675 -0.006 0.491
4 1.731 0.573 0.122 0.045 1.283 1.105 -2.333 -2.116 0.784 0.606 -1.329 -0.452 0.735 -0.325 1.125 -2.748 0.346 0.349 0.404 0.527 0.819 0.540 0.128 -0.945 -1.558 1.072 -0.126 -0.848
2 -1.986 -0.674 1.023 0.251 0.339 -0.655 1.062 0.733 -1.579 0.143 0.110 -0.608 1.052 -0.971 0.594 -1.564 -1.212 -0.494 1.219 -1.214 0.005 -0.301 0.340 -1.220 0.094 1.355 1.670 -1.590
4 -0.159 1.271 -1.309 1.081 -0.115 0.390 1.597 0.622 0.281 0.420 -0.873 2.010 -0.421 0.400 0.201 0.487 0.631 -0.214 -1.148 0.921 -2.085 0.310 -2.415 -1.228 2.470 -1.816 0.063 -0.679
3 -0.052 0.769 0.530 -0.276 0.135 0.939 -1.530 0.405 -0.836 0.119 2.106 1.836 -0.166 0.991 -1.073 0.352 -0.372 0.651 -0.937 0.177 0.935 -1.923 1.577 -0.454 -1.823 0.593 -1.139 -1.475
1 -0.611 -0.782 0.116 -1.516 -0.216 0.382 -0.725 0.041 0.265 -1.762 0.206 -0.459 1.847 0.198 -0.919 -1.817 -0.050 1.306 -0.605 0.315 0.740 0.688 -0.004 -0.658 -0.260 -1.070 -0.943 0.990
1 0.151 -0.113 0.857 0.526 -0.628 1.745 1.204 -0.867 -0.344 -0.158 0.816 -0.809 -0.669 0.878 0.559 -1.948 0.658 -1.474 -0.635 -0.365 -0.469 -0.649 -1.038 0.512 -1.613 -0.532 0.791 0.397
2 -1.429 1.286 0.121 -0.203 0.431 0.237 0.767 0.538 0.718 1.351 0.884 -0.160 -1.123 1.668 -0.238 -0.473 -1.737 0.338 0.149 -1.515 0.453 -0.498 2.690 -0.569 -0.435 -0.875 1.278 -0.373
2 -0.699 1.247 0.314 -1.193 1.339 0.108 0.286 -1.216 1.878 1.293 0.527 0.508 0.599 0.050 -0.563 2.102 0.100 -1.871 0.311 -0.585 -0.159 -1.166 1.209 -0.466 -0.375 -0.557 0.964 0.777
2 0.347 -0.184 1.339 0.658 -0.317 -1.351 1.346 0.782 1.288 0.964 -1.219 0.461 0.953 0.111 -0.324 -0.465 -0.764 -0.107 -1.252 -1.826 -0.550 -0.585 2.238 1.364 -0.204 -0.030 1.052 -0.014
0 0.698 1.842 -0.340 -0.117 0.949 0.611 0.944 -0.602 -0.560 -0.839 -1.064 -0.254 0.032 -1.757 0.037 -0.908 0.283 0.520 -0.365 0.543 -0.919 0.058 -1.105 0.706 -0.463 -0.509 -0.886 1.832
4 -1.450 -1.283 -0.922 1.434 0.541 1.966 -1.912 0.607 0.463 -1.634 0.449 0.493 -0.645 0.527 -1.090 0.733 1.188 -0.836 0.926 -1.460 0.118 0.045 -0.355 -0.375 1.646 -2.578 1.555 -1.306
1 0.138 -1.072 -1.138 -2.150 0.842 0.490 0.199 0.254 1.126 0.691 0.596 1.034 0.373 0.137 -0.731 -1.910 -0.892 0.655 -0.494 -0.309 0.709 -1.793 0.757 0.807 -0.072 -0.046 -0.184 0.662
4 1.327 -0.327 -0.250 -0.162 0.275 -0.581 -2.894 1.193 1.795 -0.612 -1.067 -0.137 -0.188 0.996 0.055 -0.793 -1.318 0.866 0.600 1.061 -0.968 1.838 -1.744 0.710 -1.910 -0.790 1.561 0.831
2 0.366 -0.507 -0.189 0.161 -0.007 0.736 0.487 -1.508 -1.358 -0.370 1.829 -0.852 -0.208 -0.536 -0.554 0.178 1.401 0.456 -0.804 2.753 0.079 -0.577 1.412 -0.469 1.663 -0.871 1.430 0.201
3 -1.313 0.624 1.626 0.497 -1.879 -0.325 0.890 -0.392 0.682 -1.426 -1.990 0.022 -2.174 1.214 -0.757 0.350 1.265 0.042 -0.837 -0.684 0.282 -0.880 -0.162 0.063 -0.397 -2.039 0.275 0.554
1 1.155 0.571 -0.154 -0.544 0.771 0.275 -0.861 1.302 0.262 0.653 -0.392 1.131 -1.585 1.675 -1.939 -1.377 0.252 -0.531 -0.328 -0.531 -0.783 0.317 -0.049 0.885 0.080 -0.417 0.774 -1.832
4 1.116 0.766 -1.191 -0.179 0.540 -2.216 -1.627 -1.394 -1.241 1.364 -0.761 1.809 0.578 -0.726 0.127 -1.594 -0.897 0.344 1.192 -1.437 -1.020 -0.293 2.070 0.593 0.002 -0.619 -0.201 -0.024
1 1.479 0.664 2.139 -1.457 0.119 -0.612 -0.091 -0.270 -0.086 0.852 0.662 0.177 0.086 0.109 -0.434 1.240 0.273 1.276 1.505 -0.249 -0.563 -0.550 1.856 -0.640 -0.545 0.246 -0.187 -0.047
0 0.028 -1.116 -0.366 0.203 -1.551 1.923 1.126 -1.264 1.276 0.658 -0.915 -0.478 -1.186 -0.563 0.669 -0.433 0.052 0.228 0.931 0.060 1.666 0.548 -0.244 -0.611 -0.898 0.007 -0.003 -0.430
3 -0.282 0.325 -2.009 1.822 -0.670 -0.296 0.260 -0.196 0.961 -0.370 -0.580 0.933 -2.732 -1.275 0.243 0.838 0.847 -2.723 -0.484 0.860 -1.027 -0.233 0.451 0.065 0.489 -0.173 0.570 0.593
1 -1.716 -0.486 0.862 1.005 -1.143 -0.860 0.231 0.248 0.018 -1.470 -0.688 -0.993 -0.831 -0.948 -0.545 0.881 1.075 -0.490 0.097 -0.322 1.239 1.151 -0.477 0.577 1.343 -0.241 -1.088 1.270
0 0.716 0.805 -1.525 -0.641 -0.801 -0.699 1.255 0.088 -0.862 -0.843 -0.603 0.093 -0.942 -0.509 -0.593 0.117 0.902 0.725 0.375 -0.387 -0.905 -0.380 0.385 0.700 -0.400 0.488 0.178 0.032
0 0.672 -0.096 0.529 -0.126 0.217 0.575 -0.398 -1.234 0.276 1.189 0.299 0.318 -0.042 0.739 -0.186 -0.454 1.105 -0.531 0.889 1.106 0.567 -1.249 0.027 -0.202 -0.496 0.143 0.769 -0.459
3 0.497 0.791 1.134 0.659 -0.407 -1.810 0.991 -0.525 -1.447 1.872 -0.061 0.489 0.228 -0.014 -0.387 -0.584 2.225 -1.458 -0.433 -0.890 0.518 -0.414 -0.499 0.642 1.935 -0.830 -1.551 0.063
1 -0.005 0.140 0.260 -0.162 0.363 -0.375 0.292 -1.201 0.740 -0.944 0.770 0.874 1.145 1.723 0.336 -1.014 1.691 0.944 0.280 -2.281 -0.678 1.068 -0.440 1.824 -0.884 0.183 -0.572 0.182
3 1.125 -0.957 -2.429 1.806 0.808 -0.305 1.265 0.080 0.162 1.593 0.332 1.110 -0.512 -1.308 -0.314 0.696 1.547 0.116 2.162 -0.678 -0.860 0.185 0.275 -0.285 -0.381 -0.706 0.758 0.186
3 -1.992 -0.652 2.554 -0.205 1.139 -0.028 -0.330 0.933 1.197 -0.892 -1.098 -1.368 -0.878 1.136 0.031 1.184 -1.033 -1.003 -0.392 0.297 -0.140 0.698 0.414 0.488 0.685 1.096 -2.460 -0.012
1 0.543 0.374 0.927 1.441 -1.064 0.300 -0.722 1.070 0.724 -0.970 -0.007 -1.501 0.950 -0.536 -1.130 -0.346 0.584 0.287 -0.489 -0.105 -0.292 -0.934 -1.866 -0.006 0.585 -0.016 2.050 -0.596
3 -0.312 1.000 2.085 -0.398 0.746 0.791 -2.211 0.479 -0.610 -1.343 1.161 0.397 2.040 0.758 -0.109 -0.550 -1.531 1.120 0.776 0.268 1.265 -0.087 -0.454 0.048 0.327 1.500 -0.942 -0.441
1 0.722 0.397 0.726 -0.603 -1.060 -0.819 -0.750 -0.047 -1.184 1.065 0.516 -0.865 -0.199 -0.887 -0.149 -0.835 2.142 -1.591 -1.270 -0.238 0.094 0.100 0.918 0.443 -0.922 1.062 0.628 1.387
0 -1.131 -0.498 1.009 0.259 -1.518 -0.040 0.583 0.714 1.046 1.198 0.570 0.487 0.233 0.842 -0.788 -0.204 -0.699 0.434 -0.259 1.013 0.052 1.064 -2.111 -0.720 0.938 0.020 1.056 0.251
3 -0.436 1.048 -1.069 0.345 -0.384 -0.762 2.586 0.419 0.613 1.138 -1.421 1.752 -2.509 -0.769 -0.459 0.582 0.219 -0.167 -0.012 -0.978 -1.497 0.727 0.694 1.584 -0.790 0.693 -0.284 1.190
3 -2.405 1.667 -1.732 0.492 -1.516 1.371 0.616 0.513 -0.982 0.400 -0.449 -1.538 -0.357 -0.843 -0.973 0.229 -1.497 1.457 0.826 -0.272 1.543 -0.817 0.200 0.347 -1.251 0.881 -0.866 0.673
1 1.791 0.555 0.599 0.304 1.028 -1.300 -1.776 0.931 -0.267 -0.466 -0.167 0.359 1.275 0.653 -0.315 0.378 -0.064 -1.013 -1.459 1.522 0.476 -0.539 1.415 0.472 0.602 0.727 0.104 -0.111
1 0.074 1.183 -0.857 0.213 -0.593 1.477 -0.055 -0.211 -1.136 0.828 0.442 -0.018 0.603 1.071 0.202 0.445 0.542 -1.761 0.471 -1.637 0.007 0.563 -1.827 0.052 -0.354 -0.249 0.777 1.900
1 -1.096 0.680 2.348 0.543 -0.414 -0.349 1.909 0.307 -0.766 -0.062 0.215 0.469 1.512 1.276 0.198 -0.041 0.396 -0.608 0.980 -0.203 0.491 1.713 0.553 0.231 -0.517 -0.078 -1.136 0.533
2 0.544 -0.400 -0.081 0.017 -0.542 0.132 0.690 -0.129 1.372 1.563 -0.845 0.409 -0.239 -0.763 -0.037 -2.136 0.778 0.868 0.230 0.966 -0.699 -2.784 -0.481 -1.217 0.011 1.440 0.295 -0.258
2 0.482 2.405 0.256 -0.911 0.835 0.135 0.040 0.296 -0.396 1.869 -1.628 0.816 0.133 0.455 -0.380 1.514 0.884 -0.770 1.120 0.300 -0.584 -1.693 0.769 -0.724 0.527 1.462 -0.811 1.345
1 -1.381 -2.147 0.623 0.190 -0.477 -1.506 -0.135 0.374 -0.022 -1.567 0.852 -0.530 -2.097 0.309 0.369 -1.057 -0.979 0.866 0.175 0.673 -0.209 0.413 1.005 0.180 -0.207 -0.263 1.081 -0.028
1 -1.228 1.040 0.218 0.939 0.109 1.622 -0.877 0.536 -0.694 1.410 -0.165 -0.335 -0.135 1.214 0.589 1.901 -0.946 -0.159 0.487 -0.255 -0.174 0.457 0.172 1.128 1.314 1.719 -0.125 -0.535
2 2.111 -1.228 0.076 0.155 0.115 0.882 -0.377 -0.297 0.094 -1.672 -0.991 1.242 -0.529 -1.946 -1.564 -0.851 -0.544 -0.485 1.382 0.276 1.174 0.312 0.427 -0.314 1.147 -1.061 -0.173 -1.327
3 -0.443 -0.249 0.726 -0.144 -0.435 -0.495 -1.986 0.763 -1.007 1.858 -0.288 1.309 -1.809 0.731 0.989 1.304 -1.528 -0.703 0.577 0.148 0.293 -0.360 1.551 2.608 0.104 0.453 0.056 1.284
3 1.666 -0.246 1.137 -0.912 0.331 1.016 2.029 -0.997 -1.046 1.761 1.675 0.219 0.502 -0.382 -0.148 -1.700 0.028 0.140 2.071 1.504 0.552 -0.939 -0.481 -0.217 1.033 -1.651 -0.198 -0.051
3 -0.020 -1.327 0.667 -0.245 -2.780 0.778 1.164 1.339 -1.277 -1.917 -0.410 0.518 -0.943 -1.591 0.725 -0.345 0.623 0.016 -0.166 -0.064 -0.358 -0.459 -1.101 0.483 1.248 -0.500 1.866 0.377
3 2.196 0.976 0.004 2.401 0.811 -0.012 2.281 0.659 0.615 -0.386 0.049 -0.411 1.440 -1.609 1.366 -0.877 -0.058 -0.295 -0.520 0.690 -0.005 -0.668 0.154 -0.696 -0.538 -1.497 -0.450 0.494
4 1.145 -1.352 -0.410 -0.123 1.048 1.869 0.710 -0.570 -0.371 -2.411 -0.167 1.568 0.456 0.543 1.784 2.361 0.864 1.308 -1.954 0.685 0.186 -0.862 1.090 0.655 -0.706 -0.641 1.166 0.941
0 0.801 1.137 0.171 0.003 -1.053 0.947 0.123 1.140 0.562 1.417 0.879 0.964 0.283 -0.318 -0.037 -0.404 0.536 -1.529 0.344 0.200 0.155 -0.789 -1.289 -0.758 -0.058 -0.089 -0.181 -0.014
3 -0.775 -0.066 0.894 -0.635 -0.417 -0.655 -0.321 -1.012 -0.330 0.373 -0.703 -1.807 1.091 1.049 2.107 -0.399 0.588 -1.136 1.180 -0.185 -0.422 1.703 -1.293 -0.214 -1.590 0.469 -1.140 -2.061
4 -0.610 -1.331 0.663 0.045 1.376 0.317 -0.647 -1.372 0.256 -1.045 -0.714 0.783 0.805 -0.379 2.149 0.257 -1.749 -0.364 -1.825 -0.301 -0.323 -2.602 1.065 1.344 1.746 -0.487 -0.274 1.011
2 -0.661 -0.905 0.259 -0.895 -0.878 -0.832 2.205 -0.117 0.353 -0.569 -0.334 -1.776 0.024 1.486 0.966 -0.155 0.980 1.435 -1.081 0.742 -1.560 -1.045 1.114 0.887 -0.160 0.286 -0.879 0.545
3 -0.406 0.808 -0.089 -0.030 0.903 0.764 0.648 -0.529 -3.019 1.169 1.776 -1.546 1.699 0.443 -1.587 -0.141 -0.329 -0.388 0.502 -0.178 -0.684 1.722 0.486 -0.714 1.468 -0.408 0.334 1.237
4 0.502 -0.583 -1.647 0.159 1.248 1.331 0.319 -0.456 0.923 -1.098 -0.617 -1.431 0.998 -1.636 0.408 2.326 0.402 -0.969 -0.418 -1.510 -0.895 -0.715 -0.833 0.137 -2.781 -1.243 0.574 2.099
0 0.853 0.029 -0.296 0.300 0.506 -0.871 -0.819 0.094 1.038 -0.256 0.778 0.974 1.060 -1.157 1.104 1.020 0.484 -0.247 -0.096 0.189 0.475 -0.850 -0.918 1.188 0.482 -0.042 0.007 0.736
1 0.955 -0.656 0.624 -1.226 0.486 -1.190 -1.718 -0.645 -0.255 1.275 1.096 0.119 -0.945 -0.511 1.388 -0.518 0.839 1.637 -0.840 0.263 0.761 0.784 0.536 0.444 0.982 -0.020 -0.183 0.791
0 0.321 -1.125 -0.138 1.692 -0.507 -0.674 0.773 -0.560 -0.899 -0.688 0.970 -0.191 0.810 1.182 -0.434 -0.159 -0.235 -0.797 1.003 -0.423 -1.038 1.708 -0.228 -0.062 -0.576 -1.150 -1.493 0.865
4 -1.087 1.813 0.527 1.922 0.864 -1.324 0.645 -0.984 0.007 -0.076 -1.618 -0.813 2.205 0.238 -0.176 1.813 -0.396 -0.091 0.991 -0.401 -0.747 -2.254 -0.936 -0.946 0.159 -0.610 2.158 -1.058
4 0.749 -0.766 -0.088 -0.306 0.149 -0.888 0.835 -1.453 0.441 2.589 -0.204 1.176 -0.442 1.537 -0.186 0.100 0.107 -1.228 0.557 -0.057 -0.163 -2.000 1.658 2.279 -2.551 0.126 0.150 1.789
2 0.865 0.236 0.340 -1.398 1.560 -0.114 -2.108 -0.137 0.080 0.537 -0.072 -0.577 -0.529 0.002 -0.183 1.993 -0.327 -0.769 1.086 0.002 0.911 -0.087 0.717 1.263 1.427 0.761 -2.075 -1.514
1 -1.079 -0.345 1.075 -0.508 0.261 1.125 0.382 1.974 0.068 -0.157 -0.382 1.992 1.219 -1.459 2.110 0.498 0.129 0.518 -0.959 0.197 -0.309 -0.360 0.959 -0.264 0.100 0.328 0.944 1.268
1 1.694 0.915 2.268 1.801 0.192 1.064 -0.231 1.347 -1.146 -0.432 -0.688 -0.369 -0.329 -0.001 0.657 0.127 0.458 -0.980 -0.650 -0.441 -0.100 1.244 0.072 -0.741 0.614 0.077 -0.239 1.378
0 -0.343 -0.371 -1.408 -0.778 -1.111 1.752 0.936 1.272 0.722 -1.129 -0.525 0.489 -1.222 0.713 -0.240 -0.375 0.711 0.444 -0.361 1.159 -1.081 0.616 0.593 -0.310 0.326 -1.251 0.924 -0.185
1 2.413 -0.598 -1.363 -0.205 0.299 0.050 -0.157 -0.129 -1.182 -0.924 1.222 0.738 -1.442 -0.877 -1.188 0.402 1.170 -0.773 0.449 -0.668 -0.741 -0.415 0.697 -0.141 0.068 0.084 -0.361 1.385
0 -0.237 0.177 0.673 -0.114 -0.122 0.267 -0.399 -0.350 -0.899 -0.042 0.448 -0.269 -0.774 -0.096 0.397 0.128 2.230 -0.656 -0.150 0.779 -0.695 1.928 0.005 0.561 0.019 1.019 -1.063 1.572
0 1.626 -0.205 -0.792 -1.253 0.181 0.997 0.024 -0.177 -2.095 0.146 -0.321 -0.076 -0.447 1.417 0.187 0.099 1.252 0.252 0.960 0.814 0.526 -0.291 0.283 0.308 1.955 -0.094 0.394 -0.356
3 0.770 -1.022 0.456 0.215 -1.135 1.156 1.607 0.060 -0.133 -0.367 2.534 0.939 -0.715 -1.592 -0.427 -0.420 0.255 0.045 1.145 -2.708 0.538 0.894 -1.797 -0.867 -0.484 0.506 -0.414 -0.034
1 -0.931 -0.962 1.229 0.661 -0.130 0.832 0.301 -1.186 -2.398 -0.567 1.085 -0.428 0.460 0.184 -1.694 -0.406 0.359 -0.787 -0.363 0.392 0.073 1.047 0.384 -0.956 -0.478 0.020 1.338 -0.474
3 -1.099 0.513 0.126 0.689 0.451 -0.668 0.572 1.294 0.556 -0.571 -1.364 -2.152 -1.247 1.795 -0.790 0.620 -0.899 -0.367 -1.178 -0.083 0.347 -0.968 -0.113 1.116 1.301 1.264 0.492 2.126
1 0.473 -2.150 0.112 -0.792 0.052 0.384 0.669 0.227 -0.258 1.431 -0.853 -1.277 -0.270 -0.352 -0.835 0.545 -0.518 0.701 -1.642 -0.758 1.018 -0.565 -0.758 1.487 1.396 -1.145 -0.229 0.096
0 -0.325 -0.505 -0.035 -0.110 0.275 -0.201 -0.434 0.778 0.603 -0.032 1.195 0.083 -0.161 -0.069 0.165 -0.917 -1.874 0.281 0.834 -1.001 1.073 -1.646 -0.559 1.706 0.356 -0.302 0.252 1.196
4 0.639 -0.708 -0.435 -1.478 -2.291 -1.108 -1.570 0.337 -0.518 -0.538 -0.984 -3.233 1.508 -1.000 -0.412 -1.455 0.972 0.143 -0.826 -0.392 1.678 -0.073 -0.182 -0.941 -1.895 -0.160 0.325 1.100
1 0.496 1.483 0.705 0.810 0.294 -1.503 -1.045 0.455 -0.011 0.440 1.676 0.726 1.259 0.095 -0.807 -0.058 -0.805 1.558 0.790 0.521 1.236 -2.020 0.588 -1.311 -0.007 -0.312 0.662 0.228
4 0.672 1.168 -1.575 0.452 0.632 -0.012 0.476 -1.548 0.221 -0.638 -1.008 1.585 0.908 -1.654 2.767 0.519 -1.204 -0.194 1.077 0.991 -0.879 -0.107 1.325 0.689 0.623 0.109 -2.913 0.312
0 -0.200 1.765 -0.296 0.099 -0.076 0.691 0.226 0.718 -1.081 0.573 -0.532 0.040 -0.201 0.999 -1.162 -0.050 0.399 0.898 0.134 -0.524 -0.044 -1.065 0.238 1.851 -0.922 -0.069 -0.786 0.029
1 1.195 1.920 1.271 0.362 1.120 0.447 -0.196 -0.362 0.082 -0.909 1.128 0.374 -0.595 -0.973 0.956 0.200 -0.465 0.262 0.009 -0.618 1.955 -0.788 -0.825 -1.495 -0.766 -0.424 -0.824 -0.381
4 -0.285 1.179 -0.262 0.362 0.468 0.161 -1.104 -0.342 -2.077 1.643 -3.360 0.842 0.109 -1.737 0.261 -1.107 0.432 -0.058 0.229 -0.369 1.330 -1.016 -0.674 -0.171 -0.102 -1.132 -1.995 0.356
4 -0.051 -0.297 -1.806 -0.662 -0.829 -0.679 0.617 0.140 -0.891 0.936 1.791 0.637 2.600 0.053 1.681 0.724 1.674 -0.481 -0.360 0.633 -0.946 -1.479 -1.948 -0.095 1.383 0.499 -0.980 -0.525
4 -0.483 0.087 -2.005 -0.145 -0.971 0.947 0.175 -1.274 0.652 -1.027 1.203 1.109 -0.810 0.606 -0.649 -0.040 1.218 1.623 -0.072 -2.552 0.664 1.998 -2.004 -0.601 -1.111 -0.269 1.695 0.154
0 1.048 -0.573 -0.216 -0.069 0.920 -0.428 0.603 -0.001 -1.213 -0.003 1.104 0.006 -0.329 1.424 0.390 -1.382 -0.289 0.268 0.235 0.511 -0.161 -0.212 0.014 0.794 -0.684 -0.072 0.248 -0.538
4 -0.546 -0.519 0.101 0.514 1.252 -0.456 -1.612 0.157 -0.457 -1.301 0.298 -0.213 0.307 -0.894 0.487 -1.317 -0.828 -0.243 1.742 0.785 1.077 1.417 3.603 0.042 1.420 1.013 0.996 1.774
2 0.902 -1.393 -0.063 1.367 -1.922 0.614 -0.181 0.554 -1.024 2.348 0.495 0.543 0.853 0.574 -0.573 1.246 -0.414 1.737 0.198 0.737 -0.710 -0.313 1.446 1.035 -0.243 0.703 1.043 -0.327
1 -1.012 0.789 0.217 0.075 -1.488 1.086 -2.110 0.195 1.226 0.114 -0.247 -1.037 -0.572 2.193 -0.548 0.456 -0.834 -0.303 -1.629 -0.050 -0.362 1.350 -0.845 -0.429 0.905 -0.033 -0.164 -0.173
4 0.414 1.070 0.386 -1.345 0.892 -1.111 -0.356 0.423 0.743 -0.050 2.047 -1.197 2.038 -1.779 -0.043 -1.543 0.330 -1.543 0.455 -0.624 -1.740 1.512 0.686 0.502 1.827 -0.448 1.208 -1.018
1 -0.339 0.292 0.446 0.621 -0.878 1.821 0.927 0.953 1.203 0.534 -1.027 -0.139 -1.651 0.415 -0.169 -0.041 1.204 -0.457 -0.252 0.022 -1.840 -0.266 -2.486 -0.586 0.855 0.447 -0.595 0.041
4 -1.564 1.423 -0.278 0.384 -1.986 0.730 0.416 0.617 0.950 0.179 -0.211 -0.811 -0.443 1.206 -0.838 -1.843 -0.988 0.606 -0.303 0.604 -2.000 -2.983 0.264 1.005 -1.711 0.526 1.190 -0.004
3 1.168 -1.114 0.352 1.361 0.589 -1.698 -0.037 0.834 1.681 -0.136 -1.666 2.188 -0.122 1.444 -0.258 0.550 -0.685 -0.666 1.810 0.940 -0.028 0.935 -0.678 0.002 0.102 -0.758 -0.889 -1.889
0 -0.306 0.749 0.383 -0.810 -1.124 -0.483 -1.031 1.354 -1.304 0.345 -0.659 -1.139 0.033 -1.041 0.242 -1.191 0.936 -0.102 -0.574 0.635 0.249 -0.793 0.647 -0.865 -0.133 -1.640 0.307 -0.349
2 -0.134 -0.735 -0.871 -2.209 -0.167 -0.752 -0.832 1.131 -1.613 0.037 -0.006 0.155 2.701 -2.000 0.344 0.140 0.627 -0.696 1.017 -0.209 0.474 1.160 -0.694 0.508 -0.102 0.594 -0.381 0.022
1 -1.269 -1.560 -0.289 0.605 -1.449 0.697 0.523 1.103 -0.633 1.373 -0.835 -1.926 0.075 0.827 -0.658 0.254 1.052 1.418 0.792 -0.226 -0.198 -0.369 0.055 0.765 -0.930 -0.583 0.717 -0.153
2 -0.789 1.300 -0.731 -0.057 -1.705 1.153 -1.913 -1.643 1.051 1.920 -0.291 -0.047 0.908 0.289 -0.373 -0.485 -1.567 0.233 -0.092 -0.438 -1.002 -0.901 0.618 0.882 -0.327 -0.432 -0.854 0.937
0 -0.343 -1.581 0.587 0.198 0.050 -0.619 -0.945 -0.933 0.177 0.204 -0.350 -1.362 0.747 -0.498 0.125 0.382 0.125 -0.399 0.720 0.636 0.242 -0.175 0.745 -0.699 0.349 0.418 0.917 0.512
1 1.564 -0.477 0.415 -1.228 -1.278 -0.364 0.677 -0.202 1.313 0.208 -1.047 -0.391 0.722 0.384 0.706 1.198 1.577 -0.302 1.366 -1.345 1.249 -0.306 0.358 0.979 -0.711 0.977 0.014 0.295
4 -0.653 -1.837 -0.451 -1.149 -0.197 0.617 -0.668 -0.832 -3.434 0.442 0.796 -0.898 -1.021 -0.132 -1.711 1.064 -0.836 0.820 -0.098 -1.772 -0.949 -2.108 0.103 -0.763 -0.355 1.346 0.908 -0.384
3 -0.315 -0.536 -0.240 0.908 -0.789 -0.507 0.099 1.067 0.280 -1.675 2.170 0.964 0.367 -0.776 0.804 0.830 0.450 -1.076 -1.479 0.476 0.626 0.933 -0.337 -1.990 -0.549 0.115 -2.649 0.909
4 -0.047 -1.083 0.640 -0.948 1.616 -1.179 0.762 -1.908 -2.258 -0.555 -2.260 0.730 -0.022 1.401 -0.167 0.243 -0.268 -0.874 -0.407 -1.204 0.387 -0.308 -0.100 2.824 -0.403 0.314 -0.054 1.158
4 -1.868 -1.487 -1.478 -0.112 0.298 -0.185 -0.613 -2.111 0.214 0.626 -0.751 0.513 -0.540 -0.601 -1.532 -2.356 -1.139 -0.974 0.642 0.382 0.517 -1.616 0.013 -0.687 0.900 -0.393 1.401 -1.688
0 -0.469 1.023 -1.140 1.032 -0.160 -0.652 0.076 -0.514 1.069 1.193 -0.637 -0.267 -0.519 0.037 1.005 -0.811 -0.662 0.224 -0.026 0.369 1.009 0.362 -1.794 0.397 -0.568 0.487 0.902 0.370
1 0.992 1.067 -0.933 -1.964 1.047 -0.462 2.407 0.864 0.730 -0.222 0.632 -0.650 0.072 0.588 0.088 -0.010 0.296 1.062 -1.432 0.723 0.418 0.012 -0.234 -1.138 -0.505 -0.819 -0.169 1.183
0 -2.236 0.679 0.648 -0.088 -0.337 0.605 0.132 -0.229 2.172 0.511 -0.531 0.747 0.438 0.380 0.869 -0.473 0.264 -0.847 0.805 0.017 -0.811 -0.049 0.235 0.903 1.221 0.934 0.539 0.740
1 0.466 0.439 1.380 -1.002 -0.294 -0.069 0.866 0.434 0.519 1.219 -0.222 1.463 -0.088 0.895 -0.496 0.595 1.158 -1.082 0.944 0.179 2.038 -0.264 0.293 -0.156 0.663 -2.183 -1.410 0.669
0 1.707 1.011 0.398 1.216 -0.476 0.545 -0.129 0.322 0.857 0.602 -0.389 0.101 -0.125 -0.643 1.044 1.311 2.367 -0.726 1.082 0.382 -0.267 0.878 0.062 -0.086 -0.052 -0.703 1.269 0.161
3 1.253 -0.448 -0.692 -1.969 0.103 2.042 0.277 -0.022 0.322 -0.011 -0.813 0.823 0.226 0.018 -0.975 1.304 0.665 -0.553 1.002 -0.498 -0.782 -0.760 -1.771 0.472 -1.831 -1.228 -2.076 -0.086
1 1.032 0.517 -0.210 -0.430 -0.225 1.010 0.055 0.195 -0.366 -0.210 0.479 1.297 0.741 1.298 1.464 -0.186 0.878 -0.130 0.781 -2.383 0.137 -1.120 0.132 1.114 -1.633 -1.369 0.761 -0.651
4 0.258 -1.242 0.334 -0.155 -1.908 -0.860 -0.414 1.888 0.557 -1.335 0.486 -1.547 1.083 -0.471 -0.094 1.326 -1.287 -1.397 -0.584 1.038 -1.519 -2.832 -0.451 0.552 1.200 -0.463 -0.411 1.154
0 -1.578 -0.209 1.588 -0.135 -0.662 0.024 0.081 0.726 -0.296 -1.134 -0.368 0.419 -1.157 -0.894 1.654 0.098 0.845 -1.886 0.371 -0.769 -0.332 -0.967 -0.571 0.339 1.329 0.412 0.546 -0.498
4 -0.630 -0.281 -0.852 0.248 1.912 1.518 0.244 0.779 -0.729 1.464 -1.562 0.913 0.496 -0.756 -0.209 -1.134 -1.092 -1.591 0.768 1.205 -2.162 1.838 -0.012 0.290 -1.472 1.321 0.200 1.139
3 0.845 -1.805 0.731 -0.639 -0.736 0.991 1.162 1.035 2.039 1.267 -0.795 0.142 -0.360 2.029 1.105 -0.189 0.878 -1.232 0.250 -0.938 -1.689 0.785 0.909 0.457 -0.183 -1.266 -1.161 -0.969
4 -0.606 -0.089 0.181 -0.529 0.523 -1.232 -2.524 -1.818 -0.209 0.258 -0.799 0.305 1.469 0.373 0.679 -0.591 0.718 -0.112 0.052 -1.906 0.081 1.315 -1.891 0.005 1.888 -1.930 2.250 0.827
4 2.076 -0.754 1.164 0.706 -0.387 0.234 -2.553 -1.345 0.363 -0.900 -0.553 -1.946 3.230 -1.685 0.935 -1.678 0.429 -0.223 -1.029 -0.046 1.682 -0.076 -0.405 -0.143 -0.297 -0.412 2.061 1.197
2 1.224 -1.265 0.722 -0.116 0.119 0.729 0.743 0.839 0.352 0.542 -0.222 -0.644 0.167 -2.145 0.419 0.870 0.620 -0.611 -2.045 0.656 -0.789 -1.287 0.948 -1.355 -0.172 1.777 -0.667 -1.066
4 1.325 0.422 -2.051 -1.483 -1.008 -0.398 1.199 2.126 0.343 0.218 -0.661 0.286 1.437 -0.721 -0.286 -2.548 0.905 -0.614 -1.097 -1.562 -0.447 0.965 1.035 -0.168 -1.587 -0.151 1.034 -2.078
3 -1.752 -0.965 -0.015 0.220 -0.693 -2.752 0.194 0.728 0.766 -0.206 -0.135 0.247 0.409 -0.093 -2.323 1.397 -0.464 -1.224 -0.393 0.112 -0.609 -0.540 -0.325 0.362 -0.799 0.337 -1.380 2.006
0 1.680 -0.191 -0.643 0.192 0.089 -0.569 0.033 -0.253 -0.481 -0.494 0.534 -0.624 -1.481 -0.135 -0.090 -0.706 2.442 0.755 -0.223 -0.734 -1.301 -1.185 -0.293 0.829 0.858 0.308 0.689 0.253
1 -0.305 -0.515 0.426 -0.030 -1.368 -0.290 -1.515 -0.620 -0.507 0.127 -0.268 0.824 0.970 0.370 0.899 -0.845 1.546 -1.338 -1.189 0.559 1.710 0.060 1.584 -0.284 -0.396 0.497 0.484 2.182
4 -1.832 -1.051 1.497 1.858 -0.103 -1.239 2.096 1.594 0.679 -0.812 -0.049 -0.160 0.331 1.451 0.879 -1.077 1.376 0.313 0.687 1.467 -1.112 -0.036 -0.531 -1.568 0.347 2.512 -1.840 -0.032
3 -1.624 0.368 -0.611 -2.293 0.426 2.628 -0.033 -1.066 0.634 -0.376 0.451 -0.994 -1.535 0.330 -0.571 -0.688 0.965 -0.468 0.436 -0.561 1.203 1.748 0.679 -0.954 0.822 -0.006 0.436 -0.338
3 -0.028 -0.977 -1.448 -0.292 -0.938 -0.692 2.006 0.425 0.931 0.825 -0.101 -0.440 0.249 0.168 -0.569 0.130 -0.706 -0.171 -0.335 -1.269 -2.351 -0.939 1.804 -0.310 0.776 -2.124 -1.008 -1.354
3 0.059 -2.216 -2.154 -0.839 -0.698 1.466 -0.041 1.292 -0.380 0.606 -0.725 -0.793 -1.821 0.336 0.262 0.777 0.215 2.026 0.176 0.992 -0.324 -0.908 0.718 -0.089 0.832 0.353 -0.793 1.074
3 0.975 -0.604 -1.172 -0.145 -1.363 -0.277 -0.281 -2.464 0.531 -0.183 1.182 0.216 0.736 1.190 -0.132 -0.001 -1.574 1.240 -0.146 -1.006 0.740 -2.180 -0.429 0.908 -0.438 -0.718 -0.416 -2.010
3 0.127 -0.678 -1.095 1.698 2.201 0.713 -0.691 1.031 0.624 -0.844 0.926 0.708 1.656 -1.328 -0.418 -0.526 0.084 -0.046 -1.561 1.687 -0.529 0.256 -1.427 1.195 -0.809 0.244 -0.773 0.337
0 0.809 0.259 -0.584 0.095 -1.963 -1.572 0.423 0.942 0.905 -1.053 0.882 0.662 0.458 -0.141 -0.524 0.734 0.797 -0.208 0.428 0.464 -0.103 -0.694 0.070 0.866 -0.867 1.158 -0.736 1.818
0 0.523 -1.024 0.384 -0.200 -0.972 -0.335 0.220 1.453 0.328 0.848 0.205 0.559 -0.005 0.239 -0.882 -0.108 -1.106 0.851 0.959 0.421 -0.629 -0.109 2.332 1.320 -0.082 -0.643 0.148 0.142
1 -0.683 -0.614 0.055 -0.310 0.296 -0.234 1.147 2.111 -1.501 0.025 1.086 -0.703 0.280 -0.380 -1.003 -1.017 -2.471 0.166 0.232 -0.952 -0.280 -1.442 0.533 -0.401 0.771 -0.322 -0.988 -0.536
1 -0.854 -0.033 -1.061 0.976 -0.543 -1.712 0.114 1.125 -1.377 -0.125 -0.555 0.991 -2.047 1.135 -0.705 -0.160 0.859 1.479 0.051 -0.725 -0.532 1.166 -0.884 0.329 0.007 0.759 0.034 -0.434
0 -1.472 -0.599 -0.164 0.195 1.446 0.073 0.320 0.350 -0.328 -0.192 -0.107 0.626 -0.913 -0.507 0.356 -1.076 0.114 -0.546 0.505 1.374 -0.545 0.758 0.201 -0.384 0.007 1.298 -0.172 1.309
1 1.259 -0.139 -0.607 -0.262 -0.644 1.341 -0.335 0.456 0.790 -0.661 -1.804 -0.481 0.637 -1.695 0.253 0.840 1.392 0.678 0.293 0.186 -1.208 -0.947 -1.732 0.072 0.441 0.118 0.284 0.760
2 -1.486 -1.459 0.751 0.786 0.380 1.098 -0.914 -1.581 0.433 1.680 -0.920 -1.741 0.046 0.974 0.221 0.945 0.488 0.047 -0.111 -0.805 0.309 -0.460 -1.764 1.255 -1.037 -0.306 0.873 -0.769
0 0.191 0.206 -0.017 0.441 1.021 -1.064 0.378 1.014 1.080 -1.650 0.820 0.818 0.112 -0.473 -0.987 -0.136 1.195 1.137 0.578 0.497 0.543 -1.753 -0.905 0.521 0.250 0.456 -0.688 -0.579
1 0.909 -0.522 0.253 -0.577 -0.232 0.092 -2.234 1.608 0.532 0.418 1.032 0.863 1.318 0.040 -0.360 -0.572 -2.087 0.113 0.090 -0.341 1.066 -0.973 -0.779 0.109 1.132 0.301 0.223 0.063
0 0.935 0.649 -1.080 -0.600 1.333 0.188 0.442 -0.186 1.493 -0.844 0.906 0.976 1.360 0.125 0.934 0.806 -0.239 0.643 0.280 1.514 -0.104 0.812 0.718 -0.839 -0.548 -0.955 0.417 0.917
4 2.332 0.727 -0.653 -0.779 -0.446 -2.064 0.278 0.694 -0.967 -1.552 -0.911 0.997 0.803 0.215 1.642 -1.689 -0.208 0.560 0.790 0.804 0.724 1.319 0.761 -1.154 -0.455 -1.695 0.996 1.195
2 0.577 -0.129 1.407 -0.484 0.955 -0.039 -0.608 0.593 0.410 0.357 0.081 -0.038 2.388 1.184 0.723 0.112 1.007 0.518 0.026 -0.304 -0.081 -3.170 0.211 -0.488 1.593 0.019 -1.189 -0.050
4 -0.454 1.726 -0.887 0.772 0.356 1.262 -0.667 -0.733 1.096 0.425 -1.545 -0.539 0.386 0.838 -0.679 -2.914 0.523 -1.220 -2.017 -2.832 0.814 0.129 -0.226 1.389 0.637 1.616 -1.393 -0.035
0 -0.324 0.117 0.648 -1.262 1.752 -1.062 -0.940 -0.257 0.552 0.045 0.050 0.760 0.135 0.402 -0.269 0.925 -1.057 0.570 -0.357 0.057 0.159 0.871 -0.082 -1.754 -1.005 -0.098 0.837 0.205
2 -0.849 -1.309 -0.154 -0.866 0.907 0.406 -0.070 -0.797 0.381 0.691 0.143 1.604 0.460 0.914 0.134 0.676 -0.797 -0.458 0.349 0.342 0.508 1.100 -1.425 -0.974 0.230 -0.724 3.378 0.516
2 1.118 1.003 0.147 -0.919 -0.483 -0.243 -0.915 1.081 1.028 -1.396 -0.322 2.062 1.163 0.188 1.096 1.649 0.095 -1.357 0.860 0.911 -1.006 0.261 -0.791 -1.645 0.221 1.452 0.321 0.177
3 -0.911 1.576 0.084 -1.499 -0.308 0.439 0.732 0.199 0.528 -0.140 -0.137 3.293 -0.602 -1.259 -0.438 -0.109 -1.566 0.501 -0.610 -2.313 -0.021 -0.812 1.014 -1.010 -0.541 0.345 -0.369 -0.945
2 -0.374 -1.496 -0.494 -0.346 -1.837 -0.320 -0.171 -0.851 0.158 1.139 2.159 -0.234 -0.769 -0.862 0.443 -1.860 1.328 -0.949 0.823 -1.358 1.068 0.467 -0.103 0.045 -0.272 0.311 -1.744 0.300
0 -0.805 0.727 0.083 -1.749 -0.606 -0.268 1.198 1.744 0.771 -0.365 -0.151 0.993 1.576 0.278 0.587 -0.040 1.159 0.159 -0.105 0.838 -1.165 0.579 0.178 -0.343 0.068 0.098 -1.113 -1.560
1 -1.327 0.406 -0.396 1.087 -0.329 -1.594 -0.583 -0.091 1.246 0.120 -0.194 0.008 1.348 0.704 0.955 0.599 -0.040 0.301 -0.110 -0.060 -1.058 -2.342 -1.239 -0.328 0.198 -0.349 1.663 -0.049
0 0.404 0.625 0.476 -0.424 -1.091 -1.308 0.114 -0.221 -0.265 -1.507 0.409 -0.519 0.719 -0.018 0.395 -2.069 1.221 0.306 -1.181 0.222 -1.190 0.376 -0.288 -1.127 0.261 -1.432 0.900 1.020
3 0.590 -1.385 0.661 -1.555 -0.772 1.033 0.984 -0.944 -1.140 0.026 2.003 0.561 2.353 -0.165 0.883 -0.316 -0.800 -0.952 0.354 1.295 -0.327 1.421 1.831 0.868 0.327 -0.670 1.303 -1.141
2 -0.021 0.323 1.446 0.009 0.510 -3.200 -0.511 0.575 -0.608 -0.955 2.165 0.715 -0.546 1.127 -0.413 -1.364 -0.546 -0.777 -0.631 -1.094 -0.022 0.690 -0.197 -0.234 0.419 -0.249 0.584 -0.183
2 0.567 0.994 -0.127 0.976 -2.038 -1.056 0.164 0.330 -0.912 0.259 -0.624 -0.277 1.025 0.909 -0.997 -1.057 -0.411 -0.027 -0.658 1.041 0.347 0.406 -1.267 1.167 -2.820 0.834 0.934 -0.432
1 0.108 0.046 1.468 -1.058 -0.284 -0.870 -0.549 -1.546 -1.195 0.670 -1.420 -2.068 0.775 -0.848 1.242 -0.116 0.018 -1.497 -0.605 -0.415 0.905 0.262 0.174 -0.606 -0.056 1.176 0.488 -0.047
0 0.422 1.073 0.264 0.638 -0.647 -1.283 -0.819 1.201 -0.289 -0.488 0.007 -0.199 -1.804 0.633 0.080 0.707 0.013 0.066 0.013 1.449 -0.160 0.055 1.630 -0.367 0.233 -0.901 -1.266 -0.576
2 0.136 -0.970 -0.516 -1.078 0.603 1.513 -0.623 -1.102 -2.147 -1.083 -1.162 1.863 -1.503 0.520 0.395 -0.089 -0.403 1.806 0.383 -1.382 -0.326 -0.117 -0.673 0.053 0.836 0.172 0.679 0.568
1 0.384 0.518 -0.209 1.203 0.081 -0.550 1.304 0.157 -0.129 -0.616 0.757 0.664 -0.564 0.681 1.392 0.228 0.302 -0.644 -0.826 -1.197 -0.370 -0.467 -0.488 2.411 -0.785 -1.808 0.465 1.059
3 1.551 2.221 0.102 0.239 2.394 -1.093 -0.522 0.581 1.148 0.038 -0.814 1.288 -0.008 0.001 0.930 1.506 0.240 0.478 1.046 -0.226 -0.548 -0.368 -0.920 -1.088 2.221 -0.895 0.271 -0.086
2 -0.119 -1.571 -0.745 2.019 -1.508 -0.251 -0.016 0.320 -1.497 0.174 -1.686 0.097 -0.652 -0.866 0.835 -0.236 0.474 0.507 1.270 -1.208 0.919 0.016 -0.378 -1.149 0.607 -0.922 0.518 1.928
2 0.661 0.173 -1.456 0.259 -0.230 1.137 -1.059 -0.640 0.017 -0.663 -1.531 -0.060 -1.493 -0.855 1.253 -0.392 0.083 -0.237 -1.146 0.723 0.234 -0.511 1.419 -0.261 -1.967 1.528 0.356 -2.046
3 -0.459 -0.260 1.045 0.524 -1.866 -1.289 -0.135 0.536 -0.229 -0.503 -0.984 0.913 0.100 1.680 -1.300 0.691 0.548 0.847 0.827 -0.507 -0.181 0.166 -1.003 1.032 -2.475 -1.162 -1.678 1.100
0 -1.947 -0.821 0.820 0.593 -0.160 1.069 0.608 0.344 -0.017 0.896 0.755 -0.273 0.813 -0.654 0.235 0.400 -0.335 0.134 0.608 -1.082 0.184 2.215 -0.826 1.030 -0.096 1.183 -0.723 0.006
4 1.851 -0.095 -0.931 0.133 -0.622 -0.410 1.629 0.126 -1.279 -0.440 -1.851 -0.759 1.003 -0.723 2.642 -2.613 -0.443 0.113 0.061 0.563 -0.275 1.253 -1.230 -0.238 -1.114 -0.495 -0.257 0.619
3 -1.171 0.254 -2.459 0.414 -1.409 -0.096 -1.195 0.003 0.578 1.503 -1.541 0.975 0.131 -0.451 0.760 -0.236 0.718 -0.992 -0.633 0.944 1.485 -0.035 -2.787 -0.199 0.032 -0.233 0.336 -0.477
4 2.025 -0.668 -0.099 1.892 -1.012 0.578 2.092 0.315 2.121 0.094 1.062 -0.119 -0.762 0.218 -2.254 -0.956 -0.177 0.378 1.763 -0.270 -0.731 0.286 -0.648 -1.642 -0.680 -1.103 0.764 -0.719
0 1.330 -0.121 -1.340 -0.486 -1.488 -1.125 0.389 -1.174 1.113 -0.071 0.086 -0.278 0.773 0.783 0.335 0.565 -0.212 0.542 -0.338 0.002 -0.275 -0.457 -0.689 0.468 1.311 1.171 -1.257 1.289
0 -0.125 0.235 -0.211 -1.040 0.297 -1.566 0.138 -0.995 -0.081 0.191 -0.353 -1.641 -1.188 0.458 -0.557 0.561 0.010 -0.198 0.348 -0.675 0.400 -0.420 2.536 -1.134 -0.357 -0.317 -1.352 0.726
0 1.220 0.389 -0.502 -0.606 -0.590 -1.016 1.112 -0.518 1.353 -0.829 -0.599 -0.452 -0.899 -0.586 0.551 1.202 0.299 -1.191 -0.600 -0.579 1.701 0.986 1.214 -0.709 -1.273 0.573 -0.460 0.292
2 0.548 1.883 -0.804 0.255 0.031 0.788 -0.452 -0.566 -0.064 0.475 -0.457 -2.655 -0.952 1.414 1.547 -0.496 1.797 -0.327 -1.535 -0.306 0.725 -0.565 0.603 0.267 -0.286 0.776 1.229 0.182
4 0.857 1.525 -2.802 0.434 -1.427 0.499 0.128 0.636 -0.499 -1.873 -0.450 0.407 1.015 -1.172 1.027 -0.648 0.362 -0.510 0.182 0.892 -1.227 -0.680 0.185 2.402 -0.863 2.040 2.004 -0.097
4 -1.475 -0.866 0.012 -2.449 -0.380 -1.165 -1.499 -0.802 -1.069 0.469 1.197 -0.364 -0.596 -0.414 -1.108 -1.289 -1.646 -1.330 1.632 -0.690 -2.942 -0.587 -1.590 0.059 -0.048 0.056 -0.041 -0.373
0 -0.993 0.458 -0.038 -0.444 -0.889 1.204 -0.186 -0.648 2.455 -1.334 -0.945 -0.171 -0.147 -0.961 -0.200 -0.550 -0.278 -0.194 0.897 -0.505 -0.789 1.144 -0.479 -0.810 -0.905 1.322 0.547 0.021
3 -0.197 -1.532 0.016 -0.250 0.512 0.081 -0.549 0.926 -2.221 -0.064 -1.763 -0.912 0.525 -1.274 -1.424 2.382 -1.359 1.651 -0.172 -0.525 0.567 1.337 -1.146 -0.510 0.847 0.288 1.102 0.515
2 -0.121 -0.434 -1.526 -0.654 -1.748 -2.103 0.166 0.112 -1.681 0.192 -1.309 0.146 0.499 -2.119 -0.043 0.628 -1.160 -0.355 0.799 -0.381 1.024 1.420 1.367 -0.538 -1.074 0.604 -0.151 -0.113
1 -0.183 0.270 0.003 0.327 0.831 -1.441 0.536 1.804 -1.607 -0.802 -0.621 0.703 -0.293 0.432 0.618 -0.931 0.166 -1.256 0.166 1.289 -0.731 -0.741 0.433 1.125 -0.667 1.510 1.527 0.151
2 -1.866 0.028 0.979 -0.643 -1.227 -0.186 -0.061 -0.909 2.096 -0.479 -1.154 -0.094 0.908 0.017 0.486 -0.872 1.488 0.562 1.048 0.365 0.825 -0.873 -0.491 1.346 1.560 -1.147 0.250 -0.431
1 0.056 -0.380 0.346 -1.126 -0.366 1.683 -1.048 1.233 -0.832 -1.053 0.107 0.807 -0.742 -2.019 1.290 -0.747 -1.237 0.253 -0.575 0.227 -0.901 -0.678 -1.065 -0.976 -0.193 1.793 0.159 -0.367
3 1.474 0.180 0.143 -0.028 -0.596 -0.311 0.102 -0.560 -0.387 0.408 -1.431 0.559 2.352 2.014 0.295 -1.249 1.383 -0.190 1.423 -0.764 -0.588 0.481 -0.687 1.877 0.564 1.773 -1.136 -0.873
4 1.051 2.023 1.384 0.203 -0.258 3.016 0.487 -1.107 0.278 0.038 0.306 1.413 0.010 -0.452 1.768 1.453 1.511 -0.822 -0.274 1.590 -1.351 -0.192 -1.119 -0.819 1.651 -1.733 1.274 0.751
3 -1.694 -0.456 -0.737 -0.145 1.161 -0.838 0.089 1.568 -0.751 -0.951 -0.332 -2.227 -1.043 -0.951 -2.232 -0.295 0.079 -0.512 -0.054 -1.514 2.014 -0.285 0.792 1.028 -0.418 -0.142 0.884 -0.579
1 1.273 -0.063 0.062 0.340 3.187 1.344 0.701 -0.913 0.144 -0.813 0.822 0.140 -1.125 0.081 -0.849 0.151 -0.859 0.027 0.750 -0.053 -0.391 0.203 0.697 -1.107 0.261 1.318 0.574 0.058
4 -0.456 1.301 0.568 -0.803 0.106 1.749 1.016 -1.383 1.765 1.136 0.602 1.449 0.299 -0.586 1.543 -1.143 -0.731 1.972 -0.795 0.480 0.331 -1.303 -0.444 -1.273 0.166 -0.620 0.945 2.148
2 0.176 0.679 -0.443 1.029 0.386 -0.880 1.466 -0.106 0.703 0.442 0.044 0.193 -0.145 0.829 -2.284 1.765 0.335 -0.257 -0.520 0.595 1.377 1.751 -1.234 2.076 0.883 -1.035 -0.483 0.571
2 0.803 -0.702 -0.056 -1.873 -0.825 0.510 -2.571 -0.593 0.106 0.196 0.256 0.273 0.879 0.307 1.591 -0.395 1.256 -0.079 0.914 0.585 -0.363 -0.510 0.307 1.706 0.322 0.885 1.610 0.577
2 -1.042 -0.640 0.880 -0.312 0.859 -0.157 -0.963 0.480 0.560 -1.230 1.136 1.521 0.627 0.077 2.012 0.539 0.197 1.700 1.603 -1.439 -0.504 0.586 1.227 -1.166 0.908 -0.405 0.717 -0.567
3 0.042 -0.613 -0.075 -1.052 1.050 -0.781 1.199 -0.061 0.856 -0.767 -1.967 -1.649 2.503 -0.598 1.096 -1.298 0.179 0.348 2.836 -0.318 -0.711 0.351 -0.925 0.410 -0.053 -0.754 0.494 0.108
4 -1.315 2.003 0.121 -1.236 -0.202 -0.483 -0.795 0.425 2.230 1.869 0.201 -0.781 0.280 0.738 0.204 0.745 0.444 0.285 -1.216 -0.130 1.911 0.490 1.168 2.011 -1.179 2.599 0.973 1.162
4 -0.279 -1.437 -0.082 1.295 -2.067 0.736 0.158 2.593 2.295 -1.764 1.381 -0.390 -0.194 -0.576 -0.699 1.318 -2.354 -0.665 0.676 -2.039 -0.230 -1.208 0.234 0.050 0.933 -2.319 0.430 -0.750
0 1.683 -1.699 -0.346 -0.438 -0.009 -1.227 -0.580 0.789 -0.269 -1.070 2.080 0.049 0.613 0.764 -0.071 -0.790 0.199 -0.700 0.372 -0.164 1.029 -1.142 0.719 0.614 -0.239 0.730 0.419 0.667
1 1.116 -0.430 -0.334 0.958 0.165 0.095 2.135 -1.639 -0.797 0.580 -0.413 -0.660 -2.036 -0.974 -0.830 -0.240 0.664 0.301 1.321 -0.228 0.507 -0.168 -1.089 1.105 -0.958 0.894 -0.032 0.569
2 0.262 0.110 -0.592 -0.937 -0.697 -0.202 0.591 0.574 0.211 -1.335 -1.695 -0.814 0.815 -0.980 0.162 0.028 0.015 0.243 -1.306 0.830 -1.076 -1.025 -2.107 1.811 0.198 -0.477 -1.215 -1.443
3 0.970 -0.419 -0.556 1.233 0.243 0.983 -1.411 -0.806 -0.523 1.298 1.260 0.088 0.481 0.168 -0.997 0.060 -0.903 -0.381 -0.225 1.651 0.158 -0.407 0.614 -0.661 2.657 0.430 -2.472 -0.804
3 2.301 0.739 -0.856 1.212 -1.152 -0.137 -0.247 0.182 -0.942 0.128 -1.748 -0.377 1.335 1.025 -0.696 -1.526 -2.183 0.617 0.278 0.386 1.691 0.153 0.228 -1.397 1.624 -0.164 -0.817 0.208
1 0.479 0.444 0.060 -2.084 0.440 -0.145 0.547 1.120 0.385 -0.260 -1.776 -0.872 0.696 0.283 0.928 0.470 0.270 -0.346 0.207 -0.192 0.095 -1.162 1.425 -0.293 -2.270 -1.011 -0.317 0.961
1 1.086 0.708 -1.581 0.187 0.144 0.714 -0.335 -1.429 -0.867 1.080 0.156 -0.261 -1.016 -0.951 -1.340 -0.364 0.791 -2.246 -0.575 -1.397 -0.751 0.866 1.093 0.863 -0.049 -0.636 0.544 -0.734
0 1.755 -0.331 1.391 -0.316 -0.966 1.746 0.521 0.004 0.687 0.274 -0.069 -1.326 -0.271 0.623 -0.123 0.902 0.384 -0.878 -1.319 -0.157 -0.524 0.336 -0.718 -0.615 1.053 0.148 -0.703 0.018
1 -0.417 0.425 -0.924 -0.020 2.081 1.126 -0.685 1.866 0.794 0.965 0.466 -0.132 1.584 0.690 0.986 -0.389 -0.028 -0.635 0.331 0.355 0.386 0.018 0.002 1.015 0.750 1.562 0.221 -1.388
1 -0.356 -0.064 0.370 1.473 -0.039 -1.132 0.626 -0.493 -0.190 1.612 -0.579 -0.525 -0.714 -0.961 -0.615 0.241 -2.332 -0.804 -0.305 0.982 1.968 -0.962 0.581 0.548 -0.070 1.756 -0.123 0.665
1 -1.096 0.685 -0.491 -2.104 0.261 -0.010 0.886 0.766 -2.511 0.454 -1.142 0.304 0.085 -0.055 -0.965 0.612 1.844 -0.268 0.096 0.898 -0.753 0.585 0.104 1.187 0.922 0.015 0.239 -0.364
0 0.968 -0.498 -1.309 -0.051 0.223 -0.472 0.278 0.330 -0.910 0.395 -0.267 -0.700 0.787 -0.777 -0.457 0.072 1.374 0.193 0.528 -0.298 -0.078 -0.049 0.293 -0.296 1.340 -0.373 -1.707 2.743
4 1.286 0.057 0.002 0.635 1.576 1.512 0.593 0.517 0.696 0.986 1.299 -0.237 1.318 0.343 0.651 1.382 0.747 0.916 -0.389 1.498 0.696 0.126 1.101 2.588 -1.169 -0.804 2.685 0.427
2 0.084 1.327 -1.119 -1.038 0.033 -1.636 0.244 0.016 0.545 2.238 0.059 -0.540 0.962 -0.012 -0.164 1.787 0.751 -0.952 -0.922 -1.335 -1.100 1.128 -0.508 -1.786 0.458 0.169 -0.448 -0.637
1 -0.596 -0.872 -0.671 1.605 -0.026 1.862 -0.547 1.256 -0.854 1.295 1.000 -0.029 -1.446 0.699 0.158 -0.831 1.241 0.335 0.182 0.010 -0.827 0.147 -0.110 1.143 -0.815 0.678 -0.880 -0.114
3 1.290 1.299 -1.300 -0.545 0.724 -0.627 0.105 -0.716 -1.089 -0.896 0.956 0.014 1.272 0.520 1.606 0.818 0.759 -0.120 3.009 0.113 -1.973 0.268 -0.982 1.017 0.726 -0.192 -0.888 0.146
3 -0.661 0.862 -0.040 0.385 2.056 1.173 -2.476 0.309 -0.583 -1.207 -0.648 0.756 0.275 0.184 2.527 -0.137 0.039 -1.805 0.531 0.540 1.108 -1.095 -0.384 -0.490 0.437 0.243 -0.091 -0.145
0 -0.014 0.433 0.939 -0.741 0.376 0.979 0.976 -0.043 -0.348 -0.012 -0.707 0.536 0.134 -2.442 0.364 -0.489 0.164 1.518 -0.911 -0.395 -0.232 -1.303 0.100 0.434 -0.385 0.551 1.489 -0.690
2 0.630 -1.334 1.093 0.558 0.734 0.180 -1.066 0.795 0.588 -0.762 -1.238 0.069 0.148 0.731 -0.625 0.385 0.981 -2.079 -2.329 0.178 0.285 -0.630 -1.479 1.822 0.067 0.111 -0.499 -0.151
4 0.340 -1.662 0.167 0.408 -1.078 -0.127 -2.901 -0.334 -0.436 1.402 -0.603 -0.266 -0.079 -0.590 0.720 -1.992 1.137 -0.012 1.302 -2.747 -2.436 -0.292 0.506 -0.555 0.584 0.981 0.666 0.103
4 2.334 1.195 -0.594 0.039 -0.540 -1.167 -0.401 0.219 0.187 -1.453 -0.393 -2.240 -0.129 0.475 0.133 1.439 -2.717 -0.132 0.540 2.200 0.220 -0.476 1.149 -1.410 0.587 2.751 0.253 -0.011
4 -1.058 0.628 -0.458 0.112 0.299 0.853 2.632 1.220 -0.978 1.428 -0.056 -2.512 1.173 -1.608 -0.122 -0.382 2.354 0.739 -0.299 -0.705 -0.018 -1.435 -0.212 -1.517 0.241 0.428 0.775 0.301
1 1.285 -0.498 0.245 -1.663 -1.112 1.059 0.441 0.138 -1.296 0.174 0.114 0.033 -0.777 0.725 -0.784 -0.251 -2.052 -0.057 -1.166 0.181 0.042 0.001 -1.172 -1.471 -0.066 -1.724 1.237 0.282
2 1.738 2.027 -0.330 0.615 1.487 1.179 0.180 0.668 -0.593 -0.446 0.490 -0.353 2.014 -0.284 -0.289 -0.438 -0.830 0.813 -1.216 -1.473 -0.950 0.104 -0.074 -1.558 -0.668 -0.297 -0.206 -0.179
4 0.735 0.025 -1.346 1.368 -0.163 -1.686 -1.156 -1.269 -1.114 1.014 0.198 0.541 -2.801 0.768 0.630 -1.791 -1.245 1.138 -0.596 1.009 0.010 -0.513 -0.140 -1.991 0.381 -1.199 -1.830 -1.482
4 -2.016 0.872 0.700 0.570 -3.170 -0.267 2.034 1.344 -3.300 0.413 1.085 0.090 0.078 0.430 1.564 0.293 -0.176 1.441 -0.792 0.706 1.482 -0.715 0.482 1.234 1.164 -0.315 0.583 0.005
0 0.293 -1.045 0.473 0.456 -1.484 -0.322 1.409 -0.633 0.018 0.160 -0.026 0.689 0.935 1.265 -0.313 0.524 0.840 -0.118 -0.719 0.144 -0.606 -0.704 0.220 -0.188 0.176 -1.551 -0.176 -2.260
0 0.483 -0.167 -0.017 -0.980 0.474 0.287 -0.840 1.051 0.147 1.229 -1.027 1.043 0.046 0.194 1.602 0.182 -0.114 0.201 0.807 -0.437 -1.114 -0.745 -0.133 -0.549 -0.364 0.589 0.421 -1.483
4 0.506 1.447 0.568 -1.050 1.363 1.641 3.152 -1.123 0.243 -2.082 0.553 -0.548 1.923 -0.775 -1.689 -0.471 -1.975 0.751 -2.065 0.028 -2.078 -0.320 1.643 0.361 -0.863 -0.031 0.018 0.473
2 0.765 -0.575 0.151 0.223 0.736 1.660 0.607 -0.238 0.797 0.304 -2.238 -1.199 -0.225 -0.585 -0.123 -2.427 0.841 -0.050 1.216 0.026 0.201 -1.036 -1.205 0.133 0.002 -1.642 0.940 0.050
4 0.390 -1.534 1.230 2.416 -1.071 -0.488 -1.125 0.243 0.534 -0.071 1.181 1.429 -1.370 0.189 0.413 0.298 -0.335 0.307 -1.509 1.484 -0.279 -0.574 -1.653 1.739 1.994 0.142 -1.334 -0.297
3 -0.429 -0.126 -0.974 0.557 0.957 -1.851 1.784 -0.528 -0.295 0.308 -0.838 -0.591 -0.831 -0.002 -0.581 0.377 -1.316 0.444 -1.127 1.011 0.813 -2.139 0.842 -2.610 -1.172 1.033 -0.541 1.561
4 0.901 0.190 -0.263 0.821 1.913 -0.558 -0.088 -0.991 0.155 -0.156 1.158 1.420 0.788 1.047 -0.705 -0.955 0.437 0.020 1.999 -1.922 0.620 0.603 -0.762 -0.968 -0.339 -1.642 1.457 2.701
3 -1.354 1.610 -0.225 0.157 -0.537 1.817 -0.260 1.597 0.212 0.800 0.665 -0.638 1.023 -0.210 0.615 -2.405 -0.341 -0.664 0.604 -1.620 -1.408 -0.191 1.786 1.488 0.145 0.559 0.437 -0.256
3 -0.495 -0.746 -0.670 1.284 0.000 -0.554 -0.908 -0.321 -0.512 -0.937 -0.353 0.324 3.233 -0.396 0.115 -0.907 1.954 0.125 0.484 0.427 2.759 0.856 0.252 0.072 0.100 -0.769 -0.327 1.133
1 0.352 -1.473 -0.484 -1.213 -1.426 0.853 -1.479 -1.321 1.978 -0.448 0.658 1.125 0.774 0.150 -0.037 0.538 -0.225 0.695 -0.259 -0.146 1.882 -0.480 0.019 0.431 0.495 0.293 -1.250 0.162
2 -1.000 0.508 0.006 -0.455 2.043 -0.315 0.169 0.086 1.575 -1.499 2.244 1.519 0.398 0.618 -0.228 0.150 0.420 -0.909 1.519 0.224 0.142 -0.867 1.170 -0.046 -0.525 -1.086 -0.162 -1.170
0 0.547 -0.202 -0.218 1.099 0.825 0.814 1.305 0.021 0.682 -0.310 0.324 -0.130 0.097 0.595 -0.818 2.092 -1.006 -1.214 1.158 0.792 0.624 0.628 -0.012 -0.897 0.076 -0.677 0.975 -0.147
3 0.166 -0.734 0.518 0.736 0.726 0.935 -0.568 -0.271 0.247 -0.425 -1.158 -0.502 1.635 -0.743 -0.794 -0.547 0.557 0.348 0.706 2.533 0.821 0.900 2.860 0.490 -0.392 -0.303 1.057 -1.252
3 -0.119 -1.539 -2.092 0.628 0.790 0.699 0.435 -1.072 0.243 1.574 -2.169 -1.281 -0.477 1.070 -1.000 1.626 0.412 -0.821 0.056 0.382 0.097 0.916 0.499 -2.436 0.518 -0.477 0.283 0.356
3 -0.565 0.355 -1.179 0.754 -0.429 0.808 1.238 -0.840 1.835 0.671 0.061 -0.068 0.587 -0.117 -0.137 -1.152 -0.729 -0.269 -2.916 -1.555 -0.826 -0.497 -1.221 -0.926 0.696 -1.410 -1.323 -1.318
2 1.634 -0.333 -0.076 0.073 0.479 0.827 -0.375 1.857 -1.829 -1.194 0.608 -0.164 -0.566 0.959 0.813 1.249 -1.036 0.813 0.802 -1.007 0.039 0.969 1.342 -0.147 -1.099 1.532 0.040 -1.889
0 0.842 0.959 0.329 -0.079 -0.110 1.276 -0.558 -0.141 -0.963 -0.620 -0.449 -0.177 -0.148 0.100 0.292 0.146 1.498 -0.052 0.059 0.679 -1.161 -0.222 0.370 0.449 -0.439 0.253 -1.350 1.060
0 0.270 0.308 -0.759 1.058 -0.442 0.450 -0.164 0.376 -0.214 0.394 -0.007 0.814 -0.819 -1.770 -0.973 1.069 -0.155 0.092 -1.054 0.066 -1.023 -0.876 -0.402 0.445 0.552 0.579 -1.588 -0.007
3 0.352 -1.620 -0.757 -0.126 -1.182 -1.167 -0.962 0.508 -2.526 -0.169 1.520 1.062 1.124 -0.349 1.948 -0.570 0.451 0.528 -1.097 -1.966 0.882 -0.922 -0.092 0.399 -0.022 0.677 0.466 0.540
4 0.437 -2.170 1.192 -1.287 -0.634 0.105 -0.385 0.430 0.552 -1.337 -0.036 -0.823 0.948 1.845 0.302 -2.070 0.124 -1.358 -0.094 0.958 -1.365 0.507 0.731 -0.864 -3.150 0.858 -0.477 -0.032
4 0.568 1.086 0.333 -2.493 -0.499 1.197 -0.808 -0.223 -1.045 1.784 1.681 -0.884 0.147 -0.680 0.615 0.510 0.812 0.332 2.935 -1.028 -1.711 -0.393 -1.568 -1.492 -1.001 -0.922 -0.955 0.518
2 -0.362 -0.124 -0.378 0.317 -0.758 -0.509 -0.826 -0.900 -0.341 0.263 -1.521 0.659 -1.837 -0.142 1.079 1.384 0.574 -1.625 -0.196 1.908 1.331 -0.105 1.264 -0.179 0.592 -1.277 -2.003 -0.835
4 -0.524 -0.390 0.399 -0.672 -1.256 0.015 -0.777 0.339 -1.454 0.397 0.000 0.607 -1.801 -1.846 -0.071 -1.139 -0.544 1.446 0.194 -0.975 0.211 -0.112 0.046 2.515 0.770 -1.748 2.619 -0.576
2 -0.526 1.559 -0.415 2.083 0.067 -0.234 -0.504 0.422 0.894 -2.538 0.005 -1.988 0.982 1.000 0.041 0.208 -1.430 -0.547 -0.612 -0.449 0.872 1.282 0.611 -0.323 -0.746 0.091 0.297 -0.221
1 1.046 1.380 0.059 -0.095 0.705 0.412 -1.145 1.647 0.238 0.817 0.355 0.514 -0.165 0.697 0.678 0.853 -1.368 -1.218 0.123 -0.400 1.237 -0.876 0.892 -1.668 -1.518 -0.023 0.354 0.645
2 0.393 -1.523 -0.679 0.819 -0.131 -1.465 0.394 -0.582 0.436 2.688 0.382 0.612 -0.254 -1.120 1.024 -0.606 -0.229 1.793 -0.338 1.215 0.387 -0.553 0.341 -0.716 0.905 1.247 0.049 1.342
4 -0.324 0.103 -1.107 0.163 0.902 1.211 -0.451 -0.963 -1.467 1.217 -0.589 -0.541 -2.243 -0.320 2.784 1.897 0.049 1.359 -0.665 1.530 0.216 0.687 -0.539 0.519 3.062 1.038 -0.550 -2.165
2 -0.324 -1.541 1.405 1.371 -0.672 0.921 -0.332 1.501 -0.515 1.979 0.786 1.798 -0.464 0.405 -0.969 0.844 0.899 2.397 0.482 0.724 -0.471 0.541 -0.380 0.116 0.175 -0.352 0.063 -0.375
0 0.601 1.142 -0.438 0.090 0.007 1.544 0.006 0.146 0.939 0.508 0.219 -1.325 -0.424 0.361 0.127 -0.139 0.692 2.162 0.075 0.656 -0.200 -1.123 -0.304 0.194 -0.060 0.094 -1.726 -0.465
0 1.013 -0.122 -1.042 0.171 0.622 -0.274 0.766 -0.384 -0.013 0.024 -0.849 -0.471 0.142 0.333 1.604 -0.652 0.869 0.567 0.823 -0.637 -0.362 -0.585 -0.245 0.542 0.056 0.870 -0.780 -1.954
4 0.197 1.667 -0.447 -0.523 -2.958 -1.033 1.509 2.252 0.831 0.559 -1.918 0.510 1.250 -1.058 1.583 -0.539 0.908 0.160 -1.177 -0.200 -0.672 0.033 0.796 -0.140 0.556 -0.539 0.118 1.027
3 -1.673 0.328 -0.870 0.469 -1.645 -0.088 -0.727 0.488 -1.167 0.536 1.066 0.455 0.309 0.957 0.829 -1.861 1.078 1.535 0.582 -1.379 -1.237 -2.234 1.045 1.298 0.444 0.790 -0.311 0.529
1 -0.773 -0.072 1.095 0.368 1.944 1.398 -1.039 0.593 0.153 1.187 0.523 -0.500 0.476 -0.369 -0.646 -0.111 0.249 1.300 -0.497 -1.917 -0.197 0.256 0.182 -0.254 0.950 0.312 0.603 -1.837
0 -0.124 -0.273 -0.260 0.920 -0.512 1.012 2.012 -0.650 0.376 0.242 0.052 -1.617 0.888 -0.375 1.163 -0.015 1.135 0.513 0.764 0.626 -0.138 -0.362 0.450 0.698 -0.007 0.778 -0.738 0.255
3 -0.756 -1.258 -2.202 0.612 -0.065 0.584 1.599 -0.908 -0.249 -0.861 -1.694 -0.275 1.167 -1.062 0.238 -1.225 -0.757 0.279 1.744 1.329 -0.026 -0.844 0.091 -1.611 0.634 -0.245 1.466 0.568
2 -0.398 0.003 0.728 2.328 0.820 0.914 -0.545 0.157 -0.096 1.408 -0.146 1.329 -1.302 1.399 -0.861 0.854 -0.864 0.618 -0.963 1.036 0.845 -0.601 0.578 0.947 -0.923 -1.034 -1.071 -1.213
3 -1.382 1.042 0.224 -0.452 2.272 -1.648 0.976 -0.111 0.785 0.109 -1.872 0.095 0.336 -2.192 1.944 0.311 0.672 -1.166 0.691 1.090 0.254 -0.009 -0.894 -0.216 -0.512 0.572 -0.850 0.565
0 -0.448 0.264 -0.665 1.627 1.401 0.565 -0.525 -0.848 1.226 -1.159 0.269 -0.113 -0.900 -0.022 -0.446 -0.113 -1.249 -1.652 1.109 0.095 -0.414 0.769 -0.008 -1.290 -1.677 -0.290 0.264 -0.197
2 1.173 0.674 -1.120 -0.809 -1.434 -0.010 -0.980 -0.806 0.917 -0.808 -0.753 1.141 1.977 0.028 -0.227 -0.223 0.471 -2.221 0.644 -0.305 -0.255 0.024 -1.071 1.481 -1.032 1.394 1.437 0.582
2 1.030 1.982 0.954 2.172 -0.922 -0.819 -1.189 0.442 0.176 -0.152 0.809 1.232 -1.526 -0.597 0.820 -0.468 -0.798 1.019 -1.290 -0.302 0.329 -0.574 0.783 -0.569 1.409 -0.126 0.671 0.746
3 -0.911 -1.351 -0.373 1.121 -1.588 0.084 0.441 1.588 0.679 -0.270 0.494 0.415 0.143 0.043 -1.084 -0.832 -0.200 2.109 -0.196 -0.033 -0.878 0.893 1.018 -2.145 -1.939 1.033 1.838 -0.744
4 -0.845 0.909 -1.261 0.181 0.004 0.060 0.617 -1.262 0.912 -1.200 -0.482 -0.353 0.422 -1.574 1.618 -0.282 0.791 -1.231 -0.531 -2.263 0.836 -1.024 -1.161 0.541 -3.132 -0.224 1.782 -1.006
1 1.171 -0.014 0.490 -1.808 0.511 -0.348 -0.081 1.480 0.379 0.402 -0.482 -0.189 1.360 0.440 1.664 -0.869 -1.510 -0.231 0.970 0.243 -1.251 -0.486 0.601 0.979 -0.703 1.310 0.697 -0.693
3 0.493 -0.806 -0.145 1.090 -2.567 -0.407 -1.638 -0.222 -0.033 -0.263 0.884 0.443 -0.506 -0.738 0.735 2.056 1.654 0.452 0.009 0.378 1.600 0.758 -0.321 1.563 -0.358 1.170 -1.095 0.728
0 -0.811 0.547 0.163 0.308 -0.718 0.151 1.222 0.646 1.384 0.149 0.090 -0.975 -0.270 0.362 -1.330 0.383 -0.804 0.868 -0.590 -1.265 0.566 0.160 0.856 -1.065 1.562 -0.094 -1.330 -1.389
1 1.004 0.483 -0.162 0.895 0.239 0.404 -1.000 -1.283 -0.331 -0.990 1.349 -1.475 -0.708 -1.730 0.397 -0.632 1.670 0.382 -0.287 0.249 0.441 0.789 0.998 -0.386 -0.853 0.379 1.719 1.272
0 -0.865 0.892 -0.842 -0.300 -0.254 1.235 -1.972 0.409 0.792 0.441 -0.276 0.400 -0.534 -0.543 -0.361 -0.153 -1.287 -0.490 -0.293 -0.681 -0.514 -1.234 -0.407 -0.019 0.366 0.149 -1.771 -0.410
1 0.766 -0.478 -0.528 -1.112 0.754 -0.191 -0.318 0.110 -0.170 -2.001 -0.491 0.895 -1.507 -1.090 -0.427 -0.726 -0.028 -1.093 -0.388 -0.048 0.649 1.245 -0.262 1.888 -0.184 0.829 1.932 0.139
1 1.269 -1.387 -0.817 -0.106 0.098 -0.226 0.434 0.996 1.723 0.579 -0.852 -0.147 -0.075 0.515 0.388 2.050 -0.462 0.385 2.355 -0.585 0.145 -0.945 0.082 -0.780 1.047 0.147 -0.935 0.841
0 0.851 -0.391 0.107 0.846 0.464 -0.585 0.300 -1.984 0.000 -0.618 1.519 -0.977 0.117 -0.429 -0.558 0.377 -0.418 1.852 -1.975 0.631 -0.085 -0.050 0.302 0.998 0.265 1.001 0.046 -1.034
2 -1.030 -0.669 -0.304 -0.929 -0.778 0.075 0.215 -1.424 0.396 -0.382 0.720 0.780 -0.545 0.614 -0.837 1.324 -0.799 0.353 -0.188 -1.017 0.901 -0.614 -1.700 -2.882 -0.875 1.376 1.258 -0.719
4 -0.397 1.512 -0.993 -0.017 1.189 0.846 2.216 0.003 -0.442 -1.094 -0.368 -1.104 -1.796 1.963 0.049 2.591 -0.839 -0.429 -0.775 -0.641 0.874 0.566 0.875 1.342 1.107 -1.008 -0.524 0.121
3 -0.860 -1.097 -0.389 -0.017 -0.242 -1.877 1.722 -0.274 0.173 1.245 -2.345 -0.512 -0.387 -0.336 -0.173 1.447 0.673 0.958 0.047 -1.399 -2.080 -1.529 0.360 0.032 -0.210 -0.369 2.012 -0.823
1 -0.553 0.300 0.206 1.744 1.815 -0.597 1.058 0.355 -1.413 -0.696 2.336 0.358 1.000 -0.212 -0.540 0.027 -0.266 -1.493 -1.003 0.708 -0.381 0.026 0.235 0.313 1.147 -1.099 0.570 0.755
0 -1.446 0.362 -0.797 1.075 0.225 0.607 1.411 -0.310 -0.574 0.872 0.510 -0.187 1.391 -0.212 0.033 0.021 -0.935 0.078 1.397 0.445 0.031 -0.162 0.331 0.750 0.609 -0.332 0.325 0.480
2 -0.951 -0.065 0.261 0.588 0.961 0.768 1.838 0.106 -0.159 -1.467 0.970 0.610 -0.566 0.243 -0.010 0.175 -0.274 0.113 -1.519 -0.406 0.308 -0.705 0.389 2.065 -1.155 -1.833 1.206 2.093
1 -0.745 -0.427 0.138 -0.595 -1.376 -1.007 0.940 2.072 0.305 1.079 0.354 0.698 0.741 0.678 0.355 0.143 -0.406 0.526 0.310 -0.907 1.189 0.992 0.193 1.178 0.727 -0.440 -1.815 -1.849
2 -1.576 0.889 0.578 -0.392 0.844 0.478 1.639 1.012 -0.132 -0.881 1.090 0.041 -0.168 -0.723 1.850 -0.894 0.704 1.556 -0.329 0.084 -1.831 -1.174 1.768 -1.405 0.019 0.564 -0.724 -0.513
4 -2.259 -0.599 -1.019 0.150 -2.016 -2.046 2.170 0.618 0.121 0.364 0.146 0.997 -0.632 0.187 0.227 -1.047 1.277 -0.446 1.017 0.115 1.634 -1.041 -1.085 -0.906 0.133 1.541 1.593 -2.310
2 0.200 -0.231 1.418 -1.411 3.602 -0.174 0.738 1.387 -0.196 -0.115 -0.503 -0.885 0.272 -0.135 -0.509 -0.451 0.576 -0.798 -0.247 0.899 0.247 -1.050 -0.024 0.437 0.989 -0.755 -0.559 -0.695
4 -0.468 1.003 -0.290 -0.307 -1.326 -0.760 0.879 0.597 -0.132 1.370 -1.266 -1.143 -0.169 0.993 1.632 -2.390 -0.655 1.270 -1.348 -2.181 -0.287 1.169 0.200 1.240 1.680 1.214 1.448 -1.581
3 0.225 -1.277 -0.556 -0.080 -0.328 -0.206 1.621 0.100 0.382 -2.009 0.746 2.852 1.346 -0.440 2.281 0.027 0.307 -0.267 -0.293 -2.248 0.787 -0.297 -0.188 0.273 -0.415 0.264 1.203 0.405
3 2.569 1.179 1.172 0.632 0.961 0.945 -0.263 -0.279 -0.785 0.710 -1.695 0.393 -0.329 -1.635 0.279 -0.270 0.408 -0.515 -1.444 -0.153 -0.869 -1.863 0.850 -0.607 -0.140 -2.208 -0.425 0.701
2 -0.077 -0.942 -0.081 2.092 0.999 0.551 0.963 1.541 -1.153 -0.392 -0.529 0.587 0.160 0.756 -0.507 1.372 -0.069 0.695 1.195 1.115 0.361 0.819 1.742 0.514 -0.730 0.583 -0.037 -2.348
3 0.934 0.022 0.714 -0.820 -0.333 -0.024 -0.631 1.249 -0.764 -0.577 1.359 1.165 1.588 -1.908 0.201 0.090 -0.562 0.324 -0.321 0.162 3.402 0.317 0.359 0.821 0.181 -1.098 -1.647 -0.029
3 -0.329 0.255 -1.519 0.897 -1.078 1.913 0.374 0.618 -0.673 -0.482 1.326 -1.421 -0.007 0.016 0.456 0.235 0.463 -0.561 2.545 0.011 1.041 1.045 -2.639 -1.565 -0.820 -0.507 0.273 0.295
3 -0.710 0.642 -1.734 0.579 -0.004 0.038 0.255 -0.856 0.424 1.710 -1.375 0.376 -0.621 -1.629 -1.184 -0.103 0.056 0.153 -0.776 0.410 0.408 1.912 -1.201 1.435 -1.357 0.371 2.126 -1.151
1 -0.042 -0.570 -0.714 -0.260 -1.256 -0.595 0.308 -1.155 -2.068 -0.462 -1.197 -0.530 0.848 0.840 -0.057 0.578 1.968 -0.254 -0.970 0.670 -0.504 -1.084 -0.249 0.394 0.363 -1.384 0.432 -1.460
0 0.545 0.887 0.211 -0.418 -0.350 0.026 0.250 -0.147 0.019 -1.517 0.667 0.181 1.002 0.000 -1.738 -0.358 -0.200 0.743 -2.070 0.041 0.514 -0.661 0.075 -0.627 -0.730 -0.408 -0.711 0.684
1 0.536 -0.572 0.087 -1.804 0.608 -0.537 -0.139 -0.153 -0.348 0.065 0.013 0.219 -1.258 0.772 -0.124 0.340 -0.129 -1.066 2.531 0.126 -1.578 -0.092 -0.392 -0.162 -1.636 0.030 -0.873 1.476
4 0.838 -0.215 0.633 -0.548 2.392 0.614 -0.484 -0.752 0.270 0.674 -1.308 0.011 0.743 0.334 0.624 1.491 -3.109 0.218 -2.351 -0.045 1.299 -0.129 -0.934 0.826 0.140 0.189 2.157 -1.507
0 -0.522 -0.747 -1.507 0.992 -0.175 -1.246 -0.826 -0.867 0.043 1.067 -0.348 -2.253 -0.189 -0.156 -0.225 -0.627 0.217 0.066 -0.025 0.413 -0.430 -0.487 1.380 0.559 0.827 1.226 0.526 0.928
1 -1.245 0.233 -0.608 -1.014 -1.340 0.432 -0.681 0.968 0.994 1.374 -0.852 0.475 0.632 -0.474 -0.772 1.610 0.227 0.636 -0.822 -1.219 -0.569 -0.124 -1.064 -0.010 -1.689 -1.120 0.671 0.604
2 -0.434 -0.066 0.195 -1.333 0.223 -0.053 0.217 -0.031 1.327 -1.374 0.632 -0.319 0.028 -1.506 0.006 -0.073 -2.104 0.099 0.248 -3.023 0.872 -0.298 0.729 0.074 2.087 -0.034 0.601 0.644
3 -0.175 -1.863 -0.183 1.162 0.215 0.406 -2.446 -0.610 -1.694 -2.428 0.273 0.058 -0.245 -1.572 -0.286 -0.763 -0.018 0.140 -0.449 0.737 0.094 1.058 -1.888 -0.571 1.352 -0.629 0.288 1.075
1 -0.232 -0.565 -1.591 -1.889 0.367 -0.228 -0.364 -1.542 -0.966 0.045 0.285 -0.164 1.379 -0.575 1.287 0.217 0.108 0.933 -0.624 -0.329 -1.224 0.935 0.336 1.098 0.205 1.103 -0.747 0.987
2 1.619 -0.162 0.339 -0.056 0.676 0.334 1.557 1.312 -0.012 -0.472 1.760 -1.064 0.264 0.669 0.050 -0.331 -0.020 0.089 1.809 0.425 2.391 -0.235 -1.628 -0.765 0.213 0.103 -1.509 0.164
0 0.370 -0.699 -0.101 -0.816 1.551 -1.002 -0.607 0.622 0.016 -0.129 1.228 0.358 -0.193 -0.425 -0.777 -0.698 -0.409 -0.433 -0.742 0.810 0.842 -0.905 1.065 -0.606 -1.555 -1.053 0.957 1.191
3 0.205 -1.623 1.888 -0.164 0.698 -0.836 0.308 -1.303 0.452 -0.356 0.029 0.698 -1.233 -0.684 0.457 2.098 -0.138 0.591 -0.107 1.942 -1.747 0.095 -1.626 -2.507 0.297 0.139 -0.285 -0.106
3 -0.416 0.312 1.420 0.813 -1.880 -0.988 -0.558 0.457 0.056 2.853 0.654 0.459 0.572 0.191 1.177 -0.226 -1.119 0.453 1.063 0.170 -0.338 2.583 -1.220 0.003 0.702 -0.107 0.093 -0.286
4 -0.921 -0.777 -1.711 0.134 -1.785 0.394 1.358 0.530 0.052 1.534 0.327 -0.863 0.125 1.960 -1.093 -1.380 0.615 -0.224 -0.532 -1.259 -1.950 -1.456 1.615 -0.009 -1.962 1.184 1.235 1.229
3 -0.829 0.322 0.559 0.453 -1.094 0.334 0.359 1.486 1.974 1.240 -0.458 -0.772 -0.201 -0.572 2.094 0.955 0.623 0.803 1.371 0.618 -0.191 0.742 0.734 -1.117 2.401 -0.614 1.542 -0.161
3 1.442 1.780 0.259 0.071 0.935 -0.483 0.247 -0.652 1.893 1.086 1.783 -0.491 0.908 0.423 0.306 1.656 -0.912 -0.606 -0.534 -1.444 -1.992 -0.170 0.744 -0.223 -0.818 1.214 -1.135 -1.849
1 1.325 0.748 0.994 -0.890 -0.252 -0.105 -0.447 1.784 -0.699 -0.562 -0.396 1.061 -0.023 -0.849 -0.144 -0.132 -1.515 0.114 0.249 -1.548 0.147 1.011 -0.748 0.310 1.565 1.681 0.672 1.508
0 -1.505 0.015 -0.808 -0.722 0.700 0.578 1.314 -0.298 -0.795 0.422 0.679 -0.324 -0.754 -0.137 0.392 -0.789 -0.324 0.097 -1.352 -0.607 -0.256 1.365 -0.513 -2.273 -0.531 1.089 -0.936 0.286
3 0.260 -0.371 2.266 0.695 -0.011 -0.260 1.020 1.328 -0.616 -0.834 0.164 -0.187 -0.998 -0.422 -0.638 -0.509 0.827 0.768 2.668 -1.461 0.729 1.168 -1.226 -2.096 -0.898 -1.047 -0.414 1.231
3 -0.866 0.564 1.425 0.734 1.879 -1.057 -0.227 -1.525 -0.472 0.512 0.263 -0.551 -0.126 -1.980 0.038 1.555 -1.544 -0.025 1.367 -1.022 -0.199 1.165 2.598 0.262 0.346 -1.172 0.364 -0.316
0 -0.974 1.010 0.665 -0.231 -0.460 0.661 -0.911 0.202 1.199 1.520 -0.525 -1.587 -0.707 0.816 -0.741 1.083 -0.743 -0.557 -0.173 0.945 -1.584 0.280 0.262 -0.991 0.356 -0.180 -1.131 -0.074
4 -0.291 1.698 -0.270 1.101 -1.821 -0.350 0.277 -1.081 -1.754 0.442 -0.826 0.350 -1.444 1.406 1.944 -2.082 -1.751 0.934 -1.213 1.036 -0.942 -0.020 -0.645 0.151 -0.848 -0.679 -0.426 -0.015
2 -1.142 -1.586 -0.808 -0.189 -0.517 0.844 -0.682 0.215 0.175 0.372 -0.433 1.340 -0.059 0.660 2.386 -0.840 -0.442 -1.162 -0.058 0.776 -1.145 0.318 1.413 0.969 1.873 -1.523 0.736 -0.315
0 0.253 -0.190 -0.797 1.564 -0.516 0.781 -2.029 -1.382 0.889 0.682 -0.187 0.814 -1.414 -0.787 1.108 0.040 0.553 0.226 -0.560 0.126 0.036 -0.177 0.197 0.942 1.057 -0.990 -0.163 -0.777
2 -1.338 -0.873 -1.331 0.375 1.038 1.136 -0.356 0.179 0.380 -0.569 -0.564 -0.826 -0.139 0.786 -0.938 0.178 -0.985 -1.254 -0.208 -1.031 0.785 -0.157 -0.623 -0.648 -0.156 0.385 -3.086 -1.114
0 -1.289 0.015 -0.487 0.119 1.136 -0.075 -0.551 0.029 0.913 -0.965 -1.784 -0.623 -0.490 -0.103 -0.456 -0.559 -1.031 -0.913 1.190 -0.486 -1.673 -0.668 1.015 -0.503 -0.078 0.225 -0.178 -0.910
2 0.201 0.607 -2.182 -1.625 0.151 -0.196 0.503 -0.394 0.343 0.422 0.350 0.893 0.399 -1.689 0.979 3.082 -0.518 -0.802 -0.498 -0.582 -0.315 0.311 1.139 -1.461 -0.100 0.067 -0.895 0.593
0 0.777 0.507 1.466 -0.544 -0.663 -0.368 -0.930 0.054 0.156 -0.333 0.547 0.576 -0.730 0.575 -1.154 -0.928 1.431 -0.227 -0.202 1.667 0.175 -0.259 -0.900 0.465 1.449 -0.067 1.289 -0.145
3 -1.876 0.351 1.424 -0.787 0.851 -0.336 1.364 -0.415 0.180 -0.497 0.991 0.606 -0.551 -0.427 -0.051 2.607 0.469 0.306 -1.440 -1.563 -0.164 0.945 2.076 -0.995 -1.076 -0.288 -0.981 1.142
2 1.061 0.520 0.511 -0.223 0.388 -2.143 0.398 1.658 0.781 0.664 0.773 1.209 -0.199 0.985 0.457 0.064 1.351 -0.688 -0.650 -2.357 -0.929 -0.440 -0.050 0.449 0.881 -1.329 0.110 -0.983
2 1.180 0.454 1.130 -1.472 -0.552 -0.260 0.543 -1.163 -0.439 0.155 0.182 0.341 1.583 1.291 -0.106 1.614 1.657 -0.995 1.940 -0.903 -0.205 0.363 0.877 1.189 0.277 -0.173 -1.655 0.421
0 0.236 0.083 0.749 0.570 -0.104 1.005 -0.676 -1.289 0.914 0.414 0.565 -0.691 -0.362 0.695 0.109 1.432 0.663 1.154 1.462 1.590 0.764 1.155 1.350 -0.192 0.423 -1.149 -0.279 -0.089
4 0.658 2.246 0.702 -0.745 -0.157 -0.507 0.008 1.098 -0.324 -0.569 0.516 -0.815 -0.874 0.382 -1.792 -0.483 0.856 -1.834 -1.761 -1.434 -2.731 1.897 0.120 -0.670 -1.290 2.201 0.655 -0.128
0 -1.052 0.224 0.280 0.521 -0.069 -0.542 -1.798 0.044 -0.232 -0.832 1.250 0.759 -0.427 -0.488 0.260 0.895 -1.341 -0.860 0.592 -1.029 0.788 -0.852 0.458 -0.103 0.693 -0.054 -1.045 -0.718
3 0.138 1.292 -0.173 -0.829 -0.892 0.478 -0.213 -0.379 0.815 1.254 -0.275 -1.102 1.148 1.875 -1.218 -0.565 1.919 -0.241 2.028 -0.149 0.388 -1.283 -2.092 -0.249 0.225 0.651 1.466 -0.339
0 -0.967 -0.161 -0.347 0.410 0.864 -1.448 -0.549 -0.429 -1.154 -0.856 0.741 0.401 -0.673 -0.573 1.188 0.919 0.041 -0.567 1.096 -0.241 -0.667 -0.981 -1.206 -1.037 0.321 0.315 1.866 1.170
4 -1.707 -1.098 -0.422 -0.491 -0.295 -0.560 -0.959 0.655 -0.988 -0.958 -0.921 1.196 -0.298 -2.335 -1.180 0.628 1.920 0.326 -0.210 -0.542 0.149 -0.962 2.833 -1.278 -1.122 -0.860 -1.374 0.042
1 0.258 0.532 0.505 -0.672 0.238 -1.295 -0.959 -0.087 -1.061 -1.273 -0.712 1.146 1.557 -0.033 0.582 -0.563 -2.066 -0.894 -0.308 0.141 -0.101 -0.282 -0.575 0.344 -1.364 0.802 -0.586 2.067
3 0.332 -0.110 0.581 -0.502 -1.101 0.406 0.268 -2.547 -1.181 -0.672 0.525 -1.523 -0.320 1.938 0.067 0.129 -0.061 0.982 0.042 -0.220 0.152 1.326 0.402 0.057 1.178 -0.522 -3.202 0.448
1 1.600 1.122 0.891 0.269 -0.628 -0.515 0.235 0.439 -0.969 1.246 1.408 0.794 1.504 0.638 0.274 0.051 -0.317 -0.005 -1.041 -0.084 -1.168 -0.801 1.229 1.182 -0.837 0.870 -0.588 -0.664
0 -0.011 0.936 0.701 0.123 -1.090 -1.435 0.107 -0.543 0.919 0.746 -0.185 -1.158 0.511 -1.018 -0.758 -0.979 -0.139 1.170 -0.005 -0.443 -1.526 0.552 -0.780 0.118 -0.354 0.838 -0.157 -0.059
4 -0.311 0.154 2.081 -0.282 -0.247 0.150 -0.183 0.791 1.469 -0.247 -0.857 1.975 0.120 -1.705 1.862 1.126 1.601 1.434 2.645 -1.037 -0.796 -0.492 -0.521 0.123 -0.490 -1.190 -0.874 -0.294
4 -0.942 -1.023 2.026 -0.379 -1.015 1.190 -0.147 -2.027 2.083 0.584 -1.512 1.396 -1.048 -1.108 -0.892 1.940 2.045 -0.548 1.706 -2.287 0.914 1.364 1.506 0.077 0.566 -0.717 0.025 -0.164
2 -0.360 -1.173 0.666 0.814 -0.539 -0.512 -0.852 0.705 1.295 -0.967 -0.927 -0.148 0.830 0.504 1.183 -2.353 -0.468 1.076 1.681 -0.197 -0.516 -0.856 0.727 1.197 0.339 -2.218 0.408 -0.500
1 0.453 1.017 1.868 -1.376 0.597 1.034 1.105 -1.139 1.256 0.156 -0.211 0.287 -0.266 -0.259 -0.451 0.107 0.702 -0.297 -1.193 -0.939 0.874 -1.344 -0.801 -0.278 0.265 -1.082 1.048 1.836
4 0.702 -0.454 -2.021 0.657 1.248 -0.903 0.433 1.218 -0.679 -1.910 1.307 -1.562 0.169 0.553 -0.261 2.156 0.116 0.523 -0.373 0.364 0.432 -0.234 -0.911 -1.357 -0.717 1.679 -1.711 1.624
3 -0.730 1.274 -0.622 0.031 1.119 -0.737 -0.527 1.498 -0.446 1.457 -1.048 -0.463 0.045 -1.769 -0.424 0.207 -0.157 -0.243 2.189 -1.105 1.988 1.169 0.416 -0.601 -2.108 -0.792 1.057 -1.369

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,5 @@
task = predict
data = multiclass.test
input_model= LightGBM_model.txt

Просмотреть файл

@ -0,0 +1,56 @@
# task type, support train and predict
task = train
# boosting type, support gbdt for now, alias: boosting, boost
boosting_type = gbdt
# application type, support following application
# regression , regression task
# binary , binary classification task
# lambdarank , lambdarank task
# multiclass
# alias: application, app
objective = multiclass
# eval metrics, support multi metric, delimite by ',' , support following metrics
# l1
# l2 , default metric for regression
# ndcg , default metric for lambdarank
# auc
# binary_logloss , default metric for binary
# binary_error
# multi_logloss
# multi_error
metric = multi_logloss
# number of class, for multiclass classification
num_class = 5
# frequence for metric output
metric_freq = 1
# true if need output metric for training data, alias: tranining_metric, train_metric
is_training_metric = true
# number of bins for feature bucket, 255 is a recommend setting, it can save memories, and also has good accuracy.
max_bin = 255
# training data
# if exsting weight file, should name to "regression.train.weight"
# alias: train_data, train
data = multiclass.train
# valid data
valid_data = multiclass.test
# round for early stopping
early_stopping = 10
# number of trees(iterations), alias: num_tree, num_iteration, num_iterations, num_round, num_rounds
num_trees = 100
# shrinkage rate , alias: shrinkage_rate
learning_rate = 0.05
# number of leaves for one tree, alias: num_leaf
num_leaves = 31

Просмотреть файл

@ -80,6 +80,13 @@ public:
const float* feature_values,
int num_used_model) const = 0;
/*!
* \brief Predtion for multiclass classification
* \param feature_values Feature value on this record
* \return Prediction result, num_class numbers per line
*/
virtual std::vector<float> PredictMulticlass(const float* value, int num_used_model) const = 0;
/*!
* \brief save model to file
*/
@ -108,7 +115,13 @@ public:
* \return Number of weak sub-models
*/
virtual int NumberOfSubModels() const = 0;
/*!
* \brief Get number of classes
* \return Number of classes
*/
virtual int NumberOfClass() const = 0;
/*!
* \brief Get Type name of this boosting object
*/

Просмотреть файл

@ -128,6 +128,8 @@ public:
int max_position = 20;
// for binary
bool is_unbalance = false;
// for multiclass
int num_class = 1;
void Set(const std::unordered_map<std::string, std::string>& params) override;
};
@ -135,6 +137,7 @@ public:
struct MetricConfig: public ConfigBase {
public:
virtual ~MetricConfig() {}
int num_class = 1;
float sigmoid = 1.0f;
std::vector<float> label_gain;
std::vector<int> eval_at;
@ -179,6 +182,7 @@ public:
int bagging_seed = 3;
int bagging_freq = 0;
int early_stopping_round = 0;
int num_class = 1;
void Set(const std::unordered_map<std::string, std::string>& params) override;
};

Просмотреть файл

@ -336,6 +336,26 @@ static inline int64_t Pow2RoundUp(int64_t x) {
return 0;
}
/*!
* \brief Do inplace softmax transformaton on p_rec
* \param p_rec The input/output vector of the values.
*/
inline void Softmax(std::vector<float>* p_rec) {
std::vector<float> &rec = *p_rec;
float wmax = rec[0];
for (size_t i = 1; i < rec.size(); ++i) {
wmax = std::max(rec[i], wmax);
}
float wsum = 0.0f;
for (size_t i = 0; i < rec.size(); ++i) {
rec[i] = std::exp(rec[i] - wmax);
wsum += rec[i];
}
for (size_t i = 0; i < rec.size(); ++i) {
rec[i] /= static_cast<float>(wsum);
}
}
} // namespace Common
} // namespace LightGBM

Просмотреть файл

@ -33,6 +33,7 @@ public:
num_used_model_(num_used_model) {
boosting_ = boosting;
num_features_ = boosting_->MaxFeatureIdx() + 1;
num_class_ = boosting_->NumberOfClass();
#pragma omp parallel
#pragma omp master
{
@ -87,6 +88,18 @@ public:
// get result with sigmoid transform if needed
return boosting_->Predict(features_[tid], num_used_model_);
}
/*!
* \brief prediction for multiclass classification
* \param features Feature of this record
* \return Prediction result
*/
std::vector<float> PredictMulticlassOneLine(const std::vector<std::pair<int, float>>& features) {
const int tid = PutFeatureValuesToBuffer(features);
// get result with sigmoid transform if needed
return boosting_->PredictMulticlass(features_[tid], num_used_model_);
}
/*!
* \brief predicting on data, then saving result to disk
* \param data_filename Filename of data
@ -120,17 +133,30 @@ public:
};
std::function<std::string(const std::vector<std::pair<int, float>>&)> predict_fun;
if (is_predict_leaf_index_) {
if (num_class_ > 1) {
predict_fun = [this](const std::vector<std::pair<int, float>>& features){
std::vector<float> prediction = PredictMulticlassOneLine(features);
std::stringstream result_stream_buf;
for (size_t i = 0; i < prediction.size(); ++i){
if (i > 0) {
result_stream_buf << '\t';
}
result_stream_buf << prediction[i];
}
return result_stream_buf.str();
};
}
else if (is_predict_leaf_index_) {
predict_fun = [this](const std::vector<std::pair<int, float>>& features){
std::vector<int> predicted_leaf_index = PredictLeafIndexOneLine(features);
std::stringstream result_ss;
std::stringstream result_stream_buf;
for (size_t i = 0; i < predicted_leaf_index.size(); ++i){
if (i > 0) {
result_ss << '\t';
result_stream_buf << '\t';
}
result_ss << predicted_leaf_index[i];
result_stream_buf << predicted_leaf_index[i];
}
return result_ss.str();
return result_stream_buf.str();
};
}
else {
@ -189,6 +215,8 @@ private:
float** features_;
/*! \brief Number of features */
int num_features_;
/*! \brief Number of classes */
int num_class_;
/*! \brief True if need to predict result with sigmoid transform */
bool is_simgoid_;
/*! \brief Number of threads */

Просмотреть файл

@ -17,13 +17,15 @@
namespace LightGBM {
GBDT::GBDT()
: tree_learner_(nullptr), train_score_updater_(nullptr),
: train_score_updater_(nullptr),
gradients_(nullptr), hessians_(nullptr),
out_of_bag_data_indices_(nullptr), bag_data_indices_(nullptr) {
}
GBDT::~GBDT() {
if (tree_learner_ != nullptr) { delete tree_learner_; }
for (auto& tree_learner: tree_learner_){
if (tree_learner != nullptr) { delete tree_learner; }
}
if (gradients_ != nullptr) { delete[] gradients_; }
if (hessians_ != nullptr) { delete[] hessians_; }
if (out_of_bag_data_indices_ != nullptr) { delete[] out_of_bag_data_indices_; }
@ -44,23 +46,27 @@ void GBDT::Init(const BoostingConfig* config, const Dataset* train_data, const O
max_feature_idx_ = 0;
early_stopping_round_ = gbdt_config_->early_stopping_round;
train_data_ = train_data;
num_class_ = config->num_class;
tree_learner_ = std::vector<TreeLearner*>(num_class_, nullptr);
// create tree learner
tree_learner_ =
TreeLearner::CreateTreeLearner(gbdt_config_->tree_learner_type, gbdt_config_->tree_config);
// init tree learner
tree_learner_->Init(train_data_);
for (int i = 0; i < num_class_; ++i){
tree_learner_[i] =
TreeLearner::CreateTreeLearner(gbdt_config_->tree_learner_type, gbdt_config_->tree_config);
// init tree learner
tree_learner_[i]->Init(train_data_);
}
object_function_ = object_function;
// push training metrics
for (const auto& metric : training_metrics) {
training_metrics_.push_back(metric);
}
// create score tracker
train_score_updater_ = new ScoreUpdater(train_data_);
train_score_updater_ = new ScoreUpdater(train_data_, num_class_);
num_data_ = train_data_->num_data();
// create buffer for gradients and hessians
if (object_function_ != nullptr) {
gradients_ = new score_t[num_data_];
hessians_ = new score_t[num_data_];
gradients_ = new score_t[num_data_ * num_class_];
hessians_ = new score_t[num_data_ * num_class_];
}
// get max feature index
@ -85,7 +91,7 @@ void GBDT::Init(const BoostingConfig* config, const Dataset* train_data, const O
void GBDT::AddDataset(const Dataset* valid_data,
const std::vector<const Metric*>& valid_metrics) {
// for a validation dataset, we need its score and metric
valid_score_updater_.push_back(new ScoreUpdater(valid_data));
valid_score_updater_.push_back(new ScoreUpdater(valid_data, num_class_));
valid_metrics_.emplace_back();
best_iter_.emplace_back();
best_score_.emplace_back();
@ -97,7 +103,7 @@ void GBDT::AddDataset(const Dataset* valid_data,
}
void GBDT::Bagging(int iter) {
void GBDT::Bagging(int iter, const int curr_class) {
// if need bagging
if (out_of_bag_data_indices_ != nullptr && iter % gbdt_config_->bagging_freq == 0) {
// if doesn't have query data
@ -146,52 +152,59 @@ void GBDT::Bagging(int iter) {
}
Log::Info("re-bagging, using %d data to train", bag_data_cnt_);
// set bagging data to tree learner
tree_learner_->SetBaggingData(bag_data_indices_, bag_data_cnt_);
tree_learner_[curr_class]->SetBaggingData(bag_data_indices_, bag_data_cnt_);
}
}
void GBDT::UpdateScoreOutOfBag(const Tree* tree) {
void GBDT::UpdateScoreOutOfBag(const Tree* tree, const int curr_class) {
// we need to predict out-of-bag socres of data for boosting
if (out_of_bag_data_indices_ != nullptr) {
train_score_updater_->
AddScore(tree, out_of_bag_data_indices_, out_of_bag_data_cnt_);
AddScore(tree, out_of_bag_data_indices_, out_of_bag_data_cnt_, curr_class);
}
}
bool GBDT::TrainOneIter(const score_t* gradient, const score_t* hessian, bool is_eval) {
// boosting first
if (gradient == nullptr || hessian == nullptr) {
Boosting();
gradient = gradients_;
hessian = hessians_;
}
// bagging logic
Bagging(iter_);
// train a new tree
Tree * new_tree = tree_learner_->Train(gradient, hessian);
// if cannot learn a new tree, then stop
if (new_tree->num_leaves() <= 1) {
Log::Info("Can't training anymore, there isn't any leaf meets split requirements.");
return true;
}
// shrinkage by learning rate
new_tree->Shrinkage(gbdt_config_->learning_rate);
// update score
UpdateScore(new_tree);
UpdateScoreOutOfBag(new_tree);
// boosting first
if (gradient == nullptr || hessian == nullptr) {
Boosting();
gradient = gradients_;
hessian = hessians_;
}
for (int curr_class = 0; curr_class < num_class_; ++curr_class){
// bagging logic
Bagging(iter_, curr_class);
// train a new tree
Tree * new_tree = tree_learner_[curr_class]->Train(gradient + curr_class * num_data_, hessian+ curr_class * num_data_);
// if cannot learn a new tree, then stop
if (new_tree->num_leaves() <= 1) {
Log::Info("Can't training anymore, there isn't any leaf meets split requirements.");
return true;
}
// shrinkage by learning rate
new_tree->Shrinkage(gbdt_config_->learning_rate);
// update score
UpdateScore(new_tree, curr_class);
UpdateScoreOutOfBag(new_tree, curr_class);
// add model
models_.push_back(new_tree);
}
bool is_met_early_stopping = false;
// print message for metric
if (is_eval) {
is_met_early_stopping = OutputMetric(iter_ + 1);
}
// add model
models_.push_back(new_tree);
++iter_;
if (is_met_early_stopping) {
Log::Info("Early stopping at iteration %d, the best iteration round is %d",
iter_, iter_ - early_stopping_round_);
// pop last early_stopping_round_ models
for (int i = 0; i < early_stopping_round_; ++i) {
for (int i = 0; i < early_stopping_round_ * num_class_; ++i) {
delete models_.back();
models_.pop_back();
}
@ -200,12 +213,12 @@ bool GBDT::TrainOneIter(const score_t* gradient, const score_t* hessian, bool is
}
void GBDT::UpdateScore(const Tree* tree) {
void GBDT::UpdateScore(const Tree* tree, const int curr_class) {
// update training score
train_score_updater_->AddScore(tree_learner_);
train_score_updater_->AddScore(tree_learner_[curr_class], curr_class);
// update validation score
for (auto& score_tracker : valid_score_updater_) {
score_tracker->AddScore(tree);
score_tracker->AddScore(tree, curr_class);
}
}
@ -298,6 +311,8 @@ void GBDT::SaveModelToFile(bool is_finish, const char* filename) {
model_output_file_.open(filename);
// output model type
model_output_file_ << "gbdt" << std::endl;
// output number of class
model_output_file_ << "num_class=" << num_class_ << std::endl;
// output label index
model_output_file_ << "label_index=" << label_idx_ << std::endl;
// output max_feature_idx
@ -311,7 +326,7 @@ void GBDT::SaveModelToFile(bool is_finish, const char* filename) {
if (!model_output_file_.is_open()) {
return;
}
int rest = static_cast<int>(models_.size()) - early_stopping_round_;
int rest = static_cast<int>(models_.size()) - early_stopping_round_ * num_class_;
// output tree models
for (int i = saved_model_size_; i < rest; ++i) {
model_output_file_ << "Tree=" << i << std::endl;
@ -337,8 +352,26 @@ void GBDT::ModelsFromString(const std::string& model_str) {
models_.clear();
std::vector<std::string> lines = Common::Split(model_str.c_str(), '\n');
size_t i = 0;
// get number of class
while (i < lines.size()) {
size_t find_pos = lines[i].find("num_class=");
if (find_pos != std::string::npos) {
std::vector<std::string> strs = Common::Split(lines[i].c_str(), '=');
Common::Atoi(strs[1].c_str(), &num_class_);
++i;
break;
} else {
++i;
}
}
if (i == lines.size()) {
Log::Fatal("Model file doesn't contain number of class");
return;
}
// get index of label
i = 0;
while (i < lines.size()) {
size_t find_pos = lines[i].find("label_index=");
if (find_pos != std::string::npos) {
@ -460,6 +493,20 @@ float GBDT::Predict(const float* value, int num_used_model) const {
return ret;
}
std::vector<float> GBDT::PredictMulticlass(const float* value, int num_used_model) const {
if (num_used_model < 0) {
num_used_model = static_cast<int>(models_.size()) / num_class_;
}
std::vector<float> ret(num_class_, 0.0f);
for (int i = 0; i < num_used_model; ++i) {
for (int j = 0; j < num_class_; ++j){
ret[j] += models_[i * num_class_ + j] -> Predict(value);
}
}
Common::Softmax(&ret);
return ret;
}
std::vector<int> GBDT::PredictLeafIndex(const float* value, int num_used_model) const {
if (num_used_model < 0) {
num_used_model = static_cast<int>(models_.size());

Просмотреть файл

@ -68,13 +68,20 @@ public:
*/
float Predict(const float* feature_values, int num_used_model) const override;
/*!
* \brief Predtion for multiclass classification
* \param feature_values Feature value on this record
* \return Prediction result, num_class numbers per line
*/
std::vector<float> PredictMulticlass(const float* value, int num_used_model) const override;
/*!
* \brief Predtion for one record with leaf index
* \param feature_values Feature value on this record
* \param num_used_model Number of used model
* \return Predicted leaf index for this record
*/
std::vector<int> PredictLeafIndex(const float* value, int num_used_model) const override;
std::vector<int> PredictLeafIndex(const float* value, int num_used_model) const override;
/*!
* \brief Serialize models by string
@ -103,6 +110,12 @@ public:
*/
inline int NumberOfSubModels() const override { return static_cast<int>(models_.size()); }
/*!
* \brief Get number of classes
* \return Number of classes
*/
inline int NumberOfClass() const override { return num_class_; }
/*!
* \brief Get Type name of this boosting object
*/
@ -112,14 +125,16 @@ private:
/*!
* \brief Implement bagging logic
* \param iter Current interation
* \param curr_class Current class for multiclass training
*/
void Bagging(int iter);
void Bagging(int iter, const int curr_class);
/*!
* \brief updating score for out-of-bag data.
* Data should be update since we may re-bagging data on training
* \param tree Trained tree of this iteration
* \param curr_class Current class for multiclass training
*/
void UpdateScoreOutOfBag(const Tree* tree);
void UpdateScoreOutOfBag(const Tree* tree, const int curr_class);
/*!
* \brief calculate the object function
*/
@ -127,8 +142,9 @@ private:
/*!
* \brief updating score after tree was trained
* \param tree Trained tree of this iteration
* \param curr_class Current class for multiclass training
*/
void UpdateScore(const Tree* tree);
void UpdateScore(const Tree* tree, const int curr_class);
/*!
* \brief Print metric result of current iteration
* \param iter Current interation
@ -146,7 +162,7 @@ private:
/*! \brief Config of gbdt */
const GBDTConfig* gbdt_config_;
/*! \brief Tree learner, will use this class to learn trees */
TreeLearner* tree_learner_;
std::vector<TreeLearner*> tree_learner_;
/*! \brief Objective function */
const ObjectiveFunction* object_function_;
/*! \brief Store and update training data's score */
@ -180,6 +196,8 @@ private:
data_size_t bag_data_cnt_;
/*! \brief Number of traning data */
data_size_t num_data_;
/*! \brief Number of classes */
int num_class_;
/*! \brief Random generator, used for bagging */
Random random_;
/*!

Просмотреть файл

@ -18,12 +18,12 @@ public:
* \brief Constructor, will pass a const pointer of dataset
* \param data This class will bind with this data set
*/
explicit ScoreUpdater(const Dataset* data)
explicit ScoreUpdater(const Dataset* data, int num_class)
:data_(data) {
num_data_ = data->num_data();
score_ = new score_t[num_data_];
score_ = new score_t[num_data_ * num_class];
// default start score is zero
std::memset(score_, 0, sizeof(score_t)*num_data_);
std::memset(score_, 0, sizeof(score_t) * num_data_ * num_class);
const score_t* init_score = data->metadata().init_score();
// if exists initial score, will start from it
if (init_score != nullptr) {
@ -41,8 +41,8 @@ public:
* Note: this function generally will be used on validation data too.
* \param tree Trained tree model
*/
inline void AddScore(const Tree* tree) {
tree->AddPredictionToScore(data_, num_data_, score_);
inline void AddScore(const Tree* tree, int curr_class) {
tree->AddPredictionToScore(data_, num_data_, score_ + curr_class * num_data_);
}
/*!
* \brief Adding prediction score, only used for training data.
@ -50,8 +50,8 @@ public:
* Based on which We can get prediction quckily.
* \param tree_learner
*/
inline void AddScore(const TreeLearner* tree_learner) {
tree_learner->AddPredictionToScore(score_);
inline void AddScore(const TreeLearner* tree_learner, int curr_class) {
tree_learner->AddPredictionToScore(score_ + curr_class * num_data_);
}
/*!
* \brief Using tree model to get prediction number, then adding to scores for parts of data
@ -61,8 +61,8 @@ public:
* \param data_cnt Number of data that will be proccessed
*/
inline void AddScore(const Tree* tree, const data_size_t* data_indices,
data_size_t data_cnt) {
tree->AddPredictionToScore(data_, data_indices, data_cnt, score_);
data_size_t data_cnt, int curr_class) {
tree->AddPredictionToScore(data_, data_indices, data_cnt, score_ + curr_class * num_data_);
}
/*! \brief Pointer of score */
inline const score_t * score() { return score_; }
@ -72,7 +72,7 @@ private:
data_size_t num_data_;
/*! \brief Pointer of data set */
const Dataset* data_;
/*! \brief scores for data set */
/*! \brief Scores for data set */
score_t* score_;
};

Просмотреть файл

@ -46,7 +46,6 @@ void OverallConfig::Set(const std::unordered_map<std::string, std::string>& para
boosting_config = new GBDTConfig();
}
// sub-config setup
network_config.Set(params);
io_config.Set(params);
@ -132,7 +131,29 @@ void OverallConfig::GetTaskType(const std::unordered_map<std::string, std::strin
}
void OverallConfig::CheckParamConflict() {
GBDTConfig* gbdt_config = dynamic_cast<GBDTConfig*>(boosting_config);
GBDTConfig* gbdt_config = dynamic_cast<GBDTConfig*>(boosting_config);
// check if objective_type, metric_type, and num_class match
bool objective_type_multiclass = (objective_type == std::string("multiclass"));
int num_class_check = gbdt_config->num_class;
if (objective_type_multiclass){
if (num_class_check <= 1){
Log::Fatal("You should specify number of class(>=2) for multiclass training.");
}
}
else {
if (task_type == TaskType::kTrain && num_class_check != 1){
Log::Fatal("Number of class must be 1 for non-multiclass training.");
}
}
for (std::string metric_type : metric_types){
bool metric_type_multiclass = ( metric_type == std::string("multi_logloss") || metric_type == std::string("multi_error"));
if ((objective_type_multiclass && !metric_type_multiclass)
|| (!objective_type_multiclass && metric_type_multiclass)){
Log::Fatal("Objective and metrics don't match.");
}
}
if (network_config.num_machines > 1) {
is_parallel = true;
} else {
@ -196,6 +217,8 @@ void ObjectiveConfig::Set(const std::unordered_map<std::string, std::string>& pa
GetFloat(params, "sigmoid", &sigmoid);
GetInt(params, "max_position", &max_position);
CHECK(max_position > 0);
GetInt(params, "num_class", &num_class);
CHECK(num_class >= 1);
std::string tmp_str = "";
if (GetString(params, "label_gain", &tmp_str)) {
label_gain = Common::StringToFloatArray(tmp_str, ',');
@ -212,6 +235,8 @@ void ObjectiveConfig::Set(const std::unordered_map<std::string, std::string>& pa
void MetricConfig::Set(const std::unordered_map<std::string, std::string>& params) {
GetFloat(params, "sigmoid", &sigmoid);
GetInt(params, "num_class", &num_class);
CHECK(num_class >= 1);
std::string tmp_str = "";
if (GetString(params, "label_gain", &tmp_str)) {
label_gain = Common::StringToFloatArray(tmp_str, ',');
@ -268,6 +293,8 @@ void BoostingConfig::Set(const std::unordered_map<std::string, std::string>& par
GetInt(params, "metric_freq", &output_freq);
CHECK(output_freq >= 0);
GetBool(params, "is_training_metric", &is_provide_training_metric);
GetInt(params, "num_class", &num_class);
CHECK(num_class >= 1);
}
void GBDTConfig::GetTreeLearnerType(const std::unordered_map<std::string, std::string>& params) {

Просмотреть файл

@ -2,6 +2,7 @@
#include "regression_metric.hpp"
#include "binary_metric.hpp"
#include "rank_metric.hpp"
#include "multiclass_metric.hpp"
namespace LightGBM {
@ -18,6 +19,10 @@ Metric* Metric::CreateMetric(const std::string& type, const MetricConfig& config
return new AUCMetric(config);
} else if (type == "ndcg") {
return new NDCGMetric(config);
} else if (type == "multi_logloss"){
return new MultiLoglossMetric(config);
} else if (type == "multi_error"){
return new MultiErrorMetric(config);
}
return nullptr;
}

Просмотреть файл

@ -0,0 +1,138 @@
#ifndef LIGHTGBM_METRIC_MULTICLASS_METRIC_HPP_
#define LIGHTGBM_METRIC_MULTICLASS_METRIC_HPP_
#include <LightGBM/utils/log.h>
#include <LightGBM/metric.h>
#include <cmath>
namespace LightGBM {
/*!
* \brief Metric for multiclass task.
* Use static class "PointWiseLossCalculator" to calculate loss point-wise
*/
template<typename PointWiseLossCalculator>
class MulticlassMetric: public Metric {
public:
explicit MulticlassMetric(const MetricConfig& config) {
num_class_ = config.num_class;
}
virtual ~MulticlassMetric() {
}
void Init(const char* test_name, const Metadata& metadata, data_size_t num_data) override {
std::stringstream str_buf;
str_buf << test_name << "'s " << PointWiseLossCalculator::Name();
name_ = str_buf.str();
num_data_ = num_data;
// get label
label_ = metadata.label();
// get weights
weights_ = metadata.weights();
if (weights_ == nullptr) {
sum_weights_ = static_cast<float>(num_data_);
} else {
sum_weights_ = 0.0f;
for (data_size_t i = 0; i < num_data_; ++i) {
sum_weights_ += weights_[i];
}
}
}
const char* GetName() const override {
return name_.c_str();
}
bool is_bigger_better() const override {
return false;
}
std::vector<score_t> Eval(const score_t* score) const override {
score_t sum_loss = 0.0;
if (weights_ == nullptr) {
#pragma omp parallel for schedule(static) reduction(+:sum_loss)
for (data_size_t i = 0; i < num_data_; ++i) {
std::vector<score_t> rec(num_class_);
for (int k = 0; k < num_class_; ++k) {
rec[k] = score[k * num_data_ + i];
}
// add loss
sum_loss += PointWiseLossCalculator::LossOnPoint(label_[i], rec);
}
} else {
#pragma omp parallel for schedule(static) reduction(+:sum_loss)
for (data_size_t i = 0; i < num_data_; ++i) {
std::vector<score_t> rec(num_class_);
for (int k = 0; k < num_class_; ++k) {
rec[k] = score[k * num_data_ + i];
}
// add loss
sum_loss += PointWiseLossCalculator::LossOnPoint(label_[i], rec) * weights_[i];
}
}
score_t loss = sum_loss / sum_weights_;
return std::vector<score_t>(1, loss);
}
private:
/*! \brief Output frequency */
int output_freq_;
/*! \brief Number of data */
data_size_t num_data_;
/*! \brief Number of classes */
int num_class_;
/*! \brief Pointer of label */
const float* label_;
/*! \brief Pointer of weighs */
const float* weights_;
/*! \brief Sum weights */
float sum_weights_;
/*! \brief Name of this test set */
std::string name_;
};
/*! \brief L2 loss for multiclass task */
class MultiErrorMetric: public MulticlassMetric<MultiErrorMetric> {
public:
explicit MultiErrorMetric(const MetricConfig& config) :MulticlassMetric<MultiErrorMetric>(config) {}
inline static score_t LossOnPoint(float label, std::vector<score_t> score) {
size_t k = static_cast<size_t>(label);
for (size_t i = 0; i < score.size(); ++i){
if (i != k && score[i] > score[k]) {
return 0.0f;
}
}
return 1.0f;
}
inline static const char* Name() {
return "multi error";
}
};
/*! \brief Logloss for multiclass task */
class MultiLoglossMetric: public MulticlassMetric<MultiLoglossMetric> {
public:
explicit MultiLoglossMetric(const MetricConfig& config) :MulticlassMetric<MultiLoglossMetric>(config) {}
inline static score_t LossOnPoint(float label, std::vector<score_t> score) {
size_t k = static_cast<size_t>(label);
Common::Softmax(&score);
if (score[k] > kEpsilon) {
return -std::log(score[k]);
} else {
return -std::log(kEpsilon);
}
}
inline static const char* Name() {
return "multi logloss";
}
};
} // namespace LightGBM
#endif // LightGBM_METRIC_MULTICLASS_METRIC_HPP_

Просмотреть файл

@ -8,7 +8,7 @@
namespace LightGBM {
/*!
* \brief Objective funtion for binary classification
* \brief Objective function for binary classification
*/
class BinaryLogloss: public ObjectiveFunction {
public:

Просмотреть файл

@ -0,0 +1,95 @@
#ifndef LIGHTGBM_OBJECTIVE_MULTICLASS_OBJECTIVE_HPP_
#define LIGHTGBM_OBJECTIVE_MULTICLASS_OBJECTIVE_HPP_
#include <LightGBM/objective_function.h>
#include <cstring>
#include <cmath>
namespace LightGBM {
/*!
* \brief Objective function for multiclass classification
*/
class MulticlassLogloss: public ObjectiveFunction {
public:
explicit MulticlassLogloss(const ObjectiveConfig& config)
:label_int_(nullptr) {
num_class_ = config.num_class;
}
~MulticlassLogloss() {
if (label_int_ != nullptr) { delete[] label_int_; }
}
void Init(const Metadata& metadata, data_size_t num_data) override {
num_data_ = num_data;
label_ = metadata.label();
weights_ = metadata.weights();
label_int_ = new int[num_data_];
for (int i = 0; i < num_data_; ++i){
label_int_[i] = static_cast<int>(label_[i]);
if (label_int_[i] < 0 || label_int_[i] >= num_class_) {
Log::Fatal("Label must be in [0, %d), but find %d in label", num_class_, label_int_[i]);
}
}
}
void GetGradients(const score_t* score, score_t* gradients, score_t* hessians) const override {
if (weights_ == nullptr) {
#pragma omp parallel for schedule(static)
for (data_size_t i = 0; i < num_data_; ++i) {
std::vector<score_t> rec(num_class_);
for (int k = 0; k < num_class_; ++k){
rec[k] = score[k * num_data_ + i];
}
Common::Softmax(&rec);
for (int k = 0; k < num_class_; ++k) {
score_t p = rec[k];
if (label_int_[i] == k) {
gradients[k * num_data_ + i] = p - 1.0f;
} else {
gradients[k * num_data_ + i] = p;
}
hessians[k * num_data_ + i] = 2.0f * p * (1.0f - p);
}
}
} else {
#pragma omp parallel for schedule(static)
for (data_size_t i = 0; i < num_data_; ++i) {
std::vector<score_t> rec(num_class_);
for (int k = 0; k < num_class_; ++k){
rec[k] = score[k * num_data_ + i];
}
Common::Softmax(&rec);
for (int k = 0; k < num_class_; ++k) {
float p = rec[k];
if (label_int_[i] == k) {
gradients[k * num_data_ + i] = (p - 1.0f) * weights_[i];
} else {
gradients[k * num_data_ + i] = p * weights_[i];
}
hessians[k * num_data_ + i] = 2.0f * p * (1.0f - p) * weights_[i];
}
}
}
}
float GetSigmoid() const override {
return -1.0f;
}
private:
/*! \brief Number of data */
data_size_t num_data_;
/*! \brief Number of classes */
int num_class_;
/*! \brief Pointer of label */
const float* label_;
/*! \brief Corresponding integers of label_ */
int* label_int_;
/*! \brief Weights for data */
const float* weights_;
};
} // namespace LightGBM
#endif // LightGBM_OBJECTIVE_MULTICLASS_OBJECTIVE_HPP_

Просмотреть файл

@ -2,6 +2,7 @@
#include "regression_objective.hpp"
#include "binary_objective.hpp"
#include "rank_objective.hpp"
#include "multiclass_objective.hpp"
namespace LightGBM {
@ -12,6 +13,8 @@ ObjectiveFunction* ObjectiveFunction::CreateObjectiveFunction(const std::string&
return new BinaryLogloss(config);
} else if (type == "lambdarank") {
return new LambdarankNDCG(config);
} else if (type == "multiclass") {
return new MulticlassLogloss(config);
}
return nullptr;
}

Просмотреть файл

@ -14,7 +14,7 @@
namespace LightGBM {
/*!
* \brief Objective funtion for Lambdrank with NDCG
* \brief Objective function for Lambdrank with NDCG
*/
class LambdarankNDCG: public ObjectiveFunction {
public:

Просмотреть файл

@ -5,7 +5,7 @@
namespace LightGBM {
/*!
* \brief Objective funtion for regression
* \brief Objective function for regression
*/
class RegressionL2loss: public ObjectiveFunction {
public:

Просмотреть файл

@ -185,11 +185,13 @@
<ClInclude Include="..\src\metric\binary_metric.hpp" />
<ClInclude Include="..\src\metric\rank_metric.hpp" />
<ClInclude Include="..\src\metric\regression_metric.hpp" />
<ClInclude Include="..\src\metric\multiclass_metric.hpp" />
<ClInclude Include="..\src\network\linkers.h" />
<ClInclude Include="..\src\network\socket_wrapper.hpp" />
<ClInclude Include="..\src\objective\binary_objective.hpp" />
<ClInclude Include="..\src\objective\rank_objective.hpp" />
<ClInclude Include="..\src\objective\regression_objective.hpp" />
<ClInclude Include="..\src\objective\multiclass_objective.hpp" />
<ClInclude Include="..\src\treelearner\data_partition.hpp" />
<ClInclude Include="..\src\treelearner\feature_histogram.hpp" />
<ClInclude Include="..\src\treelearner\leaf_splits.hpp" />

Просмотреть файл

@ -75,6 +75,9 @@
<ClInclude Include="..\src\metric\regression_metric.hpp">
<Filter>src\metric</Filter>
</ClInclude>
<ClInclude Include="..\src\metric\multiclass_metric.hpp">
<Filter>src\metric</Filter>
</ClInclude>
<ClInclude Include="..\src\network\socket_wrapper.hpp">
<Filter>src\network</Filter>
</ClInclude>
@ -87,6 +90,9 @@
<ClInclude Include="..\src\objective\regression_objective.hpp">
<Filter>src\objective</Filter>
</ClInclude>
<ClInclude Include="..\src\objective\multiclass_objective.hpp">
<Filter>src\objective</Filter>
</ClInclude>
<ClInclude Include="..\src\treelearner\data_partition.hpp">
<Filter>src\treelearner</Filter>
</ClInclude>