ServerRun 28472
Creatorwcukierski
Programlogreg-disnb-python
DatasetSyntheticDataProblem
Task typeMulticlassClassification
Created1y260d ago
Done! Flag_green
34s
80M
BinaryClassification
21s
0.090
2s
0.232
9s

Log file

===== MAIN: learn based on training data =====
=== START program1: ./run learn ../dataset3/train
=== START program2: ./run learn ../program1/data
d=40, n=1000 nd=40000
datamatrix original dimension: (40, 1000)
using k=10
(40, 1000)
(360, 1000)
datamatrix encoded dimension: (271, 1000)
[ 0.11234746  0.19988547  0.46807146  0.02623871  0.02623871  1.124851
  0.13628022  0.24938226  0.60160285  0.12154889  0.02623871  0.02623871
  0.13628022  0.10729269  0.60160285  1.00706796  0.01821306  0.32095749
  0.49624234  0.19690484  0.71938589  0.71938589  0.11585087  0.45926911
  0.11139651  1.75025329  0.37251494  0.17142072  0.19690484  0.19690484
  1.07237358  0.02710727  0.08507921  0.60975006  0.07912181  0.71938589
  0.66690847  0.27618045  0.53706433  0.76771317  1.07237358  0.17577044
  0.50316278  1.07237358  2.05320284  1.76552076  0.23845385  0.0947139
  0.06624404  0.0589191   0.14686669  0.28391622  0.65484737  0.22507572
  0.71938589  0.13884104  0.09926384  0.26144337  0.12154889  0.71938589
  0.71938589  0.03091971  0.01888173  0.11686214  0.26740076  0.3792264
  0.10214246  0.19690484  0.02623871  0.43170381  0.43170381  0.66690847
  0.07775101  0.26618938  0.45926911  0.71938589  0.71938589  0.71938589
  0.38291365  1.76552076  0.8313038   0.15200952  0.73708546  1.124851
  1.850788    1.3384251   2.51114536  0.05831868  0.03174855  0.53337708
  0.24938226  0.02623871  0.66690847  0.90458013  0.91783683  1.43403927
  1.47197357  1.30717255  1.33188478  2.74635002  2.51114536  2.42413398
  0.0409006   0.04174289  0.66690847  0.11325008  0.71938589  0.20333574
  0.10034668  0.02623871  0.38291365  0.43170381  0.14209661  0.02623871
  0.15140185  0.12791197  0.66690847  0.4380669   0.61561518  0.66674215
  0.66690847  1.39572595  1.60668908  0.29918369  1.7309868   0.44376492
  0.18055571  0.07384475  1.06769258  0.48458692  0.02623871  0.66690847
  0.1673173   0.06072488  0.71938589  0.42574642  0.66690847  0.1390342
  0.16987617  0.18038939  0.10729269  0.02623871  0.09390561  0.33197352
  0.18342429  0.07386676  0.23946446  0.39780226  0.11325008  0.3792264
  1.00706796  0.11218114  0.2308064   0.72247833  0.14081538  0.50316278
  0.07170108  0.02255146  0.77345311  0.66222747  0.0571429   0.05654406
  0.02623871  0.28391622  0.02623871  0.2375478   0.19457402  0.60160285
  0.20856026  0.02623871  0.30015788  0.24938226  0.33162036  0.26144337
  0.02623871  0.71938589  0.17617885  0.20669285  0.02623871  0.15608285
  1.124851    0.02623871  0.46155678  0.6934104   0.39426428  0.02623871
  1.01521517  1.36005565  0.08339712  1.28594768  0.19690484  0.053804
  0.08983346  0.15947844  0.27136116  0.03829981  0.25804032  0.01458329
  0.94252944  0.47822383  0.0635101   0.33043624  0.36271094  0.06077267
  0.02623871  0.04573479  0.00274883  0.60236995  0.43170381  0.71938589
  0.44376492  0.49985439  0.59263418  0.68460805  0.65484737  1.09094944
  0.47019818  0.55686696  0.66690847  0.16987617  0.2391607   0.14402174
  0.18038939  1.41253307  0.24938226  0.40353294  0.57767734  0.86106449
  1.124851    1.47783869  0.53706433  0.71938589  0.02623871  0.02623871
  0.23168268  0.60160285  0.60236995  0.3792264   0.35599199  0.64391896
  0.7447037   0.59544951  0.26740076  0.63237451  0.55357979  1.07606083
  1.67850939  0.27744371  0.00406664  0.36271094  0.26144337  1.124851
  0.66690847  0.25849786  0.14719132  0.08339712  0.28113096  0.01821306
  0.4511219   0.19329279  0.06077267  0.71938589  0.33159835  0.39603352
  0.47355092  0.66690847  0.89527655  1.70021514  2.42413398  0.31023353
  0.66690847]
[ 0.11234746  0.19988547  0.46807146  0.02623871  0.02623871  1.124851
  0.13628022  0.24938226  0.60160285  0.12154889  0.02623871  0.02623871
  0.13628022  0.10729269  0.60160285  1.00706796  0.01821306  0.32095749
  0.49624234  0.19690484  0.71938589  0.71938589  0.11585087  0.45926911
  0.11139651  1.75025329  0.37251494  0.17142072  0.19690484  0.19690484
  1.07237358  0.02710727  0.08507921  0.60975006  0.07912181  0.71938589
  0.66690847  0.27618045  0.53706433  0.76771317  1.07237358  0.17577044
  0.50316278  1.07237358  2.05320284  1.76552076  0.23845385  0.0947139
  0.06624404  0.0589191   0.14686669  0.28391622  0.65484737  0.22507572
  0.71938589  0.13884104  0.09926384  0.26144337  0.12154889  0.71938589
  0.71938589  0.03091971  0.01888173  0.11686214  0.26740076  0.3792264
  0.10214246  0.19690484  0.02623871  0.43170381  0.43170381  0.66690847
  0.07775101  0.26618938  0.45926911  0.71938589  0.71938589  0.71938589
  0.38291365  1.76552076  0.8313038   0.15200952  0.73708546  1.124851
  1.850788    1.3384251   2.51114536  0.05831868  0.03174855  0.53337708
  0.24938226  0.02623871  0.66690847  0.90458013  0.91783683  1.43403927
  1.47197357  1.30717255  1.33188478  2.74635002  2.51114536  2.42413398
  0.0409006   0.04174289  0.66690847  0.11325008  0.71938589  0.20333574
  0.10034668  0.02623871  0.38291365  0.43170381  0.14209661  0.02623871
  0.15140185  0.12791197  0.66690847  0.4380669   0.61561518  0.66674215
  0.66690847  1.39572595  1.60668908  0.29918369  1.7309868   0.44376492
  0.18055571  0.07384475  1.06769258  0.48458692  0.02623871  0.66690847
  0.1673173   0.06072488  0.71938589  0.42574642  0.66690847  0.1390342
  0.16987617  0.18038939  0.10729269  0.02623871  0.09390561  0.33197352
  0.18342429  0.07386676  0.23946446  0.39780226  0.11325008  0.3792264
  1.00706796  0.11218114  0.2308064   0.72247833  0.14081538  0.50316278
  0.07170108  0.02255146  0.77345311  0.66222747  0.0571429   0.05654406
  0.02623871  0.28391622  0.02623871  0.2375478   0.19457402  0.60160285
  0.20856026  0.02623871  0.30015788  0.24938226  0.33162036  0.26144337
  0.02623871  0.71938589  0.17617885  0.20669285  0.02623871  0.15608285
  1.124851    0.02623871  0.46155678  0.6934104   0.39426428  0.02623871
  1.01521517  1.36005565  0.08339712  1.28594768  0.19690484  0.053804
  0.08983346  0.15947844  0.27136116  0.03829981  0.25804032  0.01458329
  0.94252944  0.47822383  0.0635101   0.33043624  0.36271094  0.06077267
  0.02623871  0.04573479  0.00274883  0.60236995  0.43170381  0.71938589
  0.44376492  0.49985439  0.59263418  0.68460805  0.65484737  1.09094944
  0.47019818  0.55686696  0.66690847  0.16987617  0.2391607   0.14402174
  0.18038939  1.41253307  0.24938226  0.40353294  0.57767734  0.86106449
  1.124851    1.47783869  0.53706433  0.71938589  0.02623871  0.02623871
  0.23168268  0.60160285  0.60236995  0.3792264   0.35599199  0.64391896
  0.7447037   0.59544951  0.26740076  0.63237451  0.55357979  1.07606083
  1.67850939  0.27744371  0.00406664  0.36271094  0.26144337  1.124851
  0.66690847  0.25849786  0.14719132  0.08339712  0.28113096  0.01821306
  0.4511219   0.19329279  0.06077267  0.71938589  0.33159835  0.39603352
  0.47355092  0.66690847  0.89527655  1.70021514  2.42413398  0.31023353
  0.66690847]
lambs, mean: 0.00164620776332, var:5.71418864198e-06
mean lambda: 0.00164620776332
(2, 271)
b is [[ 0.46202683]
 [-0.46202683]]
RUNNING THE L-BFGS-B CODE

           * * *

Machine precision = 1.084D-19
 N =          544     M =           10
 This problem is unconstrained.

At X0         0 variables are exactly at the bounds

At iterate    0    f=  6.92353D-01    |proj g|=  5.01307D-02

At iterate   10    f=  2.87474D-01    |proj g|=  5.87625D-03

At iterate   20    f=  2.72878D-01    |proj g|=  1.75144D-03

At iterate   30    f=  2.71023D-01    |proj g|=  6.81030D-04

At iterate   40    f=  2.70692D-01    |proj g|=  6.95212D-05

At iterate   50    f=  2.70636D-01    |proj g|=  1.20526D-04

At iterate   60    f=  2.70628D-01    |proj g|=  4.48617D-05

           * * *

Tit   = total number of iterations
Tnf   = total number of function evaluations
Tnint = total number of segments explored during Cauchy searches
Skip  = number of BFGS updates skipped
Nact  = number of active bounds at final generalized Cauchy point
Projg = norm of the final projected gradient
F     = final function value

           * * *

   N   Tit  Tnf  Tnint  Skip  Nact     Projg        F
  544   69   77      1     0     0   9.103D-06   2.706D-01
  F =  0.27062605519967631     

CONVERGENCE: NORM OF PROJECTED GRADIENT <= PGTOL            

 Cauchy                time 0.000E+00 seconds.
 Subspace minimization time 1.200E-02 seconds.
 Line search           time 1.100E+00 seconds.

 Total User time 1.120E+00 seconds.

=== END program2: ./run learn ../program1/data --- OK [21s]
=== END program1: ./run learn ../dataset3/train --- OK [21s]

===== MAIN: predict/evaluate on train data =====
=== START program4: ./run stripLabels ../dataset3/train ../program0/evalTrain.in
=== END program4: ./run stripLabels ../dataset3/train ../program0/evalTrain.in --- OK [0s]
=== START program1: ./run predict ../program0/evalTrain.in ../program0/evalTrain.out
=== START program2: ./run predict ../program0/evalTrain.in ../program0/evalTrain.out.multiclass-output
d=40, n=1000 nd=40000
datamatrix original dimension: (40, 1000)
datamatrix encoded dimension: (271, 1000)
=== END program2: ./run predict ../program0/evalTrain.in ../program0/evalTrain.out.multiclass-output --- OK [2s]
=== END program1: ./run predict ../program0/evalTrain.in ../program0/evalTrain.out --- OK [2s]
=== START program5: ./run evaluate ../dataset3/train ../program0/evalTrain.out
=== END program5: ./run evaluate ../dataset3/train ../program0/evalTrain.out --- OK [0s]

===== MAIN: predict/evaluate on test data =====
=== START program4: ./run stripLabels ../dataset3/test ../program0/evalTest.in
=== END program4: ./run stripLabels ../dataset3/test ../program0/evalTest.in --- OK [1s]
=== START program1: ./run predict ../program0/evalTest.in ../program0/evalTest.out
=== START program2: ./run predict ../program0/evalTest.in ../program0/evalTest.out.multiclass-output
d=40, n=9000 nd=360000
datamatrix original dimension: (40, 9000)
datamatrix encoded dimension: (271, 9000)
=== END program2: ./run predict ../program0/evalTest.in ../program0/evalTest.out.multiclass-output --- OK [8s]
=== END program1: ./run predict ../program0/evalTest.in ../program0/evalTest.out --- OK [9s]
=== START program5: ./run evaluate ../dataset3/test ../program0/evalTest.out
=== END program5: ./run evaluate ../dataset3/test ../program0/evalTest.out --- OK [1s]


real	0m35.135s
user	0m11.333s
sys	0m3.404s

Run specification Arrow_right
Results Arrow_right


Comments:


Must be logged in to post comments.