4

Please consider the following basic reproducible example:

library(h2o)
h2o.init()
data("iris")
iris.hex = as.h2o(iris, "iris.hex")
mod = h2o.glm(y = "Sepal.Length", x = setdiff(colnames(iris), "Sepal.Length"), 
              training_frame = iris.hex, nfolds = 2, seed = 100,
              lambda_search = T, early_stopping = F, 
              family = "gamma", nlambdas = 100)

When I run the above, I expect that h2o will iterate over 100 different values of lambda. However, running length(mod@allparameters$lambda) will show that only 79 values of lambda were actually tested. These 79 values are the first 79 values in the sequence:

maxLambda = max(mod@allparameters$lambda)
lambdaMinRatio = mod@allparameters$lambda_min_ratio
exp(seq(log(maxLambda), log(maxLambda*lambdaMinRatio), length.out = 100))

Could you please let me know how I can get the function to iterate over all 100 values of lambda? (I tried setting early_stopping = F to see if that would fix the issue but it does not.)

Here is my cluster info if it helps:

R is connected to the H2O cluster: 
    H2O cluster uptime:         11 hours 21 minutes 
    H2O cluster version:        3.10.5.3 
    H2O cluster version age:    1 month and 26 days  
    H2O cluster name:           H2O_started_from_R_xaq943 
    H2O cluster total nodes:    1 
    H2O cluster total memory:   6.75 GB 
    H2O cluster total cores:    8 
    H2O cluster allowed cores:  4 
    H2O cluster healthy:        TRUE 
    H2O Connection ip:          localhost 
    H2O Connection port:        54321 
    H2O Connection proxy:       NA 
    H2O Internal Security:      FALSE 
    R Version:                  R version 3.3.3 (2017-03-06) 

Thank you!

EDIT: As requested, here is the output of h2o.getGLMFullRegularizationPath(mod):

$`__meta`
$`__meta`$schema_version
[1] 3

$`__meta`$schema_name
[1] "GLMRegularizationPathV3"

$`__meta`$schema_type
[1] "RegularizationPath"


$model
NULL

$lambdas
 [1] 1.434114617 1.306711827 1.190627150 1.084855115 0.988479577 0.900665776 0.820653111 0.747748550 0.681320630 0.620793983 0.565644356
[12] 0.515394071 0.469607882 0.427889212 0.389876714 0.355241141 0.323682497 0.294927436 0.268726896 0.244853939 0.223101790 0.203282042
[23] 0.185223025 0.168768322 0.153775410 0.140114426 0.127667047 0.116325458 0.105991425 0.096575439 0.087995943 0.080178626 0.073055778
[34] 0.066565704 0.060652190 0.055264017 0.050354514 0.045881158 0.041805202 0.038091343 0.034707413 0.031624102 0.028814704 0.026254885
[45] 0.023922474 0.021797267 0.019860858 0.018096474 0.016488833 0.015024011 0.013689319 0.012473198 0.011365113 0.010355468 0.009435517
[56] 0.008597291 0.007833532 0.007137622 0.006503536 0.005925779 0.005399349 0.004919686 0.004482635 0.004084410 0.003721562 0.003390949
[67] 0.003089706 0.002815225 0.002565128 0.002337249 0.002129615 0.001940426 0.001768044 0.001610975 0.001467861 0.001337460 0.001218644
[78] 0.001110383 0.001011740

$explained_deviance_train
 [1] -3.294962e-08  1.278780e-01  2.352402e-01  3.253159e-01  4.008369e-01  4.641126e-01  5.170944e-01  5.614293e-01  5.985067e-01
[10]  6.294974e-01  6.553869e-01  6.770044e-01  6.950464e-01  7.100979e-01  7.226495e-01  7.331127e-01  7.418320e-01  7.490957e-01
[19]  7.551451e-01  7.687710e-01  7.815713e-01  7.921910e-01  8.010014e-01  8.083105e-01  8.143741e-01  8.194045e-01  8.235584e-01
[28]  8.270239e-01  8.298991e-01  8.322847e-01  8.342640e-01  8.359064e-01  8.372692e-01  8.384000e-01  8.393384e-01  8.401172e-01
[37]  8.407634e-01  8.411713e-01  8.420553e-01  8.434391e-01  8.445680e-01  8.454431e-01  8.462240e-01  8.468835e-01  8.476350e-01
[46]  8.481135e-01  8.497288e-01  8.513965e-01  8.528687e-01  8.541499e-01  8.551259e-01  8.560063e-01  8.566711e-01  8.572853e-01
[55]  8.578407e-01  8.583362e-01  8.586877e-01  8.590151e-01  8.593148e-01  8.595864e-01  8.596849e-01  8.599377e-01  8.600233e-01
[64]  8.602430e-01  8.603153e-01  8.605097e-01  8.605776e-01  8.608212e-01  8.608821e-01  8.610499e-01  8.611065e-01  8.611627e-01
[73]  8.612156e-01  8.616241e-01  8.616940e-01  8.617575e-01  8.617782e-01  8.617988e-01  8.618557e-01

$explained_deviance_valid
NULL

$coefficients
      Species.setosa Species.versicolor Species.virginica  Sepal.Width Petal.Length  Petal.Width Intercept
 [1,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000  0.000000000 0.000000e+00 0.1711352
 [2,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.001046643 0.000000e+00 0.1750882
 [3,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.002009314 0.000000e+00 0.1787588
 [4,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.002894275 0.000000e+00 0.1821621
 [5,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.003707356 0.000000e+00 0.1853133
 [6,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.004453990 0.000000e+00 0.1882274
 [7,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.005139245 0.000000e+00 0.1909189
 [8,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.005767843 0.000000e+00 0.1934021
 [9,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.006344186 0.000000e+00 0.1956907
[10,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.006872371 0.000000e+00 0.1977980
[11,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.007356208 0.000000e+00 0.1997366
[12,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.007799235 0.000000e+00 0.2015187
[13,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.008204738 0.000000e+00 0.2031555
[14,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.008575759 0.000000e+00 0.2046579
[15,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.008915116 0.000000e+00 0.2060361
[16,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.009225414 0.000000e+00 0.2072996
[17,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.009509059 0.000000e+00 0.2084574
[18,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.009768269 0.000000e+00 0.2095177
[19,]   0.000000e+00       0.0000000000      0.000000e+00  0.000000000 -0.010005092 0.000000e+00 0.2104884
[20,]   0.000000e+00       0.0000000000      0.000000e+00 -0.001127417 -0.010319589 0.000000e+00 0.2151915
[21,]   0.000000e+00       0.0000000000      0.000000e+00 -0.002359216 -0.010623694 0.000000e+00 0.2201719
[22,]   0.000000e+00       0.0000000000      0.000000e+00 -0.003480564 -0.010900376 0.000000e+00 0.2247086
[23,]   0.000000e+00       0.0000000000      0.000000e+00 -0.004501465 -0.011152087 0.000000e+00 0.2288412
[24,]   0.000000e+00       0.0000000000      0.000000e+00 -0.005430894 -0.011381098 0.000000e+00 0.2326054
[25,]   0.000000e+00       0.0000000000      0.000000e+00 -0.006277042 -0.011589469 0.000000e+00 0.2360339
[26,]   0.000000e+00       0.0000000000      0.000000e+00 -0.007047377 -0.011779076 0.000000e+00 0.2391565
[27,]   0.000000e+00       0.0000000000      0.000000e+00 -0.007743794 -0.011951139 0.000000e+00 0.2419836
[28,]   0.000000e+00       0.0000000000      0.000000e+00 -0.008382717 -0.012108203 0.000000e+00 0.2445752
[29,]   0.000000e+00       0.0000000000      0.000000e+00 -0.008964450 -0.012251159 0.000000e+00 0.2469356
[30,]   0.000000e+00       0.0000000000      0.000000e+00 -0.009494120 -0.012381280 0.000000e+00 0.2490854
[31,]   0.000000e+00       0.0000000000      0.000000e+00 -0.009976404 -0.012499729 0.000000e+00 0.2510434
[32,]   0.000000e+00       0.0000000000      0.000000e+00 -0.010415558 -0.012607559 0.000000e+00 0.2528268
[33,]   0.000000e+00       0.0000000000      0.000000e+00 -0.010815455 -0.012705728 0.000000e+00 0.2544511
[34,]   0.000000e+00       0.0000000000      0.000000e+00 -0.011179617 -0.012795108 0.000000e+00 0.2559306
[35,]   0.000000e+00       0.0000000000      0.000000e+00 -0.011511250 -0.012876490 0.000000e+00 0.2572783
[36,]   0.000000e+00       0.0000000000      0.000000e+00 -0.011813271 -0.012950594 0.000000e+00 0.2585058
[37,]   0.000000e+00       0.0000000000      0.000000e+00 -0.012088333 -0.013018075 0.000000e+00 0.2596239
[38,]   0.000000e+00       0.0000000000      0.000000e+00 -0.012254270 -0.013069805 0.000000e+00 0.2603445
[39,]   0.000000e+00      -0.0001175922      0.000000e+00 -0.012623025 -0.013136288 0.000000e+00 0.2617830
[40,]   0.000000e+00      -0.0005066170      0.000000e+00 -0.013031762 -0.013198821 0.000000e+00 0.2634171
[41,]   0.000000e+00      -0.0008532154      0.000000e+00 -0.013400532 -0.013255288 0.000000e+00 0.2648907
[42,]   0.000000e+00      -0.0011428955      0.000000e+00 -0.013718316 -0.013304258 0.000000e+00 0.2661590
[43,]   0.000000e+00      -0.0014293556      0.000000e+00 -0.014023516 -0.013351005 0.000000e+00 0.2673789
[44,]   0.000000e+00      -0.0016797073      1.243120e-05 -0.014304179 -0.013396541 0.000000e+00 0.2685020
[45,]   0.000000e+00      -0.0018706468      9.790433e-05 -0.014536882 -0.013478186 8.361933e-05 0.2694643
[46,]   0.000000e+00      -0.0019698629      1.717337e-04 -0.014665554 -0.013530772 1.814935e-04 0.2699431
[47,]   0.000000e+00      -0.0021078477      2.246836e-04 -0.014925921 -0.013849890 8.489923e-04 0.2711751
[48,]   0.000000e+00      -0.0021556371      3.034315e-04 -0.015150706 -0.014237748 1.656453e-03 0.2723528
[49,]   0.000000e+00      -0.0021453273      4.458210e-04 -0.015348300 -0.014616464 2.413512e-03 0.2734328
[50,]   0.000000e+00      -0.0020839569      6.461732e-04 -0.015520020 -0.014980050 3.109852e-03 0.2744131
[51,]   0.000000e+00      -0.0020107174      8.597081e-04 -0.015660412 -0.015278421 3.659515e-03 0.2752178
[52,]   0.000000e+00      -0.0019078474      1.101906e-03 -0.015786052 -0.015572930 4.186424e-03 0.2759708
[53,]   0.000000e+00      -0.0018175109      1.323132e-03 -0.015890456 -0.015809763 4.599076e-03 0.2765883
[54,]   0.000000e+00      -0.0017094991      1.558056e-03 -0.015986195 -0.016047486 5.006251e-03 0.2771791
[55,]   0.000000e+00      -0.0015842081      1.807162e-03 -0.016071634 -0.016281094 5.397220e-03 0.2777320
[56,]   0.000000e+00      -0.0014430021      2.070103e-03 -0.016146458 -0.016507391 5.765349e-03 0.2782422
[57,]   0.000000e+00      -0.0013372850      2.282679e-03 -0.016207766 -0.016676301 6.033973e-03 0.2786413
[58,]   0.000000e+00      -0.0012235170      2.499826e-03 -0.016264638 -0.016845566 6.300372e-03 0.2790268
[59,]   0.000000e+00      -0.0011012638      2.721871e-03 -0.016315538 -0.017012360 6.558645e-03 0.2793901
[60,]   0.000000e+00      -0.0009710435      2.949010e-03 -0.016360197 -0.017174819 6.804753e-03 0.2797282
[61,]   0.000000e+00      -0.0009387214      3.037293e-03 -0.016389637 -0.017231436 6.890089e-03 0.2798895
[62,]   0.000000e+00      -0.0008039241      3.270133e-03 -0.016434978 -0.017400280 7.145560e-03 0.2802397
[63,]   0.000000e+00      -0.0007660395      3.357739e-03 -0.016459753 -0.017456898 7.230221e-03 0.2803861
[64,]   0.000000e+00      -0.0006199118      3.595215e-03 -0.016496474 -0.017622417 7.475811e-03 0.2807040
[65,]   0.000000e+00      -0.0005768718      3.683036e-03 -0.016516321 -0.017676868 7.554952e-03 0.2808322
[66,]  -3.476645e-05      -0.0004234267      3.926329e-03 -0.016530611 -0.017839966 7.778584e-03 0.2811053
[67,]  -6.891206e-05      -0.0003785185      4.015034e-03 -0.016531687 -0.017896456 7.848434e-03 0.2812048
[68,]  -2.509409e-04      -0.0001852860      4.347389e-03 -0.016506532 -0.018118743 8.088206e-03 0.2815679
[69,]  -3.133552e-04      -0.0001451602      4.438172e-03 -0.016500367 -0.018173876 8.139393e-03 0.2816729
[70,]  -5.214018e-04      -0.0000198928      4.695164e-03 -0.016468891 -0.018337882 8.275330e-03 0.2819765
[71,]  -6.024159e-04       0.0000000000      4.785344e-03 -0.016466374 -0.018391875 8.314552e-03 0.2821158
[72,]  -6.921978e-04       0.0000000000      4.869432e-03 -0.016471686 -0.018446669 8.353211e-03 0.2822946
[73,]  -7.920269e-04       0.0000000000      4.942796e-03 -0.016472703 -0.018501428 8.391136e-03 0.2824681
[74,]  -2.055117e-03       0.0000000000      5.491157e-03 -0.016310937 -0.018964797 8.523048e-03 0.2838078
[75,]  -2.353043e-03       0.0000000000      5.606834e-03 -0.016260884 -0.019047344 8.505333e-03 0.2840483
[76,]  -2.644396e-03       0.0000000000      5.720820e-03 -0.016211592 -0.019126493 8.483952e-03 0.2842812
[77,]  -2.743107e-03       0.0000000000      5.760265e-03 -0.016195310 -0.019153151 8.477521e-03 0.2843592
[78,]  -2.843096e-03       0.0000000000      5.800458e-03 -0.016179171 -0.019181275 8.473083e-03 0.2844411
[79,]  -3.135365e-03       0.0000000000      5.915736e-03 -0.016130870 -0.019263792 8.457283e-03 0.2846831

$coefficients_std
      Species.setosa Species.versicolor Species.virginica   Sepal.Width Petal.Length  Petal.Width Intercept
 [1,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000  0.000000000 0.0000000000 0.1711352
 [2,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.001847636 0.0000000000 0.1711550
 [3,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.003547039 0.0000000000 0.1712078
 [4,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.005109259 0.0000000000 0.1712854
 [5,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.006544589 0.0000000000 0.1713811
 [6,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.007862621 0.0000000000 0.1714893
 [7,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.009072299 0.0000000000 0.1716056
 [8,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.010181963 0.0000000000 0.1717265
 [9,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.011199381 0.0000000000 0.1718492
[10,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.012131784 0.0000000000 0.1719716
[11,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.012985900 0.0000000000 0.1720920
[12,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.013767976 0.0000000000 0.1722091
[13,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.014483810 0.0000000000 0.1723221
[14,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.015138773 0.0000000000 0.1724302
[15,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.015737839 0.0000000000 0.1725331
[16,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.016285607 0.0000000000 0.1726305
[17,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.016786324 0.0000000000 0.1727224
[18,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.017243908 0.0000000000 0.1728086
[19,]   0.000000e+00       0.0000000000      0.000000e+00  0.0000000000 -0.017661971 0.0000000000 0.1728892
[20,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0004914029 -0.018217153 0.0000000000 0.1729636
[21,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0010283025 -0.018753989 0.0000000000 0.1730351
[22,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0015170607 -0.019242414 0.0000000000 0.1731038
[23,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0019620369 -0.019686760 0.0000000000 0.1731692
[24,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0023671437 -0.020091032 0.0000000000 0.1732312
[25,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0027359511 -0.020458869 0.0000000000 0.1732897
[26,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0030717141 -0.020793582 0.0000000000 0.1733446
[27,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0033752589 -0.021097325 0.0000000000 0.1733959
[28,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0036537438 -0.021374590 0.0000000000 0.1734438
[29,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0039073015 -0.021626949 0.0000000000 0.1734884
[30,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0041381667 -0.021856652 0.0000000000 0.1735299
[31,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0043483781 -0.022065749 0.0000000000 0.1735682
[32,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0045397906 -0.022256101 0.0000000000 0.1736038
[33,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0047140920 -0.022429399 0.0000000000 0.1736366
[34,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0048728180 -0.022587182 0.0000000000 0.1736668
[35,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0050173658 -0.022730846 0.0000000000 0.1736947
[36,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0051490065 -0.022861661 0.0000000000 0.1737203
[37,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0052688970 -0.022980784 0.0000000000 0.1737439
[38,]   0.000000e+00       0.0000000000      0.000000e+00 -0.0053412234 -0.023072104 0.0000000000 0.1737627
[39,]   0.000000e+00      -0.0001175922      0.000000e+00 -0.0055019511 -0.023189466 0.0000000000 0.1738240
[40,]   0.000000e+00      -0.0005066170      0.000000e+00 -0.0056801058 -0.023299856 0.0000000000 0.1739735
[41,]   0.000000e+00      -0.0008532154      0.000000e+00 -0.0058408402 -0.023399537 0.0000000000 0.1741074
[42,]   0.000000e+00      -0.0011428955      0.000000e+00 -0.0059793516 -0.023485982 0.0000000000 0.1742201
[43,]   0.000000e+00      -0.0014293556      0.000000e+00 -0.0061123779 -0.023568506 0.0000000000 0.1743313
[44,]   0.000000e+00      -0.0016797073      1.243120e-05 -0.0062347093 -0.023648890 0.0000000000 0.1744252
[45,]   0.000000e+00      -0.0018706468      9.790433e-05 -0.0063361369 -0.023793017 0.0000637378 0.1744695
[46,]   0.000000e+00      -0.0019698629      1.717337e-04 -0.0063922205 -0.023885848 0.0001383412 0.1744746
[47,]   0.000000e+00      -0.0021078477      2.246836e-04 -0.0065057056 -0.024449186 0.0006471339 0.1745119
[48,]   0.000000e+00      -0.0021556371      3.034315e-04 -0.0066036820 -0.025133872 0.0012626110 0.1745132
[49,]   0.000000e+00      -0.0021453273      4.458210e-04 -0.0066898067 -0.025802418 0.0018396700 0.1744739
[50,]   0.000000e+00      -0.0020839569      6.461732e-04 -0.0067646533 -0.026444255 0.0023704464 0.1743980
[51,]   0.000000e+00      -0.0020107174      8.597081e-04 -0.0068258458 -0.026970969 0.0027894199 0.1743114
[52,]   0.000000e+00      -0.0019078474      1.101906e-03 -0.0068806078 -0.027490866 0.0031910501 0.1742055
[53,]   0.000000e+00      -0.0018175109      1.323132e-03 -0.0069261140 -0.027908946 0.0035055892 0.1741087
[54,]   0.000000e+00      -0.0017094991      1.558056e-03 -0.0069678433 -0.028328598 0.0038159527 0.1740017
[55,]   0.000000e+00      -0.0015842081      1.807162e-03 -0.0070050834 -0.028740986 0.0041139641 0.1738844
[56,]   0.000000e+00      -0.0014430021      2.070103e-03 -0.0070376966 -0.029140468 0.0043945661 0.1737569
[57,]   0.000000e+00      -0.0013372850      2.282679e-03 -0.0070644186 -0.029438645 0.0045993214 0.1736560
[58,]   0.000000e+00      -0.0012235170      2.499826e-03 -0.0070892073 -0.029737447 0.0048023811 0.1735510
[59,]   0.000000e+00      -0.0011012638      2.721871e-03 -0.0071113928 -0.030031888 0.0049992462 0.1734416
[60,]   0.000000e+00      -0.0009710435      2.949010e-03 -0.0071308582 -0.030318677 0.0051868390 0.1733278
[61,]   0.000000e+00      -0.0009387214      3.037293e-03 -0.0071436900 -0.030418624 0.0052518855 0.1732887
[62,]   0.000000e+00      -0.0008039241      3.270133e-03 -0.0071634528 -0.030716683 0.0054466147 0.1731721
[63,]   0.000000e+00      -0.0007660395      3.357739e-03 -0.0071742512 -0.030816631 0.0055111469 0.1731316
[64,]   0.000000e+00      -0.0006199118      3.595215e-03 -0.0071902568 -0.031108822 0.0056983444 0.1730097
[65,]   0.000000e+00      -0.0005768718      3.683036e-03 -0.0071989073 -0.031204944 0.0057586692 0.1729675
[66,]  -3.476645e-05      -0.0004234267      3.926329e-03 -0.0072051358 -0.031492861 0.0059291294 0.1728522
[67,]  -6.891206e-05      -0.0003785185      4.015034e-03 -0.0072056052 -0.031592582 0.0059823720 0.1728199
[68,]  -2.509409e-04      -0.0001852860      4.347389e-03 -0.0071946407 -0.031984985 0.0061651351 0.1727122
[69,]  -3.133552e-04      -0.0001451602      4.438172e-03 -0.0071919537 -0.032082312 0.0062041522 0.1726902
[70,]  -5.214018e-04      -0.0000198928      4.695164e-03 -0.0071782342 -0.032371830 0.0063077682 0.1726367
[71,]  -6.024159e-04       0.0000000000      4.785344e-03 -0.0071771375 -0.032467144 0.0063376645 0.1726279
[72,]  -6.921978e-04       0.0000000000      4.869432e-03 -0.0071794528 -0.032563872 0.0063671323 0.1726309
[73,]  -7.920269e-04       0.0000000000      4.942796e-03 -0.0071798959 -0.032660538 0.0063960401 0.1726409
[74,]  -2.055117e-03       0.0000000000      5.491157e-03 -0.0071093875 -0.033478523 0.0064965885 0.1728921
[75,]  -2.353043e-03       0.0000000000      5.606834e-03 -0.0070875711 -0.033624243 0.0064830849 0.1729542
[76,]  -2.644396e-03       0.0000000000      5.720820e-03 -0.0070660864 -0.033763964 0.0064667874 0.1730147
[77,]  -2.743107e-03       0.0000000000      5.760265e-03 -0.0070589896 -0.033811024 0.0064618858 0.1730346
[78,]  -2.843096e-03       0.0000000000      5.800458e-03 -0.0070519551 -0.033860671 0.0064585028 0.1730548
[79,]  -3.135365e-03       0.0000000000      5.915736e-03 -0.0070309023 -0.034006338 0.0064464596 0.1731155

$coefficient_names
[1] "Species.setosa"     "Species.versicolor" "Species.virginica"  "Sepal.Width"        "Petal.Length"       "Petal.Width"       
[7] "Intercept"

EDIT #2: In response to @Darren's answer. I am now seeing the following on my actual (confidential) dataset. The cross-validated models have selected smaller lambda's, yet the main model stops at a very large lambda.

> tail(mx@allparameters$lambda)
[1] 0.1536665 0.1400152 0.1275767 0.1162431 0.1059164
> mx@model$lambda_best
[1] 0.1059164
> 
> lapply(mx@model$cross_validation_models, function(m_cv){
+     m <- h2o.getModel(m_cv$name)
+     list( tail(m@allparameters$lambda), m@model$lambda_best )
+ })

[[1]]
[[1]][[1]]
[1] 2.446806e-05 2.229438e-05 2.031381e-05 1.850919e-05 1.686488e-05 1.536665e-05

[[1]][[2]]
[1] 0.01135707


[[2]]
[[2]][[1]]
[1] 2.446806e-05 2.229438e-05 2.031381e-05 1.850919e-05 1.686488e-05 1.536665e-05

[[2]][[2]]
[1] 0.01808366


[[3]]
[[3]][[1]]
[1] 2.446806e-05 2.229438e-05 2.031381e-05 1.850919e-05 1.686488e-05 1.536665e-05

[[3]][[2]]
[1] 0.01647716
Darren Cook
  • 27,837
  • 13
  • 117
  • 217
jav
  • 1,485
  • 8
  • 11
  • Try running it again with lambda search over the values of lambda the previous run skipped. Does anything look unusual? As in, do all the paramaters zero out? – C8H10N4O2 Aug 26 '17 at 01:05
  • Also, what is the output of `h2o.getGLMFullRegularizationPath(mod)`? I turned my H2O server off for the weekend... – C8H10N4O2 Aug 26 '17 at 01:08
  • I have tried some of the lambdas that were skipped and the none of the parameters are zeroed out. These are the smaller lambdas that were skipped so it should be moving closer to an unpenalized GLM. I have also edited my post to include the output of the full regularization path. Thank you. – jav Aug 26 '17 at 01:17
  • 1
    It's interesting that `lambdas[79]` is just over 0.001, wondering if the next one would be < 0.001 -- maybe there's some sort of stopping threshold for very small lambda? Or maybe the parameter change between lambdas 78-79 is small enough that some other stopping criterion is getting triggered? I have no idea. If this question is still open on Monday, I'll take a look. – C8H10N4O2 Aug 26 '17 at 01:24
  • That is interesting. The next one is actually below 0.001. However, I just went back to my actual dataset and checked to see if this was also the case. When the number of lambdas was set to 30, it stopped just before the lambda went below 0.01. Then when the number of lambdas was set to 100, it stopped just before the lambda went below 0.1. Thanks for taking the time to look at this. Very much appreciated. – jav Aug 26 '17 at 01:41
  • @C8H10N4O2, I've added a response to Darren's post above. I am seeing differently on my actual dataset. If you still have any thoughts, they are appreciated. – jav Aug 28 '17 at 05:51

2 Answers2

4

Short answer: you've found a bug and we've opened a ticket here. The early stopping flag is not being honored when nfolds > 0. In the meantime, if you don't set nfolds, you should get 100 lambdas.

Erin LeDell
  • 8,704
  • 1
  • 19
  • 35
  • Erin, is the only bug that `early_stopping = F` is being ignored? If you want early stopping, is the current behaviour correct? It seemed to be stopping a bit early here, especially on the OP's unseen real data; but is that part of guarding against over-fitting, when using CV? – Darren Cook Aug 29 '17 at 08:37
  • @DarrenCook Yes, that's the bug. When `early_stopping = F`, it computes all lambdas in the CV models, but the bug is that in the main model, it will still do early stopping (by computing up-to the best lambda instead of all lambdas). – Erin LeDell Aug 29 '17 at 20:41
  • Erin, my comment got a bit long, so I made it into a full question: https://stackoverflow.com/q/45948642/841830 – Darren Cook Aug 29 '17 at 21:34
1

What is happening is it learns from the cross-validation models, to optimize the parameters used for the final run. (BTW, you are using nfolds=2 which is fairly unusual for a small data set: learn on just 75 records, then test on the other 75. So you are going to have a lot of noise in what it learns from CV.)

Following on from your code:

tail(mod@allparameters$lambda)
mod@model$lambda_best

I'm using 3.14.0.1, so here is what I get:

[1] 0.002129615 0.001940426 0.001768044 0.001610975 0.001467861 0.001337460

and:

[1] 0.001610975

Then if we go look at the same for the 2 CV models:

lapply(mod@model$cross_validation_models, function(m_cv){
  m <- h2o.getModel(m_cv$name)
  list( tail(m@allparameters$lambda), m@model$lambda_best )
  })

I get:

[[1]]
[[1]][[1]]
[1] 0.0002283516 0.0002080655 0.0001895815 0.0001727396 0.0001573939 0.0001434115

[[1]][[2]]
[1] 0.002337249


[[2]]
[[2]][[1]]
[1] 0.0002283516 0.0002080655 0.0001895815 0.0001727396 0.0001573939 0.0001434115

[[2]][[2]]
[1] 0.00133746

I.e. it seems the lowest best lambda found in the CV models was 0.00133, so it has used that as early stopping for the final model.

BTW, if you poke around in those cv models you will see they both tried 100 values for lambda. It is only the final model that does the extra optimization.

(I'm thinking of it as a time optimization, but reading p.26/27 of the Generalized Linear Models booklet (free download from https://www.h2o.ai/resources/), I think it is mainly about using the cv data to avoid over-fitting.)

You can explicitly specify a set of lambda values to try. BUT, the cross-validation learning will still take priority for the final model. E.g. in the following the final model only tried the first 4 of the 6 lambda values I suggested, because both CV models liked 0.001 best.

mx = h2o.glm(y = "Sepal.Length", x = setdiff(colnames(iris), "Sepal.Length"), 
            training_frame = iris.hex, nfolds = 2, seed = 100,
            lambda = c(1.0, 0.1, 0.01, 0.001, 0.0001, 0), lambda_search = T,
            family = "gamma")

tail(mx@allparameters$lambda)
mx@model$lambda_best

lapply(mx@model$cross_validation_models, function(m_cv){
  m <- h2o.getModel(m_cv$name)
  list( tail(m@allparameters$lambda), m@model$lambda_best )
})
Darren Cook
  • 27,837
  • 13
  • 117
  • 217
  • Thank you very much for your detailed response. This makes sense now. :) – jav Aug 27 '17 at 14:00
  • Could you please clarify what booklet you are referencing? – C8H10N4O2 Aug 28 '17 at 01:30
  • @Darren, I am now still actually seeing differently on my actual dataset. I have edited my post to show the results of my actual model. The cross-validated models have selected smaller lambda's, yet the main model stops at a very large lambda. Please let me know if you have any further thoughts. – jav Aug 28 '17 at 05:46
  • 1
    @C8H10N4O2 Sorry, added the link to the download page, where you can find a booklet on GLM, describing the h2o implementation. – Darren Cook Aug 28 '17 at 08:25
  • @jav My first two thoughts on your real results were: 1. the cv models tried down to 1e-5, but chose a lambda 3 orders of magnitude higher, so it wants to regularize a lot, suggesting a lot of noise; 2. nfolds of 3 is low - try 10, and see if you get better parameter estimates? Though, I do agree it is strange that the main model didn't try a few more lower lambdas, given that it chose the last lambda it tried. – Darren Cook Aug 28 '17 at 08:31