CRAN Package Check Results for Package mboost

Last updated on 2024-11-12 15:49:56 CET.

Flavor Version Tinstall Tcheck Ttotal Status Flags
r-devel-linux-x86_64-debian-clang 2.9-11 16.35 276.36 292.71 OK
r-devel-linux-x86_64-debian-gcc 2.9-11 10.96 172.15 183.11 ERROR
r-devel-linux-x86_64-fedora-clang 2.9-11 467.18 OK
r-devel-linux-x86_64-fedora-gcc 2.9-11 454.83 OK
r-devel-windows-x86_64 2.9-11 18.00 136.00 154.00 OK --no-vignettes
r-patched-linux-x86_64 2.9-11 14.64 259.58 274.22 OK
r-release-linux-x86_64 2.9-11 15.78 263.52 279.30 OK
r-release-macos-arm64 2.9-11 141.00 OK
r-release-macos-x86_64 2.9-11 464.00 OK
r-release-windows-x86_64 2.9-11 17.00 137.00 154.00 OK --no-vignettes
r-oldrel-macos-arm64 2.9-11 198.00 OK
r-oldrel-macos-x86_64 2.9-11 313.00 OK
r-oldrel-windows-x86_64 2.9-11 24.00 185.00 209.00 OK --no-vignettes

Check Details

Version: 2.9-11
Check: examples
Result: ERROR Running examples in ‘mboost-Ex.R’ failed The error most likely occurred in: > base::assign(".ptime", proc.time(), pos = "CheckExEnv") > ### Name: baselearners > ### Title: Base-learners for Gradient Boosting > ### Aliases: baselearners baselearner base-learner bols bbs bspatial brad > ### bkernel brandom btree bmono bmrf buser bns bss %+% %X% %O% > ### Keywords: models > > ### ** Examples > > > set.seed(290875) > > n <- 100 > x1 <- rnorm(n) > x2 <- rnorm(n) + 0.25 * x1 > x3 <- as.factor(sample(0:1, 100, replace = TRUE)) > x4 <- gl(4, 25) > y <- 3 * sin(x1) + x2^2 + rnorm(n) > weights <- drop(rmultinom(1, n, rep.int(1, n) / n)) > > ### set up base-learners > spline1 <- bbs(x1, knots = 20, df = 4) > extract(spline1, "design")[1:10, 1:10] 1 2 3 4 5 6 7 8 9 [1,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000 [2,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000 [3,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000 [4,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000 [5,] 0 0 0 0 0.01490533 0.44554054 5.113987e-01 0.028155480 0.000000000 [6,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000 [7,] 0 0 0 0 0.00000000 0.06481227 6.035695e-01 0.328334430 0.003283771 [8,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.000000000 0.000000000 [9,] 0 0 0 0 0.00000000 0.00000000 1.551197e-09 0.167720617 0.666662247 [10,] 0 0 0 0 0.00000000 0.00000000 0.000000e+00 0.009196839 0.401902997 10 [1,] 0.0000000 [2,] 0.0000000 [3,] 0.0000000 [4,] 0.0000000 [5,] 0.0000000 [6,] 0.0000000 [7,] 0.0000000 [8,] 0.0000000 [9,] 0.1656171 [10,] 0.5493155 > extract(spline1, "penalty") [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [,11] [,12] [,13] [1,] 1 -2 1 0 0 0 0 0 0 0 0 0 0 [2,] -2 5 -4 1 0 0 0 0 0 0 0 0 0 [3,] 1 -4 6 -4 1 0 0 0 0 0 0 0 0 [4,] 0 1 -4 6 -4 1 0 0 0 0 0 0 0 [5,] 0 0 1 -4 6 -4 1 0 0 0 0 0 0 [6,] 0 0 0 1 -4 6 -4 1 0 0 0 0 0 [7,] 0 0 0 0 1 -4 6 -4 1 0 0 0 0 [8,] 0 0 0 0 0 1 -4 6 -4 1 0 0 0 [9,] 0 0 0 0 0 0 1 -4 6 -4 1 0 0 [10,] 0 0 0 0 0 0 0 1 -4 6 -4 1 0 [11,] 0 0 0 0 0 0 0 0 1 -4 6 -4 1 [12,] 0 0 0 0 0 0 0 0 0 1 -4 6 -4 [13,] 0 0 0 0 0 0 0 0 0 0 1 -4 6 [14,] 0 0 0 0 0 0 0 0 0 0 0 1 -4 [15,] 0 0 0 0 0 0 0 0 0 0 0 0 1 [16,] 0 0 0 0 0 0 0 0 0 0 0 0 0 [17,] 0 0 0 0 0 0 0 0 0 0 0 0 0 [18,] 0 0 0 0 0 0 0 0 0 0 0 0 0 [19,] 0 0 0 0 0 0 0 0 0 0 0 0 0 [20,] 0 0 0 0 0 0 0 0 0 0 0 0 0 [21,] 0 0 0 0 0 0 0 0 0 0 0 0 0 [22,] 0 0 0 0 0 0 0 0 0 0 0 0 0 [23,] 0 0 0 0 0 0 0 0 0 0 0 0 0 [24,] 0 0 0 0 0 0 0 0 0 0 0 0 0 [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24] [1,] 0 0 0 0 0 0 0 0 0 0 0 [2,] 0 0 0 0 0 0 0 0 0 0 0 [3,] 0 0 0 0 0 0 0 0 0 0 0 [4,] 0 0 0 0 0 0 0 0 0 0 0 [5,] 0 0 0 0 0 0 0 0 0 0 0 [6,] 0 0 0 0 0 0 0 0 0 0 0 [7,] 0 0 0 0 0 0 0 0 0 0 0 [8,] 0 0 0 0 0 0 0 0 0 0 0 [9,] 0 0 0 0 0 0 0 0 0 0 0 [10,] 0 0 0 0 0 0 0 0 0 0 0 [11,] 0 0 0 0 0 0 0 0 0 0 0 [12,] 1 0 0 0 0 0 0 0 0 0 0 [13,] -4 1 0 0 0 0 0 0 0 0 0 [14,] 6 -4 1 0 0 0 0 0 0 0 0 [15,] -4 6 -4 1 0 0 0 0 0 0 0 [16,] 1 -4 6 -4 1 0 0 0 0 0 0 [17,] 0 1 -4 6 -4 1 0 0 0 0 0 [18,] 0 0 1 -4 6 -4 1 0 0 0 0 [19,] 0 0 0 1 -4 6 -4 1 0 0 0 [20,] 0 0 0 0 1 -4 6 -4 1 0 0 [21,] 0 0 0 0 0 1 -4 6 -4 1 0 [22,] 0 0 0 0 0 0 1 -4 6 -4 1 [23,] 0 0 0 0 0 0 0 1 -4 5 -2 [24,] 0 0 0 0 0 0 0 0 1 -2 1 > knots.x2 <- quantile(x2, c(0.25, 0.5, 0.75)) > spline2 <- bbs(x2, knots = knots.x2, df = 5) > ols3 <- bols(x3) > extract(ols3) (Intercept) x31 1 1 1 3 1 0 attr(,"assign") [1] 0 1 attr(,"contrasts") attr(,"contrasts")$x3 [1] "contr.treatment" > ols4 <- bols(x4) > > ### compute base-models > drop(ols3$dpp(weights)$fit(y)$model) ## same as: (Intercept) x31 1.3842061 0.2083405 > coef(lm(y ~ x3, weights = weights)) (Intercept) x31 1.3842061 0.2083405 > > drop(ols4$dpp(weights)$fit(y)$model) ## same as: (Intercept) x42 x43 x44 0.9162875 0.3180593 0.8982705 0.8162401 > coef(lm(y ~ x4, weights = weights)) (Intercept) x42 x43 x44 0.9162875 0.3180593 0.8982705 0.8162401 > > ### fit model, component-wise > mod1 <- mboost_fit(list(spline1, spline2, ols3, ols4), y, weights) > > ### more convenient formula interface > mod2 <- mboost(y ~ bbs(x1, knots = 20, df = 4) + + bbs(x2, knots = knots.x2, df = 5) + + bols(x3) + bols(x4), weights = weights) > all.equal(coef(mod1), coef(mod2)) [1] TRUE > > > ### grouped linear effects > # center x1 and x2 first > x1 <- scale(x1, center = TRUE, scale = FALSE) > x2 <- scale(x2, center = TRUE, scale = FALSE) > model <- gamboost(y ~ bols(x1, x2, intercept = FALSE) + + bols(x1, intercept = FALSE) + + bols(x2, intercept = FALSE), + control = boost_control(mstop = 50)) > coef(model, which = 1) # one base-learner for x1 and x2 $`bols(x1, x2, intercept = FALSE)` x1 x2 1.81077137 -0.02249335 attr(,"offset") [1] 1.334042 > coef(model, which = 2:3) # two separate base-learners for x1 and x2 $`bols(x1, intercept = FALSE)` x1 0 $`bols(x2, intercept = FALSE)` x2 0 attr(,"offset") [1] 1.334042 > # zero because they were (not yet) selected. > > ### example for bspatial > x1 <- runif(250,-pi,pi) > x2 <- runif(250,-pi,pi) > > y <- sin(x1) * sin(x2) + rnorm(250, sd = 0.4) > > spline3 <- bspatial(x1, x2, knots = 12) > Xmat <- extract(spline3, "design") > ## 12 inner knots + 4 boundary knots = 16 knots per direction > ## THUS: 16 * 16 = 256 columns > dim(Xmat) [1] 250 256 > extract(spline3, "penalty")[1:10, 1:10] 10 x 10 sparse Matrix of class "dsCMatrix" [1,] 2 -2 1 . . . . . . . [2,] -2 6 -4 1 . . . . . . [3,] 1 -4 7 -4 1 . . . . . [4,] . 1 -4 7 -4 1 . . . . [5,] . . 1 -4 7 -4 1 . . . [6,] . . . 1 -4 7 -4 1 . . [7,] . . . . 1 -4 7 -4 1 . [8,] . . . . . 1 -4 7 -4 1 [9,] . . . . . . 1 -4 7 -4 [10,] . . . . . . . 1 -4 7 > > ## specify number of knots separately > form1 <- y ~ bspatial(x1, x2, knots = list(x1 = 12, x2 = 14)) > > ## decompose spatial effect into parametric part and > ## deviation with one df > form2 <- y ~ bols(x1) + bols(x2) + bols(x1, by = x2, intercept = FALSE) + + bspatial(x1, x2, knots = 12, center = TRUE, df = 1) > > mod1 <- gamboost(form1) > ## Not run: > ##D plot(mod1) > ##D > ## End(Not run) > > mod2 <- gamboost(form2) > ## automated plot function: > ## Not run: > ##D plot(mod2) > ##D > ## End(Not run) > ## plot sum of linear and smooth effects: > library("lattice") > df <- expand.grid(x1 = unique(x1), x2 = unique(x2)) > df$pred <- predict(mod2, newdata = df) > ## Not run: > ##D levelplot(pred ~ x1 * x2, data = df) > ##D > ## End(Not run) > > ## specify radial basis function base-learner for spatial effect > ## and use data-adaptive effective range (theta = NULL, see 'args') > form3 <- y ~ brad(x1, x2) > ## Now use different settings, e.g. 50 knots and theta fixed to 0.4 > ## (not really a good setting) > form4 <- y ~ brad(x1, x2, knots = 50, args = list(theta = 0.4)) > > mod3 <- gamboost(form3) Loading required namespace: fields Failed with error: ‘there is no package called ‘fields’’ Error in hyper_brad(mf, vary, knots = knots, df = df, lambda = lambda, : Cannot load package‘fields’, which is needed for the automatic knot placement Calls: gamboost ... eval -> eval -> brad -> bl_lin -> newX -> hyper_brad Execution halted Examples with CPU (user + system) or elapsed time > 5s user system elapsed Family 5.144 0.048 5.509 Flavor: r-devel-linux-x86_64-debian-gcc