Aliases: nlminb
Keywords: optimize
### ** Examples ## No test: x <- rnbinom(100, mu = 10, size = 10) hdev <- function(par) -sum(dnbinom(x, mu = par[1], size = par[2], log = TRUE)) nlminb(c(9, 12), hdev)
$par [1] 9.950003 13.495939 $objective [1] 280.5877 $convergence [1] 0 $iterations [1] 10 $evaluations function gradient 12 30 $message [1] "relative convergence (4)"
nlminb(c(20, 20), hdev, lower = 0, upper = Inf)
$par [1] 9.95000 13.49592 $objective [1] 280.5877 $convergence [1] 0 $iterations [1] 16 $evaluations function gradient 20 40 $message [1] "relative convergence (4)"
nlminb(c(20, 20), hdev, lower = 0.001, upper = Inf)
$par [1] 9.95000 13.49592 $objective [1] 280.5877 $convergence [1] 0 $iterations [1] 16 $evaluations function gradient 20 40 $message [1] "relative convergence (4)"
## slightly modified from the S-PLUS help page for nlminb # this example minimizes a sum of squares with known solution y sumsq <- function( x, y) {sum((x-y)^2)} y <- rep(1,5) x0 <- rnorm(length(y)) nlminb(start = x0, sumsq, y = y)
$par [1] 1 1 1 1 1 $objective [1] 1.056396e-18 $convergence [1] 0 $iterations [1] 5 $evaluations function gradient 7 33 $message [1] "X-convergence (3)"
# now use bounds with a y that has some components outside the bounds y <- c( 0, 2, 0, -2, 0) nlminb(start = x0, sumsq, lower = -1, upper = 1, y = y)
$par [1] -4.307913e-08 1.000000e+00 1.436043e-06 -1.000000e+00 9.380389e-07 $objective [1] 2 $convergence [1] 0 $iterations [1] 4 $evaluations function gradient 6 20 $message [1] "relative convergence (4)"
# try using the gradient sumsq.g <- function(x, y) 2*(x-y) nlminb(start = x0, sumsq, sumsq.g, lower = -1, upper = 1, y = y)
$par [1] -1.110223e-16 1.000000e+00 -2.428613e-17 -1.000000e+00 -1.387779e-16 $objective [1] 2 $convergence [1] 0 $iterations [1] 4 $evaluations function gradient 6 4 $message [1] "both X-convergence and relative convergence (5)"
# now use the hessian, too sumsq.h <- function(x, y) diag(2, nrow = length(x)) nlminb(start = x0, sumsq, sumsq.g, sumsq.h, lower = -1, upper = 1, y = y)
$par [1] 2.220446e-16 1.000000e+00 0.000000e+00 -1.000000e+00 5.551115e-17 $objective [1] 2 $convergence [1] 0 $iterations [1] 2 $evaluations function gradient 4 2 $message [1] "both X-convergence and relative convergence (5)"
## Rest lifted from optim help page fr <- function(x) { ## Rosenbrock Banana function x1 <- x[1] x2 <- x[2] 100 * (x2 - x1 * x1)^2 + (1 - x1)^2 } grr <- function(x) { ## Gradient of 'fr' x1 <- x[1] x2 <- x[2] c(-400 * x1 * (x2 - x1 * x1) - 2 * (1 - x1), 200 * (x2 - x1 * x1)) } nlminb(c(-1.2,1), fr)
$par [1] 1 1 $objective [1] 2.768493e-20 $convergence [1] 0 $iterations [1] 35 $evaluations function gradient 44 74 $message [1] "X-convergence (3)"
nlminb(c(-1.2,1), fr, grr)
$par [1] 1 1 $objective [1] 4.291816e-22 $convergence [1] 0 $iterations [1] 35 $evaluations function gradient 43 36 $message [1] "X-convergence (3)"
flb <- function(x) { p <- length(x); sum(c(1, rep(4, p-1)) * (x - c(1, x[-p])^2)^2) } ## 25-dimensional box constrained ## par[24] is *not* at boundary nlminb(rep(3, 25), flb, lower = rep(2, 25), upper = rep(4, 25))
$par [1] 2.000000 2.000000 2.000000 2.000000 2.000000 2.000000 2.000000 2.000000 [9] 2.000000 2.000000 2.000000 2.000000 2.000000 2.000000 2.000000 2.000000 [17] 2.000000 2.000000 2.000000 2.000000 2.000000 2.000000 2.000000 2.109093 [25] 4.000000 $objective [1] 368.1059 $convergence [1] 0 $iterations [1] 6 $evaluations function gradient 10 177 $message [1] "relative convergence (4)"
## trying to use a too small tolerance: r <- nlminb(rep(3, 25), flb, control = list(rel.tol = 1e-16)) stopifnot(grepl("rel.tol", r$message)) ## End(No test)