diff --git a/src/library/stats/R/constrOptim.R b/src/library/stats/R/constrOptim.R index 0c1e2a4ea7..b31b6e59e2 100644 --- a/src/library/stats/R/constrOptim.R +++ b/src/library/stats/R/constrOptim.R @@ -54,11 +54,14 @@ constrOptim <- } else function(theta, ...) dR(theta, theta.old, ...) totCounts <- 0 s.mu <- sign(mu) + a <- NULL + optim_failure <- FALSE for(i in seq_len(outer.iterations)) { obj.old <- obj r.old <- r theta.old <- theta + a.old <- a a <- optim(theta.old, fun, gradient, control = control, method = method, hessian = hessian, ...) @@ -66,20 +69,32 @@ constrOptim <- if (is.finite(r) && is.finite(r.old) && abs(r - r.old) < (1e-3 + abs(r)) * outer.eps) break theta <- a$par - totCounts <- totCounts + a$counts + + if (any(ui%*%theta-ci<0) || !is.finite(r) || any(!is.finite(theta))) { + i <- i - 1 + a <- a.old + optim_failure <- TRUE + break + } + + totCounts <- totCounts + a$counts obj <- f(theta, ...) if (s.mu * obj > s.mu * obj.old) break } + if (optim_failure) { + a$convergence <- 21L # See https://github.com/nashjc/optimx/blob/main/man/optimx.Rd + a$message <- gettextf("Returning solution from outer iteration %d, either because the solution is not in the feasible region or optim() provided non-finite outputs. Consider either checking the gradient implementation or using a derivative-free opimizer or reducing outer.eps.", i) + } if (i == outer.iterations) { - a$convergence <- 7 + a$convergence <- 7L a$message <- gettext("Barrier algorithm ran out of iterations and did not converge") } if (mu > 0 && obj > obj.old) { - a$convergence <- 11 + a$convergence <- 11L a$message <- gettextf("Objective function increased at outer iteration %d", i) } if (mu < 0 && obj < obj.old) { - a$convergence <- 11 + a$convergence <- 11L a$message <- gettextf("Objective function decreased at outer iteration %d", i) } a$outer.iterations <- i diff --git a/src/library/stats/man/constrOptim.Rd b/src/library/stats/man/constrOptim.Rd index d7f955e309..1443651c20 100644 --- a/src/library/stats/man/constrOptim.Rd +++ b/src/library/stats/man/constrOptim.Rd @@ -66,6 +66,13 @@ constrOptim(theta, f, grad, ui, ci, mu = 1e-04, control = list(), \code{method = "Nelder-Mead"}. It should take arguments matching those of \code{f} and return a vector containing the gradient. + In some rare cornercases, the call to \code{optim} inside the outer + loop might lead to non-finite function value or parameter values or + parameters outside the feasible region. In these cases, the outer + loop is stopped early and the last useful result is returned. The + \code{message} item in the output list warns the user of this + behaviour. + } \value{ As for \code{\link{optim}}, but with two extra components: diff --git a/src/library/stats/tests/constrOptim.R b/src/library/stats/tests/constrOptim.R new file mode 100644 index 0000000000..66982d1635 --- /dev/null +++ b/src/library/stats/tests/constrOptim.R @@ -0,0 +1,33 @@ +objective <- function(p){ + out <- -log(p[1] + p[2]) + log(p[2]) + log(1 - p[1] - p[2]) + out +} + +gradfun <- function(p){ + out <- c( + -1/(p[1] + p[2]) - 1/(1 - p[1] - p[2]), + -1/(p[1] + p[2]) + 1/p[2] - 1/(1 - p[1] - p[2]) + ) + out +} +startp <- c(1/3, 1/3) +small <- 1e-6 +CI <- c(small, small, small - 1) +UI <- rbind( + c(1, 0), # p1>small + c(0, 1), # p2>small + c(-1, -1) # p1+p2 < 1-small +) + +out <- constrOptim( + theta = startp, + f = objective, + grad = gradfun, + ui = UI, + ci = CI, + outer.iterations = 100, + method = "BFGS" +) + +# Convergence code 0 (set by optim) +stopifnot(identical(out$convergence, 0L))