829 lines
26 KiB
Plaintext
829 lines
26 KiB
Plaintext
|
|
||
|
R Under development (unstable) (2022-03-19 r81942) -- "Unsuffered Consequences"
|
||
|
Copyright (C) 2022 The R Foundation for Statistical Computing
|
||
|
Platform: x86_64-pc-linux-gnu (64-bit)
|
||
|
|
||
|
R is free software and comes with ABSOLUTELY NO WARRANTY.
|
||
|
You are welcome to redistribute it under certain conditions.
|
||
|
Type 'license()' or 'licence()' for distribution details.
|
||
|
|
||
|
R is a collaborative project with many contributors.
|
||
|
Type 'contributors()' for more information and
|
||
|
'citation()' on how to cite R or R packages in publications.
|
||
|
|
||
|
Type 'demo()' for some demos, 'help()' for on-line help, or
|
||
|
'help.start()' for an HTML browser interface to help.
|
||
|
Type 'q()' to quit R.
|
||
|
|
||
|
> # File src/library/stats/tests/nls.R
|
||
|
> # Part of the R package, https://www.R-project.org
|
||
|
> #
|
||
|
> # This program is free software; you can redistribute it and/or modify
|
||
|
> # it under the terms of the GNU General Public License as published by
|
||
|
> # the Free Software Foundation; either version 2 of the License, or
|
||
|
> # (at your option) any later version.
|
||
|
> #
|
||
|
> # This program is distributed in the hope that it will be useful,
|
||
|
> # but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||
|
> # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||
|
> # GNU General Public License for more details.
|
||
|
> #
|
||
|
> # A copy of the GNU General Public License is available at
|
||
|
> # https://www.R-project.org/Licenses/
|
||
|
>
|
||
|
> ## tests of nls, especially of weighted fits
|
||
|
>
|
||
|
> library(stats)
|
||
|
> options(digits = 5) # to avoid trivial printed differences
|
||
|
> options(useFancyQuotes = FALSE) # avoid fancy quotes in o/p
|
||
|
> options(show.nls.convergence = FALSE) # avoid non-diffable output
|
||
|
> options(warn = 1)
|
||
|
>
|
||
|
> have_MASS <- requireNamespace('MASS', quietly = TRUE)
|
||
|
>
|
||
|
> pdf("nls-test.pdf")
|
||
|
>
|
||
|
> ## utility for comparing nls() results: [TODO: use more often below]
|
||
|
> .n <- function(r) r[names(r) != "call"]
|
||
|
>
|
||
|
> ## selfStart.default() w/ no parameters:
|
||
|
> logist <- deriv( ~Asym/(1+exp(-(x-xmid)/scal)), c("Asym", "xmid", "scal"),
|
||
|
+ function(x, Asym, xmid, scal){} )
|
||
|
> logistInit <- function(mCall, LHS, data) {
|
||
|
+ xy <- sortedXyData(mCall[["x"]], LHS, data)
|
||
|
+ if(nrow(xy) < 3) stop("Too few distinct input values to fit a logistic")
|
||
|
+ Asym <- max(abs(xy[,"y"]))
|
||
|
+ if (Asym != max(xy[,"y"])) Asym <- -Asym # negative asymptote
|
||
|
+ xmid <- NLSstClosestX(xy, 0.5 * Asym)
|
||
|
+ scal <- NLSstClosestX(xy, 0.75 * Asym) - xmid
|
||
|
+ setNames(c(Asym, xmid, scal),
|
||
|
+ mCall[c("Asym", "xmid", "scal")])
|
||
|
+ }
|
||
|
> logist <- selfStart(logist, initial = logistInit) ##-> Error in R 1.5.0
|
||
|
> str(logist)
|
||
|
function (x, Asym, xmid, scal)
|
||
|
- attr(*, "initial")=function (mCall, LHS, data)
|
||
|
- attr(*, "class")= chr "selfStart"
|
||
|
> ## with parameters and getInitial():
|
||
|
> logist <- selfStart(logist, initial = logistInit,
|
||
|
+ parameters = c("Asym", "xmid", "scal"))
|
||
|
> tools::assertWarning(verbose = TRUE,
|
||
|
+ in1 <- getInitial(circumference ~ logist(age, Asym, xmid, scal), Orange)
|
||
|
+ ) # no warning previously
|
||
|
Asserted warning: selfStart initializing functions should have a final '...' argument since R 4.1.0
|
||
|
> ## but this then failed, now gives the same warning:
|
||
|
> tools::assertWarning(verbose = TRUE,
|
||
|
+ fm <- nls(circumference ~ logist(age, Asym, xmid, scal), Orange)
|
||
|
+ )
|
||
|
Asserted warning: selfStart initializing functions should have a final '...' argument since R 4.1.0
|
||
|
> ## in R 4.1.{0,1,2} gave
|
||
|
> ## Error in (attr(object, "initial"))(mCall = mCall, data = data, LHS = LHS, :
|
||
|
> ## unused arguments (control = list(.......), trace = FALSE)
|
||
|
> ## IGNORE_RDIFF_BEGIN
|
||
|
> coef(summary(fm))
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
Asym 192.69 20.244 9.5182 7.4824e-11
|
||
|
xmid 728.76 107.299 6.7919 1.1202e-07
|
||
|
scal 353.53 81.472 4.3393 1.3382e-04
|
||
|
> ## IGNORE_RDIFF_END
|
||
|
>
|
||
|
> ## getInitial.formula() gets selfStart function from environment(formula)
|
||
|
> plogis <- stats::SSlogis
|
||
|
> in2 <- getInitial(circumference ~ plogis(age, Asym, xmid, scal), Orange)
|
||
|
> ## in R <= 4.2.x gave Error:
|
||
|
> ## no 'getInitial' method found for "function" objects
|
||
|
> rm(plogis)
|
||
|
>
|
||
|
>
|
||
|
> ## lower and upper in algorithm="port"
|
||
|
> set.seed(123)
|
||
|
> x <- runif(200)
|
||
|
> a <- b <- 1; c <- -0.1
|
||
|
> y <- a+b*x+c*x^2+rnorm(200, sd=0.05)
|
||
|
> plot(x,y)
|
||
|
> curve(a+b*x+c*x^2, add = TRUE)
|
||
|
> ## IGNORE_RDIFF_BEGIN
|
||
|
> nls(y ~ a+b*x+c*I(x^2), start = c(a=1, b=1, c=0.1), algorithm = "port")
|
||
|
Nonlinear regression model
|
||
|
model: y ~ a + b * x + c * I(x^2)
|
||
|
data: parent.frame()
|
||
|
a b c
|
||
|
1.0058 0.9824 -0.0897
|
||
|
residual sum-of-squares: 0.46
|
||
|
|
||
|
Algorithm "port", convergence message: relative convergence (4)
|
||
|
> (fm <- nls(y ~ a+b*x+c*I(x^2), start = c(a=1, b=1, c=0.1),
|
||
|
+ algorithm = "port", lower = c(0, 0, 0)))
|
||
|
Nonlinear regression model
|
||
|
model: y ~ a + b * x + c * I(x^2)
|
||
|
data: parent.frame()
|
||
|
a b c
|
||
|
1.02 0.89 0.00
|
||
|
residual sum-of-squares: 0.468
|
||
|
|
||
|
Algorithm "port", convergence message: both X-convergence and relative convergence (5)
|
||
|
> ## IGNORE_RDIFF_END
|
||
|
> if(have_MASS) {
|
||
|
+ print(confint(fm))
|
||
|
+ } else message("skipping tests requiring the MASS package")
|
||
|
Waiting for profiling to be done...
|
||
|
2.5% 97.5%
|
||
|
a 1.00875 1.037847
|
||
|
b 0.84138 0.914645
|
||
|
c NA 0.042807
|
||
|
>
|
||
|
> ## weighted nls fit
|
||
|
> set.seed(123)
|
||
|
> y <- x <- 1:10
|
||
|
> yeps <- y + rnorm(length(y), sd = 0.01)
|
||
|
> wts <- rep(c(1, 2), length = 10); wts[5] <- 0
|
||
|
> fit0 <- lm(yeps ~ x, weights = wts)
|
||
|
> ## IGNORE_RDIFF_BEGIN
|
||
|
> summary(fit0, cor = TRUE)
|
||
|
|
||
|
Call:
|
||
|
lm(formula = yeps ~ x, weights = wts)
|
||
|
|
||
|
Weighted Residuals:
|
||
|
Min 1Q Median 3Q Max
|
||
|
-0.01562 -0.00723 -0.00158 0.00403 0.02413
|
||
|
|
||
|
Coefficients:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
(Intercept) 0.00517 0.00764 0.68 0.52
|
||
|
x 0.99915 0.00119 841.38 <2e-16 ***
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.0132 on 7 degrees of freedom
|
||
|
Multiple R-squared: 1, Adjusted R-squared: 1
|
||
|
F-statistic: 7.08e+05 on 1 and 7 DF, p-value: <2e-16
|
||
|
|
||
|
Correlation of Coefficients:
|
||
|
(Intercept)
|
||
|
x -0.89
|
||
|
|
||
|
> cf0 <- coef(summary(fit0))[, 1:2]
|
||
|
> fit <- nls(yeps ~ a + b*x, start = list(a = 0.12345, b = 0.54321),
|
||
|
+ weights = wts, trace = TRUE)
|
||
|
112.14 (3.04e+02): par = (0.12345 0.54321)
|
||
|
0.0012128 (2.75e-06): par = (0.0051705 0.99915)
|
||
|
> summary(fit, cor = TRUE)
|
||
|
|
||
|
Formula: yeps ~ a + b * x
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
a 0.00517 0.00764 0.68 0.52
|
||
|
b 0.99915 0.00119 841.37 <2e-16 ***
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.0132 on 7 degrees of freedom
|
||
|
|
||
|
Correlation of Parameter Estimates:
|
||
|
a
|
||
|
b -0.89
|
||
|
|
||
|
> ## IGNORE_RDIFF_END
|
||
|
> stopifnot(all.equal(residuals(fit), residuals(fit0), tolerance = 1e-5,
|
||
|
+ check.attributes = FALSE))
|
||
|
> stopifnot(df.residual(fit) == df.residual(fit0))
|
||
|
> stopifnot(all.equal(logLik(fit), logLik(fit0), tolerance = 1e-8))
|
||
|
> cf1 <- coef(summary(fit))[, 1:2]
|
||
|
> ## IGNORE_RDIFF_BEGIN
|
||
|
> fit2 <- nls(yeps ~ a + b*x, start = list(a = 0.12345, b = 0.54321),
|
||
|
+ weights = wts, trace = TRUE, algorithm = "port")
|
||
|
0: 56.070572: 0.123450 0.543210
|
||
|
1: 6.3964587: 1.34546 0.700840
|
||
|
2: 0.00060639084: 0.00517053 0.999153
|
||
|
3: 0.00060639084: 0.00517051 0.999153
|
||
|
> summary(fit2, cor = TRUE)
|
||
|
|
||
|
Formula: yeps ~ a + b * x
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
a 0.00517 0.00764 0.68 0.52
|
||
|
b 0.99915 0.00119 841.38 <2e-16 ***
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.0132 on 7 degrees of freedom
|
||
|
|
||
|
Correlation of Parameter Estimates:
|
||
|
a
|
||
|
b -0.89
|
||
|
|
||
|
Algorithm "port", convergence message: both X-convergence and relative convergence (5)
|
||
|
|
||
|
> ## IGNORE_RDIFF_END
|
||
|
> cf2 <- coef(summary(fit2))[, 1:2]
|
||
|
> rownames(cf0) <- c("a", "b")
|
||
|
> # expect relative errors ca 2e-08
|
||
|
> stopifnot(all.equal(cf1, cf0, tolerance = 1e-6),
|
||
|
+ all.equal(cf1, cf0, tolerance = 1e-6))
|
||
|
> stopifnot(all.equal(residuals(fit2), residuals(fit0), tolerance = 1e5,
|
||
|
+ check.attributes = FALSE))
|
||
|
> stopifnot(all.equal(logLik(fit2), logLik(fit0), tolerance = 1e-8))
|
||
|
>
|
||
|
>
|
||
|
> DNase1 <- subset(DNase, Run == 1)
|
||
|
> DNase1$wts <- rep(8:1, each = 2)
|
||
|
> fm1 <- nls(density ~ SSlogis(log(conc), Asym, xmid, scal),
|
||
|
+ data = DNase1, weights = wts)
|
||
|
> summary(fm1)
|
||
|
|
||
|
Formula: density ~ SSlogis(log(conc), Asym, xmid, scal)
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
Asym 2.3350 0.0966 24.2 3.5e-12 ***
|
||
|
xmid 1.4731 0.0947 15.6 8.8e-10 ***
|
||
|
scal 1.0385 0.0304 34.1 4.2e-14 ***
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.0355 on 13 degrees of freedom
|
||
|
|
||
|
>
|
||
|
> ## directly
|
||
|
> fm2 <- nls(density ~ Asym/(1 + exp((xmid - log(conc))/scal)),
|
||
|
+ data = DNase1, weights = wts,
|
||
|
+ start = list(Asym = 3, xmid = 0, scal = 1))
|
||
|
> summary(fm2)
|
||
|
|
||
|
Formula: density ~ Asym/(1 + exp((xmid - log(conc))/scal))
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
Asym 2.3350 0.0966 24.2 3.5e-12 ***
|
||
|
xmid 1.4731 0.0947 15.6 8.8e-10 ***
|
||
|
scal 1.0385 0.0304 34.1 4.2e-14 ***
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.0355 on 13 degrees of freedom
|
||
|
|
||
|
> stopifnot(all.equal(coef(summary(fm2)), coef(summary(fm1)), tolerance = 1e-6))
|
||
|
> stopifnot(all.equal(residuals(fm2), residuals(fm1), tolerance = 1e-5))
|
||
|
> stopifnot(all.equal(fitted(fm2), fitted(fm1), tolerance = 1e-6))
|
||
|
> fm2a <- nls(density ~ Asym/(1 + exp((xmid - log(conc)))),
|
||
|
+ data = DNase1, weights = wts,
|
||
|
+ start = list(Asym = 3, xmid = 0))
|
||
|
> anova(fm2a, fm2)
|
||
|
Analysis of Variance Table
|
||
|
|
||
|
Model 1: density ~ Asym/(1 + exp((xmid - log(conc))))
|
||
|
Model 2: density ~ Asym/(1 + exp((xmid - log(conc))/scal))
|
||
|
Res.Df Res.Sum Sq Df Sum Sq F value Pr(>F)
|
||
|
1 14 0.0186
|
||
|
2 13 0.0164 1 0.00212 1.68 0.22
|
||
|
>
|
||
|
> ## and without using weights
|
||
|
> fm3 <- nls(~ sqrt(wts) * (density - Asym/(1 + exp((xmid - log(conc))/scal))),
|
||
|
+ data = DNase1, start = list(Asym = 3, xmid = 0, scal = 1))
|
||
|
> summary(fm3)
|
||
|
|
||
|
Formula: 0 ~ sqrt(wts) * (density - Asym/(1 + exp((xmid - log(conc))/scal)))
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
Asym 2.3350 0.0966 24.2 3.5e-12 ***
|
||
|
xmid 1.4731 0.0947 15.6 8.8e-10 ***
|
||
|
scal 1.0385 0.0304 34.1 4.2e-14 ***
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.0355 on 13 degrees of freedom
|
||
|
|
||
|
> stopifnot(all.equal(coef(summary(fm3)), coef(summary(fm1)), tolerance = 1e-6))
|
||
|
> ft <- with(DNase1, density - fitted(fm3)/sqrt(wts))
|
||
|
> stopifnot(all.equal(ft, fitted(fm1), tolerance = 1e-6))
|
||
|
> # sign of residuals is reversed
|
||
|
> r <- with(DNase1, -residuals(fm3)/sqrt(wts))
|
||
|
> all.equal(r, residuals(fm1), tolerance = 1e-5)
|
||
|
[1] TRUE
|
||
|
> fm3a <- nls(~ sqrt(wts) * (density - Asym/(1 + exp((xmid - log(conc))))),
|
||
|
+ data = DNase1, start = list(Asym = 3, xmid = 0))
|
||
|
> anova(fm3a, fm3)
|
||
|
Analysis of Variance Table
|
||
|
|
||
|
Model 1: 0 ~ sqrt(wts) * (density - Asym/(1 + exp((xmid - log(conc)))))
|
||
|
Model 2: 0 ~ sqrt(wts) * (density - Asym/(1 + exp((xmid - log(conc))/scal)))
|
||
|
Res.Df Res.Sum Sq Df Sum Sq F value Pr(>F)
|
||
|
1 14 0.0186
|
||
|
2 13 0.0164 1 0.00212 1.68 0.22
|
||
|
>
|
||
|
> ## using conditional linearity
|
||
|
> fm4 <- nls(density ~ 1/(1 + exp((xmid - log(conc))/scal)),
|
||
|
+ data = DNase1, weights = wts,
|
||
|
+ start = list(xmid = 0, scal = 1), algorithm = "plinear")
|
||
|
> summary(fm4)
|
||
|
|
||
|
Formula: density ~ 1/(1 + exp((xmid - log(conc))/scal))
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
xmid 1.4731 0.0947 15.6 8.8e-10 ***
|
||
|
scal 1.0385 0.0304 34.1 4.2e-14 ***
|
||
|
.lin 2.3350 0.0966 24.2 3.5e-12 ***
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.0355 on 13 degrees of freedom
|
||
|
|
||
|
> cf <- coef(summary(fm4))[c(3,1,2), ]
|
||
|
> rownames(cf)[2] <- "Asym"
|
||
|
> stopifnot(all.equal(cf, coef(summary(fm1)), tolerance = 1e-6,
|
||
|
+ check.attributes = FALSE))
|
||
|
> stopifnot(all.equal(residuals(fm4), residuals(fm1), tolerance = 1e-5))
|
||
|
> stopifnot(all.equal(fitted(fm4), fitted(fm1), tolerance = 1e-6))
|
||
|
> fm4a <- nls(density ~ 1/(1 + exp((xmid - log(conc)))),
|
||
|
+ data = DNase1, weights = wts,
|
||
|
+ start = list(xmid = 0), algorithm = "plinear")
|
||
|
> anova(fm4a, fm4)
|
||
|
Analysis of Variance Table
|
||
|
|
||
|
Model 1: density ~ 1/(1 + exp((xmid - log(conc))))
|
||
|
Model 2: density ~ 1/(1 + exp((xmid - log(conc))/scal))
|
||
|
Res.Df Res.Sum Sq Df Sum Sq F value Pr(>F)
|
||
|
1 14 0.0186
|
||
|
2 13 0.0164 1 0.00212 1.68 0.22
|
||
|
>
|
||
|
> ## using 'port'
|
||
|
> fm5 <- nls(density ~ Asym/(1 + exp((xmid - log(conc))/scal)),
|
||
|
+ data = DNase1, weights = wts,
|
||
|
+ start = list(Asym = 3, xmid = 0, scal = 1),
|
||
|
+ algorithm = "port")
|
||
|
> summary(fm5)
|
||
|
|
||
|
Formula: density ~ Asym/(1 + exp((xmid - log(conc))/scal))
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
Asym 2.3350 0.0966 24.2 3.5e-12 ***
|
||
|
xmid 1.4731 0.0947 15.6 8.8e-10 ***
|
||
|
scal 1.0385 0.0304 34.1 4.2e-14 ***
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.0355 on 13 degrees of freedom
|
||
|
|
||
|
Algorithm "port", convergence message: relative convergence (4)
|
||
|
|
||
|
> stopifnot(all.equal(coef(summary(fm5)), coef(summary(fm1)), tolerance = 1e-6))
|
||
|
> stopifnot(all.equal(residuals(fm5), residuals(fm1), tolerance = 1e-5))
|
||
|
> stopifnot(all.equal(fitted(fm5), fitted(fm1), tolerance = 1e-6))
|
||
|
>
|
||
|
> ## check profiling
|
||
|
> pfm1 <- profile(fm1)
|
||
|
> pfm3 <- profile(fm3)
|
||
|
> for(m in names(pfm1))
|
||
|
+ stopifnot(all.equal(pfm1[[m]], pfm3[[m]], tolerance = 1e-5))
|
||
|
> pfm5 <- profile(fm5)
|
||
|
> for(m in names(pfm1))
|
||
|
+ stopifnot(all.equal(pfm1[[m]], pfm5[[m]], tolerance = 1e-5))
|
||
|
> if(have_MASS) {
|
||
|
+ print(c1 <- confint(fm1))
|
||
|
+ print(c4 <- confint(fm4, 1:2))
|
||
|
+ stopifnot(all.equal(c1[2:3, ], c4, tolerance = 1e-3))
|
||
|
+ }
|
||
|
Waiting for profiling to be done...
|
||
|
2.5% 97.5%
|
||
|
Asym 2.14936 2.5724
|
||
|
xmid 1.28535 1.6966
|
||
|
scal 0.97526 1.1068
|
||
|
Waiting for profiling to be done...
|
||
|
2.5% 97.5%
|
||
|
xmid 1.2866 1.6949
|
||
|
scal 0.9757 1.1063
|
||
|
>
|
||
|
> ## some low-dimensional examples
|
||
|
> npts <- 1000
|
||
|
> set.seed(1001)
|
||
|
> x <- runif(npts)
|
||
|
> b <- 0.7
|
||
|
> y <- x^b+rnorm(npts, sd=0.05)
|
||
|
> a <- 0.5
|
||
|
> y2 <- a*x^b+rnorm(npts, sd=0.05)
|
||
|
> c <- 1.0
|
||
|
> y3 <- a*(x+c)^b+rnorm(npts, sd=0.05)
|
||
|
> d <- 0.5
|
||
|
> y4 <- a*(x^d+c)^b+rnorm(npts, sd=0.05)
|
||
|
> m1 <- c(y ~ x^b, y2 ~ a*x^b, y3 ~ a*(x+exp(logc))^b)
|
||
|
> s1 <- list(c(b=1), c(a=1,b=1), c(a=1,b=1,logc=0))
|
||
|
> for(p in 1:3) {
|
||
|
+ fm <- nls(m1[[p]], start = s1[[p]])
|
||
|
+ print(fm)
|
||
|
+ if(have_MASS) print(confint(fm))
|
||
|
+ fm <- nls(m1[[p]], start = s1[[p]], algorithm = "port")
|
||
|
+ print(fm)
|
||
|
+ if(have_MASS) print(confint(fm))
|
||
|
+ }
|
||
|
Nonlinear regression model
|
||
|
model: y ~ x^b
|
||
|
data: parent.frame()
|
||
|
b
|
||
|
0.695
|
||
|
residual sum-of-squares: 2.39
|
||
|
Waiting for profiling to be done...
|
||
|
2.5% 97.5%
|
||
|
0.68704 0.70281
|
||
|
Nonlinear regression model
|
||
|
model: y ~ x^b
|
||
|
data: parent.frame()
|
||
|
b
|
||
|
0.695
|
||
|
residual sum-of-squares: 2.39
|
||
|
|
||
|
Algorithm "port", convergence message: relative convergence (4)
|
||
|
Waiting for profiling to be done...
|
||
|
2.5% 97.5%
|
||
|
0.68704 0.70281
|
||
|
Nonlinear regression model
|
||
|
model: y2 ~ a * x^b
|
||
|
data: parent.frame()
|
||
|
a b
|
||
|
0.502 0.724
|
||
|
residual sum-of-squares: 2.51
|
||
|
Waiting for profiling to be done...
|
||
|
2.5% 97.5%
|
||
|
a 0.49494 0.50893
|
||
|
b 0.70019 0.74767
|
||
|
Nonlinear regression model
|
||
|
model: y2 ~ a * x^b
|
||
|
data: parent.frame()
|
||
|
a b
|
||
|
0.502 0.724
|
||
|
residual sum-of-squares: 2.51
|
||
|
|
||
|
Algorithm "port", convergence message: relative convergence (4)
|
||
|
Waiting for profiling to be done...
|
||
|
2.5% 97.5%
|
||
|
a 0.49494 0.50893
|
||
|
b 0.70019 0.74767
|
||
|
Nonlinear regression model
|
||
|
model: y3 ~ a * (x + exp(logc))^b
|
||
|
data: parent.frame()
|
||
|
a b logc
|
||
|
0.558 0.603 -0.176
|
||
|
residual sum-of-squares: 2.44
|
||
|
Waiting for profiling to be done...
|
||
|
2.5% 97.5%
|
||
|
a 0.35006 0.66057
|
||
|
b 0.45107 0.91473
|
||
|
logc -0.64627 0.40946
|
||
|
Nonlinear regression model
|
||
|
model: y3 ~ a * (x + exp(logc))^b
|
||
|
data: parent.frame()
|
||
|
a b logc
|
||
|
0.558 0.603 -0.176
|
||
|
residual sum-of-squares: 2.44
|
||
|
|
||
|
Algorithm "port", convergence message: relative convergence (4)
|
||
|
Waiting for profiling to be done...
|
||
|
2.5% 97.5%
|
||
|
a 0.35006 0.66057
|
||
|
b 0.45107 0.91473
|
||
|
logc -0.64627 0.40946
|
||
|
>
|
||
|
> if(have_MASS) {
|
||
|
+ fm <- nls(y2~x^b, start=c(b=1), algorithm="plinear")
|
||
|
+ print(confint(profile(fm)))
|
||
|
+ fm <- nls(y3 ~ (x+exp(logc))^b, start=c(b=1, logc=0), algorithm="plinear")
|
||
|
+ print(confint(profile(fm)))
|
||
|
+ }
|
||
|
2.5% 97.5%
|
||
|
0.70019 0.74767
|
||
|
2.5% 97.5%
|
||
|
b 0.45105 0.91471
|
||
|
logc -0.64625 0.40933
|
||
|
>
|
||
|
>
|
||
|
> ## more profiling with bounds
|
||
|
> op <- options(digits=3)
|
||
|
> npts <- 10
|
||
|
> set.seed(1001)
|
||
|
> a <- 2
|
||
|
> b <- 0.5
|
||
|
> x <- runif(npts)
|
||
|
> y <- a*x/(1+a*b*x) + rnorm(npts, sd=0.2)
|
||
|
> gfun <- function(a,b,x) {
|
||
|
+ if(a < 0 || b < 0) stop("bounds violated")
|
||
|
+ a*x/(1+a*b*x)
|
||
|
+ }
|
||
|
> m1 <- nls(y ~ gfun(a,b,x), algorithm = "port",
|
||
|
+ lower = c(0,0), start = c(a=1, b=1))
|
||
|
> (pr1 <- profile(m1))
|
||
|
$a
|
||
|
tau par.vals.a par.vals.b
|
||
|
1 -3.869 0.706 0.000
|
||
|
2 -3.114 0.802 0.000
|
||
|
3 -0.863 1.124 0.000
|
||
|
4 0.000 1.538 0.263
|
||
|
5 0.590 1.952 0.446
|
||
|
6 1.070 2.423 0.592
|
||
|
7 1.534 3.082 0.737
|
||
|
8 1.969 4.034 0.878
|
||
|
9 2.376 5.502 1.014
|
||
|
10 2.751 7.929 1.144
|
||
|
11 3.090 12.263 1.264
|
||
|
12 3.375 20.845 1.373
|
||
|
|
||
|
$b
|
||
|
tau par.vals.a par.vals.b
|
||
|
1 -0.673 1.2087 0.0272
|
||
|
2 0.000 1.5381 0.2633
|
||
|
3 0.707 2.0026 0.4994
|
||
|
4 1.365 2.6295 0.7236
|
||
|
5 1.994 3.5762 0.9522
|
||
|
6 2.611 5.1820 1.1962
|
||
|
7 3.225 8.2162 1.4614
|
||
|
8 3.820 17.3946 1.7512
|
||
|
|
||
|
attr(,"original.fit")
|
||
|
Nonlinear regression model
|
||
|
model: y ~ gfun(a, b, x)
|
||
|
data: parent.frame()
|
||
|
a b
|
||
|
1.538 0.263
|
||
|
residual sum-of-squares: 0.389
|
||
|
|
||
|
Algorithm "port", convergence message: relative convergence (4)
|
||
|
attr(,"summary")
|
||
|
|
||
|
Formula: y ~ gfun(a, b, x)
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
a 1.538 0.617 2.49 0.037 *
|
||
|
b 0.263 0.352 0.75 0.476
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.221 on 8 degrees of freedom
|
||
|
|
||
|
Algorithm "port", convergence message: relative convergence (4)
|
||
|
|
||
|
attr(,"class")
|
||
|
[1] "profile.nls" "profile"
|
||
|
> if(have_MASS) print(confint(pr1))
|
||
|
2.5% 97.5%
|
||
|
a 0.96 5.20
|
||
|
b NA 1.07
|
||
|
>
|
||
|
> gfun <- function(a,b,x) {
|
||
|
+ if(a < 0 || b < 0 || a > 1.5 || b > 1) stop("bounds violated")
|
||
|
+ a*x/(1+a*b*x)
|
||
|
+ }
|
||
|
> m2 <- nls(y ~ gfun(a,b,x), algorithm = "port",
|
||
|
+ lower = c(0, 0), upper=c(1.5, 1), start = c(a=1, b=1))
|
||
|
> profile(m2)
|
||
|
$a
|
||
|
tau par.vals.a par.vals.b
|
||
|
1 -3.681 0.729 0.000
|
||
|
2 -2.945 0.823 0.000
|
||
|
3 -0.977 1.099 0.000
|
||
|
4 0.000 1.500 0.243
|
||
|
|
||
|
$b
|
||
|
tau par.vals.a par.vals.b
|
||
|
1 -0.733 1.18200 0.00395
|
||
|
2 0.000 1.50000 0.24263
|
||
|
3 1.645 1.50000 0.48132
|
||
|
4 2.154 1.50000 0.57869
|
||
|
5 2.727 1.50000 0.70706
|
||
|
6 3.288 1.50000 0.85748
|
||
|
|
||
|
attr(,"original.fit")
|
||
|
Nonlinear regression model
|
||
|
model: y ~ gfun(a, b, x)
|
||
|
data: parent.frame()
|
||
|
a b
|
||
|
1.500 0.243
|
||
|
residual sum-of-squares: 0.39
|
||
|
|
||
|
Algorithm "port", convergence message: relative convergence (4)
|
||
|
attr(,"summary")
|
||
|
|
||
|
Formula: y ~ gfun(a, b, x)
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
a 1.500 0.598 2.51 0.036 *
|
||
|
b 0.243 0.356 0.68 0.514
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.221 on 8 degrees of freedom
|
||
|
|
||
|
Algorithm "port", convergence message: relative convergence (4)
|
||
|
|
||
|
attr(,"class")
|
||
|
[1] "profile.nls" "profile"
|
||
|
> if(have_MASS) print(confint(m2))
|
||
|
Waiting for profiling to be done...
|
||
|
2.5% 97.5%
|
||
|
a 0.907 NA
|
||
|
b NA 0.611
|
||
|
> options(op)
|
||
|
>
|
||
|
> ## scoping problems
|
||
|
> test <- function(trace=TRUE)
|
||
|
+ {
|
||
|
+ x <- seq(0,5,len=20)
|
||
|
+ n <- 1
|
||
|
+ y <- 2*x^2 + n + rnorm(x)
|
||
|
+ xy <- data.frame(x=x,y=y)
|
||
|
+ myf <- function(x,a,b,c) a*x^b+c
|
||
|
+ list(with.start=
|
||
|
+ nls(y ~ myf(x,a,b,n), data=xy, start=c(a=1,b=1), trace=trace),
|
||
|
+ no.start= ## cheap auto-init to 1
|
||
|
+ suppressWarnings(
|
||
|
+ nls(y ~ myf(x,A,B,n), data=xy)))
|
||
|
+ }
|
||
|
> ## IGNORE_RDIFF_BEGIN
|
||
|
> t1 <- test()
|
||
|
8291.9 (1.06e+01): par = (1 1)
|
||
|
726.02 (5.04e+00): par = (0.80544 2.4297)
|
||
|
552.85 (4.45e+00): par = (1.29 2.129)
|
||
|
70.431 (1.29e+00): par = (1.9565 1.967)
|
||
|
26.555 (4.44e-02): par = (1.9788 2.0064)
|
||
|
26.503 (9.35e-05): par = (1.9798 2.0046)
|
||
|
26.503 (7.17e-07): par = (1.9799 2.0046)
|
||
|
> ## IGNORE_RDIFF_END
|
||
|
> t1$with.start
|
||
|
Nonlinear regression model
|
||
|
model: y ~ myf(x, a, b, n)
|
||
|
data: xy
|
||
|
a b
|
||
|
1.98 2.00
|
||
|
residual sum-of-squares: 26.5
|
||
|
> ##__with.start:
|
||
|
> ## failed to find n in 2.2.x
|
||
|
> ## found wrong n in 2.3.x
|
||
|
> ## finally worked in 2.4.0
|
||
|
> ##__no.start: failed in 3.0.2
|
||
|
> ## 2018-09 fails on macOS with Accelerate framework.
|
||
|
> stopifnot(all.equal(.n(t1[[1]]), .n(t1[[2]]), check.environment = FALSE))
|
||
|
> rm(a,b)
|
||
|
> t2 <- test(FALSE)
|
||
|
> stopifnot(all.equal(lapply(t1, .n),
|
||
|
+ lapply(t2, .n), tolerance = 0.16, # different random error
|
||
|
+ check.environment = FALSE))
|
||
|
>
|
||
|
>
|
||
|
> ## list 'start'
|
||
|
> set.seed(101)# (remain independent of above)
|
||
|
> getExpmat <- function(theta, t)
|
||
|
+ {
|
||
|
+ conc <- matrix(nrow = length(t), ncol = length(theta))
|
||
|
+ for(i in 1:length(theta)) conc[, i] <- exp(-theta[i] * t)
|
||
|
+ conc
|
||
|
+ }
|
||
|
> expsum <- as.vector(getExpmat(c(.05,.005), 1:100) %*% c(1,1))
|
||
|
> expsumNoisy <- expsum + max(expsum) *.001 * rnorm(100)
|
||
|
> expsum.df <-data.frame(expsumNoisy)
|
||
|
>
|
||
|
> ## estimate decay rates, amplitudes with default Gauss-Newton
|
||
|
> summary (nls(expsumNoisy ~ getExpmat(k, 1:100) %*% sp, expsum.df,
|
||
|
+ start = list(k = c(.6,.02), sp = c(1,2))))
|
||
|
|
||
|
Formula: expsumNoisy ~ getExpmat(k, 1:100) %*% sp
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
k1 5.00e-02 2.73e-04 183 <2e-16 ***
|
||
|
k2 4.97e-03 4.77e-05 104 <2e-16 ***
|
||
|
sp1 1.00e+00 3.96e-03 253 <2e-16 ***
|
||
|
sp2 9.98e-01 4.43e-03 225 <2e-16 ***
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.00182 on 96 degrees of freedom
|
||
|
|
||
|
>
|
||
|
> ## didn't work with port in 2.4.1
|
||
|
> summary (nls(expsumNoisy ~ getExpmat(k, 1:100) %*% sp, expsum.df,
|
||
|
+ start = list(k = c(.6,.02), sp = c(1,2)),
|
||
|
+ algorithm = "port"))
|
||
|
|
||
|
Formula: expsumNoisy ~ getExpmat(k, 1:100) %*% sp
|
||
|
|
||
|
Parameters:
|
||
|
Estimate Std. Error t value Pr(>|t|)
|
||
|
k1 5.00e-02 2.73e-04 183 <2e-16 ***
|
||
|
k2 4.97e-03 4.77e-05 104 <2e-16 ***
|
||
|
sp1 1.00e+00 3.96e-03 253 <2e-16 ***
|
||
|
sp2 9.98e-01 4.43e-03 225 <2e-16 ***
|
||
|
---
|
||
|
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
|
||
|
|
||
|
Residual standard error: 0.00182 on 96 degrees of freedom
|
||
|
|
||
|
Algorithm "port", convergence message: both X-convergence and relative convergence (5)
|
||
|
|
||
|
>
|
||
|
>
|
||
|
> ## PR13540
|
||
|
>
|
||
|
> x <- runif(200)
|
||
|
> b0 <- c(rep(0,100),runif(100))
|
||
|
> b1 <- 1
|
||
|
> fac <- as.factor(rep(c(0,1), each = 100))
|
||
|
> y <- b0 + b1*x + rnorm(200, sd=0.05)
|
||
|
> # next failed in 2.8.1
|
||
|
> fit <- nls(y~b0[fac] + b1*x, start = list(b0=c(1,1), b1=1),
|
||
|
+ algorithm ="port", upper = c(100, 100, 100))
|
||
|
> # next did not "fail" in proposed fix:
|
||
|
> fiB <- nls(y~b0[fac] + b1*x, start = list(b0=c(1,1), b1=101),
|
||
|
+ algorithm ="port", upper = c(100, 100, 100),
|
||
|
+ control = list(warnOnly=TRUE))# warning ..
|
||
|
Warning in nls(y ~ b0[fac] + b1 * x, start = list(b0 = c(1, 1), b1 = 101), :
|
||
|
Convergence failure: initial par violates constraints
|
||
|
> with(fiB$convInfo, ## start par. violates constraints
|
||
|
+ stopifnot(isConv == FALSE, stopCode == 300))
|
||
|
>
|
||
|
>
|
||
|
> ## PR#17367 -- nls() quoting non-syntactical variable names
|
||
|
> ##
|
||
|
> op <- options(warn = 2)# no warnings allowed from here
|
||
|
> ##
|
||
|
> dN <- data.frame('NO [µmol/l]' = c(1,3,8,17), t = 1:4, check.names=FALSE)
|
||
|
> fnN <- `NO [µmol/l]` ~ a + k* exp(t)
|
||
|
> ## lm() works, nls() should too
|
||
|
> lm.N <- lm(`NO [µmol/l]` ~ exp(t) , data = dN)
|
||
|
> summary(lm.N) -> slmN
|
||
|
> nm. <- nls(`NO [µmol/l]` ~ a + k*exp(t), start=list(a=0,k=1), data = dN)
|
||
|
> ## In R <= 3.4.x : Error in eval(predvars, data, env) : object 'NO' not found
|
||
|
> nmf <- nls(fnN, start=list(a=0,k=1), data = dN)
|
||
|
> ## (ditto; gave identical error)
|
||
|
> noC <- function(L) L[-match("call", names(L))]
|
||
|
> stopifnot(all.equal(noC (nm.), noC (nmf)))
|
||
|
> ##
|
||
|
> ## with list for which as.data.frame() does not work [-> different branch, not using model.frame!]
|
||
|
> ## list version (has been valid "forever", still doubtful, rather give error [FIXME] ?)
|
||
|
> lsN <- c(as.list(dN), list(foo="bar")); lsN[["t"]] <- 1:8
|
||
|
> nmL <- nls(`NO [µmol/l]` ~ a + k*exp(t), start=list(a=0,k=1), data = lsN)
|
||
|
> stopifnot(all.equal(coef(nmL), c(a = 5.069866, k = 0.003699669), tol = 4e-7))# seen 4.2e-8
|
||
|
>
|
||
|
> ## trivial RHS -- should work even w/o 'start='
|
||
|
> fi1 <- nls(y ~ a, start = list(a=1))
|
||
|
> ## -> 2 deprecation warnings "length 1 in vector-arithmetic" from nlsModel() in R 3.4.x ..
|
||
|
> options(op) # warnings about missing 'start' ok:
|
||
|
> f.1 <- nls(y ~ a) # failed in R 3.4.x
|
||
|
Warning in nls(y ~ a) :
|
||
|
No starting values specified for some parameters.
|
||
|
Initializing 'a' to '1.'.
|
||
|
Consider specifying 'start' or using a selfStart model
|
||
|
> stopifnot(all.equal(noC(f.1), noC(fi1)),
|
||
|
+ all.equal(coef(f.1), c(a = mean(y))))
|
||
|
>
|
||
|
>
|
||
|
> ##--- New option 'central' for numericDeriv() :
|
||
|
>
|
||
|
> ## Continuing the pnorm() example from example(numericDeriv):
|
||
|
>
|
||
|
> mkEnv <- function(n, from = -3, to = 3) {
|
||
|
+ stopifnot(is.numeric(n), n >= 2)
|
||
|
+ E <- new.env()
|
||
|
+ E$mean <- 0.
|
||
|
+ E$sd <- 1.
|
||
|
+ E$x <- seq(from, to, length.out = n)
|
||
|
+ E
|
||
|
+ }
|
||
|
>
|
||
|
> pnEnv <- mkEnv(65) # is used inside errE() :
|
||
|
>
|
||
|
> ## varying eps (very platform dependent?):
|
||
|
> errE <- Vectorize(function(eps, central=FALSE) {
|
||
|
+ grad <- attr(numericDeriv(quote(pnorm(x, mean, sd)), c("mean", "sd"),
|
||
|
+ pnEnv, eps=eps, central=central), "gradient")
|
||
|
+ target <- with(pnEnv, -dnorm(x) * cbind(1, x, deparse.level=0L))
|
||
|
+ ## return relative error {in the same sense as in all.equal()} :
|
||
|
+ sum(abs(target - grad)) / sum(abs(target))
|
||
|
+ })
|
||
|
>
|
||
|
> curve(errE(x), 1e-9, 1e-4, log="xy", n=512, ylim = c(1.5e-11, 5e-7),
|
||
|
+ xlab = quote(epsilon), ylab=quote(errE(epsilon))) -> rex
|
||
|
> axis(1, at = 2^-(52/2), label = quote(sqrt(epsilon[c])), col=4, col.axis=4, line=-1/2)
|
||
|
> axis(1, at = 2^-(52/3), label = quote(epsilon[c]^{1/3}), col=4, col.axis=4, line=-1/2)
|
||
|
> curve(errE(x, central=TRUE), n=512, col=2, add = TRUE) -> rexC
|
||
|
> ## IGNORE_RDIFF_BEGIN
|
||
|
> str(xy1 <- approx(rex , xout= sqrt(2^-52)) )
|
||
|
List of 2
|
||
|
$ x: num 1.49e-08
|
||
|
$ y: num 1.56e-08
|
||
|
> str(xy2 <- approx(rexC, xout=(2^-52)^(1/3)))
|
||
|
List of 2
|
||
|
$ x: num 6.06e-06
|
||
|
$ y: num 2.4e-11
|
||
|
> ## IGNORE_RDIFF_END
|
||
|
> lines(xy1, type="h", col=4)
|
||
|
> lines(xy2, type="h", col=4)
|
||
|
>
|
||
|
> proc.time()
|
||
|
user system elapsed
|
||
|
3.481 0.135 3.613
|