This function provides AIC computation for an univariate plsR model.
References
Baibing Li, Julian Morris, Elaine B. Martin, Model selection for partial least squares regression, Chemometrics and Intelligent Laboratory Systems 64 (2002) 79-89, doi:10.1016/S0169-7439(02)00051-5 .
Author
Frédéric Bertrand
frederic.bertrand@lecnam.net
https://fbertran.github.io/homepage/
Examples
data(pine)
ypine <- pine[,11]
Xpine <- pine[,1:10]
(Pinscaled <- as.data.frame(cbind(scale(ypine),scale(as.matrix(Xpine)))))
#> V1 x1 x2 x3 x4 x5
#> 1 1.93343137 -0.893800639 -0.962578437 -1.09627645 -0.43382827 -0.10494125
#> 2 0.81712282 0.206659107 -0.141067529 -0.36224787 -0.04949719 0.63880345
#> 3 0.39540626 -0.653559427 -0.141067529 -0.67683154 -1.97115262 -1.73188279
#> 4 0.04811027 -0.475315947 -0.141067529 0.68636438 -1.39465599 -1.40649448
#> 5 -0.70849886 0.322904855 0.406606409 -0.46710909 -0.72207659 -1.05786415
#> 6 0.84192968 -0.506314813 -0.277986014 -1.09627645 -0.04949719 -0.10494125
#> 7 -0.63407829 0.826636429 1.091198831 1.10580928 -1.39465599 -1.66215672
#> 8 -0.91935714 -0.049081538 2.323465192 -0.46710909 1.19957885 1.01067580
#> 9 2.71484735 -1.459529946 -0.688741468 -0.99141522 -0.91424213 -0.61626573
#> 10 0.49463369 -1.862515205 0.680443378 -0.25738664 -0.14557996 -0.75571786
#> 11 -0.53485086 -1.157291001 -0.688741468 0.58150316 1.00741330 0.33665717
#> 12 -0.13794116 -1.033295537 1.638872769 2.15442154 0.91133053 1.47551624
#> 13 2.26832393 -1.056544686 -1.921007828 -1.20113767 -1.20249045 -1.10434819
#> 14 1.53652166 -0.459816514 -1.099496921 -1.20113767 0.62308222 0.98743378
#> 15 1.16441881 -0.498565096 -0.414904499 -0.99141522 -0.24166273 0.26693110
#> 16 -0.93176057 1.710104113 1.228117316 2.04956031 1.19957885 0.59231941
#> 17 -0.84493657 1.849599011 -0.277986014 0.89608683 1.10349607 1.15012793
#> 18 0.23416169 -0.080080404 0.132769440 -0.57197032 -0.62599382 0.10423695
#> 19 -0.49764058 0.005166478 0.680443378 -0.36224787 -1.29857322 -0.89517000
#> 20 -0.11313430 0.865385012 1.365035800 0.79122561 0.14266836 -0.01197316
#> 21 -0.17515144 2.012343058 -1.236415406 2.15442154 0.71916499 0.84798165
#> 22 -0.85734000 0.632893516 -0.414904499 0.47664193 -0.24166273 -0.10494125
#> 23 0.19695141 0.477899186 -0.004149045 -0.78169277 0.81524776 1.05715985
#> 24 -0.91935714 2.004593341 -0.688741468 1.21067051 0.71916499 0.59231941
#> 25 -0.88214685 0.625143800 2.186546707 0.16205826 0.23875113 -0.01197316
#> 26 -0.16274801 0.601894650 -0.277986014 -0.67683154 0.23875113 0.70852952
#> 27 -0.84493657 0.911883311 -0.825659952 0.68636438 1.96824102 1.33606411
#> 28 -0.75811257 0.260907123 -0.688741468 -1.09627645 -1.68290430 -2.19672323
#> 29 0.34579255 -0.831802907 -0.825659952 -0.99141522 -0.91424213 -0.87192797
#> 30 -0.78291943 -0.909300072 -0.141067529 0.37178071 -0.52991104 -0.91841202
#> 31 -0.57206115 -0.676808577 0.269687924 -0.57197032 0.91133053 1.52200029
#> 32 -0.74570914 -0.669058860 -1.099496921 -0.04766419 1.29566162 0.33665717
#> 33 -0.96897085 -0.041331821 0.954280347 0.58150316 0.71916499 0.59231941
#> x6 x7 x8 x9 x10
#> 1 -1.1025308 -2.9795617 -0.69706483 -1.02706110 -1.3713833
#> 2 -0.4055286 -0.8420501 -0.48446650 -0.49748272 -0.2009786
#> 3 -0.6843294 -0.3076721 -1.37737948 -0.85053497 -1.3713833
#> 4 0.7096750 0.2267058 -0.27186817 0.56167404 -0.5911135
#> 5 -0.5449290 0.2267058 -0.39942716 -0.32095659 -1.7615182
#> 6 -1.1025308 0.2267058 -0.73958449 -1.20358722 -1.3713833
#> 7 1.2672767 1.2954616 0.32340716 0.91472629 0.9694261
#> 8 -0.4055286 -1.9108059 0.11080883 -0.32095659 -0.5911135
#> 9 -1.1025308 0.2267058 -1.12226148 -0.85053497 0.9694261
#> 10 -0.2661281 0.7610837 -0.31438783 0.03209566 0.9694261
#> 11 0.8490754 -0.8420501 1.68403646 1.62083079 -0.2009786
#> 12 2.1036794 -1.3764280 1.59899713 1.44430467 0.9694261
#> 13 -1.1025308 0.2267058 -1.50493848 -1.55663947 -0.5911135
#> 14 -1.1025308 0.7610837 -0.73958449 -1.55663947 -1.3713833
#> 15 -0.9631303 0.2267058 -0.56950583 -1.20358722 0.1891563
#> 16 1.8248785 0.2267058 1.64151680 1.44430467 0.5792912
#> 17 1.4066772 1.2954616 0.70608415 1.26777854 -1.7615182
#> 18 -0.5449290 -2.4451838 -0.14430917 0.20862179 0.5792912
#> 19 -0.4055286 0.7610837 -1.07974182 -0.67400884 0.9694261
#> 20 0.8490754 -0.3076721 0.66356448 0.73820016 0.5792912
#> 21 1.6854781 0.2267058 0.79112348 0.91472629 0.1891563
#> 22 0.5702745 -0.3076721 0.06828916 0.38514791 0.1891563
#> 23 -0.8237299 0.7610837 -0.31438783 -0.67400884 0.5792912
#> 24 0.8490754 0.7610837 0.11080883 0.38514791 0.9694261
#> 25 -0.1267277 -0.3076721 0.11080883 0.20862179 -1.3713833
#> 26 -0.8237299 0.2267058 -0.01675017 -0.49748272 0.9694261
#> 27 1.2672767 0.7610837 2.61946911 1.26777854 -1.7615182
#> 28 -1.1025308 0.2267058 -1.67501714 -1.20358722 0.1891563
#> 29 -0.9631303 0.2267058 -0.90966315 -1.20358722 0.9694261
#> 30 0.2914737 -0.3076721 -0.05926984 1.44430467 0.9694261
#> 31 -0.6843294 0.2267058 -0.22934850 -0.85053497 0.5792912
#> 32 -0.1267277 0.7610837 1.04624148 0.56167404 0.9694261
#> 33 0.7096750 1.2954616 1.17380047 1.09125241 0.9694261
colnames(Pinscaled)[1] <- "yy"
lm(yy~x1+x2+x3+x4+x5+x6+x7+x8+x9+x10,data=Pinscaled)
#>
#> Call:
#> lm(formula = yy ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 +
#> x10, data = Pinscaled)
#>
#> Coefficients:
#> (Intercept) x1 x2 x3 x4 x5
#> 1.119e-16 -4.601e-01 -3.175e-01 3.298e-01 -6.020e-01 5.539e-01
#> x6 x7 x8 x9 x10
#> 6.720e-02 -6.054e-02 6.865e-02 -6.231e-01 -1.058e-01
#>
modpls <- plsR(ypine,Xpine,10)
#> ____************************************************____
#> ____Component____ 1 ____
#> ____Component____ 2 ____
#> ____Component____ 3 ____
#> ____Component____ 4 ____
#> ____Component____ 5 ____
#> ____Component____ 6 ____
#> ____Component____ 7 ____
#> ____Component____ 8 ____
#> ____Component____ 9 ____
#> ____Component____ 10 ____
#> ____Predicting X without NA neither in X nor in Y____
#> Loading required namespace: plsdof
#> ****________________________________________________****
#>
modpls$Std.Coeffs
#> [,1]
#> Intercept 0.00000000
#> x1 -0.46014550
#> x2 -0.31750091
#> x3 0.32980012
#> x4 -0.60199305
#> x5 0.55393816
#> x6 0.06720419
#> x7 -0.06054179
#> x8 0.06864663
#> x9 -0.62312421
#> x10 -0.10578863
lm(yy~x1+x2+x3+x4+x5+x6+x7+x8+x9+x10,data=Pinscaled)
#>
#> Call:
#> lm(formula = yy ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 +
#> x10, data = Pinscaled)
#>
#> Coefficients:
#> (Intercept) x1 x2 x3 x4 x5
#> 1.119e-16 -4.601e-01 -3.175e-01 3.298e-01 -6.020e-01 5.539e-01
#> x6 x7 x8 x9 x10
#> 6.720e-02 -6.054e-02 6.865e-02 -6.231e-01 -1.058e-01
#>
AIC(lm(yy~x1+x2+x3+x4+x5+x6+x7+x8+x9+x10,data=Pinscaled))
#> [1] 79.37542
print(logLik(lm(yy~x1+x2+x3+x4+x5+x6+x7+x8+x9+x10,data=Pinscaled)))
#> 'log Lik.' -27.68771 (df=12)
sum(dnorm(modpls$RepY, modpls$Std.ValsPredictY, sqrt(mean(modpls$residY^2)), log=TRUE))
#> [1] -27.68771
sum(dnorm(Pinscaled$yy,fitted(lm(yy~x1+x2+x3+x4+x5+x6+x7+x8+x9+x10,data=Pinscaled)),
sqrt(mean(residuals(lm(yy~x1+x2+x3+x4+x5+x6+x7+x8+x9+x10,data=Pinscaled))^2)), log=TRUE))
#> [1] -27.68771
loglikpls(modpls$residY)
#> [1] -27.68771
loglikpls(residuals(lm(yy~x1+x2+x3+x4+x5+x6+x7+x8+x9+x10,data=Pinscaled)))
#> [1] -27.68771
AICpls(10,residuals(lm(yy~x1+x2+x3+x4+x5+x6+x7+x8+x9+x10,data=Pinscaled)))
#> [1] 79.37542
AICpls(10,modpls$residY)
#> [1] 79.37542