R/kfolds2Chisq.R
kfolds2Chisq.Rd
This function computes Predicted Chisquare for k-fold cross validated partial least squares regression models.
kfolds2Chisq(pls_kfolds)
a k-fold cross validated partial least squares regression glm model
Total Predicted Chisquare vs number of components for the first group partition
...
Total Predicted Chisquare vs number of components for the last group partition
Use cv.plsRglm
to create k-fold cross validated partial
least squares regression glm models.
Nicolas Meyer, Myriam Maumy-Bertrand et Frédéric Bertrand (2010). Comparing the linear and the logistic PLS regression with qualitative predictors: application to allelotyping data. Journal de la Societe Francaise de Statistique, 151(2), pages 1-18. http://publications-sfds.math.cnrs.fr/index.php/J-SFdS/article/view/47
kfolds2coeff
, kfolds2Press
,
kfolds2Pressind
, kfolds2Chisqind
,
kfolds2Mclassedind
and kfolds2Mclassed
to
extract and transforms results from k-fold cross validation.
# \donttest{
data(Cornell)
XCornell<-Cornell[,1:7]
yCornell<-Cornell[,8]
bbb <- cv.plsRglm(object=yCornell,dataX=XCornell,nt=3,modele="pls-glm-gaussian",K=16,verbose=FALSE)
bbb2 <- cv.plsRglm(object=yCornell,dataX=XCornell,nt=3,modele="pls-glm-gaussian",K=5,verbose=FALSE)
kfolds2Chisq(bbb)
#> [[1]]
#> [1] 55.70774 24.52966 20.84377
#>
kfolds2Chisq(bbb2)
#> [[1]]
#> [1] 62.75624 13.00385 18.78492
#>
rm(list=c("XCornell","yCornell","bbb","bbb2"))
data(pine)
Xpine<-pine[,1:10]
ypine<-pine[,11]
bbb <- cv.plsRglm(object=ypine,dataX=Xpine,nt=4,modele="pls-glm-gaussian",verbose=FALSE)
bbb2 <- cv.plsRglm(object=ypine,dataX=Xpine,nt=10,modele="pls-glm-gaussian",K=10,verbose=FALSE)
kfolds2Chisq(bbb)
#> [[1]]
#> [1] 12.79433 11.62440 10.71400 11.97592
#>
kfolds2Chisq(bbb2)
#> [[1]]
#> [1] 13.76087 13.37113 11.86735 13.20128 14.46109 14.94359 15.07131 15.18588
#> [9] 15.20106 15.24767
#>
XpineNAX21 <- Xpine
XpineNAX21[1,2] <- NA
bbbNA <- cv.plsRglm(object=ypine,dataX=XpineNAX21,nt=10,modele="pls",K=10,verbose=FALSE)
kfolds2Press(bbbNA)
#> [[1]]
#> [1] 14.92628 14.37553 12.28649 12.12053 11.55847 13.28866 14.14043 20.95684
#> [9] 14.33606
#>
kfolds2Chisq(bbbNA)
#> [[1]]
#> [1] 14.92628 14.37553 12.28649 12.12053 11.55847 13.28866 14.14043 20.95684
#> [9] 14.33606
#>
bbbNA2 <- cv.plsRglm(object=ypine,dataX=XpineNAX21,nt=4,modele="pls-glm-gaussian",verbose=FALSE)
bbbNA3 <- cv.plsRglm(object=ypine,dataX=XpineNAX21,nt=10,modele="pls-glm-gaussian",K=10,
verbose=FALSE)
kfolds2Chisq(bbbNA2)
#> [[1]]
#> [1] 15.23969 16.92180 14.45210 14.74561
#>
kfolds2Chisq(bbbNA3)
#> [[1]]
#> [1] 14.97800 14.65451 11.61113 13.15281 12.67912 19.23716 20.71347 20.58365
#> [9] 21.56557
#>
rm(list=c("Xpine","XpineNAX21","ypine","bbb","bbb2","bbbNA","bbbNA2","bbbNA3"))
data(aze_compl)
Xaze_compl<-aze_compl[,2:34]
yaze_compl<-aze_compl$y
kfolds2Chisq(cv.plsRglm(object=yaze_compl,dataX=Xaze_compl,nt=4,modele="pls-glm-family",
family="binomial",verbose=FALSE))
#> [[1]]
#> [1] 239.1685 645.5158 10328.4832 55210.5573
#>
kfolds2Chisq(cv.plsRglm(object=yaze_compl,dataX=Xaze_compl,nt=4,modele="pls-glm-logistic",
verbose=FALSE))
#> [[1]]
#> [1] 318.2194 714.1583 3236.3585 12028.9993
#>
kfolds2Chisq(cv.plsRglm(object=yaze_compl,dataX=Xaze_compl,nt=10,modele="pls-glm-family",
family=binomial(),K=10,verbose=FALSE))
#> [[1]]
#> [1] 227.7316 452.1654 2059.5759 7188.8215 17946.5448 23486.1508
#> [7] 26944.1036 28156.8907 27708.5382 28325.5904
#>
kfolds2Chisq(cv.plsRglm(object=yaze_compl,dataX=Xaze_compl,nt=10,modele="pls-glm-logistic",
K=10,verbose=FALSE))
#> [[1]]
#> [1] 206.0769 348.2690 1839.9036 4398.6389 8850.9623
#> [6] 35977.6672 384288.7368 908110.3227 1296856.4896 1390966.2763
#>
rm(list=c("Xaze_compl","yaze_compl"))
# }