#activate libraries
library(psych)
library(mdatools)
##
## Attaching package: 'mdatools'
## The following object is masked from 'package:psych':
##
## pca
library(outliers)
##
## Attaching package: 'outliers'
## The following object is masked from 'package:psych':
##
## outlier
library(ggpubr)
## Loading required package: ggplot2
##
## Attaching package: 'ggplot2'
## The following objects are masked from 'package:psych':
##
## %+%, alpha
library(PerformanceAnalytics)
## Loading required package: xts
## Loading required package: zoo
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
##
## Attaching package: 'PerformanceAnalytics'
## The following object is masked from 'package:graphics':
##
## legend
library(Hmisc)
## Loading required package: lattice
## Loading required package: survival
## Loading required package: Formula
##
## Attaching package: 'Hmisc'
## The following object is masked from 'package:mdatools':
##
## capitalize
## The following object is masked from 'package:psych':
##
## describe
## The following objects are masked from 'package:base':
##
## format.pval, units
library(pca3d)
#IMPORT DATA with header in first row CSV in the same folder of the project
X <- read.csv(file = "X.csv", header = TRUE)
XY<- read.csv(file = "XY.csv", header = TRUE)
Y <- read.csv(file = "Y.csv", header = TRUE)
XN <- prep.autoscale(X, center = TRUE, scale = TRUE)
#UNIVARIATE ANALYSIS
str(X)
## 'data.frame': 999 obs. of 11 variables:
## $ V1 : num 7.4 7.8 7.8 11.2 7.4 7.4 7.9 7.3 7.8 7.5 ...
## $ V2 : num 0.7 0.88 0.76 0.28 0.7 0.66 0.6 0.65 0.58 0.5 ...
## $ V3 : num 0 0 0.04 0.56 0 0 0.06 0 0.02 0.36 ...
## $ V4 : num 1.9 2.6 2.3 1.9 1.9 1.8 1.6 1.2 2 6.1 ...
## $ V5 : num 0.076 0.098 0.092 0.075 0.076 0.075 0.069 0.065 0.073 0.071 ...
## $ V6 : num 11 25 15 17 11 13 15 15 9 17 ...
## $ V7 : num 34 67 54 60 34 40 59 21 18 102 ...
## $ V8 : num 0.998 0.997 0.997 0.998 0.998 ...
## $ V9 : num 3.51 3.2 3.26 3.16 3.51 3.51 3.3 3.39 3.36 3.35 ...
## $ V10: num 0.56 0.68 0.65 0.58 0.56 0.56 0.46 0.47 0.57 0.8 ...
## $ V11: num 9.4 9.8 9.8 9.8 9.4 9.4 9.4 10 9.5 10.5 ...
summary(X)
## V1 V2 V3 V4
## Min. : 4.600 Min. :0.1000 Min. :0.0000 Min. : 0.800
## 1st Qu.: 6.700 1st Qu.:0.2600 1st Qu.:0.2100 1st Qu.: 1.800
## Median : 7.300 Median :0.3700 Median :0.3200 Median : 2.500
## Mean : 7.746 Mean :0.4112 Mean :0.3206 Mean : 4.631
## 3rd Qu.: 8.300 3rd Qu.:0.5500 3rd Qu.:0.4200 3rd Qu.: 6.200
## Max. :15.600 Max. :1.3300 Max. :1.0000 Max. :22.000
## V5 V6 V7 V8
## Min. :0.02000 Min. : 3.00 Min. : 8.00 Min. :0.9892
## 1st Qu.:0.04500 1st Qu.: 12.00 1st Qu.: 42.00 1st Qu.:0.9939
## Median :0.06200 Median : 22.00 Median : 96.00 Median :0.9965
## Mean :0.07089 Mean : 26.17 Mean : 99.04 Mean :0.9959
## 3rd Qu.:0.08300 3rd Qu.: 37.00 3rd Qu.:147.50 3rd Qu.:0.9978
## Max. :0.61100 Max. :131.00 Max. :313.00 Max. :1.0032
## V9 V10 V11
## Min. :2.740 Min. :0.2700 Min. : 8.50
## 1st Qu.:3.150 1st Qu.:0.4700 1st Qu.: 9.40
## Median :3.250 Median :0.5600 Median : 9.80
## Mean :3.251 Mean :0.5916 Mean :10.12
## 3rd Qu.:3.350 3rd Qu.:0.6600 3rd Qu.:10.60
## Max. :3.900 Max. :2.0000 Max. :14.00
describe(X)
## X
##
## 11 Variables 999 Observations
## --------------------------------------------------------------------------------
## V1
## n missing distinct Info Mean Gmd .05 .10
## 999 0 88 0.999 7.746 1.769 5.8 6.1
## .25 .50 .75 .90 .95
## 6.7 7.3 8.3 10.3 11.5
##
## lowest : 4.6 4.7 5.0 5.1 5.2, highest: 13.7 13.8 14.0 15.0 15.6
## --------------------------------------------------------------------------------
## V2
## n missing distinct Info Mean Gmd .05 .10
## 999 0 127 1 0.4112 0.2088 0.180 0.210
## .25 .50 .75 .90 .95
## 0.260 0.370 0.550 0.670 0.735
##
## lowest : 0.100 0.115 0.120 0.125 0.130, highest: 1.040 1.070 1.090 1.130 1.330
## --------------------------------------------------------------------------------
## V3
## n missing distinct Info Mean Gmd .05 .10
## 999 0 79 1 0.3206 0.1995 0.020 0.060
## .25 .50 .75 .90 .95
## 0.210 0.320 0.420 0.560 0.631
##
## lowest : 0.00 0.01 0.02 0.03 0.04, highest: 0.74 0.76 0.79 0.88 1.00
## --------------------------------------------------------------------------------
## V4
## n missing distinct Info Mean Gmd .05 .10
## 999 0 167 0.999 4.631 4.242 1.20 1.40
## .25 .50 .75 .90 .95
## 1.80 2.50 6.20 11.94 14.30
##
## lowest : 0.80 0.90 1.00 1.10 1.20, highest: 19.45 19.80 20.70 20.80 22.00
## --------------------------------------------------------------------------------
## V5
## n missing distinct Info Mean Gmd .05 .10
## 999 0 138 1 0.07089 0.03901 0.031 0.036
## .25 .50 .75 .90 .95
## 0.045 0.062 0.083 0.100 0.118
##
## lowest : 0.020 0.021 0.022 0.023 0.025, highest: 0.413 0.464 0.467 0.610 0.611
## --------------------------------------------------------------------------------
## V6
## n missing distinct Info Mean Gmd .05 .10
## 999 0 83 0.999 26.17 19.5 5 6
## .25 .50 .75 .90 .95
## 12 22 37 52 60
##
## lowest : 3.0 4.0 5.0 6.0 7.0, highest: 82.0 82.5 83.0 87.0 131.0
## --------------------------------------------------------------------------------
## V7
## n missing distinct Info Mean Gmd .05 .10
## 999 0 225 1 99.04 70.64 17.9 23.0
## .25 .50 .75 .90 .95
## 42.0 96.0 147.5 186.0 203.0
##
## lowest : 8 10 11 12 13, highest: 252 255 260 272 313
## --------------------------------------------------------------------------------
## V8
## n missing distinct Info Mean Gmd .05 .10
## 999 0 144 1 0.9959 0.003052 0.9910 0.9918
## .25 .50 .75 .90 .95
## 0.9939 0.9965 0.9978 0.9992 0.9999
##
## lowest : 0.9892 0.9893 0.9894 0.9896 0.9898, highest: 1.0015 1.0018 1.0022 1.0026 1.0032
## --------------------------------------------------------------------------------
## V9
## n missing distinct Info Mean Gmd .05 .10
## 999 0 82 1 3.251 0.175 3.00 3.05
## .25 .50 .75 .90 .95
## 3.15 3.25 3.35 3.45 3.52
##
## lowest : 2.74 2.87 2.88 2.89 2.93, highest: 3.69 3.72 3.75 3.85 3.90
## --------------------------------------------------------------------------------
## V10
## n missing distinct Info Mean Gmd .05 .10
## 999 0 97 0.999 0.5916 0.1957 0.37 0.39
## .25 .50 .75 .90 .95
## 0.47 0.56 0.66 0.82 0.92
##
## lowest : 0.27 0.28 0.29 0.30 0.32, highest: 1.59 1.61 1.95 1.98 2.00
## --------------------------------------------------------------------------------
## V11
## n missing distinct Info Mean Gmd .05 .10
## 999 0 50 0.998 10.12 1.128 8.9 9.1
## .25 .50 .75 .90 .95
## 9.4 9.8 10.6 11.7 12.5
##
## lowest : 8.5 8.6 8.7 8.8 8.9, highest: 13.0 13.1 13.3 13.4 14.0
## --------------------------------------------------------------------------------
boxplot(X)
summary(XN)
## V1 V2 V3 V4
## Min. :-1.8396 Min. :-1.6410 Min. :-1.817058 Min. :-0.8730
## 1st Qu.:-0.6118 1st Qu.:-0.7972 1st Qu.:-0.626885 1st Qu.:-0.6451
## Median :-0.2610 Median :-0.2171 Median :-0.003461 Median :-0.4856
## Mean : 0.0000 Mean : 0.0000 Mean : 0.000000 Mean : 0.0000
## 3rd Qu.: 0.3236 3rd Qu.: 0.7322 3rd Qu.: 0.563289 3rd Qu.: 0.3576
## Max. : 4.5916 Max. : 4.8457 Max. : 3.850434 Max. : 3.9585
## V5 V6 V7 V8
## Min. :-0.9944 Min. :-1.3077 Min. :-1.47070 Min. :-2.4788
## 1st Qu.:-0.5059 1st Qu.:-0.7998 1st Qu.:-0.92147 1st Qu.:-0.7479
## Median :-0.1737 Median :-0.2354 Median :-0.04915 Median : 0.2096
## Mean : 0.0000 Mean : 0.0000 Mean : 0.00000 Mean : 0.0000
## 3rd Qu.: 0.2366 3rd Qu.: 0.6111 3rd Qu.: 0.78278 3rd Qu.: 0.6884
## Max. :10.5539 Max. : 5.9160 Max. : 3.45627 Max. : 2.6771
## V9 V10 V11
## Min. :-3.270357 Min. :-1.6309 Min. :-1.5463
## 1st Qu.:-0.647219 1st Qu.:-0.6167 1st Qu.:-0.6862
## Median :-0.007429 Median :-0.1603 Median :-0.3040
## Mean : 0.000000 Mean : 0.0000 Mean : 0.0000
## 3rd Qu.: 0.632361 3rd Qu.: 0.3467 3rd Qu.: 0.4605
## Max. : 4.151204 Max. : 7.1417 Max. : 3.7096
boxplot(XN)
#Outliers
head(outlier(X, logical = TRUE))
## V1 V2 V3 V4 V5 V6 V7 V8 V9 V10 V11
## [1,] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [2,] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [3,] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [4,] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [5,] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [6,] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
outX<- outlier(X, logical = TRUE)
write.csv(outX, file = "outX.csv")
#ANOVA univariate analysis
anX_multi<- aov(formula = cbind(V1, V2, V3, V4, V5, V6, V7, V8, V9, V10, V11) ~ C, data = XY)
summary(anX_multi)
## Response V1 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 16.28 5.4251 1.8592 0.1348
## Residuals 995 2903.35 2.9179
##
## Response V2 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.106 0.035422 0.9851 0.399
## Residuals 995 35.778 0.035958
##
## Response V3 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.2193 0.073109 2.3579 0.07025 .
## Residuals 995 30.8512 0.031006
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Response V4 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 58.2 19.407 1.008 0.3884
## Residuals 995 19156.0 19.252
##
## Response V5 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.00292 0.00097185 0.3704 0.7744
## Residuals 995 2.61085 0.00262397
##
## Response V6 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 772 257.49 0.8196 0.4831
## Residuals 995 312582 314.15
##
## Response V7 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 12093 4031.1 1.0521 0.3687
## Residuals 995 3812360 3831.5
##
## Response V8 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.0000166 5.5368e-06 0.7504 0.5222
## Residuals 995 0.0073417 7.3786e-06
##
## Response V9 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.0909 0.030298 1.2411 0.2935
## Residuals 995 24.2904 0.024412
##
## Response V10 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.177 0.058937 1.5179 0.2083
## Residuals 995 38.635 0.038829
##
## Response V11 :
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 3.4 1.1340 1.0357 0.3759
## Residuals 995 1089.5 1.0949
anV1X <- aov(V1 ~ C, data = XY)
summary(anV1X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 16.3 5.425 1.859 0.135
## Residuals 995 2903.3 2.918
TukeyHSD(anV1X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V1 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 -0.01339112 -0.4020265 0.37524429 0.9997501
## C3-C1 -0.32262255 -0.7247480 0.07950290 0.1655258
## C4-C1 -0.12512314 -0.5235532 0.27330691 0.8506314
## C3-C2 -0.30923143 -0.6987363 0.08027347 0.1730013
## C4-C2 -0.11173203 -0.4974206 0.27395658 0.8785937
## C4-C3 0.19749940 -0.2017788 0.59677762 0.5804551
anV2X <- aov(V2 ~ C, data = XY)
summary(anV2X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.11 0.03542 0.985 0.399
## Residuals 995 35.78 0.03596
TukeyHSD(anV2X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V2 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 -0.003546837 -0.04668890 0.03959523 0.9966529
## C3-C1 -0.009612745 -0.05425233 0.03502684 0.9454253
## C4-C1 -0.026933536 -0.07116290 0.01729583 0.3980273
## C3-C2 -0.006065908 -0.04930450 0.03717268 0.9838921
## C4-C2 -0.023386699 -0.06620164 0.01942825 0.4960573
## C4-C3 -0.017320791 -0.06164431 0.02700273 0.7461597
anV3X <- aov(V3 ~ C, data = XY)
summary(anV3X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.219 0.07311 2.358 0.0703 .
## Residuals 995 30.851 0.03101
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
TukeyHSD(anV3X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V3 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 -0.012446168 -0.052507808 0.02761547 0.8546625
## C3-C1 -0.030973739 -0.072425971 0.01047849 0.2190402
## C4-C1 0.009215081 -0.031856219 0.05028638 0.9388622
## C3-C2 -0.018527572 -0.058678842 0.02162370 0.6349242
## C4-C2 0.021661249 -0.018096627 0.06141912 0.4983166
## C4-C3 0.040188820 -0.000969911 0.08134755 0.0585629
anV4X <- aov(V4 ~ C, data = XY)
summary(anV4X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 58 19.41 1.008 0.388
## Residuals 995 19156 19.25
TukeyHSD(anV4X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V4 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 -0.58469586 -1.5829598 0.4135681 0.4334669
## C3-C1 -0.09851541 -1.1314304 0.9343996 0.9948043
## C4-C1 -0.04893387 -1.0723568 0.9744890 0.9993342
## C3-C2 0.48618046 -0.5143169 1.4866779 0.5947777
## C4-C2 0.53576199 -0.4549327 1.5264567 0.5048488
## C4-C3 0.04958153 -0.9760200 1.0751831 0.9993119
anV5X <- aov(V5 ~ C, data = XY)
summary(anV5X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.0029 0.0009718 0.37 0.774
## Residuals 995 2.6108 0.0026240
TukeyHSD(anV5X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V5 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 0.004349453 -0.007304776 0.016003681 0.7719694
## C3-C1 0.001181723 -0.010877039 0.013240484 0.9943711
## C4-C1 0.003196862 -0.008751083 0.015144808 0.9014879
## C3-C2 -0.003167730 -0.014848032 0.008512572 0.8978958
## C4-C2 -0.001152590 -0.012718451 0.010413271 0.9940855
## C4-C3 0.002015140 -0.009958241 0.013988520 0.9727729
anV6X <- aov(V6 ~ C, data = XY)
summary(anV6X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 772 257.5 0.82 0.483
## Residuals 995 312582 314.1
TukeyHSD(anV6X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V6 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 -1.9968370 -6.029339 2.035665 0.5795586
## C3-C1 -0.4429972 -4.615472 3.729478 0.9928713
## C4-C1 0.1680837 -3.966048 4.302215 0.9995900
## C3-C2 1.5538398 -2.487684 5.595363 0.7555398
## C4-C2 2.1649207 -1.837005 6.166846 0.5045643
## C4-C3 0.6110809 -3.531851 4.754013 0.9813740
anV7X <- aov(V7 ~ C, data = XY)
summary(anV7X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 12093 4031 1.052 0.369
## Residuals 995 3812360 3832
TukeyHSD(anV7X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V7 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 -9.650289 -23.733110 4.432532 0.2917270
## C3-C1 -6.078799 -20.650453 8.492855 0.7057908
## C4-C1 -4.996398 -19.434144 9.441347 0.8097800
## C3-C2 3.571490 -10.542839 17.685818 0.9151512
## C4-C2 4.653890 -9.322148 18.629929 0.8269843
## C4-C3 1.082401 -13.386080 15.550881 0.9974707
anV8X <- aov(V8 ~ C, data = XY)
summary(anV8X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.000017 5.537e-06 0.75 0.522
## Residuals 995 0.007342 7.379e-06
TukeyHSD(anV8X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V8 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 -5.573297e-06 -0.0006235754 0.0006124288 0.9999955
## C3-C1 -2.993260e-04 -0.0009387798 0.0003401278 0.6240601
## C4-C1 -2.084472e-04 -0.0008420246 0.0004251302 0.8320938
## C3-C2 -2.937527e-04 -0.0009131375 0.0003256321 0.6139551
## C4-C2 -2.028739e-04 -0.0008161901 0.0004104423 0.8298209
## C4-C3 9.087878e-05 -0.0005440474 0.0007258050 0.9829265
anV9X <- aov(V9 ~ C, data = XY)
summary(anV9X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.091 0.03030 1.241 0.294
## Residuals 995 24.290 0.02441
TukeyHSD(anV9X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V9 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 0.011908455 -0.02363909 0.047456003 0.8243850
## C3-C1 0.004707633 -0.03207382 0.041489082 0.9876683
## C4-C1 -0.013892544 -0.05033598 0.022550897 0.7603380
## C3-C2 -0.007200822 -0.04282790 0.028426257 0.9542653
## C4-C2 -0.025800999 -0.06107901 0.009477012 0.2363869
## C4-C3 -0.018600177 -0.05512120 0.017920844 0.5563301
anV10X <- aov(V10 ~ C, data = XY)
summary(anV10X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 0.18 0.05894 1.518 0.208
## Residuals 995 38.64 0.03883
TukeyHSD(anV10X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V10 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 -0.006304136 -0.05113572 0.03852745 0.9837822
## C3-C1 -0.035635854 -0.08202360 0.01075189 0.1974018
## C4-C1 -0.010082321 -0.05604378 0.03587914 0.9425532
## C3-C2 -0.029331718 -0.07426360 0.01560017 0.3348468
## C4-C2 -0.003778185 -0.04826983 0.04071346 0.9963156
## C4-C3 0.025553533 -0.02050577 0.07161283 0.4822667
anV11X <- aov(V11 ~ C, data = XY)
summary(anV11X)
## Df Sum Sq Mean Sq F value Pr(>F)
## C 3 3.4 1.134 1.036 0.376
## Residuals 995 1089.5 1.095
TukeyHSD(anV11X, which = "C")
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = V11 ~ C, data = XY)
##
## $C
## diff lwr upr p adj
## C2-C1 -0.072031630 -0.3100982 0.1660349 0.8641575
## C3-C1 -0.142675070 -0.3890052 0.1036551 0.4436005
## C4-C1 0.000802969 -0.2432635 0.2448694 0.9999998
## C3-C2 -0.070643440 -0.3092426 0.1679557 0.8715390
## C4-C2 0.072834599 -0.1634268 0.3090960 0.8575007
## C4-C3 0.143478039 -0.1011080 0.3880640 0.4320676
dat<- XY
x <- which(names(dat) == "C") # name of grouping variable
y <- which(names(dat) == "V1" # names of variables to test
| names(dat) == "V2"
| names(dat) == "V3"
| names(dat) == "V4"
| names(dat) == "V5"
| names(dat) == "V6"
| names(dat) == "V7"
| names(dat) == "V8"
| names(dat) == "V9"
| names(dat) == "V10"
| names(dat) == "V11")
method1 <- "anova" # one of "anova" or "kruskal.test"
method2 <- "t.test" # one of "wilcox.test" or "t.test"
my_comparisons <- list(c("C1", "C2"), c("C1", "C3"), c("C1", "C4"), c("C2", "C3"), c("C2", "C4"), c("C3", "C4")) # comparisons for post-hoc tests
for (i in y) {
for (j in x) {
p <- ggboxplot(dat,
x = colnames(dat[j]), y = colnames(dat[i]),
color = colnames(dat[j]),
legend = "none",
palette = "npg",
add = "jitter"
)
print(
p + stat_compare_means(aes(label = paste0(..method.., ", p-value = ", ..p.format..)),
method = method1, label.y = max(dat[, i], na.rm = TRUE)
)
+ stat_compare_means(comparisons = my_comparisons, method = method2, label = "p.format") # remove if p-value of ANOVA or Kruskal-Wallis test >= alpha
)
}
}
#BIVARIATE ANALYSIS
chart.Correlation(X, histogram=TRUE, pch=19)
corX<- rcorr(as.matrix(X),type="pearson")
corX
## V1 V2 V3 V4 V5 V6 V7 V8 V9 V10 V11
## V1 1.00 0.16 0.48 -0.19 0.26 -0.40 -0.50 0.61 -0.33 0.42 0.04
## V2 0.16 1.00 -0.43 -0.30 0.27 -0.42 -0.45 0.31 0.28 0.23 -0.07
## V3 0.48 -0.43 1.00 0.16 0.12 0.08 0.09 0.20 -0.48 0.15 0.09
## V4 -0.19 -0.30 0.16 1.00 -0.21 0.53 0.57 0.33 -0.32 -0.26 -0.31
## V5 0.26 0.27 0.12 -0.21 1.00 -0.21 -0.30 0.32 -0.10 0.54 -0.16
## V6 -0.40 -0.42 0.08 0.53 -0.21 1.00 0.82 -0.15 -0.21 -0.31 -0.19
## V7 -0.50 -0.45 0.09 0.57 -0.30 0.82 1.00 -0.25 -0.26 -0.35 -0.20
## V8 0.61 0.31 0.20 0.33 0.32 -0.15 -0.25 1.00 -0.13 0.38 -0.48
## V9 -0.33 0.28 -0.48 -0.32 -0.10 -0.21 -0.26 -0.13 1.00 -0.02 0.22
## V10 0.42 0.23 0.15 -0.26 0.54 -0.31 -0.35 0.38 -0.02 1.00 0.03
## V11 0.04 -0.07 0.09 -0.31 -0.16 -0.19 -0.20 -0.48 0.22 0.03 1.00
##
## n= 999
##
##
## P
## V1 V2 V3 V4 V5 V6 V7 V8 V9 V10
## V1 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000
## V2 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000
## V3 0.0000 0.0000 0.0000 0.0001 0.0131 0.0057 0.0000 0.0000 0.0000
## V4 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000
## V5 0.0000 0.0000 0.0001 0.0000 0.0000 0.0000 0.0000 0.0012 0.0000
## V6 0.0000 0.0000 0.0131 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000
## V7 0.0000 0.0000 0.0057 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000
## V8 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000
## V9 0.0000 0.0000 0.0000 0.0000 0.0012 0.0000 0.0000 0.0000 0.6059
## V10 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.6059
## V11 0.2374 0.0355 0.0037 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.3496
## V11
## V1 0.2374
## V2 0.0355
## V3 0.0037
## V4 0.0000
## V5 0.0000
## V6 0.0000
## V7 0.0000
## V8 0.0000
## V9 0.0000
## V10 0.3496
## V11
write.csv(corX$P, file = "corXp.csv")
write.csv(corX$r, file = "corXr.csv")
#MULTIVARIATE ANALYSIS with mdatools library #PCA
XPCA = pca(X, 11, scale = TRUE, info = "X PCA model")
summary(XPCA)
##
## Summary for PCA model (class pca)
## Type of limits: ddmoments
## Alpha: 0.05
## Gamma: 0.01
##
## Eigenvals Expvar Cumexpvar Nq Nh
## Comp 1 3.459 31.44 31.44 2 2
## Comp 2 2.492 22.65 54.10 1 2
## Comp 3 1.605 14.59 68.69 1 3
## Comp 4 0.981 8.91 77.60 2 1
## Comp 5 0.689 6.27 83.87 1 1
## Comp 6 0.532 4.84 88.71 1 2
## Comp 7 0.425 3.87 92.58 2 1
## Comp 8 0.391 3.55 96.13 1 1
## Comp 9 0.259 2.36 98.48 1 2
## Comp 10 0.131 1.19 99.68 1 2
## Comp 11 0.036 0.32 100.00 2 2
plot(XPCA, show.labels = TRUE, cgroup = Y$C)
plotBiplot(XPCA, show.labels = TRUE)
c = categorize(XPCA)
plotResiduals(XPCA, show.labels = TRUE, cgroup = c)
XPCA1 = setDistanceLimits(XPCA, alpha = 0.01, gamma = 0.01)
plotResiduals(XPCA1, show.labels = TRUE, cgroup = c)
#PCA 3D graph
pcaX <- prcomp(X, scale.=TRUE)
XC <- factor(Y[,3])
pca3d(XPCA$loadings, show.labels = TRUE)
## [1] 0.009390596 0.008972050 0.011068359
## Creating new device
pca3d(XPCA$calres$scores, show.labels = TRUE, group=XC)
## [1] 0.12153868 0.14654408 0.07320205
pca3d(pcaX, group=XC, show.labels = TRUE, show.ellipses=TRUE, ellipse.ci=0.75, show.plane=FALSE)
## [1] 0.12153868 0.14654408 0.07320205
snapshotPCA3d(file="ellipses.png")
#Hierarchical agglomerative clustering (XN was normalized before using mdatools or excel)
disXN = dist(XN)
hcXN = hclust(disXN)
plot(hcXN)
head(hcXN$order)
## [1] 152 259 82 107 825 684
member = cutree(hcXN,4)
table(member)
## member
## 1 2 3 4
## 660 148 4 187
HCA<- member
head(HCA)
## 1 2 3 4 5 6
## 1 1 1 2 1 1
write.csv(HCA, file = "HCA.csv")
#Work with mdatools for PLS-DA (plsda can normalize data with function “scale”)
daX = plsda(X, Y$A, ncomp.selcrit = "min", scale = TRUE, cv = 1)
daX$ncomp
## [1] 11
plot(daX, ncomp = 11)
summary(daX, ncomp = 11)
##
## PLS-DA model (class plsda) summary
## ------------------------------------
## Info:
## Number of selected components: 11
## Cross-validation: full (leave one out)
##
## Class #1 (A3)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 0 0 994 5 1 0 0.995
## Cv NA NA 0 0 994 5 1 0 0.995
##
## Class #2 (A4)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 0 0 965 34 1 0 0.966
## Cv NA NA 0 0 965 34 1 0 0.966
##
## Class #3 (A5)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 244 141 455 159 0.763 0.605 0.700
## Cv NA NA 242 144 452 161 0.758 0.600 0.695
##
## Class #4 (A6)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 94 82 515 308 0.863 0.234 0.610
## Cv NA NA 91 90 507 311 0.849 0.226 0.599
##
## Class #5 (A7)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 4 4 864 127 0.995 0.031 0.869
## Cv NA NA 3 5 863 128 0.994 0.023 0.867
##
## Class #6 (A8)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 0 0 975 24 1 0 0.976
## Cv NA NA 0 0 975 24 1 0 0.976
summary(daX$calres, ncomp = 11)
##
## PLS-DA results (class plsdares) summary:
## Number of selected components: 1
##
## Class #1 (A3):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 16.121 16.121 6.181 6.181 0 0 994 5 1 0
## Comp 2 20.536 36.657 0.957 7.138 0 0 994 5 1 0
## Comp 3 25.516 62.173 0.322 7.460 0 0 994 5 1 0
## Comp 4 6.532 68.705 0.928 8.388 0 0 994 5 1 0
## Comp 5 10.046 78.751 0.328 8.716 0 0 994 5 1 0
## Comp 6 7.993 86.745 0.133 8.850 0 0 994 5 1 0
## Comp 7 2.751 89.495 0.179 9.029 0 0 994 5 1 0
## Comp 8 2.779 92.275 0.177 9.206 0 0 994 5 1 0
## Comp 9 1.431 93.705 0.540 9.745 0 0 994 5 1 0
## Comp 10 3.010 96.715 0.150 9.895 0 0 994 5 1 0
## Comp 11 3.285 100.000 0.015 9.910 0 0 994 5 1 0
## Accuracy
## Comp 1 0.995
## Comp 2 0.995
## Comp 3 0.995
## Comp 4 0.995
## Comp 5 0.995
## Comp 6 0.995
## Comp 7 0.995
## Comp 8 0.995
## Comp 9 0.995
## Comp 10 0.995
## Comp 11 0.995
##
##
## Class #2 (A4):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 16.121 16.121 6.181 6.181 0 0 965 34 1 0
## Comp 2 20.536 36.657 0.957 7.138 0 0 965 34 1 0
## Comp 3 25.516 62.173 0.322 7.460 0 0 965 34 1 0
## Comp 4 6.532 68.705 0.928 8.388 0 0 965 34 1 0
## Comp 5 10.046 78.751 0.328 8.716 0 0 965 34 1 0
## Comp 6 7.993 86.745 0.133 8.850 0 0 965 34 1 0
## Comp 7 2.751 89.495 0.179 9.029 0 0 965 34 1 0
## Comp 8 2.779 92.275 0.177 9.206 0 0 965 34 1 0
## Comp 9 1.431 93.705 0.540 9.745 0 0 965 34 1 0
## Comp 10 3.010 96.715 0.150 9.895 0 0 965 34 1 0
## Comp 11 3.285 100.000 0.015 9.910 0 0 965 34 1 0
## Accuracy
## Comp 1 0.966
## Comp 2 0.966
## Comp 3 0.966
## Comp 4 0.966
## Comp 5 0.966
## Comp 6 0.966
## Comp 7 0.966
## Comp 8 0.966
## Comp 9 0.966
## Comp 10 0.966
## Comp 11 0.966
##
##
## Class #3 (A5):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 16.121 16.121 6.181 6.181 219 146 450 184 0.755 0.543
## Comp 2 20.536 36.657 0.957 7.138 222 142 454 181 0.762 0.551
## Comp 3 25.516 62.173 0.322 7.460 236 148 448 167 0.752 0.586
## Comp 4 6.532 68.705 0.928 8.388 230 148 448 173 0.752 0.571
## Comp 5 10.046 78.751 0.328 8.716 227 135 461 176 0.773 0.563
## Comp 6 7.993 86.745 0.133 8.850 227 135 461 176 0.773 0.563
## Comp 7 2.751 89.495 0.179 9.029 226 138 458 177 0.768 0.561
## Comp 8 2.779 92.275 0.177 9.206 229 137 459 174 0.770 0.568
## Comp 9 1.431 93.705 0.540 9.745 241 138 458 162 0.768 0.598
## Comp 10 3.010 96.715 0.150 9.895 241 140 456 162 0.765 0.598
## Comp 11 3.285 100.000 0.015 9.910 244 141 455 159 0.763 0.605
## Accuracy
## Comp 1 0.670
## Comp 2 0.677
## Comp 3 0.685
## Comp 4 0.679
## Comp 5 0.689
## Comp 6 0.689
## Comp 7 0.685
## Comp 8 0.689
## Comp 9 0.700
## Comp 10 0.698
## Comp 11 0.700
##
##
## Class #4 (A6):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 16.121 16.121 6.181 6.181 0 0 597 402 1.000 0.000
## Comp 2 20.536 36.657 0.957 7.138 30 33 564 372 0.945 0.075
## Comp 3 25.516 62.173 0.322 7.460 26 24 573 376 0.960 0.065
## Comp 4 6.532 68.705 0.928 8.388 82 69 528 320 0.884 0.204
## Comp 5 10.046 78.751 0.328 8.716 85 64 533 317 0.893 0.211
## Comp 6 7.993 86.745 0.133 8.850 87 72 525 315 0.879 0.216
## Comp 7 2.751 89.495 0.179 9.029 87 75 522 315 0.874 0.216
## Comp 8 2.779 92.275 0.177 9.206 92 73 524 310 0.878 0.229
## Comp 9 1.431 93.705 0.540 9.745 93 76 521 309 0.873 0.231
## Comp 10 3.010 96.715 0.150 9.895 94 82 515 308 0.863 0.234
## Comp 11 3.285 100.000 0.015 9.910 94 82 515 308 0.863 0.234
## Accuracy
## Comp 1 0.598
## Comp 2 0.595
## Comp 3 0.600
## Comp 4 0.611
## Comp 5 0.619
## Comp 6 0.613
## Comp 7 0.610
## Comp 8 0.617
## Comp 9 0.615
## Comp 10 0.610
## Comp 11 0.610
##
##
## Class #5 (A7):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 16.121 16.121 6.181 6.181 1 0 868 130 1.000 0.008
## Comp 2 20.536 36.657 0.957 7.138 0 0 868 131 1.000 0.000
## Comp 3 25.516 62.173 0.322 7.460 2 3 865 129 0.997 0.015
## Comp 4 6.532 68.705 0.928 8.388 2 3 865 129 0.997 0.015
## Comp 5 10.046 78.751 0.328 8.716 2 4 864 129 0.995 0.015
## Comp 6 7.993 86.745 0.133 8.850 2 4 864 129 0.995 0.015
## Comp 7 2.751 89.495 0.179 9.029 2 4 864 129 0.995 0.015
## Comp 8 2.779 92.275 0.177 9.206 2 4 864 129 0.995 0.015
## Comp 9 1.431 93.705 0.540 9.745 2 4 864 129 0.995 0.015
## Comp 10 3.010 96.715 0.150 9.895 3 4 864 128 0.995 0.023
## Comp 11 3.285 100.000 0.015 9.910 4 4 864 127 0.995 0.031
## Accuracy
## Comp 1 0.870
## Comp 2 0.869
## Comp 3 0.868
## Comp 4 0.868
## Comp 5 0.867
## Comp 6 0.867
## Comp 7 0.867
## Comp 8 0.867
## Comp 9 0.867
## Comp 10 0.868
## Comp 11 0.869
##
##
## Class #6 (A8):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 16.121 16.121 6.181 6.181 0 0 975 24 1 0
## Comp 2 20.536 36.657 0.957 7.138 0 0 975 24 1 0
## Comp 3 25.516 62.173 0.322 7.460 0 0 975 24 1 0
## Comp 4 6.532 68.705 0.928 8.388 0 0 975 24 1 0
## Comp 5 10.046 78.751 0.328 8.716 0 0 975 24 1 0
## Comp 6 7.993 86.745 0.133 8.850 0 0 975 24 1 0
## Comp 7 2.751 89.495 0.179 9.029 0 0 975 24 1 0
## Comp 8 2.779 92.275 0.177 9.206 0 0 975 24 1 0
## Comp 9 1.431 93.705 0.540 9.745 0 0 975 24 1 0
## Comp 10 3.010 96.715 0.150 9.895 0 0 975 24 1 0
## Comp 11 3.285 100.000 0.015 9.910 0 0 975 24 1 0
## Accuracy
## Comp 1 0.976
## Comp 2 0.976
## Comp 3 0.976
## Comp 4 0.976
## Comp 5 0.976
## Comp 6 0.976
## Comp 7 0.976
## Comp 8 0.976
## Comp 9 0.976
## Comp 10 0.976
## Comp 11 0.976
plot(daX$calres, ncomp = 11)
plotMisclassified(daX$calres, ncomp = 11)
summary(daX$cvres, ncomp = 11)
##
## PLS-DA results (class plsdares) summary:
## Number of selected components: 1
##
## Class #1 (A3):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 NA NA NA NA 0 0 994 5 1 0
## Comp 2 NA NA NA NA 0 0 994 5 1 0
## Comp 3 NA NA NA NA 0 0 994 5 1 0
## Comp 4 NA NA NA NA 0 0 994 5 1 0
## Comp 5 NA NA NA NA 0 0 994 5 1 0
## Comp 6 NA NA NA NA 0 0 994 5 1 0
## Comp 7 NA NA NA NA 0 0 994 5 1 0
## Comp 8 NA NA NA NA 0 0 994 5 1 0
## Comp 9 NA NA NA NA 0 0 994 5 1 0
## Comp 10 NA NA NA NA 0 0 994 5 1 0
## Comp 11 NA NA NA NA 0 0 994 5 1 0
## Accuracy
## Comp 1 0.995
## Comp 2 0.995
## Comp 3 0.995
## Comp 4 0.995
## Comp 5 0.995
## Comp 6 0.995
## Comp 7 0.995
## Comp 8 0.995
## Comp 9 0.995
## Comp 10 0.995
## Comp 11 0.995
##
##
## Class #2 (A4):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 NA NA NA NA 0 0 965 34 1 0
## Comp 2 NA NA NA NA 0 0 965 34 1 0
## Comp 3 NA NA NA NA 0 0 965 34 1 0
## Comp 4 NA NA NA NA 0 0 965 34 1 0
## Comp 5 NA NA NA NA 0 0 965 34 1 0
## Comp 6 NA NA NA NA 0 0 965 34 1 0
## Comp 7 NA NA NA NA 0 0 965 34 1 0
## Comp 8 NA NA NA NA 0 0 965 34 1 0
## Comp 9 NA NA NA NA 0 0 965 34 1 0
## Comp 10 NA NA NA NA 0 0 965 34 1 0
## Comp 11 NA NA NA NA 0 0 965 34 1 0
## Accuracy
## Comp 1 0.966
## Comp 2 0.966
## Comp 3 0.966
## Comp 4 0.966
## Comp 5 0.966
## Comp 6 0.966
## Comp 7 0.966
## Comp 8 0.966
## Comp 9 0.966
## Comp 10 0.966
## Comp 11 0.966
##
##
## Class #3 (A5):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 NA NA NA NA 217 150 446 186 0.748 0.538
## Comp 2 NA NA NA NA 219 146 450 184 0.755 0.543
## Comp 3 NA NA NA NA 235 150 446 168 0.748 0.583
## Comp 4 NA NA NA NA 228 150 446 175 0.748 0.566
## Comp 5 NA NA NA NA 222 139 457 181 0.767 0.551
## Comp 6 NA NA NA NA 222 139 457 181 0.767 0.551
## Comp 7 NA NA NA NA 220 140 456 183 0.765 0.546
## Comp 8 NA NA NA NA 224 142 454 179 0.762 0.556
## Comp 9 NA NA NA NA 237 142 454 166 0.762 0.588
## Comp 10 NA NA NA NA 241 144 452 162 0.758 0.598
## Comp 11 NA NA NA NA 242 144 452 161 0.758 0.600
## Accuracy
## Comp 1 0.664
## Comp 2 0.670
## Comp 3 0.682
## Comp 4 0.675
## Comp 5 0.680
## Comp 6 0.680
## Comp 7 0.677
## Comp 8 0.679
## Comp 9 0.692
## Comp 10 0.694
## Comp 11 0.695
##
##
## Class #4 (A6):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 NA NA NA NA 0 0 597 402 1.000 0.000
## Comp 2 NA NA NA NA 27 37 560 375 0.938 0.067
## Comp 3 NA NA NA NA 24 31 566 378 0.948 0.060
## Comp 4 NA NA NA NA 73 74 523 329 0.876 0.182
## Comp 5 NA NA NA NA 82 76 521 320 0.873 0.204
## Comp 6 NA NA NA NA 82 78 519 320 0.869 0.204
## Comp 7 NA NA NA NA 83 81 516 319 0.864 0.206
## Comp 8 NA NA NA NA 88 81 516 314 0.864 0.219
## Comp 9 NA NA NA NA 84 83 514 318 0.861 0.209
## Comp 10 NA NA NA NA 91 88 509 311 0.853 0.226
## Comp 11 NA NA NA NA 91 90 507 311 0.849 0.226
## Accuracy
## Comp 1 0.598
## Comp 2 0.588
## Comp 3 0.591
## Comp 4 0.597
## Comp 5 0.604
## Comp 6 0.602
## Comp 7 0.600
## Comp 8 0.605
## Comp 9 0.599
## Comp 10 0.601
## Comp 11 0.599
##
##
## Class #5 (A7):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 NA NA NA NA 0 0 868 131 1.000 0.000
## Comp 2 NA NA NA NA 0 0 868 131 1.000 0.000
## Comp 3 NA NA NA NA 2 3 865 129 0.997 0.015
## Comp 4 NA NA NA NA 2 3 865 129 0.997 0.015
## Comp 5 NA NA NA NA 2 4 864 129 0.995 0.015
## Comp 6 NA NA NA NA 2 4 864 129 0.995 0.015
## Comp 7 NA NA NA NA 2 4 864 129 0.995 0.015
## Comp 8 NA NA NA NA 2 4 864 129 0.995 0.015
## Comp 9 NA NA NA NA 2 4 864 129 0.995 0.015
## Comp 10 NA NA NA NA 3 5 863 128 0.994 0.023
## Comp 11 NA NA NA NA 3 5 863 128 0.994 0.023
## Accuracy
## Comp 1 0.869
## Comp 2 0.869
## Comp 3 0.868
## Comp 4 0.868
## Comp 5 0.867
## Comp 6 0.867
## Comp 7 0.867
## Comp 8 0.867
## Comp 9 0.867
## Comp 10 0.867
## Comp 11 0.867
##
##
## Class #6 (A8):
## X expvar X cumexpvar Y expvar Y cumexpvar TP FP TN FN Spec. Sens.
## Comp 1 NA NA NA NA 0 0 975 24 1 0
## Comp 2 NA NA NA NA 0 0 975 24 1 0
## Comp 3 NA NA NA NA 0 0 975 24 1 0
## Comp 4 NA NA NA NA 0 0 975 24 1 0
## Comp 5 NA NA NA NA 0 0 975 24 1 0
## Comp 6 NA NA NA NA 0 0 975 24 1 0
## Comp 7 NA NA NA NA 0 0 975 24 1 0
## Comp 8 NA NA NA NA 0 0 975 24 1 0
## Comp 9 NA NA NA NA 0 0 975 24 1 0
## Comp 10 NA NA NA NA 0 0 975 24 1 0
## Comp 11 NA NA NA NA 0 0 975 24 1 0
## Accuracy
## Comp 1 0.976
## Comp 2 0.976
## Comp 3 0.976
## Comp 4 0.976
## Comp 5 0.976
## Comp 6 0.976
## Comp 7 0.976
## Comp 8 0.976
## Comp 9 0.976
## Comp 10 0.976
## Comp 11 0.976
plot(daX$cvres, ncomp = 11)
plotMisclassified(daX$cvres, ncomp = 11)
summary(daX, ncomp = 11)
##
## PLS-DA model (class plsda) summary
## ------------------------------------
## Info:
## Number of selected components: 11
## Cross-validation: full (leave one out)
##
## Class #1 (A3)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 0 0 994 5 1 0 0.995
## Cv NA NA 0 0 994 5 1 0 0.995
##
## Class #2 (A4)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 0 0 965 34 1 0 0.966
## Cv NA NA 0 0 965 34 1 0 0.966
##
## Class #3 (A5)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 244 141 455 159 0.763 0.605 0.700
## Cv NA NA 242 144 452 161 0.758 0.600 0.695
##
## Class #4 (A6)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 94 82 515 308 0.863 0.234 0.610
## Cv NA NA 91 90 507 311 0.849 0.226 0.599
##
## Class #5 (A7)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 4 4 864 127 0.995 0.031 0.869
## Cv NA NA 3 5 863 128 0.994 0.023 0.867
##
## Class #6 (A8)
## X cumexpvar Y cumexpvar TP FP TN FN Spec. Sens. Accuracy
## Cal 100 9.91 0 0 975 24 1 0 0.976
## Cv NA NA 0 0 975 24 1 0 0.976
getConfusionMatrix(daX$res$cal, ncomp = 11)
## A3 A4 A5 A6 A7 A8 None
## A3 0 0 2 1 0 0 2
## A4 0 0 21 1 0 0 12
## A5 0 0 244 44 0 0 122
## A6 0 0 112 94 3 0 194
## A7 0 0 6 33 4 0 88
## A8 0 0 0 3 1 0 20
getConfusionMatrix(daX$res$cv, ncomp = 11)
## A3 A4 A5 A6 A7 A8 None
## A3 0 0 2 1 0 0 2
## A4 0 0 21 1 0 0 12
## A5 0 0 242 51 0 0 118
## A6 0 0 115 91 3 0 194
## A7 0 0 6 34 3 0 88
## A8 0 0 0 3 2 0 19
par(mfrow = c(3, 2))
plotMisclassified(daX, ncomp = 11)
plotSensitivity(daX, ncomp = 11)
plotSpecificity(daX, ncomp = 11)
head(daX$res$cal$misclassified)
## [,1] [,2] [,3] [,4] [,5] [,6]
## A3 0.005005005 0.005005005 0.005005005 0.005005005 0.005005005 0.005005005
## A4 0.034034034 0.034034034 0.034034034 0.034034034 0.034034034 0.034034034
## A5 0.330330330 0.323323323 0.315315315 0.321321321 0.311311311 0.311311311
## A6 0.402402402 0.405405405 0.400400400 0.389389389 0.381381381 0.387387387
## A7 0.130130130 0.131131131 0.132132132 0.132132132 0.133133133 0.133133133
## A8 0.024024024 0.024024024 0.024024024 0.024024024 0.024024024 0.024024024
## [,7] [,8] [,9] [,10] [,11]
## A3 0.005005005 0.005005005 0.005005005 0.005005005 0.005005005
## A4 0.034034034 0.034034034 0.034034034 0.034034034 0.034034034
## A5 0.315315315 0.311311311 0.300300300 0.302302302 0.300300300
## A6 0.390390390 0.383383383 0.385385385 0.390390390 0.390390390
## A7 0.133133133 0.133133133 0.133133133 0.132132132 0.131131131
## A8 0.024024024 0.024024024 0.024024024 0.024024024 0.024024024
head(daX$res$cal$y.pred)
## , , A3
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6 Comp 7
## 1 -0.9904361 -0.9949083 -0.9954794 -0.9986075 -1.0000644 -0.9917087 -0.9908053
## 2 -0.9905179 -0.9955137 -0.9966088 -0.9936480 -0.9944053 -1.0015984 -0.9984933
## 3 -0.9904066 -0.9945547 -0.9955293 -0.9949419 -0.9948515 -0.9973462 -0.9945303
## 4 -0.9898999 -0.9861447 -0.9873828 -0.9900161 -0.9842715 -0.9783521 -0.9761465
## 5 -0.9904361 -0.9949083 -0.9954794 -0.9986075 -1.0000644 -0.9917087 -0.9908053
## 6 -0.9904122 -0.9944862 -0.9949413 -0.9985652 -1.0003974 -0.9934330 -0.9920736
## Comp 8 Comp 9 Comp 10 Comp 11
## 1 -0.9905562 -0.9923754 -0.9925204 -0.9929916
## 2 -1.0155008 -1.0078320 -1.0108844 -1.0111722
## 3 -1.0049508 -1.0032851 -1.0059720 -1.0056855
## 4 -0.9704901 -0.9653457 -0.9651054 -0.9664834
## 5 -0.9905562 -0.9923754 -0.9925204 -0.9929916
## 6 -0.9914034 -0.9948991 -0.9954968 -0.9960364
##
## , , A4
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6 Comp 7
## 1 -0.9021881 -0.8191273 -0.8213773 -0.8682590 -0.8775607 -0.8751814 -0.8807747
## 2 -0.8967345 -0.8039493 -0.8082632 -0.7638884 -0.7687239 -0.7707720 -0.7899961
## 3 -0.9041530 -0.8271120 -0.8309513 -0.8221480 -0.8215708 -0.8222812 -0.8397153
## 4 -0.9379389 -1.0076832 -1.0125608 -1.0520267 -1.0153488 -1.0136633 -1.0273188
## 5 -0.9021881 -0.8191273 -0.8213773 -0.8682590 -0.8775607 -0.8751814 -0.8807747
## 6 -0.9037797 -0.8281151 -0.8299077 -0.8842208 -0.8959194 -0.8939363 -0.9023525
## Comp 8 Comp 9 Comp 10 Comp 11
## 1 -0.8805588 -0.8866272 -0.8869660 -0.8849921
## 2 -0.8047351 -0.7791539 -0.7862840 -0.7850786
## 3 -0.8487459 -0.8431895 -0.8494658 -0.8506661
## 4 -1.0224168 -1.0052566 -1.0046951 -0.9989225
## 5 -0.8805588 -0.8866272 -0.8869660 -0.8849921
## 6 -0.9017716 -0.9134326 -0.9148287 -0.9125682
##
## , , A5
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6 Comp 7
## 1 0.2315246 0.3912832 0.3766589 0.3083608 0.2872432 0.2864754 0.2790533
## 2 0.3093983 0.4878608 0.4598209 0.5244670 0.5134891 0.5141501 0.4886401
## 3 0.2034677 0.3516479 0.3266932 0.3395181 0.3408284 0.3410576 0.3179230
## 4 -0.2789681 -0.4131139 -0.4448167 -0.5023115 -0.4190416 -0.4195855 -0.4377061
## 5 0.2315246 0.3912832 0.3766589 0.3083608 0.2872432 0.2864754 0.2790533
## 6 0.2087978 0.3543308 0.3426790 0.2635547 0.2369953 0.2363553 0.2251873
## Comp 8 Comp 9 Comp 10 Comp 11
## 1 0.2786740 0.2962212 0.2974515 0.2917826
## 2 0.5145341 0.4405642 0.4664573 0.4629955
## 3 0.3337882 0.3177215 0.3405143 0.3439613
## 4 -0.4463180 -0.4959381 -0.4979770 -0.5145556
## 5 0.2786740 0.2962212 0.2974515 0.2917826
## 6 0.2241668 0.2578855 0.2629555 0.2564632
##
## , , A6
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6
## 1 -0.2652657 -0.49280129 -0.496351762 -0.3218860 -0.3138709 -0.3319673
## 2 -0.2781135 -0.53228796 -0.539095437 -0.7042322 -0.7000656 -0.6844872
## 3 -0.2606369 -0.47168186 -0.477740305 -0.5105012 -0.5109985 -0.5055957
## 4 -0.1810439 0.01001265 0.002315901 0.1491849 0.1175801 0.1047603
## 5 -0.2652657 -0.49280129 -0.496351762 -0.3218860 -0.3138709 -0.3319673
## 6 -0.2615162 -0.46879096 -0.471619751 -0.2694986 -0.2594180 -0.2745013
## Comp 7 Comp 8 Comp 9 Comp 10 Comp 11
## 1 -0.3173294 -0.3170705 -0.3146044 -0.3137132 -0.3138607
## 2 -0.6341766 -0.6518543 -0.6622501 -0.6434947 -0.6435848
## 3 -0.4599695 -0.4708006 -0.4730587 -0.4565489 -0.4564593
## 4 0.1404975 0.1463769 0.1394031 0.1379264 0.1374952
## 5 -0.3173294 -0.3170705 -0.3146044 -0.3137132 -0.3138607
## 6 -0.2524757 -0.2517791 -0.2470402 -0.2433678 -0.2435367
##
## , , A7
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6 Comp 7
## 1 -1.0354424 -1.070967 -1.0543934 -1.0664155 -1.0494056 -1.0454066 -1.0444948
## 2 -1.0900277 -1.129711 -1.0979343 -1.0865550 -1.0777125 -1.0811550 -1.0780213
## 3 -1.0157760 -1.048726 -1.0204453 -1.0181878 -1.0192432 -1.0204372 -1.0175952
## 4 -0.6776141 -0.647785 -0.6118571 -0.6219776 -0.6890503 -0.6862173 -0.6839913
## 5 -1.0354424 -1.070967 -1.0543934 -1.0664155 -1.0494056 -1.0454066 -1.0444948
## 6 -1.0195121 -1.051873 -1.0386686 -1.0525964 -1.0312032 -1.0278701 -1.0264981
## Comp 8 Comp 9 Comp 10 Comp 11
## 1 -1.0445965 -1.0537303 -1.0548919 -1.052926
## 2 -1.0710820 -1.0325785 -1.0570266 -1.055826
## 3 -1.0133435 -1.0049803 -1.0265011 -1.027696
## 4 -0.6862993 -0.6604705 -0.6585455 -0.652797
## 5 -1.0445965 -1.0537303 -1.0548919 -1.052926
## 6 -1.0267716 -1.0443231 -1.0491102 -1.046859
##
## , , A8
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6 Comp 7
## 1 -1.0381922 -1.0134796 -1.0090570 -1.0531928 -1.0463417 -1.0422114 -1.0456491
## 2 -1.0540047 -1.0263988 -1.0179192 -0.9761433 -0.9725818 -0.9761374 -0.9879528
## 3 -1.0324952 -1.0095735 -1.0020270 -0.9937392 -0.9941643 -0.9953974 -1.0061127
## 4 -0.9345351 -0.9552858 -0.9456985 -0.9828530 -1.0098680 -1.0069420 -1.0153349
## 5 -1.0381922 -1.0134796 -1.0090570 -1.0531928 -1.0463417 -1.0422114 -1.0456491
## 6 -1.0335775 -1.0110653 -1.0075417 -1.0586737 -1.0500571 -1.0466146 -1.0517873
## Comp 8 Comp 9 Comp 10 Comp 11
## 1 -1.0458921 -1.0488840 -1.0493600 -1.0470120
## 2 -0.9713620 -0.9587496 -0.9687675 -0.9673337
## 3 -0.9959475 -0.9932080 -1.0020264 -1.0034541
## 4 -1.0208527 -1.0123922 -1.0116033 -1.0047366
## 5 -1.0458921 -1.0488840 -1.0493600 -1.0470120
## 6 -1.0524411 -1.0581904 -1.0601519 -1.0574629
head(daX$res$cv$misclassified)
## [,1] [,2] [,3] [,4] [,5] [,6]
## A3 0.005005005 0.005005005 0.005005005 0.005005005 0.005005005 0.005005005
## A4 0.034034034 0.034034034 0.034034034 0.034034034 0.034034034 0.034034034
## A5 0.336336336 0.330330330 0.318318318 0.325325325 0.320320320 0.320320320
## A6 0.402402402 0.412412412 0.409409409 0.403403403 0.396396396 0.398398398
## A7 0.131131131 0.131131131 0.132132132 0.132132132 0.133133133 0.133133133
## A8 0.024024024 0.024024024 0.024024024 0.024024024 0.024024024 0.024024024
## [,7] [,8] [,9] [,10] [,11]
## A3 0.005005005 0.005005005 0.005005005 0.005005005 0.005005005
## A4 0.034034034 0.034034034 0.034034034 0.034034034 0.034034034
## A5 0.323323323 0.321321321 0.308308308 0.306306306 0.305305305
## A6 0.400400400 0.395395395 0.401401401 0.399399399 0.401401401
## A7 0.133133133 0.133133133 0.133133133 0.133133133 0.133133133
## A8 0.024024024 0.024024024 0.024024024 0.024024024 0.024024024
head(daX$res$cv$y.pred)
## , , A3
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6 Comp 7
## 1 -0.9903944 -0.9948657 -0.9953878 -0.9985981 -1.0000785 -0.9916369 -0.9907174
## 2 -0.9904641 -0.9954294 -0.9965617 -0.9935440 -0.9942858 -1.0015592 -0.9983470
## 3 -0.9903674 -0.9945014 -0.9954973 -0.9949221 -0.9948236 -0.9973154 -0.9943982
## 4 -0.9898908 -0.9861613 -0.9874240 -0.9900623 -0.9841961 -0.9781232 -0.9760387
## 5 -0.9903944 -0.9948657 -0.9953878 -0.9985981 -1.0000785 -0.9916369 -0.9907174
## 6 -0.9903730 -0.9944452 -0.9948447 -0.9985512 -1.0004158 -0.9933808 -0.9919924
## Comp 8 Comp 9 Comp 10 Comp 11
## 1 -0.9904381 -0.9922765 -0.9924424 -0.9929417
## 2 -1.0156735 -1.0077520 -1.0110623 -1.0113022
## 3 -1.0049156 -1.0031726 -1.0060041 -1.0057216
## 4 -0.9702254 -0.9652635 -0.9649063 -0.9661989
## 5 -0.9904381 -0.9922765 -0.9924424 -0.9929417
## 6 -0.9912700 -0.9947945 -0.9954301 -0.9960081
##
## , , A4
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6 Comp 7
## 1 -0.9024277 -0.8182402 -0.8200448 -0.8674530 -0.8767430 -0.8743100 -0.8798793
## 2 -0.8967311 -0.8028999 -0.8071398 -0.7623450 -0.7671011 -0.7690208 -0.7873923
## 3 -0.9042187 -0.8264035 -0.8300649 -0.8215404 -0.8209112 -0.8215931 -0.8386051
## 4 -0.9378040 -1.0071301 -1.0122818 -1.0513910 -1.0151786 -1.0135773 -1.0273534
## 5 -0.9024277 -0.8182402 -0.8200448 -0.8674530 -0.8767430 -0.8743100 -0.8798793
## 6 -0.9039984 -0.8273783 -0.8287725 -0.8835464 -0.8952570 -0.8932336 -0.9016166
## Comp 8 Comp 9 Comp 10 Comp 11
## 1 -0.8796399 -0.8857623 -0.8861457 -0.8841736
## 2 -0.8027265 -0.7759740 -0.7836268 -0.7825765
## 3 -0.8476756 -0.8418492 -0.8484075 -0.8497171
## 4 -1.0223862 -1.0058579 -1.0050154 -0.9989133
## 5 -0.8796399 -0.8857623 -0.8861457 -0.8841736
## 6 -0.9009980 -0.9127272 -0.9141911 -0.9119429
##
## , , A5
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6 Comp 7
## 1 0.2252938 0.3875094 0.3745702 0.3037307 0.2825925 0.2813223 0.2737902
## 2 0.3050570 0.4843293 0.4555945 0.5202190 0.5093901 0.5099474 0.4847298
## 3 0.1994652 0.3485960 0.3235507 0.3359968 0.3374227 0.3376553 0.3141350
## 4 -0.2773113 -0.4098327 -0.4428772 -0.4993324 -0.4149897 -0.4155489 -0.4333525
## 5 0.2252938 0.3875094 0.3745702 0.3037307 0.2825925 0.2813223 0.2737902
## 6 0.2029412 0.3506497 0.3407260 0.2586814 0.2319867 0.2309940 0.2195952
## Comp 8 Comp 9 Comp 10 Comp 11
## 1 0.2733790 0.2910137 0.2924100 0.2867422
## 2 0.5096446 0.4323757 0.4597375 0.4567436
## 3 0.3291326 0.3123776 0.3360886 0.3397923
## 4 -0.4425082 -0.4901908 -0.4932692 -0.5104358
## 5 0.2733790 0.2910137 0.2924100 0.2867422
## 6 0.2185356 0.2522713 0.2576009 0.2511460
##
## , , A6
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6
## 1 -0.2624260 -0.490001520 -0.494182982 -0.3174039 -0.3091525 -0.32727760
## 2 -0.2753503 -0.528895813 -0.536707797 -0.7012415 -0.6971777 -0.68158063
## 3 -0.2583772 -0.469081392 -0.476190400 -0.5077273 -0.5082756 -0.50289339
## 4 -0.1824548 0.005411248 -0.002436561 0.1415959 0.1111939 0.09830535
## 5 -0.2624260 -0.490001520 -0.494182982 -0.3174039 -0.3091525 -0.32727760
## 6 -0.2588639 -0.466213510 -0.469371540 -0.2647701 -0.2543742 -0.26952502
## Comp 7 Comp 8 Comp 9 Comp 10 Comp 11
## 1 -0.3125516 -0.3122595 -0.3097729 -0.3087647 -0.3089774
## 2 -0.6322333 -0.6482422 -0.6582357 -0.6392185 -0.6394353
## 3 -0.4572523 -0.4675850 -0.4698763 -0.4532029 -0.4530052
## 4 0.1337988 0.1398673 0.1327497 0.1304925 0.1301753
## 5 -0.3125516 -0.3122595 -0.3097729 -0.3087647 -0.3089774
## 6 -0.2473030 -0.2465451 -0.2417297 -0.2378616 -0.2381270
##
## , , A7
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6 Comp 7
## 1 -1.0325800 -1.0709673 -1.0557862 -1.0667184 -1.0498430 -1.0456075 -1.044695
## 2 -1.0887575 -1.1304704 -1.0975363 -1.0869963 -1.0782732 -1.0817359 -1.078985
## 3 -1.0143377 -1.0489619 -1.0200918 -1.0180610 -1.0192052 -1.0204168 -1.017702
## 4 -0.6778979 -0.6473325 -0.6098772 -0.6192753 -0.6871201 -0.6842085 -0.681870
## 5 -1.0325800 -1.0709673 -1.0557862 -1.0667184 -1.0498430 -1.0456075 -1.044695
## 6 -1.0168144 -1.0516462 -1.0400252 -1.0527599 -1.0314541 -1.0279367 -1.026557
## Comp 8 Comp 9 Comp 10 Comp 11
## 1 -1.0448185 -1.0539823 -1.0553002 -1.0533029
## 2 -1.0719227 -1.0323119 -1.0575942 -1.0564762
## 3 -1.0131142 -1.0044624 -1.0265344 -1.0278723
## 4 -0.6839283 -0.6588244 -0.6558988 -0.6498504
## 5 -1.0448185 -1.0539823 -1.0553002 -1.0533029
## 6 -1.0268806 -1.0444342 -1.0494774 -1.0471942
##
## , , A8
##
## Comp 1 Comp 2 Comp 3 Comp 4 Comp 5 Comp 6 Comp 7
## 1 -1.0374656 -1.0134347 -1.0091685 -1.0535573 -1.0467754 -1.0424903 -1.0459469
## 2 -1.0537538 -1.0266337 -1.0176489 -0.9760921 -0.9725523 -0.9760509 -0.9877724
## 3 -1.0321642 -1.0096478 -1.0017063 -0.9937460 -0.9942071 -0.9954368 -1.0061771
## 4 -0.9346413 -0.9549546 -0.9451033 -0.9815350 -1.0097093 -1.0068475 -1.0151842
## 5 -1.0374656 -1.0134347 -1.0091685 -1.0535573 -1.0467754 -1.0424903 -1.0459469
## 6 -1.0328915 -1.0109665 -1.0077121 -1.0590538 -1.0504857 -1.0469178 -1.0521262
## Comp 8 Comp 9 Comp 10 Comp 11
## 1 -1.0462230 -1.0492197 -1.0497569 -1.0473466
## 2 -0.9710797 -0.9581020 -0.9682357 -0.9669534
## 3 -0.9958423 -0.9930172 -1.0019398 -1.0034761
## 4 -1.0208192 -1.0126132 -1.0114028 -1.0047768
## 5 -1.0462230 -1.0492197 -1.0497569 -1.0473466
## 6 -1.0528420 -1.0585858 -1.0606406 -1.0578739
#article CULITVAR nested k-fold cross-validation plsda
libraries
library(mdatools) library(psych) library(rms) library(ggpubr) library(PerformanceAnalytics) library(Hmisc) library(pca3d) library(rpart) library(e1071)
#split dataset in training and test
index <- 1:nrow(h144N) testindex <- sample(index, trunc(length(index)/3)) testset1<- h144N[testindex,] trainset1<- h144N[-testindex,] index <- 1:nrow(h144N) testindex <- sample(index, trunc(length(index)/3)) testset2<- h144N[testindex,] trainset2<- h144N[-testindex,] index <- 1:nrow(h144N) testindex <- sample(index, trunc(length(index)/3)) testset3<- h144N[testindex,] trainset3<- h144N[-testindex,] index <- 1:nrow(h144N) testindex <- sample(index, trunc(length(index)/3)) testset4<- h144N[testindex,] trainset4<- h144N[-testindex,] index <- 1:nrow(h144N) testindex <- sample(index, trunc(length(index)/3)) testset5<- h144N[testindex,] trainset5<- h144N[-testindex,] index <- 1:nrow(h144N) testindex <- sample(index, trunc(length(index)/3)) testset6<- h144N[testindex,] trainset6<- h144N[-testindex,] index <- 1:nrow(h144N) testindex <- sample(index, trunc(length(index)/3)) testset7<- h144N[testindex,] trainset7<- h144N[-testindex,] index <- 1:nrow(h144N) testindex <- sample(index, trunc(length(index)/3)) testset8<- h144N[testindex,] trainset8<- h144N[-testindex,] index <- 1:nrow(h144N) testindex <- sample(index, trunc(length(index)/3)) testset9<- h144N[testindex,] trainset9<- h144N[-testindex,] index <- 1:nrow(h144N) testindex <- sample(index, trunc(length(index)/3)) testset10<- h144N[testindex,] trainset10<- h144N[-testindex,]
#PLSDA model on training
daT1 = plsda(trainset1[,-1], trainset1[,1], scale = TRUE, cv = list(“rand”,nseg=4,nrep=10)) daT2 = plsda(trainset2[,-1], trainset2[,1], scale = TRUE, cv = list(“rand”,nseg=4,nrep=10)) daT3 = plsda(trainset3[,-1], trainset3[,1], scale = TRUE, cv = list(“rand”,nseg=4,nrep=10)) daT4 = plsda(trainset4[,-1], trainset4[,1], scale = TRUE, cv = list(“rand”,nseg=4,nrep=10)) daT5 = plsda(trainset5[,-1], trainset5[,1], scale = TRUE, cv = list(“rand”,nseg=4,nrep=10)) daT6 = plsda(trainset6[,-1], trainset6[,1], scale = TRUE, cv = list(“rand”,nseg=4,nrep=10)) daT7 = plsda(trainset7[,-1], trainset7[,1], scale = TRUE, cv = list(“rand”,nseg=4,nrep=10)) daT8 = plsda(trainset8[,-1], trainset8[,1], scale = TRUE, cv = list(“rand”,nseg=4,nrep=10)) daT9 = plsda(trainset9[,-1], trainset9[,1], scale = TRUE, cv = list(“rand”,nseg=4,nrep=10)) daT10 = plsda(trainset10[,-1], trainset10[,1], scale = TRUE, cv = list(“rand”,nseg=4,nrep=10))
#prediction on test
T1= predict(daT1, testset1[,-1], testset1[,1]) T2= predict(daT2, testset2[,-1], testset1[,1]) T3= predict(daT3, testset3[,-1], testset1[,1]) T4= predict(daT4, testset4[,-1], testset1[,1]) T5= predict(daT5, testset5[,-1], testset1[,1]) T6= predict(daT6, testset6[,-1], testset1[,1]) T7= predict(daT7, testset7[,-1], testset1[,1]) T8= predict(daT8, testset8[,-1], testset1[,1]) T9= predict(daT9, testset9[,-1], testset1[,1]) T10= predict(daT10, testset10[,-1], testset1[,1])