##################################
# Loading R libraries
##################################
library(AppliedPredictiveModeling)
library(tidyr)
library(caret)
library(lattice)
library(dplyr)
library(moments)
library(skimr)
library(RANN)
library(pls)
library(corrplot)
library(lares)
library(DMwR2)
library(gridExtra)
library(rattle)
library(RColorBrewer)
library(stats)
library(caretEnsemble)
library(pROC)
library(adabag)
library(gbm)
library(xgboost)
library(randomForest)
library(kernlab)
library(klaR)
library(rpart)
library(rpart.plot)
##################################
# Defining file paths
##################################
<- file.path("datasets","original")
DATASETS_ORIGINAL_PATH
##################################
# Loading source and
# formulating the analysis set
##################################
<- read.csv(file.path("..", DATASETS_ORIGINAL_PATH, "WisconsinBreastCancer.csv"),
BreastCancer na.strings=c("NA","NaN"," ",""),
stringsAsFactors = FALSE)
<- as.data.frame(BreastCancer)
BreastCancer
##################################
# Performing a general exploration of the data set
##################################
dim(BreastCancer)
## [1] 1138 32
str(BreastCancer)
## 'data.frame': 1138 obs. of 32 variables:
## $ id : int 842302 842517 84300903 84348301 84358402 843786 844359 84458202 844981 84501001 ...
## $ diagnosis : chr "M" "M" "M" "M" ...
## $ radius_mean : num 18 20.6 19.7 11.4 20.3 ...
## $ texture_mean : num 10.4 17.8 21.2 20.4 14.3 ...
## $ perimeter_mean : num 122.8 132.9 130 77.6 135.1 ...
## $ area_mean : num 1001 1326 1203 386 1297 ...
## $ smoothness_mean : num 0.1184 0.0847 0.1096 0.1425 0.1003 ...
## $ compactness_mean : num 0.2776 0.0786 0.1599 0.2839 0.1328 ...
## $ concavity_mean : num 0.3001 0.0869 0.1974 0.2414 0.198 ...
## $ concave.points_mean : num 0.1471 0.0702 0.1279 0.1052 0.1043 ...
## $ symmetry_mean : num 0.242 0.181 0.207 0.26 0.181 ...
## $ fractal_dimension_mean : num 0.0787 0.0567 0.06 0.0974 0.0588 ...
## $ radius_se : num 1.095 0.543 0.746 0.496 0.757 ...
## $ texture_se : num 0.905 0.734 0.787 1.156 0.781 ...
## $ perimeter_se : num 8.59 3.4 4.58 3.44 5.44 ...
## $ area_se : num 153.4 74.1 94 27.2 94.4 ...
## $ smoothness_se : num 0.0064 0.00522 0.00615 0.00911 0.01149 ...
## $ compactness_se : num 0.049 0.0131 0.0401 0.0746 0.0246 ...
## $ concavity_se : num 0.0537 0.0186 0.0383 0.0566 0.0569 ...
## $ concave.points_se : num 0.0159 0.0134 0.0206 0.0187 0.0188 ...
## $ symmetry_se : num 0.03 0.0139 0.0225 0.0596 0.0176 ...
## $ fractal_dimension_se : num 0.00619 0.00353 0.00457 0.00921 0.00511 ...
## $ radius_worst : num 25.4 25 23.6 14.9 22.5 ...
## $ texture_worst : num 17.3 23.4 25.5 26.5 16.7 ...
## $ perimeter_worst : num 184.6 158.8 152.5 98.9 152.2 ...
## $ area_worst : num 2019 1956 1709 568 1575 ...
## $ smoothness_worst : num 0.162 0.124 0.144 0.21 0.137 ...
## $ compactness_worst : num 0.666 0.187 0.424 0.866 0.205 ...
## $ concavity_worst : num 0.712 0.242 0.45 0.687 0.4 ...
## $ concave.points_worst : num 0.265 0.186 0.243 0.258 0.163 ...
## $ symmetry_worst : num 0.46 0.275 0.361 0.664 0.236 ...
## $ fractal_dimension_worst: num 0.1189 0.089 0.0876 0.173 0.0768 ...
summary(BreastCancer)
## id diagnosis radius_mean texture_mean
## Min. : 8670 Length:1138 Min. : 6.981 Min. : 9.71
## 1st Qu.: 869218 Class :character 1st Qu.:11.700 1st Qu.:16.17
## Median : 906024 Mode :character Median :13.370 Median :18.84
## Mean : 30371831 Mean :14.127 Mean :19.29
## 3rd Qu.: 8813129 3rd Qu.:15.780 3rd Qu.:21.80
## Max. :911320502 Max. :28.110 Max. :39.28
## perimeter_mean area_mean smoothness_mean compactness_mean
## Min. : 43.79 Min. : 143.5 Min. :0.05263 Min. :0.01938
## 1st Qu.: 75.17 1st Qu.: 420.3 1st Qu.:0.08637 1st Qu.:0.06492
## Median : 86.24 Median : 551.1 Median :0.09587 Median :0.09263
## Mean : 91.97 Mean : 654.9 Mean :0.09636 Mean :0.10434
## 3rd Qu.:104.10 3rd Qu.: 782.7 3rd Qu.:0.10530 3rd Qu.:0.13040
## Max. :188.50 Max. :2501.0 Max. :0.16340 Max. :0.34540
## concavity_mean concave.points_mean symmetry_mean fractal_dimension_mean
## Min. :0.00000 Min. :0.00000 Min. :0.1060 Min. :0.04996
## 1st Qu.:0.02956 1st Qu.:0.02031 1st Qu.:0.1619 1st Qu.:0.05770
## Median :0.06154 Median :0.03350 Median :0.1792 Median :0.06154
## Mean :0.08880 Mean :0.04892 Mean :0.1812 Mean :0.06280
## 3rd Qu.:0.13070 3rd Qu.:0.07400 3rd Qu.:0.1957 3rd Qu.:0.06612
## Max. :0.42680 Max. :0.20120 Max. :0.3040 Max. :0.09744
## radius_se texture_se perimeter_se area_se
## Min. :0.1115 Min. :0.3602 Min. : 0.757 Min. : 6.802
## 1st Qu.:0.2324 1st Qu.:0.8339 1st Qu.: 1.606 1st Qu.: 17.850
## Median :0.3242 Median :1.1080 Median : 2.287 Median : 24.530
## Mean :0.4052 Mean :1.2169 Mean : 2.866 Mean : 40.337
## 3rd Qu.:0.4789 3rd Qu.:1.4740 3rd Qu.: 3.357 3rd Qu.: 45.190
## Max. :2.8730 Max. :4.8850 Max. :21.980 Max. :542.200
## smoothness_se compactness_se concavity_se concave.points_se
## Min. :0.001713 Min. :0.002252 Min. :0.00000 Min. :0.000000
## 1st Qu.:0.005169 1st Qu.:0.013080 1st Qu.:0.01509 1st Qu.:0.007638
## Median :0.006380 Median :0.020450 Median :0.02589 Median :0.010930
## Mean :0.007041 Mean :0.025478 Mean :0.03189 Mean :0.011796
## 3rd Qu.:0.008146 3rd Qu.:0.032450 3rd Qu.:0.04205 3rd Qu.:0.014710
## Max. :0.031130 Max. :0.135400 Max. :0.39600 Max. :0.052790
## symmetry_se fractal_dimension_se radius_worst texture_worst
## Min. :0.007882 Min. :0.0008948 Min. : 7.93 Min. :12.02
## 1st Qu.:0.015160 1st Qu.:0.0022480 1st Qu.:13.01 1st Qu.:21.08
## Median :0.018730 Median :0.0031870 Median :14.97 Median :25.41
## Mean :0.020542 Mean :0.0037949 Mean :16.27 Mean :25.68
## 3rd Qu.:0.023480 3rd Qu.:0.0045580 3rd Qu.:18.79 3rd Qu.:29.72
## Max. :0.078950 Max. :0.0298400 Max. :36.04 Max. :49.54
## perimeter_worst area_worst smoothness_worst compactness_worst
## Min. : 50.41 Min. : 185.2 Min. :0.07117 Min. :0.02729
## 1st Qu.: 84.11 1st Qu.: 515.3 1st Qu.:0.11660 1st Qu.:0.14720
## Median : 97.66 Median : 686.5 Median :0.13130 Median :0.21190
## Mean :107.26 Mean : 880.6 Mean :0.13237 Mean :0.25427
## 3rd Qu.:125.40 3rd Qu.:1084.0 3rd Qu.:0.14600 3rd Qu.:0.33910
## Max. :251.20 Max. :4254.0 Max. :0.22260 Max. :1.05800
## concavity_worst concave.points_worst symmetry_worst fractal_dimension_worst
## Min. :0.0000 Min. :0.00000 Min. :0.1565 Min. :0.05504
## 1st Qu.:0.1145 1st Qu.:0.06493 1st Qu.:0.2504 1st Qu.:0.07146
## Median :0.2267 Median :0.09993 Median :0.2822 Median :0.08004
## Mean :0.2722 Mean :0.11461 Mean :0.2901 Mean :0.08395
## 3rd Qu.:0.3829 3rd Qu.:0.16140 3rd Qu.:0.3179 3rd Qu.:0.09208
## Max. :1.2520 Max. :0.29100 Max. :0.6638 Max. :0.20750
##################################
# Setting the data type
# for the response variable
##################################
$diagnosis <- factor(BreastCancer$diagnosis,
BreastCancerlevels = c("B","M"))
##################################
# Formulating a data type assessment summary
##################################
<- BreastCancer
PDA <- data.frame(
(PDA.Summary Column.Index=c(1:length(names(PDA))),
Column.Name= names(PDA),
Column.Type=sapply(PDA, function(x) class(x)),
row.names=NULL)
)
## Column.Index Column.Name Column.Type
## 1 1 id integer
## 2 2 diagnosis factor
## 3 3 radius_mean numeric
## 4 4 texture_mean numeric
## 5 5 perimeter_mean numeric
## 6 6 area_mean numeric
## 7 7 smoothness_mean numeric
## 8 8 compactness_mean numeric
## 9 9 concavity_mean numeric
## 10 10 concave.points_mean numeric
## 11 11 symmetry_mean numeric
## 12 12 fractal_dimension_mean numeric
## 13 13 radius_se numeric
## 14 14 texture_se numeric
## 15 15 perimeter_se numeric
## 16 16 area_se numeric
## 17 17 smoothness_se numeric
## 18 18 compactness_se numeric
## 19 19 concavity_se numeric
## 20 20 concave.points_se numeric
## 21 21 symmetry_se numeric
## 22 22 fractal_dimension_se numeric
## 23 23 radius_worst numeric
## 24 24 texture_worst numeric
## 25 25 perimeter_worst numeric
## 26 26 area_worst numeric
## 27 27 smoothness_worst numeric
## 28 28 compactness_worst numeric
## 29 29 concavity_worst numeric
## 30 30 concave.points_worst numeric
## 31 31 symmetry_worst numeric
## 32 32 fractal_dimension_worst numeric
##################################
# Loading dataset
##################################
<- BreastCancer
DQA
##################################
# Formulating an overall data quality assessment summary
##################################
<- data.frame(
(DQA.Summary Column.Name= names(DQA),
Column.Type=sapply(DQA, function(x) class(x)),
Row.Count=sapply(DQA, function(x) nrow(DQA)),
NA.Count=sapply(DQA,function(x)sum(is.na(x))),
Fill.Rate=sapply(DQA,function(x)format(round((sum(!is.na(x))/nrow(DQA)),3),nsmall=3)),
row.names=NULL)
)
## Column.Name Column.Type Row.Count NA.Count Fill.Rate
## 1 id integer 1138 0 1.000
## 2 diagnosis factor 1138 0 1.000
## 3 radius_mean numeric 1138 0 1.000
## 4 texture_mean numeric 1138 0 1.000
## 5 perimeter_mean numeric 1138 0 1.000
## 6 area_mean numeric 1138 0 1.000
## 7 smoothness_mean numeric 1138 0 1.000
## 8 compactness_mean numeric 1138 0 1.000
## 9 concavity_mean numeric 1138 0 1.000
## 10 concave.points_mean numeric 1138 0 1.000
## 11 symmetry_mean numeric 1138 0 1.000
## 12 fractal_dimension_mean numeric 1138 0 1.000
## 13 radius_se numeric 1138 0 1.000
## 14 texture_se numeric 1138 0 1.000
## 15 perimeter_se numeric 1138 0 1.000
## 16 area_se numeric 1138 0 1.000
## 17 smoothness_se numeric 1138 0 1.000
## 18 compactness_se numeric 1138 0 1.000
## 19 concavity_se numeric 1138 0 1.000
## 20 concave.points_se numeric 1138 0 1.000
## 21 symmetry_se numeric 1138 0 1.000
## 22 fractal_dimension_se numeric 1138 0 1.000
## 23 radius_worst numeric 1138 0 1.000
## 24 texture_worst numeric 1138 0 1.000
## 25 perimeter_worst numeric 1138 0 1.000
## 26 area_worst numeric 1138 0 1.000
## 27 smoothness_worst numeric 1138 0 1.000
## 28 compactness_worst numeric 1138 0 1.000
## 29 concavity_worst numeric 1138 0 1.000
## 30 concave.points_worst numeric 1138 0 1.000
## 31 symmetry_worst numeric 1138 0 1.000
## 32 fractal_dimension_worst numeric 1138 0 1.000
##################################
# Listing all Predictors
##################################
<- DQA[,!names(DQA) %in% c("id","diagnosis")]
DQA.Predictors
##################################
# Listing all numeric Predictors
##################################
<- DQA.Predictors[,sapply(DQA.Predictors, is.numeric)]
DQA.Predictors.Numeric
if (length(names(DQA.Predictors.Numeric))>0) {
print(paste0("There are ",
length(names(DQA.Predictors.Numeric))),
(" numeric predictor variable(s)."))
else {
} print("There are no numeric predictor variables.")
}
## [1] "There are 30 numeric predictor variable(s)."
##################################
# Listing all factor Predictors
##################################
<- DQA.Predictors[,sapply(DQA.Predictors, is.factor)]
DQA.Predictors.Factor
if (length(names(DQA.Predictors.Factor))>0) {
print(paste0("There are ",
length(names(DQA.Predictors.Factor))),
(" factor predictor variable(s)."))
else {
} print("There are no factor predictor variables.")
}
## [1] "There are no factor predictor variables."
##################################
# Formulating a data quality assessment summary for factor Predictors
##################################
if (length(names(DQA.Predictors.Factor))>0) {
##################################
# Formulating a function to determine the first mode
##################################
<- function(x) {
FirstModes <- unique(na.omit(x))
ux <- tabulate(match(x, ux))
tab == max(tab)]
ux[tab
}
##################################
# Formulating a function to determine the second mode
##################################
<- function(x) {
SecondModes <- unique(na.omit(x))
ux <- tabulate(match(x, ux))
tab = ux[tab == max(tab)]
fm = x[!(x %in% fm)]
sm <- unique(sm)
usm <- tabulate(match(sm, usm))
tabsm ifelse(is.na(usm[tabsm == max(tabsm)])==TRUE,
return("x"),
return(usm[tabsm == max(tabsm)]))
}
<- data.frame(
(DQA.Predictors.Factor.Summary Column.Name= names(DQA.Predictors.Factor),
Column.Type=sapply(DQA.Predictors.Factor, function(x) class(x)),
Unique.Count=sapply(DQA.Predictors.Factor, function(x) length(unique(x))),
First.Mode.Value=sapply(DQA.Predictors.Factor, function(x) as.character(FirstModes(x)[1])),
Second.Mode.Value=sapply(DQA.Predictors.Factor, function(x) as.character(SecondModes(x)[1])),
First.Mode.Count=sapply(DQA.Predictors.Factor, function(x) sum(na.omit(x) == FirstModes(x)[1])),
Second.Mode.Count=sapply(DQA.Predictors.Factor, function(x) sum(na.omit(x) == SecondModes(x)[1])),
Unique.Count.Ratio=sapply(DQA.Predictors.Factor, function(x) format(round((length(unique(x))/nrow(DQA.Predictors.Factor)),3), nsmall=3)),
First.Second.Mode.Ratio=sapply(DQA.Predictors.Factor, function(x) format(round((sum(na.omit(x) == FirstModes(x)[1])/sum(na.omit(x) == SecondModes(x)[1])),3), nsmall=3)),
row.names=NULL)
)
}
##################################
# Formulating a data quality assessment summary for numeric Predictors
##################################
if (length(names(DQA.Predictors.Numeric))>0) {
##################################
# Formulating a function to determine the first mode
##################################
<- function(x) {
FirstModes <- unique(na.omit(x))
ux <- tabulate(match(x, ux))
tab == max(tab)]
ux[tab
}
##################################
# Formulating a function to determine the second mode
##################################
<- function(x) {
SecondModes <- unique(na.omit(x))
ux <- tabulate(match(x, ux))
tab = ux[tab == max(tab)]
fm = na.omit(x)[!(na.omit(x) %in% fm)]
sm <- unique(sm)
usm <- tabulate(match(sm, usm))
tabsm ifelse(is.na(usm[tabsm == max(tabsm)])==TRUE,
return(0.00001),
return(usm[tabsm == max(tabsm)]))
}
<- data.frame(
(DQA.Predictors.Numeric.Summary Column.Name= names(DQA.Predictors.Numeric),
Column.Type=sapply(DQA.Predictors.Numeric, function(x) class(x)),
Unique.Count=sapply(DQA.Predictors.Numeric, function(x) length(unique(x))),
Unique.Count.Ratio=sapply(DQA.Predictors.Numeric, function(x) format(round((length(unique(x))/nrow(DQA.Predictors.Numeric)),3), nsmall=3)),
First.Mode.Value=sapply(DQA.Predictors.Numeric, function(x) format(round((FirstModes(x)[1]),3),nsmall=3)),
Second.Mode.Value=sapply(DQA.Predictors.Numeric, function(x) format(round((SecondModes(x)[1]),3),nsmall=3)),
First.Mode.Count=sapply(DQA.Predictors.Numeric, function(x) sum(na.omit(x) == FirstModes(x)[1])),
Second.Mode.Count=sapply(DQA.Predictors.Numeric, function(x) sum(na.omit(x) == SecondModes(x)[1])),
First.Second.Mode.Ratio=sapply(DQA.Predictors.Numeric, function(x) format(round((sum(na.omit(x) == FirstModes(x)[1])/sum(na.omit(x) == SecondModes(x)[1])),3), nsmall=3)),
Minimum=sapply(DQA.Predictors.Numeric, function(x) format(round(min(x,na.rm = TRUE),3), nsmall=3)),
Mean=sapply(DQA.Predictors.Numeric, function(x) format(round(mean(x,na.rm = TRUE),3), nsmall=3)),
Median=sapply(DQA.Predictors.Numeric, function(x) format(round(median(x,na.rm = TRUE),3), nsmall=3)),
Maximum=sapply(DQA.Predictors.Numeric, function(x) format(round(max(x,na.rm = TRUE),3), nsmall=3)),
Skewness=sapply(DQA.Predictors.Numeric, function(x) format(round(skewness(x,na.rm = TRUE),3), nsmall=3)),
Kurtosis=sapply(DQA.Predictors.Numeric, function(x) format(round(kurtosis(x,na.rm = TRUE),3), nsmall=3)),
Percentile25th=sapply(DQA.Predictors.Numeric, function(x) format(round(quantile(x,probs=0.25,na.rm = TRUE),3), nsmall=3)),
Percentile75th=sapply(DQA.Predictors.Numeric, function(x) format(round(quantile(x,probs=0.75,na.rm = TRUE),3), nsmall=3)),
row.names=NULL)
)
}
## Column.Name Column.Type Unique.Count Unique.Count.Ratio
## 1 radius_mean numeric 456 0.401
## 2 texture_mean numeric 479 0.421
## 3 perimeter_mean numeric 522 0.459
## 4 area_mean numeric 539 0.474
## 5 smoothness_mean numeric 474 0.417
## 6 compactness_mean numeric 537 0.472
## 7 concavity_mean numeric 537 0.472
## 8 concave.points_mean numeric 542 0.476
## 9 symmetry_mean numeric 432 0.380
## 10 fractal_dimension_mean numeric 499 0.438
## 11 radius_se numeric 540 0.475
## 12 texture_se numeric 519 0.456
## 13 perimeter_se numeric 533 0.468
## 14 area_se numeric 528 0.464
## 15 smoothness_se numeric 547 0.481
## 16 compactness_se numeric 541 0.475
## 17 concavity_se numeric 533 0.468
## 18 concave.points_se numeric 507 0.446
## 19 symmetry_se numeric 498 0.438
## 20 fractal_dimension_se numeric 545 0.479
## 21 radius_worst numeric 457 0.402
## 22 texture_worst numeric 511 0.449
## 23 perimeter_worst numeric 514 0.452
## 24 area_worst numeric 544 0.478
## 25 smoothness_worst numeric 411 0.361
## 26 compactness_worst numeric 529 0.465
## 27 concavity_worst numeric 539 0.474
## 28 concave.points_worst numeric 492 0.432
## 29 symmetry_worst numeric 500 0.439
## 30 fractal_dimension_worst numeric 535 0.470
## First.Mode.Value Second.Mode.Value First.Mode.Count Second.Mode.Count
## 1 12.340 13.000 8 6
## 2 15.700 21.250 6 4
## 3 82.610 132.900 6 4
## 4 512.200 658.800 6 4
## 5 0.101 0.108 10 8
## 6 0.121 0.160 6 4
## 7 0.000 0.120 26 6
## 8 0.000 0.029 26 6
## 9 0.177 0.181 8 6
## 10 0.057 0.059 6 4
## 11 0.286 0.298 6 4
## 12 1.150 0.734 6 4
## 13 1.778 2.406 8 4
## 14 16.970 74.080 6 4
## 15 0.006 0.005 4 2
## 16 0.023 0.014 6 4
## 17 0.000 0.017 26 4
## 18 0.000 0.012 26 6
## 19 0.013 0.015 8 6
## 20 0.003 0.006 4 2
## 21 12.360 13.340 10 8
## 22 27.260 27.660 6 4
## 23 117.700 184.600 6 4
## 24 1269.000 2019.000 4 2
## 25 0.131 0.149 8 6
## 26 0.342 0.177 6 4
## 27 0.000 0.450 26 6
## 28 0.000 0.026 26 6
## 29 0.320 0.361 6 4
## 30 0.074 0.084 6 4
## First.Second.Mode.Ratio Minimum Mean Median Maximum Skewness Kurtosis
## 1 1.333 6.981 14.127 13.370 28.110 0.940 3.828
## 2 1.500 9.710 19.290 18.840 39.280 0.649 3.741
## 3 1.500 43.790 91.969 86.240 188.500 0.988 3.953
## 4 1.500 143.500 654.889 551.100 2501.000 1.641 6.610
## 5 1.250 0.053 0.096 0.096 0.163 0.455 3.838
## 6 1.500 0.019 0.104 0.093 0.345 1.187 4.625
## 7 4.333 0.000 0.089 0.062 0.427 1.397 4.971
## 8 4.333 0.000 0.049 0.034 0.201 1.168 4.047
## 9 1.333 0.106 0.181 0.179 0.304 0.724 4.266
## 10 1.500 0.050 0.063 0.062 0.097 1.301 5.969
## 11 1.500 0.112 0.405 0.324 2.873 3.080 20.521
## 12 1.500 0.360 1.217 1.108 4.885 1.642 8.292
## 13 2.000 0.757 2.866 2.287 21.980 3.435 24.204
## 14 1.500 6.802 40.337 24.530 542.200 5.433 51.767
## 15 2.000 0.002 0.007 0.006 0.031 2.308 13.368
## 16 1.500 0.002 0.025 0.020 0.135 1.897 8.051
## 17 6.500 0.000 0.032 0.026 0.396 5.097 51.423
## 18 4.333 0.000 0.012 0.011 0.053 1.441 8.071
## 19 1.333 0.008 0.021 0.019 0.079 2.189 10.816
## 20 2.000 0.001 0.004 0.003 0.030 3.914 29.040
## 21 1.250 7.930 16.269 14.970 36.040 1.100 3.925
## 22 1.500 12.020 25.677 25.410 49.540 0.497 3.212
## 23 1.500 50.410 107.261 97.660 251.200 1.125 4.050
## 24 2.000 185.200 880.583 686.500 4254.000 1.854 7.347
## 25 1.333 0.071 0.132 0.131 0.223 0.414 3.503
## 26 1.500 0.027 0.254 0.212 1.058 1.470 6.002
## 27 4.333 0.000 0.272 0.227 1.252 1.147 4.591
## 28 4.333 0.000 0.115 0.100 0.291 0.491 2.459
## 29 1.500 0.156 0.290 0.282 0.664 1.430 7.395
## 30 1.500 0.055 0.084 0.080 0.208 1.658 8.188
## Percentile25th Percentile75th
## 1 11.700 15.780
## 2 16.170 21.800
## 3 75.170 104.100
## 4 420.300 782.700
## 5 0.086 0.105
## 6 0.065 0.130
## 7 0.030 0.131
## 8 0.020 0.074
## 9 0.162 0.196
## 10 0.058 0.066
## 11 0.232 0.479
## 12 0.834 1.474
## 13 1.606 3.357
## 14 17.850 45.190
## 15 0.005 0.008
## 16 0.013 0.032
## 17 0.015 0.042
## 18 0.008 0.015
## 19 0.015 0.023
## 20 0.002 0.005
## 21 13.010 18.790
## 22 21.080 29.720
## 23 84.110 125.400
## 24 515.300 1084.000
## 25 0.117 0.146
## 26 0.147 0.339
## 27 0.114 0.383
## 28 0.065 0.161
## 29 0.250 0.318
## 30 0.071 0.092
##################################
# Identifying potential data quality issues
##################################
##################################
# Checking for missing observations
##################################
if ((nrow(DQA.Summary[DQA.Summary$NA.Count>0,]))>0){
print(paste0("Missing observations noted for ",
nrow(DQA.Summary[DQA.Summary$NA.Count>0,])),
(" variable(s) with NA.Count>0 and Fill.Rate<1.0."))
$NA.Count>0,]
DQA.Summary[DQA.Summaryelse {
} print("No missing observations noted.")
}
## [1] "No missing observations noted."
##################################
# Checking for zero or near-zero variance Predictors
##################################
if (length(names(DQA.Predictors.Factor))==0) {
print("No factor predictors noted.")
else if (nrow(DQA.Predictors.Factor.Summary[as.numeric(as.character(DQA.Predictors.Factor.Summary$First.Second.Mode.Ratio))>5,])>0){
} print(paste0("Low variance observed for ",
nrow(DQA.Predictors.Factor.Summary[as.numeric(as.character(DQA.Predictors.Factor.Summary$First.Second.Mode.Ratio))>5,])),
(" factor variable(s) with First.Second.Mode.Ratio>5."))
as.numeric(as.character(DQA.Predictors.Factor.Summary$First.Second.Mode.Ratio))>5,]
DQA.Predictors.Factor.Summary[else {
} print("No low variance factor predictors due to high first-second mode ratio noted.")
}
## [1] "No factor predictors noted."
if (length(names(DQA.Predictors.Numeric))==0) {
print("No numeric predictors noted.")
else if (nrow(DQA.Predictors.Numeric.Summary[as.numeric(as.character(DQA.Predictors.Numeric.Summary$First.Second.Mode.Ratio))>5,])>0){
} print(paste0("Low variance observed for ",
nrow(DQA.Predictors.Numeric.Summary[as.numeric(as.character(DQA.Predictors.Numeric.Summary$First.Second.Mode.Ratio))>5,])),
(" numeric variable(s) with First.Second.Mode.Ratio>5."))
as.numeric(as.character(DQA.Predictors.Numeric.Summary$First.Second.Mode.Ratio))>5,]
DQA.Predictors.Numeric.Summary[else {
} print("No low variance numeric predictors due to high first-second mode ratio noted.")
}
## [1] "Low variance observed for 1 numeric variable(s) with First.Second.Mode.Ratio>5."
## Column.Name Column.Type Unique.Count Unique.Count.Ratio First.Mode.Value
## 17 concavity_se numeric 533 0.468 0.000
## Second.Mode.Value First.Mode.Count Second.Mode.Count First.Second.Mode.Ratio
## 17 0.017 26 4 6.500
## Minimum Mean Median Maximum Skewness Kurtosis Percentile25th Percentile75th
## 17 0.000 0.032 0.026 0.396 5.097 51.423 0.015 0.042
if (length(names(DQA.Predictors.Numeric))==0) {
print("No numeric predictors noted.")
else if (nrow(DQA.Predictors.Numeric.Summary[as.numeric(as.character(DQA.Predictors.Numeric.Summary$Unique.Count.Ratio))<0.01,])>0){
} print(paste0("Low variance observed for ",
nrow(DQA.Predictors.Numeric.Summary[as.numeric(as.character(DQA.Predictors.Numeric.Summary$Unique.Count.Ratio))<0.01,])),
(" numeric variable(s) with Unique.Count.Ratio<0.01."))
as.numeric(as.character(DQA.Predictors.Numeric.Summary$Unique.Count.Ratio))<0.01,]
DQA.Predictors.Numeric.Summary[else {
} print("No low variance numeric predictors due to low unique count ratio noted.")
}
## [1] "No low variance numeric predictors due to low unique count ratio noted."
##################################
# Checking for skewed Predictors
##################################
if (length(names(DQA.Predictors.Numeric))==0) {
print("No numeric predictors noted.")
else if (nrow(DQA.Predictors.Numeric.Summary[as.numeric(as.character(DQA.Predictors.Numeric.Summary$Skewness))>3 |
} as.numeric(as.character(DQA.Predictors.Numeric.Summary$Skewness))<(-3),])>0){
print(paste0("High skewness observed for ",
nrow(DQA.Predictors.Numeric.Summary[as.numeric(as.character(DQA.Predictors.Numeric.Summary$Skewness))>3 |
(as.numeric(as.character(DQA.Predictors.Numeric.Summary$Skewness))<(-3),])),
" numeric variable(s) with Skewness>3 or Skewness<(-3)."))
as.numeric(as.character(DQA.Predictors.Numeric.Summary$Skewness))>3 |
DQA.Predictors.Numeric.Summary[as.numeric(as.character(DQA.Predictors.Numeric.Summary$Skewness))<(-3),]
else {
} print("No skewed numeric predictors noted.")
}
## [1] "High skewness observed for 5 numeric variable(s) with Skewness>3 or Skewness<(-3)."
## Column.Name Column.Type Unique.Count Unique.Count.Ratio
## 11 radius_se numeric 540 0.475
## 13 perimeter_se numeric 533 0.468
## 14 area_se numeric 528 0.464
## 17 concavity_se numeric 533 0.468
## 20 fractal_dimension_se numeric 545 0.479
## First.Mode.Value Second.Mode.Value First.Mode.Count Second.Mode.Count
## 11 0.286 0.298 6 4
## 13 1.778 2.406 8 4
## 14 16.970 74.080 6 4
## 17 0.000 0.017 26 4
## 20 0.003 0.006 4 2
## First.Second.Mode.Ratio Minimum Mean Median Maximum Skewness Kurtosis
## 11 1.500 0.112 0.405 0.324 2.873 3.080 20.521
## 13 2.000 0.757 2.866 2.287 21.980 3.435 24.204
## 14 1.500 6.802 40.337 24.530 542.200 5.433 51.767
## 17 6.500 0.000 0.032 0.026 0.396 5.097 51.423
## 20 2.000 0.001 0.004 0.003 0.030 3.914 29.040
## Percentile25th Percentile75th
## 11 0.232 0.479
## 13 1.606 3.357
## 14 17.850 45.190
## 17 0.015 0.042
## 20 0.002 0.005
##################################
# Loading dataset
##################################
<- DQA[,!names(DQA) %in% c("id")]
DPA
##################################
# Gathering descriptive statistics
##################################
<- skim(DPA)) (DPA_Skimmed
Name | DPA |
Number of rows | 1138 |
Number of columns | 31 |
_______________________ | |
Column type frequency: | |
factor | 1 |
numeric | 30 |
________________________ | |
Group variables | None |
Variable type: factor
skim_variable | n_missing | complete_rate | ordered | n_unique | top_counts |
---|---|---|---|---|---|
diagnosis | 0 | 1 | FALSE | 2 | B: 714, M: 424 |
Variable type: numeric
skim_variable | n_missing | complete_rate | mean | sd | p0 | p25 | p50 | p75 | p100 | hist |
---|---|---|---|---|---|---|---|---|---|---|
radius_mean | 0 | 1 | 14.13 | 3.52 | 6.98 | 11.70 | 13.37 | 15.78 | 28.11 | ▂▇▃▁▁ |
texture_mean | 0 | 1 | 19.29 | 4.30 | 9.71 | 16.17 | 18.84 | 21.80 | 39.28 | ▃▇▃▁▁ |
perimeter_mean | 0 | 1 | 91.97 | 24.29 | 43.79 | 75.17 | 86.24 | 104.10 | 188.50 | ▃▇▃▁▁ |
area_mean | 0 | 1 | 654.89 | 351.76 | 143.50 | 420.30 | 551.10 | 782.70 | 2501.00 | ▇▃▂▁▁ |
smoothness_mean | 0 | 1 | 0.10 | 0.01 | 0.05 | 0.09 | 0.10 | 0.11 | 0.16 | ▁▇▇▁▁ |
compactness_mean | 0 | 1 | 0.10 | 0.05 | 0.02 | 0.06 | 0.09 | 0.13 | 0.35 | ▇▇▂▁▁ |
concavity_mean | 0 | 1 | 0.09 | 0.08 | 0.00 | 0.03 | 0.06 | 0.13 | 0.43 | ▇▃▂▁▁ |
concave.points_mean | 0 | 1 | 0.05 | 0.04 | 0.00 | 0.02 | 0.03 | 0.07 | 0.20 | ▇▃▂▁▁ |
symmetry_mean | 0 | 1 | 0.18 | 0.03 | 0.11 | 0.16 | 0.18 | 0.20 | 0.30 | ▁▇▅▁▁ |
fractal_dimension_mean | 0 | 1 | 0.06 | 0.01 | 0.05 | 0.06 | 0.06 | 0.07 | 0.10 | ▆▇▂▁▁ |
radius_se | 0 | 1 | 0.41 | 0.28 | 0.11 | 0.23 | 0.32 | 0.48 | 2.87 | ▇▁▁▁▁ |
texture_se | 0 | 1 | 1.22 | 0.55 | 0.36 | 0.83 | 1.11 | 1.47 | 4.88 | ▇▅▁▁▁ |
perimeter_se | 0 | 1 | 2.87 | 2.02 | 0.76 | 1.61 | 2.29 | 3.36 | 21.98 | ▇▁▁▁▁ |
area_se | 0 | 1 | 40.34 | 45.47 | 6.80 | 17.85 | 24.53 | 45.19 | 542.20 | ▇▁▁▁▁ |
smoothness_se | 0 | 1 | 0.01 | 0.00 | 0.00 | 0.01 | 0.01 | 0.01 | 0.03 | ▇▃▁▁▁ |
compactness_se | 0 | 1 | 0.03 | 0.02 | 0.00 | 0.01 | 0.02 | 0.03 | 0.14 | ▇▃▁▁▁ |
concavity_se | 0 | 1 | 0.03 | 0.03 | 0.00 | 0.02 | 0.03 | 0.04 | 0.40 | ▇▁▁▁▁ |
concave.points_se | 0 | 1 | 0.01 | 0.01 | 0.00 | 0.01 | 0.01 | 0.01 | 0.05 | ▇▇▁▁▁ |
symmetry_se | 0 | 1 | 0.02 | 0.01 | 0.01 | 0.02 | 0.02 | 0.02 | 0.08 | ▇▃▁▁▁ |
fractal_dimension_se | 0 | 1 | 0.00 | 0.00 | 0.00 | 0.00 | 0.00 | 0.00 | 0.03 | ▇▁▁▁▁ |
radius_worst | 0 | 1 | 16.27 | 4.83 | 7.93 | 13.01 | 14.97 | 18.79 | 36.04 | ▆▇▃▁▁ |
texture_worst | 0 | 1 | 25.68 | 6.14 | 12.02 | 21.08 | 25.41 | 29.72 | 49.54 | ▃▇▆▁▁ |
perimeter_worst | 0 | 1 | 107.26 | 33.59 | 50.41 | 84.11 | 97.66 | 125.40 | 251.20 | ▇▇▃▁▁ |
area_worst | 0 | 1 | 880.58 | 569.11 | 185.20 | 515.30 | 686.50 | 1084.00 | 4254.00 | ▇▂▁▁▁ |
smoothness_worst | 0 | 1 | 0.13 | 0.02 | 0.07 | 0.12 | 0.13 | 0.15 | 0.22 | ▂▇▇▂▁ |
compactness_worst | 0 | 1 | 0.25 | 0.16 | 0.03 | 0.15 | 0.21 | 0.34 | 1.06 | ▇▅▁▁▁ |
concavity_worst | 0 | 1 | 0.27 | 0.21 | 0.00 | 0.11 | 0.23 | 0.38 | 1.25 | ▇▅▂▁▁ |
concave.points_worst | 0 | 1 | 0.11 | 0.07 | 0.00 | 0.06 | 0.10 | 0.16 | 0.29 | ▅▇▅▃▁ |
symmetry_worst | 0 | 1 | 0.29 | 0.06 | 0.16 | 0.25 | 0.28 | 0.32 | 0.66 | ▅▇▁▁▁ |
fractal_dimension_worst | 0 | 1 | 0.08 | 0.02 | 0.06 | 0.07 | 0.08 | 0.09 | 0.21 | ▇▃▁▁▁ |
##################################
# Outlier Detection
##################################
##################################
# Listing all Predictors
##################################
<- DPA[,!names(DPA) %in% c("diagnosis")]
DPA.Predictors
##################################
# Listing all numeric Predictors
##################################
<- DPA.Predictors[,sapply(DPA.Predictors, is.numeric)]
DPA.Predictors.Numeric
##################################
# Identifying outliers for the numeric Predictors
##################################
<- c()
OutlierCountList
for (i in 1:ncol(DPA.Predictors.Numeric)) {
<- boxplot.stats(DPA.Predictors.Numeric[,i])$out
Outliers <- length(Outliers)
OutlierCount <- append(OutlierCountList,OutlierCount)
OutlierCountList <- which(DPA.Predictors.Numeric[,i] %in% c(Outliers))
OutlierIndices print(
ggplot(DPA.Predictors.Numeric, aes(x=DPA.Predictors.Numeric[,i])) +
geom_boxplot() +
theme_bw() +
theme(axis.text.y=element_blank(),
axis.ticks.y=element_blank()) +
xlab(names(DPA.Predictors.Numeric)[i]) +
labs(title=names(DPA.Predictors.Numeric)[i],
subtitle=paste0(OutlierCount, " Outlier(s) Detected")))
}
##################################
# Zero and Near-Zero Variance
##################################
##################################
# Identifying columns with low variance
###################################
<- nearZeroVar(DPA,
DPA_LowVariance freqCut = 80/20,
uniqueCut = 10,
saveMetrics= TRUE)
$nzv,]) (DPA_LowVariance[DPA_LowVariance
## [1] freqRatio percentUnique zeroVar nzv
## <0 rows> (or 0-length row.names)
if ((nrow(DPA_LowVariance[DPA_LowVariance$nzv,]))==0){
print("No low variance descriptors noted.")
else {
}
print(paste0("Low variance observed for ",
nrow(DPA_LowVariance[DPA_LowVariance$nzv,])),
(" numeric variable(s) with First.Second.Mode.Ratio>4 and Unique.Count.Ratio<0.10."))
<- (nrow(DPA_LowVariance[DPA_LowVariance$nzv,]))
DPA_LowVarianceForRemoval
print(paste0("Low variance can be resolved by removing ",
nrow(DPA_LowVariance[DPA_LowVariance$nzv,])),
(" numeric variable(s)."))
for (j in 1:DPA_LowVarianceForRemoval) {
<- rownames(DPA_LowVariance[DPA_LowVariance$nzv,])[j]
DPA_LowVarianceRemovedVariable print(paste0("Variable ",
j," for removal: ",
DPA_LowVarianceRemovedVariable))
}
%>%
DPA skim() %>%
::filter(skim_variable %in% rownames(DPA_LowVariance[DPA_LowVariance$nzv,]))
dplyr
}
## [1] "No low variance descriptors noted."
##################################
# Visualizing pairwise correlation between Predictor
##################################
<- cor(DPA.Predictors.Numeric,
(DPA_Correlation method = "pearson",
use="pairwise.complete.obs"))
## radius_mean texture_mean perimeter_mean area_mean
## radius_mean 1.000000000 0.323781891 0.997855281 0.987357170
## texture_mean 0.323781891 1.000000000 0.329533059 0.321085696
## perimeter_mean 0.997855281 0.329533059 1.000000000 0.986506804
## area_mean 0.987357170 0.321085696 0.986506804 1.000000000
## smoothness_mean 0.170581187 -0.023388516 0.207278164 0.177028377
## compactness_mean 0.506123578 0.236702222 0.556936211 0.498501682
## concavity_mean 0.676763550 0.302417828 0.716135650 0.685982829
## concave.points_mean 0.822528522 0.293464051 0.850977041 0.823268869
## symmetry_mean 0.147741242 0.071400980 0.183027212 0.151293079
## fractal_dimension_mean -0.311630826 -0.076437183 -0.261476908 -0.283109812
## radius_se 0.679090388 0.275868676 0.691765014 0.732562227
## texture_se -0.097317443 0.386357623 -0.086761078 -0.066280214
## perimeter_se 0.674171616 0.281673115 0.693134890 0.726628328
## area_se 0.735863663 0.259844987 0.744982694 0.800085921
## smoothness_se -0.222600125 0.006613777 -0.202694026 -0.166776667
## compactness_se 0.205999980 0.191974611 0.250743681 0.212582551
## concavity_se 0.194203623 0.143293077 0.228082345 0.207660060
## concave.points_se 0.376168956 0.163851025 0.407216916 0.372320282
## symmetry_se -0.104320881 0.009127168 -0.081629327 -0.072496588
## fractal_dimension_se -0.042641269 0.054457520 -0.005523391 -0.019886963
## radius_worst 0.969538973 0.352572947 0.969476363 0.962746086
## texture_worst 0.297007644 0.912044589 0.303038372 0.287488627
## perimeter_worst 0.965136514 0.358039575 0.970386887 0.959119574
## area_worst 0.941082460 0.343545947 0.941549808 0.959213326
## smoothness_worst 0.119616140 0.077503359 0.150549404 0.123522939
## compactness_worst 0.413462823 0.277829592 0.455774228 0.390410309
## concavity_worst 0.526911462 0.301025224 0.563879263 0.512605920
## concave.points_worst 0.744214198 0.295315843 0.771240789 0.722016626
## symmetry_worst 0.163953335 0.105007910 0.189115040 0.143569914
## fractal_dimension_worst 0.007065886 0.119205351 0.051018530 0.003737597
## smoothness_mean compactness_mean concavity_mean
## radius_mean 0.17058119 0.50612358 0.67676355
## texture_mean -0.02338852 0.23670222 0.30241783
## perimeter_mean 0.20727816 0.55693621 0.71613565
## area_mean 0.17702838 0.49850168 0.68598283
## smoothness_mean 1.00000000 0.65912322 0.52198377
## compactness_mean 0.65912322 1.00000000 0.88312067
## concavity_mean 0.52198377 0.88312067 1.00000000
## concave.points_mean 0.55369517 0.83113504 0.92139103
## symmetry_mean 0.55777479 0.60264105 0.50066662
## fractal_dimension_mean 0.58479200 0.56536866 0.33678336
## radius_se 0.30146710 0.49747345 0.63192482
## texture_se 0.06840645 0.04620483 0.07621835
## perimeter_se 0.29609193 0.54890526 0.66039079
## area_se 0.24655243 0.45565285 0.61742681
## smoothness_se 0.33237544 0.13529927 0.09856375
## compactness_se 0.31894330 0.73872179 0.67027882
## concavity_se 0.24839568 0.57051687 0.69127021
## concave.points_se 0.38067569 0.64226185 0.68325992
## symmetry_se 0.20077438 0.22997659 0.17800921
## fractal_dimension_se 0.28360670 0.50731813 0.44930075
## radius_worst 0.21312014 0.53531540 0.68823641
## texture_worst 0.03607180 0.24813283 0.29987889
## perimeter_worst 0.23885263 0.59021043 0.72956492
## area_worst 0.20671836 0.50960381 0.67598723
## smoothness_worst 0.80532420 0.56554117 0.44882204
## compactness_worst 0.47246844 0.86580904 0.75496802
## concavity_worst 0.43492571 0.81627525 0.88410264
## concave.points_worst 0.50305335 0.81557322 0.86132303
## symmetry_worst 0.39430948 0.51022343 0.40946413
## fractal_dimension_worst 0.49931637 0.68738232 0.51492989
## concave.points_mean symmetry_mean
## radius_mean 0.82252852 0.14774124
## texture_mean 0.29346405 0.07140098
## perimeter_mean 0.85097704 0.18302721
## area_mean 0.82326887 0.15129308
## smoothness_mean 0.55369517 0.55777479
## compactness_mean 0.83113504 0.60264105
## concavity_mean 0.92139103 0.50066662
## concave.points_mean 1.00000000 0.46249739
## symmetry_mean 0.46249739 1.00000000
## fractal_dimension_mean 0.16691738 0.47992133
## radius_se 0.69804983 0.30337926
## texture_se 0.02147958 0.12805293
## perimeter_se 0.71064987 0.31389276
## area_se 0.69029854 0.22397022
## smoothness_se 0.02765331 0.18732117
## compactness_se 0.49042425 0.42165915
## concavity_se 0.43916707 0.34262702
## concave.points_se 0.61563413 0.39329787
## symmetry_se 0.09535079 0.44913654
## fractal_dimension_se 0.25758375 0.33178615
## radius_worst 0.83031763 0.18572775
## texture_worst 0.29275171 0.09065069
## perimeter_worst 0.85592313 0.21916856
## area_worst 0.80962962 0.17719338
## smoothness_worst 0.45275305 0.42667503
## compactness_worst 0.66745368 0.47320001
## concavity_worst 0.75239950 0.43372101
## concave.points_worst 0.91015531 0.43029661
## symmetry_worst 0.37574415 0.69982580
## fractal_dimension_worst 0.36866113 0.43841350
## fractal_dimension_mean radius_se texture_se
## radius_mean -0.3116308263 0.6790903880 -0.09731744
## texture_mean -0.0764371834 0.2758686762 0.38635762
## perimeter_mean -0.2614769081 0.6917650135 -0.08676108
## area_mean -0.2831098117 0.7325622270 -0.06628021
## smoothness_mean 0.5847920019 0.3014670983 0.06840645
## compactness_mean 0.5653686634 0.4974734461 0.04620483
## concavity_mean 0.3367833594 0.6319248221 0.07621835
## concave.points_mean 0.1669173832 0.6980498336 0.02147958
## symmetry_mean 0.4799213301 0.3033792632 0.12805293
## fractal_dimension_mean 1.0000000000 0.0001109951 0.16417397
## radius_se 0.0001109951 1.0000000000 0.21324734
## texture_se 0.1641739659 0.2132473373 1.00000000
## perimeter_se 0.0398299316 0.9727936770 0.22317073
## area_se -0.0901702475 0.9518301121 0.11156725
## smoothness_se 0.4019644254 0.1645142198 0.39724285
## compactness_se 0.5598366906 0.3560645755 0.23169970
## concavity_se 0.4466303217 0.3323575376 0.19499846
## concave.points_se 0.3411980444 0.5133464414 0.23028340
## symmetry_se 0.3450073971 0.2405673625 0.41162068
## fractal_dimension_se 0.6881315775 0.2277535327 0.27972275
## radius_worst -0.2536914949 0.7150651951 -0.11169031
## texture_worst -0.0512692020 0.1947985568 0.40900277
## perimeter_worst -0.2051512113 0.7196838037 -0.10224192
## area_worst -0.2318544512 0.7515484761 -0.08319499
## smoothness_worst 0.5049420754 0.1419185529 -0.07365766
## compactness_worst 0.4587981567 0.2871031656 -0.09243935
## concavity_worst 0.3462338763 0.3805846346 -0.06895622
## concave.points_worst 0.1753254492 0.5310623278 -0.11963752
## symmetry_worst 0.3340186839 0.0945428304 -0.12821476
## fractal_dimension_worst 0.7672967792 0.0495594325 -0.04565457
## perimeter_se area_se smoothness_se compactness_se
## radius_mean 0.67417162 0.73586366 -0.222600125 0.2060000
## texture_mean 0.28167311 0.25984499 0.006613777 0.1919746
## perimeter_mean 0.69313489 0.74498269 -0.202694026 0.2507437
## area_mean 0.72662833 0.80008592 -0.166776667 0.2125826
## smoothness_mean 0.29609193 0.24655243 0.332375443 0.3189433
## compactness_mean 0.54890526 0.45565285 0.135299268 0.7387218
## concavity_mean 0.66039079 0.61742681 0.098563746 0.6702788
## concave.points_mean 0.71064987 0.69029854 0.027653308 0.4904242
## symmetry_mean 0.31389276 0.22397022 0.187321165 0.4216591
## fractal_dimension_mean 0.03982993 -0.09017025 0.401964425 0.5598367
## radius_se 0.97279368 0.95183011 0.164514220 0.3560646
## texture_se 0.22317073 0.11156725 0.397242853 0.2316997
## perimeter_se 1.00000000 0.93765541 0.151075331 0.4163224
## area_se 0.93765541 1.00000000 0.075150338 0.2848401
## smoothness_se 0.15107533 0.07515034 1.000000000 0.3366961
## compactness_se 0.41632237 0.28484006 0.336696081 1.0000000
## concavity_se 0.36248158 0.27089473 0.268684760 0.8012683
## concave.points_se 0.55626408 0.41572957 0.328429499 0.7440827
## symmetry_se 0.26648709 0.13410898 0.413506125 0.3947128
## fractal_dimension_se 0.24414277 0.12707090 0.427374207 0.8032688
## radius_worst 0.69720059 0.75737319 -0.230690710 0.2046072
## texture_worst 0.20037085 0.19649665 -0.074742965 0.1430026
## perimeter_worst 0.72103131 0.76121264 -0.217303755 0.2605158
## area_worst 0.73071297 0.81140796 -0.182195478 0.1993713
## smoothness_worst 0.13005439 0.12538943 0.314457456 0.2273942
## compactness_worst 0.34191945 0.28325654 -0.055558139 0.6787804
## concavity_worst 0.41889882 0.38510014 -0.058298387 0.6391467
## concave.points_worst 0.55489723 0.53816631 -0.102006796 0.4832083
## symmetry_worst 0.10993043 0.07412629 -0.107342098 0.2778784
## fractal_dimension_worst 0.08543257 0.01753930 0.101480315 0.5909728
## concavity_se concave.points_se symmetry_se
## radius_mean 0.1942036 0.37616896 -0.104320881
## texture_mean 0.1432931 0.16385103 0.009127168
## perimeter_mean 0.2280823 0.40721692 -0.081629327
## area_mean 0.2076601 0.37232028 -0.072496588
## smoothness_mean 0.2483957 0.38067569 0.200774376
## compactness_mean 0.5705169 0.64226185 0.229976591
## concavity_mean 0.6912702 0.68325992 0.178009208
## concave.points_mean 0.4391671 0.61563413 0.095350787
## symmetry_mean 0.3426270 0.39329787 0.449136542
## fractal_dimension_mean 0.4466303 0.34119804 0.345007397
## radius_se 0.3323575 0.51334644 0.240567362
## texture_se 0.1949985 0.23028340 0.411620680
## perimeter_se 0.3624816 0.55626408 0.266487092
## area_se 0.2708947 0.41572957 0.134108980
## smoothness_se 0.2686848 0.32842950 0.413506125
## compactness_se 0.8012683 0.74408267 0.394712835
## concavity_se 1.0000000 0.77180399 0.309428578
## concave.points_se 0.7718040 1.00000000 0.312780223
## symmetry_se 0.3094286 0.31278022 1.000000000
## fractal_dimension_se 0.7273722 0.61104414 0.369078083
## radius_worst 0.1869035 0.35812667 -0.128120769
## texture_worst 0.1002410 0.08674121 -0.077473420
## perimeter_worst 0.2266804 0.39499925 -0.103753044
## area_worst 0.1883527 0.34227116 -0.110342743
## smoothness_worst 0.1684813 0.21535060 -0.012661800
## compactness_worst 0.4848578 0.45288838 0.060254879
## concavity_worst 0.6625641 0.54959238 0.037119049
## concave.points_worst 0.4404723 0.60244961 -0.030413396
## symmetry_worst 0.1977878 0.14311567 0.389402485
## fractal_dimension_worst 0.4393293 0.31065455 0.078079476
## fractal_dimension_se radius_worst texture_worst
## radius_mean -0.042641269 0.96953897 0.297007644
## texture_mean 0.054457520 0.35257295 0.912044589
## perimeter_mean -0.005523391 0.96947636 0.303038372
## area_mean -0.019886963 0.96274609 0.287488627
## smoothness_mean 0.283606699 0.21312014 0.036071799
## compactness_mean 0.507318127 0.53531540 0.248132833
## concavity_mean 0.449300749 0.68823641 0.299878889
## concave.points_mean 0.257583746 0.83031763 0.292751713
## symmetry_mean 0.331786146 0.18572775 0.090650688
## fractal_dimension_mean 0.688131577 -0.25369149 -0.051269202
## radius_se 0.227753533 0.71506520 0.194798557
## texture_se 0.279722748 -0.11169031 0.409002766
## perimeter_se 0.244142773 0.69720059 0.200370854
## area_se 0.127070903 0.75737319 0.196496649
## smoothness_se 0.427374207 -0.23069071 -0.074742965
## compactness_se 0.803268818 0.20460717 0.143002583
## concavity_se 0.727372184 0.18690352 0.100240984
## concave.points_se 0.611044139 0.35812667 0.086741210
## symmetry_se 0.369078083 -0.12812077 -0.077473420
## fractal_dimension_se 1.000000000 -0.03748762 -0.003195029
## radius_worst -0.037487618 1.00000000 0.359920754
## texture_worst -0.003195029 0.35992075 1.000000000
## perimeter_worst -0.001000398 0.99370792 0.365098245
## area_worst -0.022736147 0.98401456 0.345842283
## smoothness_worst 0.170568316 0.21657443 0.225429415
## compactness_worst 0.390158842 0.47582004 0.360832339
## concavity_worst 0.379974661 0.57397471 0.368365607
## concave.points_worst 0.215204013 0.78742385 0.359754610
## symmetry_worst 0.111093956 0.24352920 0.233027461
## fractal_dimension_worst 0.591328066 0.09349198 0.219122425
## perimeter_worst area_worst smoothness_worst
## radius_mean 0.965136514 0.94108246 0.11961614
## texture_mean 0.358039575 0.34354595 0.07750336
## perimeter_mean 0.970386887 0.94154981 0.15054940
## area_mean 0.959119574 0.95921333 0.12352294
## smoothness_mean 0.238852626 0.20671836 0.80532420
## compactness_mean 0.590210428 0.50960381 0.56554117
## concavity_mean 0.729564917 0.67598723 0.44882204
## concave.points_mean 0.855923128 0.80962962 0.45275305
## symmetry_mean 0.219168559 0.17719338 0.42667503
## fractal_dimension_mean -0.205151211 -0.23185445 0.50494208
## radius_se 0.719683804 0.75154848 0.14191855
## texture_se -0.102241922 -0.08319499 -0.07365766
## perimeter_se 0.721031310 0.73071297 0.13005439
## area_se 0.761212636 0.81140796 0.12538943
## smoothness_se -0.217303755 -0.18219548 0.31445746
## compactness_se 0.260515840 0.19937133 0.22739423
## concavity_se 0.226680426 0.18835265 0.16848132
## concave.points_se 0.394999252 0.34227116 0.21535060
## symmetry_se -0.103753044 -0.11034274 -0.01266180
## fractal_dimension_se -0.001000398 -0.02273615 0.17056832
## radius_worst 0.993707916 0.98401456 0.21657443
## texture_worst 0.365098245 0.34584228 0.22542941
## perimeter_worst 1.000000000 0.97757809 0.23677460
## area_worst 0.977578091 1.00000000 0.20914533
## smoothness_worst 0.236774604 0.20914533 1.00000000
## compactness_worst 0.529407690 0.43829628 0.56818652
## concavity_worst 0.618344080 0.54333053 0.51852329
## concave.points_worst 0.816322102 0.74741880 0.54769090
## symmetry_worst 0.269492769 0.20914551 0.49383833
## fractal_dimension_worst 0.138956862 0.07964703 0.61762419
## compactness_worst concavity_worst concave.points_worst
## radius_mean 0.41346282 0.52691146 0.7442142
## texture_mean 0.27782959 0.30102522 0.2953158
## perimeter_mean 0.45577423 0.56387926 0.7712408
## area_mean 0.39041031 0.51260592 0.7220166
## smoothness_mean 0.47246844 0.43492571 0.5030534
## compactness_mean 0.86580904 0.81627525 0.8155732
## concavity_mean 0.75496802 0.88410264 0.8613230
## concave.points_mean 0.66745368 0.75239950 0.9101553
## symmetry_mean 0.47320001 0.43372101 0.4302966
## fractal_dimension_mean 0.45879816 0.34623388 0.1753254
## radius_se 0.28710317 0.38058463 0.5310623
## texture_se -0.09243935 -0.06895622 -0.1196375
## perimeter_se 0.34191945 0.41889882 0.5548972
## area_se 0.28325654 0.38510014 0.5381663
## smoothness_se -0.05555814 -0.05829839 -0.1020068
## compactness_se 0.67878035 0.63914670 0.4832083
## concavity_se 0.48485780 0.66256413 0.4404723
## concave.points_se 0.45288838 0.54959238 0.6024496
## symmetry_se 0.06025488 0.03711905 -0.0304134
## fractal_dimension_se 0.39015884 0.37997466 0.2152040
## radius_worst 0.47582004 0.57397471 0.7874239
## texture_worst 0.36083234 0.36836561 0.3597546
## perimeter_worst 0.52940769 0.61834408 0.8163221
## area_worst 0.43829628 0.54333053 0.7474188
## smoothness_worst 0.56818652 0.51852329 0.5476909
## compactness_worst 1.00000000 0.89226090 0.8010804
## concavity_worst 0.89226090 1.00000000 0.8554339
## concave.points_worst 0.80108036 0.85543386 1.0000000
## symmetry_worst 0.61444050 0.53251973 0.5025285
## fractal_dimension_worst 0.81045486 0.68651092 0.5111141
## symmetry_worst fractal_dimension_worst
## radius_mean 0.16395333 0.007065886
## texture_mean 0.10500791 0.119205351
## perimeter_mean 0.18911504 0.051018530
## area_mean 0.14356991 0.003737597
## smoothness_mean 0.39430948 0.499316369
## compactness_mean 0.51022343 0.687382323
## concavity_mean 0.40946413 0.514929891
## concave.points_mean 0.37574415 0.368661134
## symmetry_mean 0.69982580 0.438413498
## fractal_dimension_mean 0.33401868 0.767296779
## radius_se 0.09454283 0.049559432
## texture_se -0.12821476 -0.045654569
## perimeter_se 0.10993043 0.085432572
## area_se 0.07412629 0.017539295
## smoothness_se -0.10734210 0.101480315
## compactness_se 0.27787843 0.590972763
## concavity_se 0.19778782 0.439329269
## concave.points_se 0.14311567 0.310654551
## symmetry_se 0.38940248 0.078079476
## fractal_dimension_se 0.11109396 0.591328066
## radius_worst 0.24352920 0.093491979
## texture_worst 0.23302746 0.219122425
## perimeter_worst 0.26949277 0.138956862
## area_worst 0.20914551 0.079647034
## smoothness_worst 0.49383833 0.617624192
## compactness_worst 0.61444050 0.810454856
## concavity_worst 0.53251973 0.686510921
## concave.points_worst 0.50252849 0.511114146
## symmetry_worst 1.00000000 0.537848206
## fractal_dimension_worst 0.53784821 1.000000000
<- cor.mtest(DPA.Predictors.Numeric,
DPA_CorrelationTest method = "pearson",
conf.level = 0.95)
corrplot(cor(DPA.Predictors.Numeric,
method = "pearson",
use="pairwise.complete.obs"),
method = "circle",
type = "upper",
order = "original",
tl.col = "black",
tl.cex = 0.75,
tl.srt = 90,
sig.level = 0.05,
p.mat = DPA_CorrelationTest$p,
insig = "blank")
corrplot(cor(DPA.Predictors.Numeric,
method = "pearson",
use="pairwise.complete.obs"),
method = "number",
type = "upper",
order = "original",
tl.col = "black",
tl.cex = 0.75,
tl.srt = 90,
sig.level = 0.05,
number.cex = 0.65,
p.mat = DPA_CorrelationTest$p,
insig = "blank")
##################################
# Identifying the highly correlated variables
##################################
<- sum(abs(DPA_Correlation[upper.tri(DPA_Correlation)])>0.95)) (DPA_HighlyCorrelatedCount
## [1] 15
if (DPA_HighlyCorrelatedCount == 0) {
print("No highly correlated predictors noted.")
else {
} print(paste0("High correlation observed for ",
(DPA_HighlyCorrelatedCount)," pairs of numeric variable(s) with Correlation.Coefficient>0.95."))
<- corr_cross(DPA.Predictors.Numeric,
(DPA_HighlyCorrelatedPairs max_pvalue = 0.05,
top = DPA_HighlyCorrelatedCount,
rm.na = TRUE,
grid = FALSE
))
}
## [1] "High correlation observed for 15 pairs of numeric variable(s) with Correlation.Coefficient>0.95."
if (DPA_HighlyCorrelatedCount > 0) {
<- findCorrelation(DPA_Correlation, cutoff = 0.95)
DPA_HighlyCorrelated
<- length(DPA_HighlyCorrelated))
(DPA_HighlyCorrelatedForRemoval
print(paste0("High correlation can be resolved by removing ",
(DPA_HighlyCorrelatedForRemoval)," numeric variable(s)."))
for (j in 1:DPA_HighlyCorrelatedForRemoval) {
<- colnames(DPA.Predictors.Numeric)[DPA_HighlyCorrelated[j]]
DPA_HighlyCorrelatedRemovedVariable print(paste0("Variable ",
j," for removal: ",
DPA_HighlyCorrelatedRemovedVariable))
}
}
## [1] "High correlation can be resolved by removing 7 numeric variable(s)."
## [1] "Variable 1 for removal: perimeter_worst"
## [1] "Variable 2 for removal: radius_worst"
## [1] "Variable 3 for removal: perimeter_mean"
## [1] "Variable 4 for removal: area_worst"
## [1] "Variable 5 for removal: radius_mean"
## [1] "Variable 6 for removal: perimeter_se"
## [1] "Variable 7 for removal: area_se"
##################################
# Linear Dependencies
##################################
##################################
# Finding linear dependencies
##################################
<- findLinearCombos(DPA.Predictors.Numeric)
DPA_LinearlyDependent
##################################
# Identifying the linearly dependent variables
##################################
<- findLinearCombos(DPA.Predictors.Numeric)
DPA_LinearlyDependent
<- length(DPA_LinearlyDependent$linearCombos)) (DPA_LinearlyDependentCount
## [1] 0
if (DPA_LinearlyDependentCount == 0) {
print("No linearly dependent predictors noted.")
else {
} print(paste0("Linear dependency observed for ",
(DPA_LinearlyDependentCount)," subset(s) of numeric variable(s)."))
for (i in 1:DPA_LinearlyDependentCount) {
<- colnames(DPA.Predictors.Numeric)[DPA_LinearlyDependent$linearCombos[[i]]]
DPA_LinearlyDependentSubset print(paste0("Linear dependent variable(s) for subset ",
i," include: ",
DPA_LinearlyDependentSubset))
}
}
## [1] "No linearly dependent predictors noted."
##################################
# Identifying the linearly dependent variables for removal
##################################
if (DPA_LinearlyDependentCount > 0) {
<- findLinearCombos(DPA.Predictors.Numeric)
DPA_LinearlyDependent
<- length(DPA_LinearlyDependent$remove)
DPA_LinearlyDependentForRemoval
print(paste0("Linear dependency can be resolved by removing ",
(DPA_LinearlyDependentForRemoval)," numeric variable(s)."))
for (j in 1:DPA_LinearlyDependentForRemoval) {
<- colnames(DPA.Predictors.Numeric)[DPA_LinearlyDependent$remove[j]]
DPA_LinearlyDependentRemovedVariable print(paste0("Variable ",
j," for removal: ",
DPA_LinearlyDependentRemovedVariable))
}
}
##################################
# Shape Transformation
##################################
##################################
# Applying a Box-Cox transformation
##################################
<- preProcess(DPA.Predictors.Numeric, method = c("BoxCox"))
DPA_BoxCox <- predict(DPA_BoxCox, DPA.Predictors.Numeric)
DPA_BoxCoxTransformed
for (i in 1:ncol(DPA_BoxCoxTransformed)) {
<- format(round(median(DPA_BoxCoxTransformed[,i],na.rm = TRUE),2), nsmall=2)
Median <- format(round(mean(DPA_BoxCoxTransformed[,i],na.rm = TRUE),2), nsmall=2)
Mean <- format(round(skewness(DPA_BoxCoxTransformed[,i],na.rm = TRUE),2), nsmall=2)
Skewness print(
ggplot(DPA_BoxCoxTransformed, aes(x=DPA_BoxCoxTransformed[,i])) +
geom_histogram(binwidth=1,color="black", fill="white") +
geom_vline(aes(xintercept=mean(DPA_BoxCoxTransformed[,i])),
color="blue", size=1) +
geom_vline(aes(xintercept=median(DPA_BoxCoxTransformed[,i])),
color="red", size=1) +
theme_bw() +
ylab("Count") +
xlab(names(DPA_BoxCoxTransformed)[i]) +
labs(title=names(DPA_BoxCoxTransformed)[i],
subtitle=paste0("Median = ", Median,
", Mean = ", Mean,
", Skewness = ", Skewness)))
}
##################################
# Identifying outliers for the numeric predictors
##################################
<- c()
OutlierCountList
for (i in 1:ncol(DPA_BoxCoxTransformed)) {
<- boxplot.stats(DPA_BoxCoxTransformed[,i])$out
Outliers <- length(Outliers)
OutlierCount <- append(OutlierCountList,OutlierCount)
OutlierCountList <- which(DPA_BoxCoxTransformed[,i] %in% c(Outliers))
OutlierIndices print(
ggplot(DPA_BoxCoxTransformed, aes(x=DPA_BoxCoxTransformed[,i])) +
geom_boxplot() +
theme_bw() +
theme(axis.text.y=element_blank(),
axis.ticks.y=element_blank()) +
xlab(names(DPA_BoxCoxTransformed)[i]) +
labs(title=names(DPA_BoxCoxTransformed)[i],
subtitle=paste0(OutlierCount, " Outlier(s) Detected")))
}
$diagnosis <- DPA[,c("diagnosis")] DPA_BoxCoxTransformed
##################################
# Creating the pre-modelling
# train set
##################################
<- DPA_BoxCoxTransformed[,!names(DPA_BoxCoxTransformed) %in% c("concavity_se",
PMA "perimeter_worst",
"radius_worst",
"perimeter_mean",
"area_worst",
"radius_mean",
"perimeter_se",
"area_se",
"concavity_mean",
"concave.points_mean",
"concave.points_se",
"concavity_worst")]
##################################
# Gathering descriptive statistics
##################################
<- skim(PMA)) (PMA_Skimmed
Name | PMA |
Number of rows | 1138 |
Number of columns | 19 |
_______________________ | |
Column type frequency: | |
factor | 1 |
numeric | 18 |
________________________ | |
Group variables | None |
Variable type: factor
skim_variable | n_missing | complete_rate | ordered | n_unique | top_counts |
---|---|---|---|---|---|
diagnosis | 0 | 1 | FALSE | 2 | B: 714, M: 424 |
Variable type: numeric
skim_variable | n_missing | complete_rate | mean | sd | p0 | p25 | p50 | p75 | p100 | hist |
---|---|---|---|---|---|---|---|---|---|---|
texture_mean | 0 | 1 | 2.94 | 0.22 | 2.27 | 2.78 | 2.94 | 3.08 | 3.67 | ▁▅▇▃▁ |
area_mean | 0 | 1 | 6.36 | 0.48 | 4.97 | 6.04 | 6.31 | 6.66 | 7.82 | ▁▅▇▃▁ |
smoothness_mean | 0 | 1 | -2.35 | 0.15 | -2.94 | -2.45 | -2.34 | -2.25 | -1.81 | ▁▂▇▃▁ |
compactness_mean | 0 | 1 | -2.38 | 0.49 | -3.94 | -2.73 | -2.38 | -2.04 | -1.06 | ▁▅▇▇▂ |
symmetry_mean | 0 | 1 | -2.26 | 0.25 | -3.20 | -2.42 | -2.25 | -2.10 | -1.43 | ▁▂▇▅▁ |
fractal_dimension_mean | 0 | 1 | -130.58 | 26.03 | -199.82 | -149.68 | -131.52 | -113.87 | -52.16 | ▁▆▇▃▁ |
radius_se | 0 | 1 | -1.42 | 0.81 | -3.51 | -1.98 | -1.42 | -0.86 | 0.86 | ▁▆▇▅▁ |
texture_se | 0 | 1 | 0.10 | 0.43 | -1.02 | -0.18 | 0.10 | 0.39 | 1.59 | ▂▆▇▂▁ |
smoothness_se | 0 | 1 | -11.83 | 1.66 | -19.20 | -12.84 | -11.85 | -10.78 | -6.11 | ▁▂▇▅▁ |
compactness_se | 0 | 1 | -3.88 | 0.65 | -6.10 | -4.34 | -3.89 | -3.43 | -2.00 | ▁▃▇▆▁ |
symmetry_se | 0 | 1 | -16.51 | 3.52 | -28.80 | -18.91 | -16.46 | -14.16 | -5.98 | ▁▃▇▅▁ |
fractal_dimension_se | 0 | 1 | -15.48 | 2.88 | -24.04 | -17.43 | -15.37 | -13.46 | -6.23 | ▁▅▇▃▁ |
texture_worst | 0 | 1 | 4.53 | 0.46 | 3.22 | 4.20 | 4.55 | 4.85 | 5.91 | ▁▅▇▅▁ |
smoothness_worst | 0 | 1 | -1.52 | 0.09 | -1.82 | -1.58 | -1.52 | -1.46 | -1.21 | ▁▃▇▃▁ |
compactness_worst | 0 | 1 | -1.55 | 0.62 | -3.60 | -1.92 | -1.55 | -1.08 | 0.06 | ▁▃▇▆▁ |
concave.points_worst | 0 | 1 | 0.11 | 0.07 | 0.00 | 0.06 | 0.10 | 0.16 | 0.29 | ▅▇▅▃▁ |
symmetry_worst | 0 | 1 | -1.77 | 0.37 | -3.06 | -2.00 | -1.76 | -1.55 | -0.45 | ▁▃▇▂▁ |
fractal_dimension_worst | 0 | 1 | -19.62 | 4.79 | -32.59 | -22.99 | -19.73 | -16.32 | -5.17 | ▁▅▇▃▁ |
##################################
# Loading dataset
##################################
<- PMA
DPA
##################################
# Listing all predictors
##################################
<- DPA[,!names(DPA) %in% c("diagnosis")]
DPA.Predictors
##################################
# Listing all numeric predictors
##################################
<- DPA.Predictors[,sapply(DPA.Predictors, is.numeric)]
DPA.Predictors.Numeric ncol(DPA.Predictors.Numeric)
## [1] 18
##################################
# Converting response variable data type to factor
##################################
$diagnosis <- as.factor(DPA$diagnosis)
DPAlength(levels(DPA$diagnosis))
## [1] 2
##################################
# Formulating the box plots
##################################
featurePlot(x = DPA.Predictors.Numeric,
y = DPA$diagnosis,
plot = "box",
scales = list(x = list(relation="free", rot = 90),
y = list(relation="free")),
adjust = 1.5,
pch = "|",
layout = c(6, 3))
##################################
# Obtaining the AUROC
##################################
<- filterVarImp(x = DPA.Predictors.Numeric,
AUROC y = DPA$diagnosis)
##################################
# Formulating the summary table
##################################
<- AUROC
AUROC_Summary
$Predictor <- rownames(AUROC)
AUROC_Summarynames(AUROC_Summary)[1] <- "AUROC"
$Metric <- rep("AUROC",nrow(AUROC))
AUROC_Summary
order(AUROC_Summary$AUROC, decreasing=TRUE),] AUROC_Summary[
## AUROC M Predictor Metric
## concave.points_worst 0.9667037 0.9667037 concave.points_worst AUROC
## area_mean 0.9383159 0.9383159 area_mean AUROC
## radius_se 0.8683341 0.8683341 radius_se AUROC
## compactness_mean 0.8637823 0.8637823 compactness_mean AUROC
## compactness_worst 0.8623025 0.8623025 compactness_worst AUROC
## texture_worst 0.7846308 0.7846308 texture_worst AUROC
## texture_mean 0.7758245 0.7758245 texture_mean AUROC
## smoothness_worst 0.7540563 0.7540563 smoothness_worst AUROC
## symmetry_worst 0.7369391 0.7369391 symmetry_worst AUROC
## compactness_se 0.7272805 0.7272805 compactness_se AUROC
## smoothness_mean 0.7220416 0.7220416 smoothness_mean AUROC
## symmetry_mean 0.6985624 0.6985624 symmetry_mean AUROC
## fractal_dimension_worst 0.6859706 0.6859706 fractal_dimension_worst AUROC
## fractal_dimension_se 0.6203028 0.6203028 fractal_dimension_se AUROC
## symmetry_se 0.5551107 0.5551107 symmetry_se AUROC
## smoothness_se 0.5311625 0.5311625 smoothness_se AUROC
## fractal_dimension_mean 0.5154656 0.5154656 fractal_dimension_mean AUROC
## texture_se 0.5115943 0.5115943 texture_se AUROC
##################################
# Exploring predictor performance
##################################
dotplot(Predictor ~ AUROC | Metric,
AUROC_Summary,origin = 0,
type = c("p", "h"),
pch = 16,
cex = 2,
alpha = 0.45,
prepanel = function(x, y) {
list(ylim = levels(reorder(y, x)))
},panel = function(x, y, ...) {
panel.dotplot(x, reorder(y, x), ...)
})
##################################
# Creating the pre-modelling dataset
# into the train and test sets
##################################
<- DPA[,colnames(DPA) %in% c("diagnosis",
DPA "texture_worst",
"texture_mean",
"smoothness_worst",
"symmetry_worst",
"compactness_se",
"smoothness_mean")]
set.seed(12345678)
<- createDataPartition(DPA$diagnosis,p=0.8)[[1]]
MA_Train_Index <- DPA[ MA_Train_Index, ]
MA_Train <- DPA[-MA_Train_Index, ]
MA_Test
dim(MA_Train)
## [1] 912 7
dim(MA_Test)
## [1] 226 7
##################################
# Setting the cross validation process
# using the Repeated K-Fold
##################################
set.seed(12345678)
<- trainControl(method="repeatedcv",
RKFold_Control summaryFunction = twoClassSummary,
number=5,
repeats=5,
classProbs = TRUE)
##################################
# Setting the conditions
# for hyperparameter tuning
##################################
= expand.grid(mfinal = c(50,100,100),
AB_Grid maxdepth = c(4,5,6),
coeflearn = "Breiman")
##################################
# Running the adaptive boosting model
# by setting the caret method to 'AdaBoost.M1'
##################################
set.seed(12345678)
<- train(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
MBS_AB_Tune y = MA_Train$diagnosis,
method = "AdaBoost.M1",
tuneGrid = AB_Grid,
metric = "ROC",
trControl = RKFold_Control)
##################################
# Reporting the cross-validation results
# for the train set
##################################
MBS_AB_Tune
## AdaBoost.M1
##
## 912 samples
## 6 predictor
## 2 classes: 'B', 'M'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results across tuning parameters:
##
## maxdepth mfinal ROC Sens Spec
## 4 50 0.9539212 0.9454523 0.8682353
## 4 100 0.9600671 0.9506972 0.9017647
## 5 50 0.9624131 0.9531137 0.9011765
## 5 100 0.9671294 0.9506636 0.9000000
## 6 50 0.9666688 0.9496323 0.8982353
## 6 100 0.9710985 0.9527750 0.8964706
##
## Tuning parameter 'coeflearn' was held constant at a value of Breiman
## ROC was used to select the optimal model using the largest value.
## The final values used for the model were mfinal = 100, maxdepth = 6
## and coeflearn = Breiman.
$finalModel MBS_AB_Tune
## $formula
## .outcome ~ .
## <environment: 0x0000024d60f9b4c0>
##
## $trees
## $trees[[1]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 331 B (0.637061404 0.362938596)
## 2) smoothness_worst< -1.499656 536 100 B (0.813432836 0.186567164)
## 4) texture_mean< 2.963467 314 23 B (0.926751592 0.073248408)
## 8) symmetry_worst>=-2.923662 311 20 B (0.935691318 0.064308682)
## 16) compactness_se< -3.711798 242 7 B (0.971074380 0.028925620)
## 32) texture_mean< 2.874407 167 1 B (0.994011976 0.005988024)
## 64) compactness_se< -4.173143 126 0 B (1.000000000 0.000000000) *
## 65) compactness_se>=-4.173143 41 1 B (0.975609756 0.024390244) *
## 33) texture_mean>=2.874407 75 6 B (0.920000000 0.080000000)
## 66) texture_mean>=2.879477 73 4 B (0.945205479 0.054794521) *
## 67) texture_mean< 2.879477 2 0 M (0.000000000 1.000000000) *
## 17) compactness_se>=-3.711798 69 13 B (0.811594203 0.188405797)
## 34) compactness_se>=-3.48221 50 0 B (1.000000000 0.000000000) *
## 35) compactness_se< -3.48221 19 6 M (0.315789474 0.684210526)
## 70) texture_worst< 3.888609 3 0 B (1.000000000 0.000000000) *
## 71) texture_worst>=3.888609 16 3 M (0.187500000 0.812500000) *
## 9) symmetry_worst< -2.923662 3 0 M (0.000000000 1.000000000) *
## 5) texture_mean>=2.963467 222 77 B (0.653153153 0.346846847)
## 10) smoothness_mean< -2.488676 72 8 B (0.888888889 0.111111111)
## 20) texture_mean>=2.971159 68 4 B (0.941176471 0.058823529)
## 40) symmetry_worst< -1.667161 62 1 B (0.983870968 0.016129032)
## 80) symmetry_worst< -1.695215 56 0 B (1.000000000 0.000000000) *
## 81) symmetry_worst>=-1.695215 6 1 B (0.833333333 0.166666667) *
## 41) symmetry_worst>=-1.667161 6 3 B (0.500000000 0.500000000)
## 82) texture_mean>=3.135016 3 0 B (1.000000000 0.000000000) *
## 83) texture_mean< 3.135016 3 0 M (0.000000000 1.000000000) *
## 21) texture_mean< 2.971159 4 0 M (0.000000000 1.000000000) *
## 11) smoothness_mean>=-2.488676 150 69 B (0.540000000 0.460000000)
## 22) symmetry_worst< -1.527595 128 50 B (0.609375000 0.390625000)
## 44) texture_mean< 3.096482 64 13 B (0.796875000 0.203125000)
## 88) smoothness_mean< -2.411294 23 0 B (1.000000000 0.000000000) *
## 89) smoothness_mean>=-2.411294 41 13 B (0.682926829 0.317073171) *
## 45) texture_mean>=3.096482 64 27 M (0.421875000 0.578125000)
## 90) smoothness_mean< -2.471478 6 0 B (1.000000000 0.000000000) *
## 91) smoothness_mean>=-2.471478 58 21 M (0.362068966 0.637931034) *
## 23) symmetry_worst>=-1.527595 22 3 M (0.136363636 0.863636364)
## 46) smoothness_worst>=-1.506135 3 0 B (1.000000000 0.000000000) *
## 47) smoothness_worst< -1.506135 19 0 M (0.000000000 1.000000000) *
## 3) smoothness_worst>=-1.499656 376 145 M (0.385638298 0.614361702)
## 6) texture_mean< 2.927988 168 55 B (0.672619048 0.327380952)
## 12) symmetry_worst< -1.611674 88 4 B (0.954545455 0.045454545)
## 24) smoothness_worst< -1.427424 78 1 B (0.987179487 0.012820513)
## 48) smoothness_worst>=-1.480531 54 0 B (1.000000000 0.000000000) *
## 49) smoothness_worst< -1.480531 24 1 B (0.958333333 0.041666667)
## 98) smoothness_worst< -1.482701 23 0 B (1.000000000 0.000000000) *
## 99) smoothness_worst>=-1.482701 1 0 M (0.000000000 1.000000000) *
## 25) smoothness_worst>=-1.427424 10 3 B (0.700000000 0.300000000)
## 50) texture_mean< 2.84692 7 0 B (1.000000000 0.000000000) *
## 51) texture_mean>=2.84692 3 0 M (0.000000000 1.000000000) *
## 13) symmetry_worst>=-1.611674 80 29 M (0.362500000 0.637500000)
## 26) compactness_se< -3.646366 36 12 B (0.666666667 0.333333333)
## 52) texture_worst< 4.517878 26 3 B (0.884615385 0.115384615)
## 104) smoothness_mean>=-2.402211 23 0 B (1.000000000 0.000000000) *
## 105) smoothness_mean< -2.402211 3 0 M (0.000000000 1.000000000) *
## 53) texture_worst>=4.517878 10 1 M (0.100000000 0.900000000)
## 106) smoothness_mean< -2.397771 1 0 B (1.000000000 0.000000000) *
## 107) smoothness_mean>=-2.397771 9 0 M (0.000000000 1.000000000) *
## 27) compactness_se>=-3.646366 44 5 M (0.113636364 0.886363636)
## 54) compactness_se>=-2.646661 4 0 B (1.000000000 0.000000000) *
## 55) compactness_se< -2.646661 40 1 M (0.025000000 0.975000000)
## 110) texture_mean< 2.77286 10 1 M (0.100000000 0.900000000) *
## 111) texture_mean>=2.77286 30 0 M (0.000000000 1.000000000) *
## 7) texture_mean>=2.927988 208 32 M (0.153846154 0.846153846)
## 14) compactness_se< -4.025757 40 15 M (0.375000000 0.625000000)
## 28) smoothness_mean>=-2.30109 17 4 B (0.764705882 0.235294118)
## 56) smoothness_mean< -2.222419 13 0 B (1.000000000 0.000000000) *
## 57) smoothness_mean>=-2.222419 4 0 M (0.000000000 1.000000000) *
## 29) smoothness_mean< -2.30109 23 2 M (0.086956522 0.913043478)
## 58) compactness_se>=-4.064037 1 0 B (1.000000000 0.000000000) *
## 59) compactness_se< -4.064037 22 1 M (0.045454545 0.954545455)
## 118) texture_mean>=3.182435 2 1 B (0.500000000 0.500000000) *
## 119) texture_mean< 3.182435 20 0 M (0.000000000 1.000000000) *
## 15) compactness_se>=-4.025757 168 17 M (0.101190476 0.898809524)
## 30) symmetry_worst< -2.179978 12 5 B (0.583333333 0.416666667)
## 60) smoothness_mean< -2.272702 8 1 B (0.875000000 0.125000000)
## 120) texture_mean>=3.061196 7 0 B (1.000000000 0.000000000) *
## 121) texture_mean< 3.061196 1 0 M (0.000000000 1.000000000) *
## 61) smoothness_mean>=-2.272702 4 0 M (0.000000000 1.000000000) *
## 31) symmetry_worst>=-2.179978 156 10 M (0.064102564 0.935897436)
## 62) texture_worst< 4.414433 1 0 B (1.000000000 0.000000000) *
## 63) texture_worst>=4.414433 155 9 M (0.058064516 0.941935484)
## 126) smoothness_mean>=-2.099273 18 4 M (0.222222222 0.777777778) *
## 127) smoothness_mean< -2.099273 137 5 M (0.036496350 0.963503650) *
##
## $trees[[2]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 338 B (0.629385965 0.370614035)
## 2) texture_worst< 4.577679 482 91 B (0.811203320 0.188796680)
## 4) texture_mean< 2.96681 410 58 B (0.858536585 0.141463415)
## 8) symmetry_worst< -1.294443 393 46 B (0.882951654 0.117048346)
## 16) texture_worst< 4.262771 222 9 B (0.959459459 0.040540541)
## 32) texture_mean< 2.909882 221 8 B (0.963800905 0.036199095)
## 64) compactness_se< -3.957552 133 0 B (1.000000000 0.000000000) *
## 65) compactness_se>=-3.957552 88 8 B (0.909090909 0.090909091) *
## 33) texture_mean>=2.909882 1 0 M (0.000000000 1.000000000) *
## 17) texture_worst>=4.262771 171 37 B (0.783625731 0.216374269)
## 34) symmetry_worst>=-2.49184 164 31 B (0.810975610 0.189024390)
## 68) smoothness_worst< -1.55307 59 2 B (0.966101695 0.033898305) *
## 69) smoothness_worst>=-1.55307 105 29 B (0.723809524 0.276190476) *
## 35) symmetry_worst< -2.49184 7 1 M (0.142857143 0.857142857)
## 70) texture_mean< 2.855865 1 0 B (1.000000000 0.000000000) *
## 71) texture_mean>=2.855865 6 0 M (0.000000000 1.000000000) *
## 9) symmetry_worst>=-1.294443 17 5 M (0.294117647 0.705882353)
## 18) compactness_se>=-2.588521 4 0 B (1.000000000 0.000000000) *
## 19) compactness_se< -2.588521 13 1 M (0.076923077 0.923076923)
## 38) smoothness_mean< -2.302887 1 0 B (1.000000000 0.000000000) *
## 39) smoothness_mean>=-2.302887 12 0 M (0.000000000 1.000000000) *
## 5) texture_mean>=2.96681 72 33 B (0.541666667 0.458333333)
## 10) smoothness_worst< -1.481325 56 18 B (0.678571429 0.321428571)
## 20) texture_mean>=2.996231 31 4 B (0.870967742 0.129032258)
## 40) smoothness_worst>=-1.678162 29 2 B (0.931034483 0.068965517)
## 80) symmetry_worst< -1.516281 28 1 B (0.964285714 0.035714286) *
## 81) symmetry_worst>=-1.516281 1 0 M (0.000000000 1.000000000) *
## 41) smoothness_worst< -1.678162 2 0 M (0.000000000 1.000000000) *
## 21) texture_mean< 2.996231 25 11 M (0.440000000 0.560000000)
## 42) texture_worst< 4.357182 7 0 B (1.000000000 0.000000000) *
## 43) texture_worst>=4.357182 18 4 M (0.222222222 0.777777778)
## 86) texture_worst>=4.467472 2 0 B (1.000000000 0.000000000) *
## 87) texture_worst< 4.467472 16 2 M (0.125000000 0.875000000) *
## 11) smoothness_worst>=-1.481325 16 1 M (0.062500000 0.937500000)
## 22) compactness_se< -4.16079 1 0 B (1.000000000 0.000000000) *
## 23) compactness_se>=-4.16079 15 0 M (0.000000000 1.000000000) *
## 3) texture_worst>=4.577679 430 183 M (0.425581395 0.574418605)
## 6) smoothness_mean< -2.362601 195 64 B (0.671794872 0.328205128)
## 12) smoothness_worst< -1.60101 65 4 B (0.938461538 0.061538462)
## 24) symmetry_worst< -1.180749 63 2 B (0.968253968 0.031746032)
## 48) smoothness_mean< -2.4008 62 1 B (0.983870968 0.016129032)
## 96) symmetry_worst< -1.681012 57 0 B (1.000000000 0.000000000) *
## 97) symmetry_worst>=-1.681012 5 1 B (0.800000000 0.200000000) *
## 49) smoothness_mean>=-2.4008 1 0 M (0.000000000 1.000000000) *
## 25) symmetry_worst>=-1.180749 2 0 M (0.000000000 1.000000000) *
## 13) smoothness_worst>=-1.60101 130 60 B (0.538461538 0.461538462)
## 26) texture_worst>=4.646117 105 36 B (0.657142857 0.342857143)
## 52) symmetry_worst< -1.39888 96 27 B (0.718750000 0.281250000)
## 104) compactness_se>=-4.567426 89 20 B (0.775280899 0.224719101) *
## 105) compactness_se< -4.567426 7 0 M (0.000000000 1.000000000) *
## 53) symmetry_worst>=-1.39888 9 0 M (0.000000000 1.000000000) *
## 27) texture_worst< 4.646117 25 1 M (0.040000000 0.960000000)
## 54) compactness_se< -4.694501 1 0 B (1.000000000 0.000000000) *
## 55) compactness_se>=-4.694501 24 0 M (0.000000000 1.000000000) *
## 7) smoothness_mean>=-2.362601 235 52 M (0.221276596 0.778723404)
## 14) symmetry_worst< -1.659152 88 38 M (0.431818182 0.568181818)
## 28) texture_mean< 3.081899 42 12 B (0.714285714 0.285714286)
## 56) compactness_se< -3.644943 32 5 B (0.843750000 0.156250000)
## 112) smoothness_mean< -2.22335 25 1 B (0.960000000 0.040000000) *
## 113) smoothness_mean>=-2.22335 7 3 M (0.428571429 0.571428571) *
## 57) compactness_se>=-3.644943 10 3 M (0.300000000 0.700000000)
## 114) smoothness_mean>=-2.12394 3 0 B (1.000000000 0.000000000) *
## 115) smoothness_mean< -2.12394 7 0 M (0.000000000 1.000000000) *
## 29) texture_mean>=3.081899 46 8 M (0.173913043 0.826086957)
## 58) smoothness_mean>=-2.099273 4 0 B (1.000000000 0.000000000) *
## 59) smoothness_mean< -2.099273 42 4 M (0.095238095 0.904761905)
## 118) smoothness_worst< -1.550482 3 1 B (0.666666667 0.333333333) *
## 119) smoothness_worst>=-1.550482 39 2 M (0.051282051 0.948717949) *
## 15) symmetry_worst>=-1.659152 147 14 M (0.095238095 0.904761905)
## 30) smoothness_worst< -1.500466 34 11 M (0.323529412 0.676470588)
## 60) smoothness_worst>=-1.506135 7 0 B (1.000000000 0.000000000) *
## 61) smoothness_worst< -1.506135 27 4 M (0.148148148 0.851851852)
## 122) smoothness_mean< -2.336585 9 4 M (0.444444444 0.555555556) *
## 123) smoothness_mean>=-2.336585 18 0 M (0.000000000 1.000000000) *
## 31) smoothness_worst>=-1.500466 113 3 M (0.026548673 0.973451327)
## 62) texture_worst< 4.599763 2 0 B (1.000000000 0.000000000) *
## 63) texture_worst>=4.599763 111 1 M (0.009009009 0.990990991)
## 126) texture_worst< 4.682677 7 1 M (0.142857143 0.857142857) *
## 127) texture_worst>=4.682677 104 0 M (0.000000000 1.000000000) *
##
## $trees[[3]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 392 B (0.570175439 0.429824561)
## 2) smoothness_worst< -1.603315 161 10 B (0.937888199 0.062111801)
## 4) symmetry_worst< -1.165944 160 9 B (0.943750000 0.056250000)
## 8) texture_worst< 4.850312 133 4 B (0.969924812 0.030075188)
## 16) smoothness_worst>=-1.723213 119 1 B (0.991596639 0.008403361)
## 32) symmetry_worst< -1.804928 83 0 B (1.000000000 0.000000000) *
## 33) symmetry_worst>=-1.804928 36 1 B (0.972222222 0.027777778)
## 66) symmetry_worst>=-1.748651 35 0 B (1.000000000 0.000000000) *
## 67) symmetry_worst< -1.748651 1 0 M (0.000000000 1.000000000) *
## 17) smoothness_worst< -1.723213 14 3 B (0.785714286 0.214285714)
## 34) compactness_se< -3.013033 11 0 B (1.000000000 0.000000000) *
## 35) compactness_se>=-3.013033 3 0 M (0.000000000 1.000000000) *
## 9) texture_worst>=4.850312 27 5 B (0.814814815 0.185185185)
## 18) texture_mean>=2.992821 24 2 B (0.916666667 0.083333333)
## 36) texture_worst>=4.929933 22 0 B (1.000000000 0.000000000) *
## 37) texture_worst< 4.929933 2 0 M (0.000000000 1.000000000) *
## 19) texture_mean< 2.992821 3 0 M (0.000000000 1.000000000) *
## 5) symmetry_worst>=-1.165944 1 0 M (0.000000000 1.000000000) *
## 3) smoothness_worst>=-1.603315 751 369 M (0.491344874 0.508655126)
## 6) texture_worst< 4.275472 155 32 B (0.793548387 0.206451613)
## 12) symmetry_worst< -1.42974 132 15 B (0.886363636 0.113636364)
## 24) smoothness_mean< -2.074653 126 10 B (0.920634921 0.079365079)
## 48) smoothness_worst>=-1.602623 124 8 B (0.935483871 0.064516129)
## 96) compactness_se< -3.892047 80 0 B (1.000000000 0.000000000) *
## 97) compactness_se>=-3.892047 44 8 B (0.818181818 0.181818182) *
## 49) smoothness_worst< -1.602623 2 0 M (0.000000000 1.000000000) *
## 25) smoothness_mean>=-2.074653 6 1 M (0.166666667 0.833333333)
## 50) texture_mean>=2.515298 1 0 B (1.000000000 0.000000000) *
## 51) texture_mean< 2.515298 5 0 M (0.000000000 1.000000000) *
## 13) symmetry_worst>=-1.42974 23 6 M (0.260869565 0.739130435)
## 26) smoothness_worst< -1.505076 5 0 B (1.000000000 0.000000000) *
## 27) smoothness_worst>=-1.505076 18 1 M (0.055555556 0.944444444)
## 54) texture_mean< 2.622235 3 1 M (0.333333333 0.666666667)
## 108) texture_mean>=2.463446 1 0 B (1.000000000 0.000000000) *
## 109) texture_mean< 2.463446 2 0 M (0.000000000 1.000000000) *
## 55) texture_mean>=2.622235 15 0 M (0.000000000 1.000000000) *
## 7) texture_worst>=4.275472 596 246 M (0.412751678 0.587248322)
## 14) smoothness_mean< -2.416986 117 39 B (0.666666667 0.333333333)
## 28) smoothness_mean>=-2.467991 71 12 B (0.830985915 0.169014085)
## 56) symmetry_worst>=-1.984547 58 3 B (0.948275862 0.051724138)
## 112) compactness_se>=-4.650552 54 1 B (0.981481481 0.018518519) *
## 113) compactness_se< -4.650552 4 2 B (0.500000000 0.500000000) *
## 57) symmetry_worst< -1.984547 13 4 M (0.307692308 0.692307692)
## 114) texture_mean< 2.991795 4 0 B (1.000000000 0.000000000) *
## 115) texture_mean>=2.991795 9 0 M (0.000000000 1.000000000) *
## 29) smoothness_mean< -2.467991 46 19 M (0.413043478 0.586956522)
## 58) compactness_se< -4.356557 18 4 B (0.777777778 0.222222222)
## 116) texture_worst< 5.05366 14 0 B (1.000000000 0.000000000) *
## 117) texture_worst>=5.05366 4 0 M (0.000000000 1.000000000) *
## 59) compactness_se>=-4.356557 28 5 M (0.178571429 0.821428571)
## 118) texture_mean< 2.868712 2 0 B (1.000000000 0.000000000) *
## 119) texture_mean>=2.868712 26 3 M (0.115384615 0.884615385) *
## 15) smoothness_mean>=-2.416986 479 168 M (0.350730689 0.649269311)
## 30) symmetry_worst< -1.652093 263 126 M (0.479087452 0.520912548)
## 60) smoothness_mean>=-2.355934 155 56 B (0.638709677 0.361290323)
## 120) texture_mean< 3.214868 142 44 B (0.690140845 0.309859155) *
## 121) texture_mean>=3.214868 13 1 M (0.076923077 0.923076923) *
## 61) smoothness_mean< -2.355934 108 27 M (0.250000000 0.750000000)
## 122) smoothness_worst< -1.579002 11 0 B (1.000000000 0.000000000) *
## 123) smoothness_worst>=-1.579002 97 16 M (0.164948454 0.835051546) *
## 31) symmetry_worst>=-1.652093 216 42 M (0.194444444 0.805555556)
## 62) compactness_se< -4.002529 43 21 B (0.511627907 0.488372093)
## 124) symmetry_worst>=-1.539708 23 4 B (0.826086957 0.173913043) *
## 125) symmetry_worst< -1.539708 20 3 M (0.150000000 0.850000000) *
## 63) compactness_se>=-4.002529 173 20 M (0.115606936 0.884393064)
## 126) smoothness_mean< -2.216408 92 20 M (0.217391304 0.782608696) *
## 127) smoothness_mean>=-2.216408 81 0 M (0.000000000 1.000000000) *
##
## $trees[[4]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 414 B (0.546052632 0.453947368)
## 2) smoothness_worst< -1.556752 261 57 B (0.781609195 0.218390805)
## 4) compactness_se< -3.489046 203 31 B (0.847290640 0.152709360)
## 8) symmetry_worst>=-2.896033 198 26 B (0.868686869 0.131313131)
## 16) symmetry_worst< -1.863339 120 6 B (0.950000000 0.050000000)
## 32) smoothness_mean< -2.332092 108 1 B (0.990740741 0.009259259)
## 64) smoothness_worst< -1.558926 107 0 B (1.000000000 0.000000000) *
## 65) smoothness_worst>=-1.558926 1 0 M (0.000000000 1.000000000) *
## 33) smoothness_mean>=-2.332092 12 5 B (0.583333333 0.416666667)
## 66) texture_mean< 2.884144 5 0 B (1.000000000 0.000000000) *
## 67) texture_mean>=2.884144 7 2 M (0.285714286 0.714285714) *
## 17) symmetry_worst>=-1.863339 78 20 B (0.743589744 0.256410256)
## 34) symmetry_worst>=-1.859739 73 15 B (0.794520548 0.205479452)
## 68) texture_worst< 5.110945 65 10 B (0.846153846 0.153846154) *
## 69) texture_worst>=5.110945 8 3 M (0.375000000 0.625000000) *
## 35) symmetry_worst< -1.859739 5 0 M (0.000000000 1.000000000) *
## 9) symmetry_worst< -2.896033 5 0 M (0.000000000 1.000000000) *
## 5) compactness_se>=-3.489046 58 26 B (0.551724138 0.448275862)
## 10) texture_worst< 4.425081 25 3 B (0.880000000 0.120000000)
## 20) compactness_se>=-3.439472 22 0 B (1.000000000 0.000000000) *
## 21) compactness_se< -3.439472 3 0 M (0.000000000 1.000000000) *
## 11) texture_worst>=4.425081 33 10 M (0.303030303 0.696969697)
## 22) smoothness_worst< -1.647098 13 4 B (0.692307692 0.307692308)
## 44) compactness_se< -2.979429 8 0 B (1.000000000 0.000000000) *
## 45) compactness_se>=-2.979429 5 1 M (0.200000000 0.800000000)
## 90) texture_mean< 3.076827 1 0 B (1.000000000 0.000000000) *
## 91) texture_mean>=3.076827 4 0 M (0.000000000 1.000000000) *
## 23) smoothness_worst>=-1.647098 20 1 M (0.050000000 0.950000000)
## 46) smoothness_worst< -1.603323 3 1 M (0.333333333 0.666666667)
## 92) smoothness_mean< -2.421794 1 0 B (1.000000000 0.000000000) *
## 93) smoothness_mean>=-2.421794 2 0 M (0.000000000 1.000000000) *
## 47) smoothness_worst>=-1.603323 17 0 M (0.000000000 1.000000000) *
## 3) smoothness_worst>=-1.556752 651 294 M (0.451612903 0.548387097)
## 6) texture_mean< 2.810904 136 37 B (0.727941176 0.272058824)
## 12) smoothness_mean< -2.074653 119 23 B (0.806722689 0.193277311)
## 24) symmetry_worst< -1.180109 113 17 B (0.849557522 0.150442478)
## 48) smoothness_worst>=-1.54469 106 12 B (0.886792453 0.113207547)
## 96) texture_mean< 2.739547 61 0 B (1.000000000 0.000000000) *
## 97) texture_mean>=2.739547 45 12 B (0.733333333 0.266666667) *
## 49) smoothness_worst< -1.54469 7 2 M (0.285714286 0.714285714)
## 98) texture_mean< 2.679131 2 0 B (1.000000000 0.000000000) *
## 99) texture_mean>=2.679131 5 0 M (0.000000000 1.000000000) *
## 25) symmetry_worst>=-1.180109 6 0 M (0.000000000 1.000000000) *
## 13) smoothness_mean>=-2.074653 17 3 M (0.176470588 0.823529412)
## 26) symmetry_worst>=-1.411591 6 3 B (0.500000000 0.500000000)
## 52) texture_mean< 2.692775 3 0 B (1.000000000 0.000000000) *
## 53) texture_mean>=2.692775 3 0 M (0.000000000 1.000000000) *
## 27) symmetry_worst< -1.411591 11 0 M (0.000000000 1.000000000) *
## 7) texture_mean>=2.810904 515 195 M (0.378640777 0.621359223)
## 14) smoothness_mean< -2.486388 17 0 B (1.000000000 0.000000000) *
## 15) smoothness_mean>=-2.486388 498 178 M (0.357429719 0.642570281)
## 30) symmetry_worst< -2.202388 31 7 B (0.774193548 0.225806452)
## 60) symmetry_worst>=-2.379234 27 3 B (0.888888889 0.111111111)
## 120) smoothness_mean>=-2.443464 25 1 B (0.960000000 0.040000000) *
## 121) smoothness_mean< -2.443464 2 0 M (0.000000000 1.000000000) *
## 61) symmetry_worst< -2.379234 4 0 M (0.000000000 1.000000000) *
## 31) symmetry_worst>=-2.202388 467 154 M (0.329764454 0.670235546)
## 62) symmetry_worst< -1.424186 391 146 M (0.373401535 0.626598465)
## 124) smoothness_worst>=-1.536189 342 142 M (0.415204678 0.584795322) *
## 125) smoothness_worst< -1.536189 49 4 M (0.081632653 0.918367347) *
## 63) symmetry_worst>=-1.424186 76 8 M (0.105263158 0.894736842)
## 126) smoothness_worst< -1.49649 6 2 B (0.666666667 0.333333333) *
## 127) smoothness_worst>=-1.49649 70 4 M (0.057142857 0.942857143) *
##
## $trees[[5]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 374 B (0.58991228 0.41008772)
## 2) symmetry_worst< -1.294443 861 327 B (0.62020906 0.37979094)
## 4) compactness_se< -4.05215 321 79 B (0.75389408 0.24610592)
## 8) smoothness_mean>=-2.294121 82 1 B (0.98780488 0.01219512)
## 16) smoothness_worst< -1.433156 61 0 B (1.00000000 0.00000000) *
## 17) smoothness_worst>=-1.433156 21 1 B (0.95238095 0.04761905)
## 34) smoothness_worst>=-1.428706 20 0 B (1.00000000 0.00000000) *
## 35) smoothness_worst< -1.428706 1 0 M (0.00000000 1.00000000) *
## 9) smoothness_mean< -2.294121 239 78 B (0.67364017 0.32635983)
## 18) texture_mean< 2.897016 115 15 B (0.86956522 0.13043478)
## 36) smoothness_mean< -2.295113 112 12 B (0.89285714 0.10714286)
## 72) compactness_se< -4.17052 92 4 B (0.95652174 0.04347826) *
## 73) compactness_se>=-4.17052 20 8 B (0.60000000 0.40000000) *
## 37) smoothness_mean>=-2.295113 3 0 M (0.00000000 1.00000000) *
## 19) texture_mean>=2.897016 124 61 M (0.49193548 0.50806452)
## 38) compactness_se< -4.706178 21 0 B (1.00000000 0.00000000) *
## 39) compactness_se>=-4.706178 103 40 M (0.38834951 0.61165049)
## 78) smoothness_worst>=-1.452317 15 1 B (0.93333333 0.06666667) *
## 79) smoothness_worst< -1.452317 88 26 M (0.29545455 0.70454545) *
## 5) compactness_se>=-4.05215 540 248 B (0.54074074 0.45925926)
## 10) smoothness_mean< -2.332015 250 74 B (0.70400000 0.29600000)
## 20) smoothness_worst< -1.602165 61 6 B (0.90163934 0.09836066)
## 40) smoothness_worst>=-1.723213 58 4 B (0.93103448 0.06896552)
## 80) symmetry_worst< -1.806005 37 0 B (1.00000000 0.00000000) *
## 81) symmetry_worst>=-1.806005 21 4 B (0.80952381 0.19047619) *
## 41) smoothness_worst< -1.723213 3 1 M (0.33333333 0.66666667)
## 82) texture_mean< 3.026052 1 0 B (1.00000000 0.00000000) *
## 83) texture_mean>=3.026052 2 0 M (0.00000000 1.00000000) *
## 21) smoothness_worst>=-1.602165 189 68 B (0.64021164 0.35978836)
## 42) texture_mean< 3.038055 116 31 B (0.73275862 0.26724138)
## 84) texture_worst>=4.450993 72 8 B (0.88888889 0.11111111) *
## 85) texture_worst< 4.450993 44 21 M (0.47727273 0.52272727) *
## 43) texture_mean>=3.038055 73 36 M (0.49315068 0.50684932)
## 86) texture_worst>=4.803681 52 16 B (0.69230769 0.30769231) *
## 87) texture_worst< 4.803681 21 0 M (0.00000000 1.00000000) *
## 11) smoothness_mean>=-2.332015 290 116 M (0.40000000 0.60000000)
## 22) smoothness_worst>=-1.479941 152 73 B (0.51973684 0.48026316)
## 44) compactness_se< -3.294139 124 46 B (0.62903226 0.37096774)
## 88) compactness_se>=-3.492992 49 4 B (0.91836735 0.08163265) *
## 89) compactness_se< -3.492992 75 33 M (0.44000000 0.56000000) *
## 45) compactness_se>=-3.294139 28 1 M (0.03571429 0.96428571)
## 90) texture_mean< 2.701935 1 0 B (1.00000000 0.00000000) *
## 91) texture_mean>=2.701935 27 0 M (0.00000000 1.00000000) *
## 23) smoothness_worst< -1.479941 138 37 M (0.26811594 0.73188406)
## 46) symmetry_worst< -2.182761 13 3 B (0.76923077 0.23076923)
## 92) smoothness_mean< -2.27667 10 0 B (1.00000000 0.00000000) *
## 93) smoothness_mean>=-2.27667 3 0 M (0.00000000 1.00000000) *
## 47) symmetry_worst>=-2.182761 125 27 M (0.21600000 0.78400000)
## 94) smoothness_mean>=-2.274972 53 18 M (0.33962264 0.66037736) *
## 95) smoothness_mean< -2.274972 72 9 M (0.12500000 0.87500000) *
## 3) symmetry_worst>=-1.294443 51 4 M (0.07843137 0.92156863)
## 6) smoothness_mean< -2.28924 13 4 M (0.30769231 0.69230769)
## 12) compactness_se>=-4.00428 6 2 B (0.66666667 0.33333333)
## 24) compactness_se< -3.441917 4 0 B (1.00000000 0.00000000) *
## 25) compactness_se>=-3.441917 2 0 M (0.00000000 1.00000000) *
## 13) compactness_se< -4.00428 7 0 M (0.00000000 1.00000000) *
## 7) smoothness_mean>=-2.28924 38 0 M (0.00000000 1.00000000) *
##
## $trees[[6]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 378 B (0.58552632 0.41447368)
## 2) texture_worst< 4.178472 132 14 B (0.89393939 0.10606061)
## 4) symmetry_worst< -1.086115 127 9 B (0.92913386 0.07086614)
## 8) texture_mean< 2.767575 100 2 B (0.98000000 0.02000000)
## 16) texture_mean>=2.479051 85 0 B (1.00000000 0.00000000) *
## 17) texture_mean< 2.479051 15 2 B (0.86666667 0.13333333)
## 34) texture_mean< 2.449364 13 0 B (1.00000000 0.00000000) *
## 35) texture_mean>=2.449364 2 0 M (0.00000000 1.00000000) *
## 9) texture_mean>=2.767575 27 7 B (0.74074074 0.25925926)
## 18) symmetry_worst< -1.431268 23 3 B (0.86956522 0.13043478)
## 36) texture_mean>=2.771335 20 0 B (1.00000000 0.00000000) *
## 37) texture_mean< 2.771335 3 0 M (0.00000000 1.00000000) *
## 19) symmetry_worst>=-1.431268 4 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst>=-1.086115 5 0 M (0.00000000 1.00000000) *
## 3) texture_worst>=4.178472 780 364 B (0.53333333 0.46666667)
## 6) smoothness_worst< -1.520292 366 122 B (0.66666667 0.33333333)
## 12) symmetry_worst< -1.861897 175 30 B (0.82857143 0.17142857)
## 24) symmetry_worst>=-3.054794 172 27 B (0.84302326 0.15697674)
## 48) smoothness_worst>=-1.543427 50 1 B (0.98000000 0.02000000)
## 96) smoothness_worst< -1.52112 49 0 B (1.00000000 0.00000000) *
## 97) smoothness_worst>=-1.52112 1 0 M (0.00000000 1.00000000) *
## 49) smoothness_worst< -1.543427 122 26 B (0.78688525 0.21311475)
## 98) smoothness_worst< -1.550482 115 19 B (0.83478261 0.16521739) *
## 99) smoothness_worst>=-1.550482 7 0 M (0.00000000 1.00000000) *
## 25) symmetry_worst< -3.054794 3 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst>=-1.861897 191 92 B (0.51832461 0.48167539)
## 26) symmetry_worst>=-1.750623 128 48 B (0.62500000 0.37500000)
## 52) texture_mean< 2.984348 62 9 B (0.85483871 0.14516129)
## 104) compactness_se>=-4.62493 53 1 B (0.98113208 0.01886792) *
## 105) compactness_se< -4.62493 9 1 M (0.11111111 0.88888889) *
## 53) texture_mean>=2.984348 66 27 M (0.40909091 0.59090909)
## 106) symmetry_worst< -1.720387 12 0 B (1.00000000 0.00000000) *
## 107) symmetry_worst>=-1.720387 54 15 M (0.27777778 0.72222222) *
## 27) symmetry_worst< -1.750623 63 19 M (0.30158730 0.69841270)
## 54) compactness_se< -3.737913 33 16 M (0.48484848 0.51515152)
## 108) compactness_se>=-4.157608 10 0 B (1.00000000 0.00000000) *
## 109) compactness_se< -4.157608 23 6 M (0.26086957 0.73913043) *
## 55) compactness_se>=-3.737913 30 3 M (0.10000000 0.90000000)
## 110) symmetry_worst< -1.843767 2 0 B (1.00000000 0.00000000) *
## 111) symmetry_worst>=-1.843767 28 1 M (0.03571429 0.96428571) *
## 7) smoothness_worst>=-1.520292 414 172 M (0.41545894 0.58454106)
## 14) symmetry_worst< -1.424186 346 162 M (0.46820809 0.53179191)
## 28) smoothness_worst>=-1.51308 325 162 M (0.49846154 0.50153846)
## 56) texture_worst< 4.858219 230 99 B (0.56956522 0.43043478)
## 112) texture_worst>=4.352293 186 65 B (0.65053763 0.34946237) *
## 113) texture_worst< 4.352293 44 10 M (0.22727273 0.77272727) *
## 57) texture_worst>=4.858219 95 31 M (0.32631579 0.67368421)
## 114) smoothness_mean< -2.336091 26 8 B (0.69230769 0.30769231) *
## 115) smoothness_mean>=-2.336091 69 13 M (0.18840580 0.81159420) *
## 29) smoothness_worst< -1.51308 21 0 M (0.00000000 1.00000000) *
## 15) symmetry_worst>=-1.424186 68 10 M (0.14705882 0.85294118)
## 30) texture_worst< 4.391935 10 4 B (0.60000000 0.40000000)
## 60) smoothness_mean< -2.187813 6 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean>=-2.187813 4 0 M (0.00000000 1.00000000) *
## 31) texture_worst>=4.391935 58 4 M (0.06896552 0.93103448)
## 62) smoothness_worst< -1.501886 3 0 B (1.00000000 0.00000000) *
## 63) smoothness_worst>=-1.501886 55 1 M (0.01818182 0.98181818)
## 126) texture_worst< 4.575764 4 1 M (0.25000000 0.75000000) *
## 127) texture_worst>=4.575764 51 0 M (0.00000000 1.00000000) *
##
## $trees[[7]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 413 B (0.54714912 0.45285088)
## 2) smoothness_worst< -1.603315 113 14 B (0.87610619 0.12389381)
## 4) smoothness_worst>=-1.723213 110 11 B (0.90000000 0.10000000)
## 8) symmetry_worst< -1.777195 69 0 B (1.00000000 0.00000000) *
## 9) symmetry_worst>=-1.777195 41 11 B (0.73170732 0.26829268)
## 18) symmetry_worst>=-1.748651 37 7 B (0.81081081 0.18918919)
## 36) compactness_se>=-4.279133 26 2 B (0.92307692 0.07692308)
## 72) texture_mean< 3.160844 25 1 B (0.96000000 0.04000000) *
## 73) texture_mean>=3.160844 1 0 M (0.00000000 1.00000000) *
## 37) compactness_se< -4.279133 11 5 B (0.54545455 0.45454545)
## 74) smoothness_mean< -2.529127 5 0 B (1.00000000 0.00000000) *
## 75) smoothness_mean>=-2.529127 6 1 M (0.16666667 0.83333333) *
## 19) symmetry_worst< -1.748651 4 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst< -1.723213 3 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst>=-1.603315 799 399 B (0.50062578 0.49937422)
## 6) texture_mean< 2.927988 339 121 B (0.64306785 0.35693215)
## 12) texture_mean>=2.89867 71 5 B (0.92957746 0.07042254)
## 24) compactness_se>=-4.62493 65 1 B (0.98461538 0.01538462)
## 48) texture_worst< 4.739939 64 0 B (1.00000000 0.00000000) *
## 49) texture_worst>=4.739939 1 0 M (0.00000000 1.00000000) *
## 25) compactness_se< -4.62493 6 2 M (0.33333333 0.66666667)
## 50) texture_mean< 2.912851 2 0 B (1.00000000 0.00000000) *
## 51) texture_mean>=2.912851 4 0 M (0.00000000 1.00000000) *
## 13) texture_mean< 2.89867 268 116 B (0.56716418 0.43283582)
## 26) texture_mean< 2.892591 239 88 B (0.63179916 0.36820084)
## 52) texture_mean< 2.708379 62 6 B (0.90322581 0.09677419)
## 104) texture_mean>=2.496294 51 0 B (1.00000000 0.00000000) *
## 105) texture_mean< 2.496294 11 5 M (0.45454545 0.54545455) *
## 53) texture_mean>=2.708379 177 82 B (0.53672316 0.46327684)
## 106) compactness_se< -4.50262 20 0 B (1.00000000 0.00000000) *
## 107) compactness_se>=-4.50262 157 75 M (0.47770701 0.52229299) *
## 27) texture_mean>=2.892591 29 1 M (0.03448276 0.96551724)
## 54) smoothness_mean< -2.409236 1 0 B (1.00000000 0.00000000) *
## 55) smoothness_mean>=-2.409236 28 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=2.927988 460 182 M (0.39565217 0.60434783)
## 14) compactness_se< -3.721197 212 98 B (0.53773585 0.46226415)
## 28) compactness_se>=-3.865662 44 7 B (0.84090909 0.15909091)
## 56) smoothness_worst< -1.48132 33 0 B (1.00000000 0.00000000) *
## 57) smoothness_worst>=-1.48132 11 4 M (0.36363636 0.63636364)
## 114) texture_mean< 2.971675 4 0 B (1.00000000 0.00000000) *
## 115) texture_mean>=2.971675 7 0 M (0.00000000 1.00000000) *
## 29) compactness_se< -3.865662 168 77 M (0.45833333 0.54166667)
## 58) symmetry_worst< -2.218846 17 0 B (1.00000000 0.00000000) *
## 59) symmetry_worst>=-2.218846 151 60 M (0.39735099 0.60264901)
## 118) compactness_se< -4.557422 19 3 B (0.84210526 0.15789474) *
## 119) compactness_se>=-4.557422 132 44 M (0.33333333 0.66666667) *
## 15) compactness_se>=-3.721197 248 68 M (0.27419355 0.72580645)
## 30) texture_worst< 4.411908 18 3 B (0.83333333 0.16666667)
## 60) smoothness_worst< -1.510166 15 0 B (1.00000000 0.00000000) *
## 61) smoothness_worst>=-1.510166 3 0 M (0.00000000 1.00000000) *
## 31) texture_worst>=4.411908 230 53 M (0.23043478 0.76956522)
## 62) smoothness_mean< -2.289177 124 47 M (0.37903226 0.62096774)
## 124) smoothness_worst>=-1.476409 37 11 B (0.70270270 0.29729730) *
## 125) smoothness_worst< -1.476409 87 21 M (0.24137931 0.75862069) *
## 63) smoothness_mean>=-2.289177 106 6 M (0.05660377 0.94339623)
## 126) smoothness_mean>=-2.093138 15 6 M (0.40000000 0.60000000) *
## 127) smoothness_mean< -2.093138 91 0 M (0.00000000 1.00000000) *
##
## $trees[[8]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 327 B (0.64144737 0.35855263)
## 2) compactness_se< -3.678758 549 147 B (0.73224044 0.26775956)
## 4) symmetry_worst< -1.329407 535 134 B (0.74953271 0.25046729)
## 8) texture_worst< 4.389172 160 17 B (0.89375000 0.10625000)
## 16) compactness_se< -4.166611 77 0 B (1.00000000 0.00000000) *
## 17) compactness_se>=-4.166611 83 17 B (0.79518072 0.20481928)
## 34) compactness_se>=-4.160164 76 10 B (0.86842105 0.13157895)
## 68) smoothness_mean< -2.099567 74 8 B (0.89189189 0.10810811) *
## 69) smoothness_mean>=-2.099567 2 0 M (0.00000000 1.00000000) *
## 35) compactness_se< -4.160164 7 0 M (0.00000000 1.00000000) *
## 9) texture_worst>=4.389172 375 117 B (0.68800000 0.31200000)
## 18) symmetry_worst>=-2.164014 342 95 B (0.72222222 0.27777778)
## 36) texture_worst>=4.642157 187 31 B (0.83422460 0.16577540)
## 72) smoothness_mean< -2.203647 183 27 B (0.85245902 0.14754098) *
## 73) smoothness_mean>=-2.203647 4 0 M (0.00000000 1.00000000) *
## 37) texture_worst< 4.642157 155 64 B (0.58709677 0.41290323)
## 74) compactness_se< -4.198706 61 12 B (0.80327869 0.19672131) *
## 75) compactness_se>=-4.198706 94 42 M (0.44680851 0.55319149) *
## 19) symmetry_worst< -2.164014 33 11 M (0.33333333 0.66666667)
## 38) smoothness_mean< -2.392268 5 0 B (1.00000000 0.00000000) *
## 39) smoothness_mean>=-2.392268 28 6 M (0.21428571 0.78571429)
## 78) smoothness_mean>=-2.302192 5 1 B (0.80000000 0.20000000) *
## 79) smoothness_mean< -2.302192 23 2 M (0.08695652 0.91304348) *
## 5) symmetry_worst>=-1.329407 14 1 M (0.07142857 0.92857143)
## 10) texture_mean< 2.814567 1 0 B (1.00000000 0.00000000) *
## 11) texture_mean>=2.814567 13 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-3.678758 363 180 B (0.50413223 0.49586777)
## 6) smoothness_mean< -2.296611 228 86 B (0.62280702 0.37719298)
## 12) texture_mean< 3.058688 146 39 B (0.73287671 0.26712329)
## 24) compactness_se>=-3.593774 129 24 B (0.81395349 0.18604651)
## 48) texture_mean>=2.782752 96 8 B (0.91666667 0.08333333)
## 96) texture_worst< 4.674843 72 1 B (0.98611111 0.01388889) *
## 97) texture_worst>=4.674843 24 7 B (0.70833333 0.29166667) *
## 49) texture_mean< 2.782752 33 16 B (0.51515152 0.48484848)
## 98) smoothness_mean< -2.461467 15 0 B (1.00000000 0.00000000) *
## 99) smoothness_mean>=-2.461467 18 2 M (0.11111111 0.88888889) *
## 25) compactness_se< -3.593774 17 2 M (0.11764706 0.88235294)
## 50) texture_worst< 4.254671 2 0 B (1.00000000 0.00000000) *
## 51) texture_worst>=4.254671 15 0 M (0.00000000 1.00000000) *
## 13) texture_mean>=3.058688 82 35 M (0.42682927 0.57317073)
## 26) compactness_se< -3.477558 42 11 B (0.73809524 0.26190476)
## 52) texture_mean< 3.410351 36 5 B (0.86111111 0.13888889)
## 104) texture_mean>=3.101852 30 0 B (1.00000000 0.00000000) *
## 105) texture_mean< 3.101852 6 1 M (0.16666667 0.83333333) *
## 53) texture_mean>=3.410351 6 0 M (0.00000000 1.00000000) *
## 27) compactness_se>=-3.477558 40 4 M (0.10000000 0.90000000)
## 54) smoothness_mean< -2.638103 3 0 B (1.00000000 0.00000000) *
## 55) smoothness_mean>=-2.638103 37 1 M (0.02702703 0.97297297)
## 110) smoothness_worst>=-1.417917 1 0 B (1.00000000 0.00000000) *
## 111) smoothness_worst< -1.417917 36 0 M (0.00000000 1.00000000) *
## 7) smoothness_mean>=-2.296611 135 41 M (0.30370370 0.69629630)
## 14) texture_worst< 4.391935 49 19 B (0.61224490 0.38775510)
## 28) compactness_se>=-3.57366 43 13 B (0.69767442 0.30232558)
## 56) texture_mean< 2.857891 30 5 B (0.83333333 0.16666667)
## 112) symmetry_worst< -1.001713 28 3 B (0.89285714 0.10714286) *
## 113) symmetry_worst>=-1.001713 2 0 M (0.00000000 1.00000000) *
## 57) texture_mean>=2.857891 13 5 M (0.38461538 0.61538462)
## 114) texture_mean>=2.870166 5 0 B (1.00000000 0.00000000) *
## 115) texture_mean< 2.870166 8 0 M (0.00000000 1.00000000) *
## 29) compactness_se< -3.57366 6 0 M (0.00000000 1.00000000) *
## 15) texture_worst>=4.391935 86 11 M (0.12790698 0.87209302)
## 30) smoothness_mean>=-2.093138 12 3 B (0.75000000 0.25000000)
## 60) compactness_se< -3.039084 9 0 B (1.00000000 0.00000000) *
## 61) compactness_se>=-3.039084 3 0 M (0.00000000 1.00000000) *
## 31) smoothness_mean< -2.093138 74 2 M (0.02702703 0.97297297)
## 62) texture_worst< 4.541747 10 2 M (0.20000000 0.80000000)
## 124) texture_worst>=4.530419 1 0 B (1.00000000 0.00000000) *
## 125) texture_worst< 4.530419 9 1 M (0.11111111 0.88888889) *
## 63) texture_worst>=4.541747 64 0 M (0.00000000 1.00000000) *
##
## $trees[[9]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 396 B (0.56578947 0.43421053)
## 2) texture_mean< 2.960623 438 139 B (0.68264840 0.31735160)
## 4) smoothness_worst< -1.500665 225 45 B (0.80000000 0.20000000)
## 8) symmetry_worst>=-1.748321 81 5 B (0.93827160 0.06172840)
## 16) smoothness_mean< -2.171581 78 2 B (0.97435897 0.02564103)
## 32) compactness_se>=-4.602061 70 0 B (1.00000000 0.00000000) *
## 33) compactness_se< -4.602061 8 2 B (0.75000000 0.25000000)
## 66) compactness_se< -4.691273 6 0 B (1.00000000 0.00000000) *
## 67) compactness_se>=-4.691273 2 0 M (0.00000000 1.00000000) *
## 17) smoothness_mean>=-2.171581 3 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst< -1.748321 144 40 B (0.72222222 0.27777778)
## 18) symmetry_worst< -1.815934 97 9 B (0.90721649 0.09278351)
## 36) symmetry_worst>=-2.179403 78 3 B (0.96153846 0.03846154)
## 72) compactness_se< -3.49316 69 1 B (0.98550725 0.01449275) *
## 73) compactness_se>=-3.49316 9 2 B (0.77777778 0.22222222) *
## 37) symmetry_worst< -2.179403 19 6 B (0.68421053 0.31578947)
## 74) texture_mean< 2.876144 10 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.876144 9 3 M (0.33333333 0.66666667) *
## 19) symmetry_worst>=-1.815934 47 16 M (0.34042553 0.65957447)
## 38) compactness_se< -4.354176 6 0 B (1.00000000 0.00000000) *
## 39) compactness_se>=-4.354176 41 10 M (0.24390244 0.75609756)
## 78) smoothness_mean>=-2.313605 4 0 B (1.00000000 0.00000000) *
## 79) smoothness_mean< -2.313605 37 6 M (0.16216216 0.83783784) *
## 5) smoothness_worst>=-1.500665 213 94 B (0.55868545 0.44131455)
## 10) smoothness_mean>=-2.290163 107 27 B (0.74766355 0.25233645)
## 20) symmetry_worst< -1.511499 76 7 B (0.90789474 0.09210526)
## 40) smoothness_worst>=-1.498447 74 5 B (0.93243243 0.06756757)
## 80) smoothness_mean< -2.007355 73 4 B (0.94520548 0.05479452) *
## 81) smoothness_mean>=-2.007355 1 0 M (0.00000000 1.00000000) *
## 41) smoothness_worst< -1.498447 2 0 M (0.00000000 1.00000000) *
## 21) symmetry_worst>=-1.511499 31 11 M (0.35483871 0.64516129)
## 42) smoothness_mean< -2.222401 13 4 B (0.69230769 0.30769231)
## 84) texture_worst>=4.174841 10 1 B (0.90000000 0.10000000) *
## 85) texture_worst< 4.174841 3 0 M (0.00000000 1.00000000) *
## 43) smoothness_mean>=-2.222401 18 2 M (0.11111111 0.88888889)
## 86) smoothness_mean>=-2.000349 1 0 B (1.00000000 0.00000000) *
## 87) smoothness_mean< -2.000349 17 1 M (0.05882353 0.94117647) *
## 11) smoothness_mean< -2.290163 106 39 M (0.36792453 0.63207547)
## 22) symmetry_worst>=-1.759286 47 16 B (0.65957447 0.34042553)
## 44) smoothness_mean< -2.296107 42 11 B (0.73809524 0.26190476)
## 88) texture_worst>=4.382736 24 2 B (0.91666667 0.08333333) *
## 89) texture_worst< 4.382736 18 9 B (0.50000000 0.50000000) *
## 45) smoothness_mean>=-2.296107 5 0 M (0.00000000 1.00000000) *
## 23) symmetry_worst< -1.759286 59 8 M (0.13559322 0.86440678)
## 46) smoothness_mean< -2.3918 6 0 B (1.00000000 0.00000000) *
## 47) smoothness_mean>=-2.3918 53 2 M (0.03773585 0.96226415)
## 94) texture_mean< 2.755881 2 0 B (1.00000000 0.00000000) *
## 95) texture_mean>=2.755881 51 0 M (0.00000000 1.00000000) *
## 3) texture_mean>=2.960623 474 217 M (0.45780591 0.54219409)
## 6) symmetry_worst< -2.01934 124 44 B (0.64516129 0.35483871)
## 12) compactness_se< -3.037823 112 33 B (0.70535714 0.29464286)
## 24) texture_mean>=3.336125 21 0 B (1.00000000 0.00000000) *
## 25) texture_mean< 3.336125 91 33 B (0.63736264 0.36263736)
## 50) smoothness_mean>=-2.330377 36 4 B (0.88888889 0.11111111)
## 100) smoothness_worst< -1.44137 34 2 B (0.94117647 0.05882353) *
## 101) smoothness_worst>=-1.44137 2 0 M (0.00000000 1.00000000) *
## 51) smoothness_mean< -2.330377 55 26 M (0.47272727 0.52727273)
## 102) smoothness_worst< -1.604518 21 2 B (0.90476190 0.09523810) *
## 103) smoothness_worst>=-1.604518 34 7 M (0.20588235 0.79411765) *
## 13) compactness_se>=-3.037823 12 1 M (0.08333333 0.91666667)
## 26) texture_mean< 3.005071 1 0 B (1.00000000 0.00000000) *
## 27) texture_mean>=3.005071 11 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-2.01934 350 137 M (0.39142857 0.60857143)
## 14) texture_worst< 5.034396 276 131 M (0.47463768 0.52536232)
## 28) texture_mean>=3.192731 36 3 B (0.91666667 0.08333333)
## 56) symmetry_worst< -1.345645 34 1 B (0.97058824 0.02941176)
## 112) texture_worst>=4.863973 33 0 B (1.00000000 0.00000000) *
## 113) texture_worst< 4.863973 1 0 M (0.00000000 1.00000000) *
## 57) symmetry_worst>=-1.345645 2 0 M (0.00000000 1.00000000) *
## 29) texture_mean< 3.192731 240 98 M (0.40833333 0.59166667)
## 58) smoothness_worst>=-1.443422 76 30 B (0.60526316 0.39473684)
## 116) texture_worst>=4.641775 58 12 B (0.79310345 0.20689655) *
## 117) texture_worst< 4.641775 18 0 M (0.00000000 1.00000000) *
## 59) smoothness_worst< -1.443422 164 52 M (0.31707317 0.68292683)
## 118) smoothness_worst< -1.556752 52 25 B (0.51923077 0.48076923) *
## 119) smoothness_worst>=-1.556752 112 25 M (0.22321429 0.77678571) *
## 15) texture_worst>=5.034396 74 6 M (0.08108108 0.91891892)
## 30) smoothness_mean< -2.526959 4 0 B (1.00000000 0.00000000) *
## 31) smoothness_mean>=-2.526959 70 2 M (0.02857143 0.97142857)
## 62) texture_mean>=3.35917 21 2 M (0.09523810 0.90476190)
## 124) texture_mean< 3.386045 2 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=3.386045 19 0 M (0.00000000 1.00000000) *
## 63) texture_mean< 3.35917 49 0 M (0.00000000 1.00000000) *
##
## $trees[[10]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 403 B (0.55811404 0.44188596)
## 2) texture_worst< 4.260219 133 21 B (0.84210526 0.15789474)
## 4) symmetry_worst< -1.429489 121 13 B (0.89256198 0.10743802)
## 8) texture_mean< 2.909334 118 10 B (0.91525424 0.08474576)
## 16) compactness_se< -3.894783 62 0 B (1.00000000 0.00000000) *
## 17) compactness_se>=-3.894783 56 10 B (0.82142857 0.17857143)
## 34) compactness_se>=-3.878107 50 4 B (0.92000000 0.08000000)
## 68) texture_mean>=2.534356 46 2 B (0.95652174 0.04347826) *
## 69) texture_mean< 2.534356 4 2 B (0.50000000 0.50000000) *
## 35) compactness_se< -3.878107 6 0 M (0.00000000 1.00000000) *
## 9) texture_mean>=2.909334 3 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst>=-1.429489 12 4 M (0.33333333 0.66666667)
## 10) texture_mean< 2.706904 4 0 B (1.00000000 0.00000000) *
## 11) texture_mean>=2.706904 8 0 M (0.00000000 1.00000000) *
## 3) texture_worst>=4.260219 779 382 B (0.50962773 0.49037227)
## 6) symmetry_worst< -1.52865 651 291 B (0.55299539 0.44700461)
## 12) compactness_se< -3.671151 404 151 B (0.62623762 0.37376238)
## 24) smoothness_mean>=-2.283768 83 13 B (0.84337349 0.15662651)
## 48) smoothness_worst< -1.419369 79 9 B (0.88607595 0.11392405)
## 96) texture_mean< 3.133277 67 3 B (0.95522388 0.04477612) *
## 97) texture_mean>=3.133277 12 6 B (0.50000000 0.50000000) *
## 49) smoothness_worst>=-1.419369 4 0 M (0.00000000 1.00000000) *
## 25) smoothness_mean< -2.283768 321 138 B (0.57009346 0.42990654)
## 50) texture_worst>=4.3976 278 107 B (0.61510791 0.38489209)
## 100) texture_worst< 4.517889 42 2 B (0.95238095 0.04761905) *
## 101) texture_worst>=4.517889 236 105 B (0.55508475 0.44491525) *
## 51) texture_worst< 4.3976 43 12 M (0.27906977 0.72093023)
## 102) texture_mean< 2.808677 8 0 B (1.00000000 0.00000000) *
## 103) texture_mean>=2.808677 35 4 M (0.11428571 0.88571429) *
## 13) compactness_se>=-3.671151 247 107 M (0.43319838 0.56680162)
## 26) symmetry_worst>=-1.608735 35 6 B (0.82857143 0.17142857)
## 52) smoothness_mean< -2.3007 29 1 B (0.96551724 0.03448276)
## 104) texture_worst< 4.993407 28 0 B (1.00000000 0.00000000) *
## 105) texture_worst>=4.993407 1 0 M (0.00000000 1.00000000) *
## 53) smoothness_mean>=-2.3007 6 1 M (0.16666667 0.83333333)
## 106) compactness_se>=-3.239083 1 0 B (1.00000000 0.00000000) *
## 107) compactness_se< -3.239083 5 0 M (0.00000000 1.00000000) *
## 27) symmetry_worst< -1.608735 212 78 M (0.36792453 0.63207547)
## 54) symmetry_worst< -1.762226 137 68 B (0.50364964 0.49635036)
## 108) smoothness_worst>=-1.468425 20 2 B (0.90000000 0.10000000) *
## 109) smoothness_worst< -1.468425 117 51 M (0.43589744 0.56410256) *
## 55) symmetry_worst>=-1.762226 75 9 M (0.12000000 0.88000000)
## 110) smoothness_mean>=-2.109794 4 0 B (1.00000000 0.00000000) *
## 111) smoothness_mean< -2.109794 71 5 M (0.07042254 0.92957746) *
## 7) symmetry_worst>=-1.52865 128 37 M (0.28906250 0.71093750)
## 14) texture_worst< 4.433296 26 3 B (0.88461538 0.11538462)
## 28) smoothness_mean< -2.195585 23 0 B (1.00000000 0.00000000) *
## 29) smoothness_mean>=-2.195585 3 0 M (0.00000000 1.00000000) *
## 15) texture_worst>=4.433296 102 14 M (0.13725490 0.86274510)
## 30) texture_mean< 2.947292 21 10 M (0.47619048 0.52380952)
## 60) symmetry_worst< -1.367423 10 0 B (1.00000000 0.00000000) *
## 61) symmetry_worst>=-1.367423 11 0 M (0.00000000 1.00000000) *
## 31) texture_mean>=2.947292 81 4 M (0.04938272 0.95061728)
## 62) compactness_se< -4.507761 3 1 B (0.66666667 0.33333333)
## 124) texture_mean>=3.111935 2 0 B (1.00000000 0.00000000) *
## 125) texture_mean< 3.111935 1 0 M (0.00000000 1.00000000) *
## 63) compactness_se>=-4.507761 78 2 M (0.02564103 0.97435897)
## 126) compactness_se>=-2.983317 9 1 M (0.11111111 0.88888889) *
## 127) compactness_se< -2.983317 69 1 M (0.01449275 0.98550725) *
##
## $trees[[11]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 410 B (0.55043860 0.44956140)
## 2) texture_mean< 2.931727 329 105 B (0.68085106 0.31914894)
## 4) smoothness_mean< -2.469112 36 0 B (1.00000000 0.00000000) *
## 5) smoothness_mean>=-2.469112 293 105 B (0.64163823 0.35836177)
## 10) compactness_se>=-3.427747 56 8 B (0.85714286 0.14285714)
## 20) smoothness_worst< -1.431481 48 2 B (0.95833333 0.04166667)
## 40) symmetry_worst< -1.330042 41 0 B (1.00000000 0.00000000) *
## 41) symmetry_worst>=-1.330042 7 2 B (0.71428571 0.28571429)
## 82) compactness_se>=-2.646661 5 0 B (1.00000000 0.00000000) *
## 83) compactness_se< -2.646661 2 0 M (0.00000000 1.00000000) *
## 21) smoothness_worst>=-1.431481 8 2 M (0.25000000 0.75000000)
## 42) texture_mean< 2.67609 2 0 B (1.00000000 0.00000000) *
## 43) texture_mean>=2.67609 6 0 M (0.00000000 1.00000000) *
## 11) compactness_se< -3.427747 237 97 B (0.59071730 0.40928270)
## 22) compactness_se< -3.4389 221 81 B (0.63348416 0.36651584)
## 44) texture_worst< 4.522453 152 44 B (0.71052632 0.28947368)
## 88) smoothness_mean< -2.081877 140 34 B (0.75714286 0.24285714) *
## 89) smoothness_mean>=-2.081877 12 2 M (0.16666667 0.83333333) *
## 45) texture_worst>=4.522453 69 32 M (0.46376812 0.53623188)
## 90) texture_worst>=4.543638 51 20 B (0.60784314 0.39215686) *
## 91) texture_worst< 4.543638 18 1 M (0.05555556 0.94444444) *
## 23) compactness_se>=-3.4389 16 0 M (0.00000000 1.00000000) *
## 3) texture_mean>=2.931727 583 278 M (0.47684391 0.52315609)
## 6) symmetry_worst< -2.20425 53 12 B (0.77358491 0.22641509)
## 12) texture_worst>=4.653864 43 4 B (0.90697674 0.09302326)
## 24) smoothness_mean< -2.282229 40 1 B (0.97500000 0.02500000)
## 48) texture_mean< 3.330945 35 0 B (1.00000000 0.00000000) *
## 49) texture_mean>=3.330945 5 1 B (0.80000000 0.20000000)
## 98) texture_mean>=3.357516 4 0 B (1.00000000 0.00000000) *
## 99) texture_mean< 3.357516 1 0 M (0.00000000 1.00000000) *
## 25) smoothness_mean>=-2.282229 3 0 M (0.00000000 1.00000000) *
## 13) texture_worst< 4.653864 10 2 M (0.20000000 0.80000000)
## 26) smoothness_mean>=-2.337576 3 1 B (0.66666667 0.33333333)
## 52) smoothness_mean< -2.242961 2 0 B (1.00000000 0.00000000) *
## 53) smoothness_mean>=-2.242961 1 0 M (0.00000000 1.00000000) *
## 27) smoothness_mean< -2.337576 7 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-2.20425 530 237 M (0.44716981 0.55283019)
## 14) texture_worst< 5.030101 444 215 M (0.48423423 0.51576577)
## 28) compactness_se>=-4.094455 315 139 B (0.55873016 0.44126984)
## 56) compactness_se< -4.05446 20 0 B (1.00000000 0.00000000) *
## 57) compactness_se>=-4.05446 295 139 B (0.52881356 0.47118644)
## 114) smoothness_worst< -1.462821 208 81 B (0.61057692 0.38942308) *
## 115) smoothness_worst>=-1.462821 87 29 M (0.33333333 0.66666667) *
## 29) compactness_se< -4.094455 129 39 M (0.30232558 0.69767442)
## 58) texture_mean>=3.181081 7 0 B (1.00000000 0.00000000) *
## 59) texture_mean< 3.181081 122 32 M (0.26229508 0.73770492)
## 118) texture_worst< 4.849569 81 32 M (0.39506173 0.60493827) *
## 119) texture_worst>=4.849569 41 0 M (0.00000000 1.00000000) *
## 15) texture_worst>=5.030101 86 22 M (0.25581395 0.74418605)
## 30) smoothness_mean< -2.505388 9 0 B (1.00000000 0.00000000) *
## 31) smoothness_mean>=-2.505388 77 13 M (0.16883117 0.83116883)
## 62) compactness_se< -4.509895 12 5 B (0.58333333 0.41666667)
## 124) texture_mean>=3.186756 7 0 B (1.00000000 0.00000000) *
## 125) texture_mean< 3.186756 5 0 M (0.00000000 1.00000000) *
## 63) compactness_se>=-4.509895 65 6 M (0.09230769 0.90769231)
## 126) smoothness_worst< -1.563077 4 1 B (0.75000000 0.25000000) *
## 127) smoothness_worst>=-1.563077 61 3 M (0.04918033 0.95081967) *
##
## $trees[[12]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 453 B (0.50328947 0.49671053)
## 2) symmetry_worst< -2.048468 145 29 B (0.80000000 0.20000000)
## 4) symmetry_worst>=-2.923662 139 23 B (0.83453237 0.16546763)
## 8) compactness_se< -3.351361 114 13 B (0.88596491 0.11403509)
## 16) symmetry_worst>=-2.379234 102 7 B (0.93137255 0.06862745)
## 32) texture_worst< 5.309594 94 3 B (0.96808511 0.03191489)
## 64) texture_mean>=3.067819 59 0 B (1.00000000 0.00000000) *
## 65) texture_mean< 3.067819 35 3 B (0.91428571 0.08571429) *
## 33) texture_worst>=5.309594 8 4 B (0.50000000 0.50000000)
## 66) texture_mean>=3.375155 4 0 B (1.00000000 0.00000000) *
## 67) texture_mean< 3.375155 4 0 M (0.00000000 1.00000000) *
## 17) symmetry_worst< -2.379234 12 6 B (0.50000000 0.50000000)
## 34) texture_mean< 2.865666 4 0 B (1.00000000 0.00000000) *
## 35) texture_mean>=2.865666 8 2 M (0.25000000 0.75000000)
## 70) symmetry_worst< -2.522371 2 0 B (1.00000000 0.00000000) *
## 71) symmetry_worst>=-2.522371 6 0 M (0.00000000 1.00000000) *
## 9) compactness_se>=-3.351361 25 10 B (0.60000000 0.40000000)
## 18) texture_mean< 3.076827 14 0 B (1.00000000 0.00000000) *
## 19) texture_mean>=3.076827 11 1 M (0.09090909 0.90909091)
## 38) smoothness_mean< -2.638103 1 0 B (1.00000000 0.00000000) *
## 39) smoothness_mean>=-2.638103 10 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst< -2.923662 6 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst>=-2.048468 767 343 M (0.44719687 0.55280313)
## 6) texture_worst< 4.592857 350 132 B (0.62285714 0.37714286)
## 12) texture_mean< 2.708379 36 1 B (0.97222222 0.02777778)
## 24) texture_mean>=2.496294 30 0 B (1.00000000 0.00000000) *
## 25) texture_mean< 2.496294 6 1 B (0.83333333 0.16666667)
## 50) texture_mean< 2.434062 5 0 B (1.00000000 0.00000000) *
## 51) texture_mean>=2.434062 1 0 M (0.00000000 1.00000000) *
## 13) texture_mean>=2.708379 314 131 B (0.58280255 0.41719745)
## 26) texture_mean>=2.771335 256 93 B (0.63671875 0.36328125)
## 52) texture_mean< 2.927988 146 34 B (0.76712329 0.23287671)
## 104) texture_mean>=2.893423 40 0 B (1.00000000 0.00000000) *
## 105) texture_mean< 2.893423 106 34 B (0.67924528 0.32075472) *
## 53) texture_mean>=2.927988 110 51 M (0.46363636 0.53636364)
## 106) texture_worst>=4.528527 38 9 B (0.76315789 0.23684211) *
## 107) texture_worst< 4.528527 72 22 M (0.30555556 0.69444444) *
## 27) texture_mean< 2.771335 58 20 M (0.34482759 0.65517241)
## 54) smoothness_mean< -2.443516 7 0 B (1.00000000 0.00000000) *
## 55) smoothness_mean>=-2.443516 51 13 M (0.25490196 0.74509804)
## 110) compactness_se>=-3.364454 5 0 B (1.00000000 0.00000000) *
## 111) compactness_se< -3.364454 46 8 M (0.17391304 0.82608696) *
## 7) texture_worst>=4.592857 417 125 M (0.29976019 0.70023981)
## 14) smoothness_mean>=-2.093138 19 3 B (0.84210526 0.15789474)
## 28) smoothness_mean< -2.073133 16 0 B (1.00000000 0.00000000) *
## 29) smoothness_mean>=-2.073133 3 0 M (0.00000000 1.00000000) *
## 15) smoothness_mean< -2.093138 398 109 M (0.27386935 0.72613065)
## 30) smoothness_mean< -2.549773 8 0 B (1.00000000 0.00000000) *
## 31) smoothness_mean>=-2.549773 390 101 M (0.25897436 0.74102564)
## 62) compactness_se>=-4.096569 233 77 M (0.33047210 0.66952790)
## 124) compactness_se< -3.721197 85 42 M (0.49411765 0.50588235) *
## 125) compactness_se>=-3.721197 148 35 M (0.23648649 0.76351351) *
## 63) compactness_se< -4.096569 157 24 M (0.15286624 0.84713376)
## 126) smoothness_worst>=-1.399898 4 0 B (1.00000000 0.00000000) *
## 127) smoothness_worst< -1.399898 153 20 M (0.13071895 0.86928105) *
##
## $trees[[13]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 435 B (0.52302632 0.47697368)
## 2) texture_mean< 2.711046 40 7 B (0.82500000 0.17500000)
## 4) compactness_se< -3.039458 37 4 B (0.89189189 0.10810811)
## 8) texture_mean>=2.479051 26 0 B (1.00000000 0.00000000) *
## 9) texture_mean< 2.479051 11 4 B (0.63636364 0.36363636)
## 18) texture_mean< 2.471475 7 0 B (1.00000000 0.00000000) *
## 19) texture_mean>=2.471475 4 0 M (0.00000000 1.00000000) *
## 5) compactness_se>=-3.039458 3 0 M (0.00000000 1.00000000) *
## 3) texture_mean>=2.711046 872 428 B (0.50917431 0.49082569)
## 6) texture_mean>=2.856753 728 334 B (0.54120879 0.45879121)
## 12) symmetry_worst>=-2.491275 713 319 B (0.55259467 0.44740533)
## 24) texture_worst< 4.458511 92 22 B (0.76086957 0.23913043)
## 48) symmetry_worst< -1.735506 51 2 B (0.96078431 0.03921569)
## 96) compactness_se>=-4.327955 45 0 B (1.00000000 0.00000000) *
## 97) compactness_se< -4.327955 6 2 B (0.66666667 0.33333333) *
## 49) symmetry_worst>=-1.735506 41 20 B (0.51219512 0.48780488)
## 98) smoothness_worst>=-1.566151 30 9 B (0.70000000 0.30000000) *
## 99) smoothness_worst< -1.566151 11 0 M (0.00000000 1.00000000) *
## 25) texture_worst>=4.458511 621 297 B (0.52173913 0.47826087)
## 50) compactness_se>=-4.671834 576 263 B (0.54340278 0.45659722)
## 100) smoothness_mean< -2.335108 312 114 B (0.63461538 0.36538462) *
## 101) smoothness_mean>=-2.335108 264 115 M (0.43560606 0.56439394) *
## 51) compactness_se< -4.671834 45 11 M (0.24444444 0.75555556)
## 102) compactness_se< -4.803674 8 0 B (1.00000000 0.00000000) *
## 103) compactness_se>=-4.803674 37 3 M (0.08108108 0.91891892) *
## 13) symmetry_worst< -2.491275 15 0 M (0.00000000 1.00000000) *
## 7) texture_mean< 2.856753 144 50 M (0.34722222 0.65277778)
## 14) smoothness_mean>=-2.271585 33 12 B (0.63636364 0.36363636)
## 28) symmetry_worst< -1.524164 22 2 B (0.90909091 0.09090909)
## 56) smoothness_mean< -2.061717 21 1 B (0.95238095 0.04761905)
## 112) texture_mean< 2.834388 18 0 B (1.00000000 0.00000000) *
## 113) texture_mean>=2.834388 3 1 B (0.66666667 0.33333333) *
## 57) smoothness_mean>=-2.061717 1 0 M (0.00000000 1.00000000) *
## 29) symmetry_worst>=-1.524164 11 1 M (0.09090909 0.90909091)
## 58) texture_mean>=2.850705 1 0 B (1.00000000 0.00000000) *
## 59) texture_mean< 2.850705 10 0 M (0.00000000 1.00000000) *
## 15) smoothness_mean< -2.271585 111 29 M (0.26126126 0.73873874)
## 30) smoothness_mean< -2.447973 7 0 B (1.00000000 0.00000000) *
## 31) smoothness_mean>=-2.447973 104 22 M (0.21153846 0.78846154)
## 62) texture_mean< 2.758426 33 13 M (0.39393939 0.60606061)
## 124) compactness_se< -3.697394 10 0 B (1.00000000 0.00000000) *
## 125) compactness_se>=-3.697394 23 3 M (0.13043478 0.86956522) *
## 63) texture_mean>=2.758426 71 9 M (0.12676056 0.87323944)
## 126) compactness_se>=-3.483667 4 1 B (0.75000000 0.25000000) *
## 127) compactness_se< -3.483667 67 6 M (0.08955224 0.91044776) *
##
## $trees[[14]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 454 M (0.49780702 0.50219298)
## 2) smoothness_mean>=-2.275457 235 83 B (0.64680851 0.35319149)
## 4) compactness_se< -3.646366 137 26 B (0.81021898 0.18978102)
## 8) smoothness_worst< -1.459555 60 0 B (1.00000000 0.00000000) *
## 9) smoothness_worst>=-1.459555 77 26 B (0.66233766 0.33766234)
## 18) compactness_se< -4.030558 42 4 B (0.90476190 0.09523810)
## 36) texture_mean>=2.979048 33 0 B (1.00000000 0.00000000) *
## 37) texture_mean< 2.979048 9 4 B (0.55555556 0.44444444)
## 74) texture_mean< 2.950291 5 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.950291 4 0 M (0.00000000 1.00000000) *
## 19) compactness_se>=-4.030558 35 13 M (0.37142857 0.62857143)
## 38) texture_mean< 2.900047 15 2 B (0.86666667 0.13333333)
## 76) texture_mean>=2.532482 13 0 B (1.00000000 0.00000000) *
## 77) texture_mean< 2.532482 2 0 M (0.00000000 1.00000000) *
## 39) texture_mean>=2.900047 20 0 M (0.00000000 1.00000000) *
## 5) compactness_se>=-3.646366 98 41 M (0.41836735 0.58163265)
## 10) smoothness_mean>=-2.093138 20 3 B (0.85000000 0.15000000)
## 20) symmetry_worst< -1.566249 17 0 B (1.00000000 0.00000000) *
## 21) symmetry_worst>=-1.566249 3 0 M (0.00000000 1.00000000) *
## 11) smoothness_mean< -2.093138 78 24 M (0.30769231 0.69230769)
## 22) texture_worst< 4.548911 44 20 B (0.54545455 0.45454545)
## 44) symmetry_worst< -1.814978 14 1 B (0.92857143 0.07142857)
## 88) compactness_se>=-3.557543 13 0 B (1.00000000 0.00000000) *
## 89) compactness_se< -3.557543 1 0 M (0.00000000 1.00000000) *
## 45) symmetry_worst>=-1.814978 30 11 M (0.36666667 0.63333333)
## 90) compactness_se>=-3.344528 18 7 B (0.61111111 0.38888889) *
## 91) compactness_se< -3.344528 12 0 M (0.00000000 1.00000000) *
## 23) texture_worst>=4.548911 34 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean< -2.275457 677 302 M (0.44608567 0.55391433)
## 6) compactness_se< -4.705732 27 2 B (0.92592593 0.07407407)
## 12) symmetry_worst< -1.170399 25 0 B (1.00000000 0.00000000) *
## 13) symmetry_worst>=-1.170399 2 0 M (0.00000000 1.00000000) *
## 7) compactness_se>=-4.705732 650 277 M (0.42615385 0.57384615)
## 14) symmetry_worst>=-1.681676 237 103 B (0.56540084 0.43459916)
## 28) compactness_se>=-4.089478 159 51 B (0.67924528 0.32075472)
## 56) texture_worst< 5.003123 149 41 B (0.72483221 0.27516779)
## 112) compactness_se< -3.483184 88 12 B (0.86363636 0.13636364) *
## 113) compactness_se>=-3.483184 61 29 B (0.52459016 0.47540984) *
## 57) texture_worst>=5.003123 10 0 M (0.00000000 1.00000000) *
## 29) compactness_se< -4.089478 78 26 M (0.33333333 0.66666667)
## 58) compactness_se< -4.539406 22 7 B (0.68181818 0.31818182)
## 116) texture_worst>=4.62656 15 0 B (1.00000000 0.00000000) *
## 117) texture_worst< 4.62656 7 0 M (0.00000000 1.00000000) *
## 59) compactness_se>=-4.539406 56 11 M (0.19642857 0.80357143)
## 118) texture_worst< 4.496329 15 5 B (0.66666667 0.33333333) *
## 119) texture_worst>=4.496329 41 1 M (0.02439024 0.97560976) *
## 15) symmetry_worst< -1.681676 413 143 M (0.34624697 0.65375303)
## 30) smoothness_worst< -1.604009 63 22 B (0.65079365 0.34920635)
## 60) symmetry_worst< -1.895532 43 7 B (0.83720930 0.16279070)
## 120) smoothness_worst>=-1.694089 29 0 B (1.00000000 0.00000000) *
## 121) smoothness_worst< -1.694089 14 7 B (0.50000000 0.50000000) *
## 61) symmetry_worst>=-1.895532 20 5 M (0.25000000 0.75000000)
## 122) texture_mean< 2.923842 3 0 B (1.00000000 0.00000000) *
## 123) texture_mean>=2.923842 17 2 M (0.11764706 0.88235294) *
## 31) smoothness_worst>=-1.604009 350 102 M (0.29142857 0.70857143)
## 62) smoothness_mean< -2.488015 18 3 B (0.83333333 0.16666667)
## 124) smoothness_worst>=-1.572781 15 0 B (1.00000000 0.00000000) *
## 125) smoothness_worst< -1.572781 3 0 M (0.00000000 1.00000000) *
## 63) smoothness_mean>=-2.488015 332 87 M (0.26204819 0.73795181)
## 126) symmetry_worst< -2.25148 23 8 B (0.65217391 0.34782609) *
## 127) symmetry_worst>=-2.25148 309 72 M (0.23300971 0.76699029) *
##
## $trees[[15]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 397 B (0.56469298 0.43530702)
## 2) symmetry_worst< -2.048053 139 37 B (0.73381295 0.26618705)
## 4) smoothness_worst>=-1.493231 40 0 B (1.00000000 0.00000000) *
## 5) smoothness_worst< -1.493231 99 37 B (0.62626263 0.37373737)
## 10) smoothness_worst< -1.499051 91 29 B (0.68131868 0.31868132)
## 20) texture_worst< 4.371728 16 0 B (1.00000000 0.00000000) *
## 21) texture_worst>=4.371728 75 29 B (0.61333333 0.38666667)
## 42) texture_worst>=4.755481 42 9 B (0.78571429 0.21428571)
## 84) symmetry_worst< -2.063958 38 5 B (0.86842105 0.13157895) *
## 85) symmetry_worst>=-2.063958 4 0 M (0.00000000 1.00000000) *
## 43) texture_worst< 4.755481 33 13 M (0.39393939 0.60606061)
## 86) symmetry_worst>=-2.096646 5 0 B (1.00000000 0.00000000) *
## 87) symmetry_worst< -2.096646 28 8 M (0.28571429 0.71428571) *
## 11) smoothness_worst>=-1.499051 8 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst>=-2.048053 773 360 B (0.53428202 0.46571798)
## 6) texture_mean< 3.058002 556 220 B (0.60431655 0.39568345)
## 12) texture_mean>=2.987952 162 37 B (0.77160494 0.22839506)
## 24) smoothness_worst>=-1.60795 148 25 B (0.83108108 0.16891892)
## 48) smoothness_mean< -2.203647 134 18 B (0.86567164 0.13432836)
## 96) texture_worst>=4.779866 57 1 B (0.98245614 0.01754386) *
## 97) texture_worst< 4.779866 77 17 B (0.77922078 0.22077922) *
## 49) smoothness_mean>=-2.203647 14 7 B (0.50000000 0.50000000)
## 98) texture_worst< 4.69039 7 0 B (1.00000000 0.00000000) *
## 99) texture_worst>=4.69039 7 0 M (0.00000000 1.00000000) *
## 25) smoothness_worst< -1.60795 14 2 M (0.14285714 0.85714286)
## 50) texture_mean< 2.999433 2 0 B (1.00000000 0.00000000) *
## 51) texture_mean>=2.999433 12 0 M (0.00000000 1.00000000) *
## 13) texture_mean< 2.987952 394 183 B (0.53553299 0.46446701)
## 26) texture_mean< 2.976294 378 167 B (0.55820106 0.44179894)
## 52) symmetry_worst>=-1.990832 346 142 B (0.58959538 0.41040462)
## 104) symmetry_worst< -1.861897 53 5 B (0.90566038 0.09433962) *
## 105) symmetry_worst>=-1.861897 293 137 B (0.53242321 0.46757679) *
## 53) symmetry_worst< -1.990832 32 7 M (0.21875000 0.78125000)
## 106) smoothness_mean< -2.445594 4 0 B (1.00000000 0.00000000) *
## 107) smoothness_mean>=-2.445594 28 3 M (0.10714286 0.89285714) *
## 27) texture_mean>=2.976294 16 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=3.058002 217 77 M (0.35483871 0.64516129)
## 14) texture_worst>=4.753106 161 76 M (0.47204969 0.52795031)
## 28) compactness_se>=-3.902076 90 31 B (0.65555556 0.34444444)
## 56) compactness_se< -3.721197 30 1 B (0.96666667 0.03333333)
## 112) smoothness_worst< -1.446315 29 0 B (1.00000000 0.00000000) *
## 113) smoothness_worst>=-1.446315 1 0 M (0.00000000 1.00000000) *
## 57) compactness_se>=-3.721197 60 30 B (0.50000000 0.50000000)
## 114) texture_worst< 5.051039 45 15 B (0.66666667 0.33333333) *
## 115) texture_worst>=5.051039 15 0 M (0.00000000 1.00000000) *
## 29) compactness_se< -3.902076 71 17 M (0.23943662 0.76056338)
## 58) smoothness_worst< -1.622284 6 0 B (1.00000000 0.00000000) *
## 59) smoothness_worst>=-1.622284 65 11 M (0.16923077 0.83076923)
## 118) symmetry_worst>=-1.733593 36 11 M (0.30555556 0.69444444) *
## 119) symmetry_worst< -1.733593 29 0 M (0.00000000 1.00000000) *
## 15) texture_worst< 4.753106 56 1 M (0.01785714 0.98214286)
## 30) smoothness_mean< -2.498594 1 0 B (1.00000000 0.00000000) *
## 31) smoothness_mean>=-2.498594 55 0 M (0.00000000 1.00000000) *
##
## $trees[[16]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 451 M (0.49451754 0.50548246)
## 2) symmetry_worst< -1.424186 809 378 B (0.53275649 0.46724351)
## 4) smoothness_worst>=-1.49223 347 131 B (0.62247839 0.37752161)
## 8) texture_worst>=4.599485 199 53 B (0.73366834 0.26633166)
## 16) texture_mean< 3.083495 124 17 B (0.86290323 0.13709677)
## 32) compactness_se>=-4.280193 118 12 B (0.89830508 0.10169492)
## 64) smoothness_mean< -2.184141 108 6 B (0.94444444 0.05555556) *
## 65) smoothness_mean>=-2.184141 10 4 M (0.40000000 0.60000000) *
## 33) compactness_se< -4.280193 6 1 M (0.16666667 0.83333333)
## 66) texture_mean< 2.921626 1 0 B (1.00000000 0.00000000) *
## 67) texture_mean>=2.921626 5 0 M (0.00000000 1.00000000) *
## 17) texture_mean>=3.083495 75 36 B (0.52000000 0.48000000)
## 34) texture_mean>=3.192731 43 10 B (0.76744186 0.23255814)
## 68) texture_worst< 5.402766 37 4 B (0.89189189 0.10810811) *
## 69) texture_worst>=5.402766 6 0 M (0.00000000 1.00000000) *
## 35) texture_mean< 3.192731 32 6 M (0.18750000 0.81250000)
## 70) symmetry_worst< -2.063609 10 4 B (0.60000000 0.40000000) *
## 71) symmetry_worst>=-2.063609 22 0 M (0.00000000 1.00000000) *
## 9) texture_worst< 4.599485 148 70 M (0.47297297 0.52702703)
## 18) smoothness_worst< -1.450406 101 37 B (0.63366337 0.36633663)
## 36) compactness_se< -3.458298 70 11 B (0.84285714 0.15714286)
## 72) texture_worst>=4.180672 60 6 B (0.90000000 0.10000000) *
## 73) texture_worst< 4.180672 10 5 B (0.50000000 0.50000000) *
## 37) compactness_se>=-3.458298 31 5 M (0.16129032 0.83870968)
## 74) smoothness_mean>=-2.27605 10 5 B (0.50000000 0.50000000) *
## 75) smoothness_mean< -2.27605 21 0 M (0.00000000 1.00000000) *
## 19) smoothness_worst>=-1.450406 47 6 M (0.12765957 0.87234043)
## 38) texture_worst< 4.30106 10 4 B (0.60000000 0.40000000)
## 76) smoothness_worst>=-1.434633 6 0 B (1.00000000 0.00000000) *
## 77) smoothness_worst< -1.434633 4 0 M (0.00000000 1.00000000) *
## 39) texture_worst>=4.30106 37 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst< -1.49223 462 215 M (0.46536797 0.53463203)
## 10) symmetry_worst< -1.815934 238 97 B (0.59243697 0.40756303)
## 20) smoothness_worst< -1.500665 222 82 B (0.63063063 0.36936937)
## 40) smoothness_worst>=-1.539367 63 10 B (0.84126984 0.15873016)
## 80) texture_worst< 4.566482 31 0 B (1.00000000 0.00000000) *
## 81) texture_worst>=4.566482 32 10 B (0.68750000 0.31250000) *
## 41) smoothness_worst< -1.539367 159 72 B (0.54716981 0.45283019)
## 82) smoothness_worst< -1.559798 126 47 B (0.62698413 0.37301587) *
## 83) smoothness_worst>=-1.559798 33 8 M (0.24242424 0.75757576) *
## 21) smoothness_worst>=-1.500665 16 1 M (0.06250000 0.93750000)
## 42) texture_worst< 4.593754 1 0 B (1.00000000 0.00000000) *
## 43) texture_worst>=4.593754 15 0 M (0.00000000 1.00000000) *
## 11) symmetry_worst>=-1.815934 224 74 M (0.33035714 0.66964286)
## 22) texture_mean< 2.824054 23 2 B (0.91304348 0.08695652)
## 44) symmetry_worst>=-1.792649 21 0 B (1.00000000 0.00000000) *
## 45) symmetry_worst< -1.792649 2 0 M (0.00000000 1.00000000) *
## 23) texture_mean>=2.824054 201 53 M (0.26368159 0.73631841)
## 46) texture_worst>=4.751723 50 25 B (0.50000000 0.50000000)
## 92) texture_worst< 4.996236 36 11 B (0.69444444 0.30555556) *
## 93) texture_worst>=4.996236 14 0 M (0.00000000 1.00000000) *
## 47) texture_worst< 4.751723 151 28 M (0.18543046 0.81456954)
## 94) smoothness_mean>=-2.231223 11 3 B (0.72727273 0.27272727) *
## 95) smoothness_mean< -2.231223 140 20 M (0.14285714 0.85714286) *
## 3) symmetry_worst>=-1.424186 103 20 M (0.19417476 0.80582524)
## 6) smoothness_worst< -1.501886 16 4 B (0.75000000 0.25000000)
## 12) texture_mean< 3.126045 12 0 B (1.00000000 0.00000000) *
## 13) texture_mean>=3.126045 4 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst>=-1.501886 87 8 M (0.09195402 0.90804598)
## 14) texture_mean< 2.77286 11 5 M (0.45454545 0.54545455)
## 28) compactness_se< -3.173162 5 0 B (1.00000000 0.00000000) *
## 29) compactness_se>=-3.173162 6 0 M (0.00000000 1.00000000) *
## 15) texture_mean>=2.77286 76 3 M (0.03947368 0.96052632)
## 30) compactness_se>=-2.524297 3 1 B (0.66666667 0.33333333)
## 60) texture_mean< 2.915767 2 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=2.915767 1 0 M (0.00000000 1.00000000) *
## 31) compactness_se< -2.524297 73 1 M (0.01369863 0.98630137)
## 62) compactness_se< -4.171724 2 1 B (0.50000000 0.50000000)
## 124) texture_mean< 3.068796 1 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=3.068796 1 0 M (0.00000000 1.00000000) *
## 63) compactness_se>=-4.171724 71 0 M (0.00000000 1.00000000) *
##
## $trees[[17]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 403 M (0.44188596 0.55811404)
## 2) compactness_se< -4.706178 28 2 B (0.92857143 0.07142857)
## 4) symmetry_worst< -1.170399 26 0 B (1.00000000 0.00000000) *
## 5) symmetry_worst>=-1.170399 2 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-4.706178 884 377 M (0.42647059 0.57352941)
## 6) compactness_se>=-4.687525 855 375 M (0.43859649 0.56140351)
## 12) smoothness_worst< -1.500665 467 233 B (0.50107066 0.49892934)
## 24) texture_mean< 3.025285 237 89 B (0.62447257 0.37552743)
## 48) compactness_se>=-4.327955 200 61 B (0.69500000 0.30500000)
## 96) smoothness_worst>=-1.568787 143 29 B (0.79720280 0.20279720) *
## 97) smoothness_worst< -1.568787 57 25 M (0.43859649 0.56140351) *
## 49) compactness_se< -4.327955 37 9 M (0.24324324 0.75675676)
## 98) texture_mean< 2.85595 6 0 B (1.00000000 0.00000000) *
## 99) texture_mean>=2.85595 31 3 M (0.09677419 0.90322581) *
## 25) texture_mean>=3.025285 230 86 M (0.37391304 0.62608696)
## 50) smoothness_mean>=-2.317271 29 8 B (0.72413793 0.27586207)
## 100) smoothness_mean< -2.266276 19 0 B (1.00000000 0.00000000) *
## 101) smoothness_mean>=-2.266276 10 2 M (0.20000000 0.80000000) *
## 51) smoothness_mean< -2.317271 201 65 M (0.32338308 0.67661692)
## 102) smoothness_mean< -2.409448 104 51 M (0.49038462 0.50961538) *
## 103) smoothness_mean>=-2.409448 97 14 M (0.14432990 0.85567010) *
## 13) smoothness_worst>=-1.500665 388 141 M (0.36340206 0.63659794)
## 26) texture_worst>=4.628023 152 76 B (0.50000000 0.50000000)
## 52) texture_worst< 4.682677 25 2 B (0.92000000 0.08000000)
## 104) texture_mean>=2.836998 23 0 B (1.00000000 0.00000000) *
## 105) texture_mean< 2.836998 2 0 M (0.00000000 1.00000000) *
## 53) texture_worst>=4.682677 127 53 M (0.41732283 0.58267717)
## 106) smoothness_worst>=-1.430927 50 16 B (0.68000000 0.32000000) *
## 107) smoothness_worst< -1.430927 77 19 M (0.24675325 0.75324675) *
## 27) texture_worst< 4.628023 236 65 M (0.27542373 0.72457627)
## 54) compactness_se< -4.50262 10 0 B (1.00000000 0.00000000) *
## 55) compactness_se>=-4.50262 226 55 M (0.24336283 0.75663717)
## 110) symmetry_worst< -1.995212 21 7 B (0.66666667 0.33333333) *
## 111) symmetry_worst>=-1.995212 205 41 M (0.20000000 0.80000000) *
## 7) compactness_se< -4.687525 29 2 M (0.06896552 0.93103448)
## 14) smoothness_mean>=-2.441817 2 0 B (1.00000000 0.00000000) *
## 15) smoothness_mean< -2.441817 27 0 M (0.00000000 1.00000000) *
##
## $trees[[18]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 450 B (0.50657895 0.49342105)
## 2) texture_worst>=5.16917 76 18 B (0.76315789 0.23684211)
## 4) texture_worst< 5.636459 71 13 B (0.81690141 0.18309859)
## 8) smoothness_mean< -2.331159 67 9 B (0.86567164 0.13432836)
## 16) compactness_se< -3.328636 63 5 B (0.92063492 0.07936508)
## 32) symmetry_worst< -1.41032 62 4 B (0.93548387 0.06451613)
## 64) compactness_se>=-3.859901 36 0 B (1.00000000 0.00000000) *
## 65) compactness_se< -3.859901 26 4 B (0.84615385 0.15384615) *
## 33) symmetry_worst>=-1.41032 1 0 M (0.00000000 1.00000000) *
## 17) compactness_se>=-3.328636 4 0 M (0.00000000 1.00000000) *
## 9) smoothness_mean>=-2.331159 4 0 M (0.00000000 1.00000000) *
## 5) texture_worst>=5.636459 5 0 M (0.00000000 1.00000000) *
## 3) texture_worst< 5.16917 836 404 M (0.48325359 0.51674641)
## 6) texture_mean< 2.708379 31 3 B (0.90322581 0.09677419)
## 12) symmetry_worst< -1.112025 29 1 B (0.96551724 0.03448276)
## 24) smoothness_mean< -2.114874 24 0 B (1.00000000 0.00000000) *
## 25) smoothness_mean>=-2.114874 5 1 B (0.80000000 0.20000000)
## 50) smoothness_mean>=-2.060513 4 0 B (1.00000000 0.00000000) *
## 51) smoothness_mean< -2.060513 1 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst>=-1.112025 2 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=2.708379 805 376 M (0.46708075 0.53291925)
## 14) symmetry_worst< -1.427209 735 359 M (0.48843537 0.51156463)
## 28) compactness_se< -4.706178 13 0 B (1.00000000 0.00000000) *
## 29) compactness_se>=-4.706178 722 346 M (0.47922438 0.52077562)
## 58) compactness_se>=-4.681232 704 346 M (0.49147727 0.50852273)
## 116) texture_worst>=4.543638 408 180 B (0.55882353 0.44117647) *
## 117) texture_worst< 4.543638 296 118 M (0.39864865 0.60135135) *
## 59) compactness_se< -4.681232 18 0 M (0.00000000 1.00000000) *
## 15) symmetry_worst>=-1.427209 70 17 M (0.24285714 0.75714286)
## 30) texture_worst< 4.595658 31 14 M (0.45161290 0.54838710)
## 60) texture_worst>=4.251602 15 1 B (0.93333333 0.06666667)
## 120) smoothness_mean< -2.156267 14 0 B (1.00000000 0.00000000) *
## 121) smoothness_mean>=-2.156267 1 0 M (0.00000000 1.00000000) *
## 61) texture_worst< 4.251602 16 0 M (0.00000000 1.00000000) *
## 31) texture_worst>=4.595658 39 3 M (0.07692308 0.92307692)
## 62) texture_mean>=3.10949 11 3 M (0.27272727 0.72727273)
## 124) texture_mean< 3.116842 3 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=3.116842 8 0 M (0.00000000 1.00000000) *
## 63) texture_mean< 3.10949 28 0 M (0.00000000 1.00000000) *
##
## $trees[[19]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 426 M (0.46710526 0.53289474)
## 2) symmetry_worst< -1.423936 816 407 M (0.49877451 0.50122549)
## 4) compactness_se< -3.648711 496 208 B (0.58064516 0.41935484)
## 8) symmetry_worst>=-1.749307 181 47 B (0.74033149 0.25966851)
## 16) smoothness_mean>=-2.483393 162 34 B (0.79012346 0.20987654)
## 32) compactness_se>=-4.676603 157 29 B (0.81528662 0.18471338)
## 64) texture_worst< 5.402766 152 24 B (0.84210526 0.15789474) *
## 65) texture_worst>=5.402766 5 0 M (0.00000000 1.00000000) *
## 33) compactness_se< -4.676603 5 0 M (0.00000000 1.00000000) *
## 17) smoothness_mean< -2.483393 19 6 M (0.31578947 0.68421053)
## 34) smoothness_mean< -2.536306 5 0 B (1.00000000 0.00000000) *
## 35) smoothness_mean>=-2.536306 14 1 M (0.07142857 0.92857143)
## 70) smoothness_worst>=-1.560846 1 0 B (1.00000000 0.00000000) *
## 71) smoothness_worst< -1.560846 13 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst< -1.749307 315 154 M (0.48888889 0.51111111)
## 18) symmetry_worst< -1.789136 274 125 B (0.54379562 0.45620438)
## 36) symmetry_worst>=-1.855076 52 8 B (0.84615385 0.15384615)
## 72) smoothness_mean>=-2.433246 41 2 B (0.95121951 0.04878049) *
## 73) smoothness_mean< -2.433246 11 5 M (0.45454545 0.54545455) *
## 37) symmetry_worst< -1.855076 222 105 M (0.47297297 0.52702703)
## 74) smoothness_mean< -2.444437 39 7 B (0.82051282 0.17948718) *
## 75) smoothness_mean>=-2.444437 183 73 M (0.39890710 0.60109290) *
## 19) symmetry_worst>=-1.789136 41 5 M (0.12195122 0.87804878)
## 38) smoothness_mean< -2.429074 2 0 B (1.00000000 0.00000000) *
## 39) smoothness_mean>=-2.429074 39 3 M (0.07692308 0.92307692)
## 78) smoothness_worst< -1.550971 5 2 B (0.60000000 0.40000000) *
## 79) smoothness_worst>=-1.550971 34 0 M (0.00000000 1.00000000) *
## 5) compactness_se>=-3.648711 320 119 M (0.37187500 0.62812500)
## 10) symmetry_worst< -1.840831 128 58 B (0.54687500 0.45312500)
## 20) symmetry_worst>=-1.982941 43 0 B (1.00000000 0.00000000) *
## 21) symmetry_worst< -1.982941 85 27 M (0.31764706 0.68235294)
## 42) texture_worst>=5.255485 6 0 B (1.00000000 0.00000000) *
## 43) texture_worst< 5.255485 79 21 M (0.26582278 0.73417722)
## 86) texture_mean< 2.763153 4 0 B (1.00000000 0.00000000) *
## 87) texture_mean>=2.763153 75 17 M (0.22666667 0.77333333) *
## 11) symmetry_worst>=-1.840831 192 49 M (0.25520833 0.74479167)
## 22) smoothness_mean< -2.503795 10 0 B (1.00000000 0.00000000) *
## 23) smoothness_mean>=-2.503795 182 39 M (0.21428571 0.78571429)
## 46) symmetry_worst>=-1.471051 10 1 B (0.90000000 0.10000000)
## 92) texture_mean< 3.100749 9 0 B (1.00000000 0.00000000) *
## 93) texture_mean>=3.100749 1 0 M (0.00000000 1.00000000) *
## 47) symmetry_worst< -1.471051 172 30 M (0.17441860 0.82558140)
## 94) compactness_se>=-2.853699 19 9 B (0.52631579 0.47368421) *
## 95) compactness_se< -2.853699 153 20 M (0.13071895 0.86928105) *
## 3) symmetry_worst>=-1.423936 96 19 M (0.19791667 0.80208333)
## 6) texture_mean< 2.77286 10 2 B (0.80000000 0.20000000)
## 12) compactness_se< -3.173162 8 0 B (1.00000000 0.00000000) *
## 13) compactness_se>=-3.173162 2 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=2.77286 86 11 M (0.12790698 0.87209302)
## 14) smoothness_worst< -1.501886 15 7 B (0.53333333 0.46666667)
## 28) texture_mean< 3.021535 6 0 B (1.00000000 0.00000000) *
## 29) texture_mean>=3.021535 9 2 M (0.22222222 0.77777778)
## 58) smoothness_mean>=-2.349952 2 0 B (1.00000000 0.00000000) *
## 59) smoothness_mean< -2.349952 7 0 M (0.00000000 1.00000000) *
## 15) smoothness_worst>=-1.501886 71 3 M (0.04225352 0.95774648)
## 30) compactness_se>=-2.567912 2 0 B (1.00000000 0.00000000) *
## 31) compactness_se< -2.567912 69 1 M (0.01449275 0.98550725)
## 62) compactness_se< -4.171724 5 1 M (0.20000000 0.80000000)
## 124) texture_mean< 3.006781 1 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=3.006781 4 0 M (0.00000000 1.00000000) *
## 63) compactness_se>=-4.171724 64 0 M (0.00000000 1.00000000) *
##
## $trees[[20]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 450 B (0.50657895 0.49342105)
## 2) symmetry_worst< -1.840831 390 150 B (0.61538462 0.38461538)
## 4) compactness_se>=-4.434687 325 102 B (0.68615385 0.31384615)
## 8) symmetry_worst>=-1.9261 88 10 B (0.88636364 0.11363636)
## 16) texture_worst< 4.927821 82 6 B (0.92682927 0.07317073)
## 32) smoothness_worst< -1.424105 77 3 B (0.96103896 0.03896104)
## 64) smoothness_mean>=-2.390216 59 0 B (1.00000000 0.00000000) *
## 65) smoothness_mean< -2.390216 18 3 B (0.83333333 0.16666667) *
## 33) smoothness_worst>=-1.424105 5 2 M (0.40000000 0.60000000)
## 66) texture_mean>=2.876957 2 0 B (1.00000000 0.00000000) *
## 67) texture_mean< 2.876957 3 0 M (0.00000000 1.00000000) *
## 17) texture_worst>=4.927821 6 2 M (0.33333333 0.66666667)
## 34) texture_mean>=3.304052 2 0 B (1.00000000 0.00000000) *
## 35) texture_mean< 3.304052 4 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst< -1.9261 237 92 B (0.61181435 0.38818565)
## 18) symmetry_worst< -1.964096 202 67 B (0.66831683 0.33168317)
## 36) smoothness_mean< -2.242666 189 55 B (0.70899471 0.29100529)
## 72) smoothness_mean>=-2.33454 44 3 B (0.93181818 0.06818182) *
## 73) smoothness_mean< -2.33454 145 52 B (0.64137931 0.35862069) *
## 37) smoothness_mean>=-2.242666 13 1 M (0.07692308 0.92307692)
## 74) texture_mean< 2.885158 1 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.885158 12 0 M (0.00000000 1.00000000) *
## 19) symmetry_worst>=-1.964096 35 10 M (0.28571429 0.71428571)
## 38) smoothness_mean>=-2.225218 8 0 B (1.00000000 0.00000000) *
## 39) smoothness_mean< -2.225218 27 2 M (0.07407407 0.92592593)
## 78) smoothness_mean< -2.425835 2 0 B (1.00000000 0.00000000) *
## 79) smoothness_mean>=-2.425835 25 0 M (0.00000000 1.00000000) *
## 5) compactness_se< -4.434687 65 17 M (0.26153846 0.73846154)
## 10) compactness_se< -4.706178 11 0 B (1.00000000 0.00000000) *
## 11) compactness_se>=-4.706178 54 6 M (0.11111111 0.88888889)
## 22) texture_mean< 2.846651 2 0 B (1.00000000 0.00000000) *
## 23) texture_mean>=2.846651 52 4 M (0.07692308 0.92307692)
## 46) smoothness_mean>=-2.271294 2 0 B (1.00000000 0.00000000) *
## 47) smoothness_mean< -2.271294 50 2 M (0.04000000 0.96000000)
## 94) compactness_se>=-4.514873 18 2 M (0.11111111 0.88888889) *
## 95) compactness_se< -4.514873 32 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst>=-1.840831 522 222 M (0.42528736 0.57471264)
## 6) compactness_se< -4.510773 39 9 B (0.76923077 0.23076923)
## 12) texture_worst>=4.622562 26 1 B (0.96153846 0.03846154)
## 24) smoothness_worst< -1.491257 19 0 B (1.00000000 0.00000000) *
## 25) smoothness_worst>=-1.491257 7 1 B (0.85714286 0.14285714)
## 50) compactness_se< -4.557422 6 0 B (1.00000000 0.00000000) *
## 51) compactness_se>=-4.557422 1 0 M (0.00000000 1.00000000) *
## 13) texture_worst< 4.622562 13 5 M (0.38461538 0.61538462)
## 26) texture_worst< 4.468479 5 0 B (1.00000000 0.00000000) *
## 27) texture_worst>=4.468479 8 0 M (0.00000000 1.00000000) *
## 7) compactness_se>=-4.510773 483 192 M (0.39751553 0.60248447)
## 14) smoothness_worst< -1.472307 322 149 M (0.46273292 0.53726708)
## 28) smoothness_worst>=-1.4768 36 6 B (0.83333333 0.16666667)
## 56) symmetry_worst>=-1.811141 30 0 B (1.00000000 0.00000000) *
## 57) symmetry_worst< -1.811141 6 0 M (0.00000000 1.00000000) *
## 29) smoothness_worst< -1.4768 286 119 M (0.41608392 0.58391608)
## 58) compactness_se< -3.711591 134 62 B (0.53731343 0.46268657)
## 116) compactness_se>=-4.159844 63 16 B (0.74603175 0.25396825) *
## 117) compactness_se< -4.159844 71 25 M (0.35211268 0.64788732) *
## 59) compactness_se>=-3.711591 152 47 M (0.30921053 0.69078947)
## 118) smoothness_mean< -2.424641 27 10 B (0.62962963 0.37037037) *
## 119) smoothness_mean>=-2.424641 125 30 M (0.24000000 0.76000000) *
## 15) smoothness_worst>=-1.472307 161 43 M (0.26708075 0.73291925)
## 30) smoothness_mean>=-2.093138 20 5 B (0.75000000 0.25000000)
## 60) texture_mean>=2.515298 16 1 B (0.93750000 0.06250000)
## 120) compactness_se< -2.887458 15 0 B (1.00000000 0.00000000) *
## 121) compactness_se>=-2.887458 1 0 M (0.00000000 1.00000000) *
## 61) texture_mean< 2.515298 4 0 M (0.00000000 1.00000000) *
## 31) smoothness_mean< -2.093138 141 28 M (0.19858156 0.80141844)
## 62) symmetry_worst< -1.780671 8 2 B (0.75000000 0.25000000)
## 124) smoothness_mean>=-2.223945 5 0 B (1.00000000 0.00000000) *
## 125) smoothness_mean< -2.223945 3 1 M (0.33333333 0.66666667) *
## 63) symmetry_worst>=-1.780671 133 22 M (0.16541353 0.83458647)
## 126) compactness_se>=-2.540721 3 0 B (1.00000000 0.00000000) *
## 127) compactness_se< -2.540721 130 19 M (0.14615385 0.85384615) *
##
## $trees[[21]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 435 B (0.52302632 0.47697368)
## 2) texture_worst< 4.609772 471 190 B (0.59660297 0.40339703)
## 4) compactness_se< -3.66733 304 95 B (0.68750000 0.31250000)
## 8) symmetry_worst< -1.786753 149 28 B (0.81208054 0.18791946)
## 16) symmetry_worst>=-2.49184 137 18 B (0.86861314 0.13138686)
## 32) symmetry_worst< -1.957488 57 0 B (1.00000000 0.00000000) *
## 33) symmetry_worst>=-1.957488 80 18 B (0.77500000 0.22500000)
## 66) symmetry_worst>=-1.919731 59 8 B (0.86440678 0.13559322) *
## 67) symmetry_worst< -1.919731 21 10 B (0.52380952 0.47619048) *
## 17) symmetry_worst< -2.49184 12 2 M (0.16666667 0.83333333)
## 34) texture_mean< 2.855865 2 0 B (1.00000000 0.00000000) *
## 35) texture_mean>=2.855865 10 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst>=-1.786753 155 67 B (0.56774194 0.43225806)
## 18) symmetry_worst>=-1.749307 118 33 B (0.72033898 0.27966102)
## 36) smoothness_mean>=-2.418898 88 14 B (0.84090909 0.15909091)
## 72) smoothness_worst< -1.480927 39 0 B (1.00000000 0.00000000) *
## 73) smoothness_worst>=-1.480927 49 14 B (0.71428571 0.28571429) *
## 37) smoothness_mean< -2.418898 30 11 M (0.36666667 0.63333333)
## 74) smoothness_mean< -2.43698 14 3 B (0.78571429 0.21428571) *
## 75) smoothness_mean>=-2.43698 16 0 M (0.00000000 1.00000000) *
## 19) symmetry_worst< -1.749307 37 3 M (0.08108108 0.91891892)
## 38) texture_mean< 2.803754 2 0 B (1.00000000 0.00000000) *
## 39) texture_mean>=2.803754 35 1 M (0.02857143 0.97142857)
## 78) smoothness_worst< -1.550971 1 0 B (1.00000000 0.00000000) *
## 79) smoothness_worst>=-1.550971 34 0 M (0.00000000 1.00000000) *
## 5) compactness_se>=-3.66733 167 72 M (0.43113772 0.56886228)
## 10) compactness_se>=-3.483667 106 47 B (0.55660377 0.44339623)
## 20) smoothness_worst< -1.496438 60 12 B (0.80000000 0.20000000)
## 40) texture_mean< 3.049609 50 2 B (0.96000000 0.04000000)
## 80) compactness_se>=-3.392487 41 0 B (1.00000000 0.00000000) *
## 81) compactness_se< -3.392487 9 2 B (0.77777778 0.22222222) *
## 41) texture_mean>=3.049609 10 0 M (0.00000000 1.00000000) *
## 21) smoothness_worst>=-1.496438 46 11 M (0.23913043 0.76086957)
## 42) smoothness_worst>=-1.4665 15 5 B (0.66666667 0.33333333)
## 84) texture_worst>=4.250651 12 2 B (0.83333333 0.16666667) *
## 85) texture_worst< 4.250651 3 0 M (0.00000000 1.00000000) *
## 43) smoothness_worst< -1.4665 31 1 M (0.03225806 0.96774194)
## 86) texture_mean< 2.8622 7 1 M (0.14285714 0.85714286) *
## 87) texture_mean>=2.8622 24 0 M (0.00000000 1.00000000) *
## 11) compactness_se< -3.483667 61 13 M (0.21311475 0.78688525)
## 22) smoothness_mean< -2.322902 31 12 M (0.38709677 0.61290323)
## 44) smoothness_mean>=-2.40657 12 0 B (1.00000000 0.00000000) *
## 45) smoothness_mean< -2.40657 19 0 M (0.00000000 1.00000000) *
## 23) smoothness_mean>=-2.322902 30 1 M (0.03333333 0.96666667)
## 46) symmetry_worst< -1.900827 2 1 B (0.50000000 0.50000000)
## 92) texture_mean< 2.874386 1 0 B (1.00000000 0.00000000) *
## 93) texture_mean>=2.874386 1 0 M (0.00000000 1.00000000) *
## 47) symmetry_worst>=-1.900827 28 0 M (0.00000000 1.00000000) *
## 3) texture_worst>=4.609772 441 196 M (0.44444444 0.55555556)
## 6) compactness_se>=-4.671834 412 195 M (0.47330097 0.52669903)
## 12) texture_worst>=4.628023 381 189 B (0.50393701 0.49606299)
## 24) texture_mean< 2.91424 28 3 B (0.89285714 0.10714286)
## 48) symmetry_worst< -1.384729 25 0 B (1.00000000 0.00000000) *
## 49) symmetry_worst>=-1.384729 3 0 M (0.00000000 1.00000000) *
## 25) texture_mean>=2.91424 353 167 M (0.47308782 0.52691218)
## 50) texture_mean>=3.029409 277 129 B (0.53429603 0.46570397)
## 100) texture_mean< 3.058002 38 0 B (1.00000000 0.00000000) *
## 101) texture_mean>=3.058002 239 110 M (0.46025105 0.53974895) *
## 51) texture_mean< 3.029409 76 19 M (0.25000000 0.75000000)
## 102) smoothness_mean< -2.423157 8 0 B (1.00000000 0.00000000) *
## 103) smoothness_mean>=-2.423157 68 11 M (0.16176471 0.83823529) *
## 13) texture_worst< 4.628023 31 3 M (0.09677419 0.90322581)
## 26) smoothness_mean< -2.471714 2 0 B (1.00000000 0.00000000) *
## 27) smoothness_mean>=-2.471714 29 1 M (0.03448276 0.96551724)
## 54) symmetry_worst< -2.477165 1 0 B (1.00000000 0.00000000) *
## 55) symmetry_worst>=-2.477165 28 0 M (0.00000000 1.00000000) *
## 7) compactness_se< -4.671834 29 1 M (0.03448276 0.96551724)
## 14) compactness_se< -4.938351 1 0 B (1.00000000 0.00000000) *
## 15) compactness_se>=-4.938351 28 0 M (0.00000000 1.00000000) *
##
## $trees[[22]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 409 B (0.55153509 0.44846491)
## 2) symmetry_worst< -2.193154 105 24 B (0.77142857 0.22857143)
## 4) smoothness_mean< -2.217511 100 19 B (0.81000000 0.19000000)
## 8) symmetry_worst>=-2.957999 97 16 B (0.83505155 0.16494845)
## 16) texture_mean< 3.326618 89 11 B (0.87640449 0.12359551)
## 32) compactness_se>=-3.861191 63 2 B (0.96825397 0.03174603)
## 64) smoothness_mean>=-2.481712 57 0 B (1.00000000 0.00000000) *
## 65) smoothness_mean< -2.481712 6 2 B (0.66666667 0.33333333) *
## 33) compactness_se< -3.861191 26 9 B (0.65384615 0.34615385)
## 66) compactness_se< -3.941046 21 4 B (0.80952381 0.19047619) *
## 67) compactness_se>=-3.941046 5 0 M (0.00000000 1.00000000) *
## 17) texture_mean>=3.326618 8 3 M (0.37500000 0.62500000)
## 34) texture_mean>=3.379986 3 0 B (1.00000000 0.00000000) *
## 35) texture_mean< 3.379986 5 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst< -2.957999 3 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean>=-2.217511 5 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst>=-2.193154 807 385 B (0.52292441 0.47707559)
## 6) texture_mean< 2.708379 30 2 B (0.93333333 0.06666667)
## 12) compactness_se< -2.990558 29 1 B (0.96551724 0.03448276)
## 24) symmetry_worst< -1.556816 22 0 B (1.00000000 0.00000000) *
## 25) symmetry_worst>=-1.556816 7 1 B (0.85714286 0.14285714)
## 50) smoothness_mean< -2.081877 6 0 B (1.00000000 0.00000000) *
## 51) smoothness_mean>=-2.081877 1 0 M (0.00000000 1.00000000) *
## 13) compactness_se>=-2.990558 1 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=2.708379 777 383 B (0.50707851 0.49292149)
## 14) texture_worst>=4.982438 116 36 B (0.68965517 0.31034483)
## 28) smoothness_mean< -2.425205 69 7 B (0.89855072 0.10144928)
## 56) smoothness_worst< -1.490267 64 3 B (0.95312500 0.04687500)
## 112) symmetry_worst< -1.530091 63 2 B (0.96825397 0.03174603) *
## 113) symmetry_worst>=-1.530091 1 0 M (0.00000000 1.00000000) *
## 57) smoothness_worst>=-1.490267 5 1 M (0.20000000 0.80000000)
## 114) texture_mean>=3.23593 1 0 B (1.00000000 0.00000000) *
## 115) texture_mean< 3.23593 4 0 M (0.00000000 1.00000000) *
## 29) smoothness_mean>=-2.425205 47 18 M (0.38297872 0.61702128)
## 58) smoothness_worst>=-1.483884 29 11 B (0.62068966 0.37931034)
## 116) symmetry_worst< -1.65672 19 3 B (0.84210526 0.15789474) *
## 117) symmetry_worst>=-1.65672 10 2 M (0.20000000 0.80000000) *
## 59) smoothness_worst< -1.483884 18 0 M (0.00000000 1.00000000) *
## 15) texture_worst< 4.982438 661 314 M (0.47503782 0.52496218)
## 30) texture_worst< 4.976767 633 314 M (0.49605055 0.50394945)
## 60) smoothness_worst< -1.374083 614 301 B (0.50977199 0.49022801)
## 120) smoothness_mean>=-2.354774 327 135 B (0.58715596 0.41284404) *
## 121) smoothness_mean< -2.354774 287 121 M (0.42160279 0.57839721) *
## 61) smoothness_worst>=-1.374083 19 1 M (0.05263158 0.94736842)
## 122) symmetry_worst< -1.846189 1 0 B (1.00000000 0.00000000) *
## 123) symmetry_worst>=-1.846189 18 0 M (0.00000000 1.00000000) *
## 31) texture_worst>=4.976767 28 0 M (0.00000000 1.00000000) *
##
## $trees[[23]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 390 B (0.57236842 0.42763158)
## 2) smoothness_mean< -2.392182 336 110 B (0.67261905 0.32738095)
## 4) smoothness_mean>=-2.401687 39 0 B (1.00000000 0.00000000) *
## 5) smoothness_mean< -2.401687 297 110 B (0.62962963 0.37037037)
## 10) texture_mean>=3.198061 61 9 B (0.85245902 0.14754098)
## 20) symmetry_worst>=-2.242858 55 4 B (0.92727273 0.07272727)
## 40) texture_mean< 3.440257 52 1 B (0.98076923 0.01923077)
## 80) symmetry_worst< -1.345645 51 0 B (1.00000000 0.00000000) *
## 81) symmetry_worst>=-1.345645 1 0 M (0.00000000 1.00000000) *
## 41) texture_mean>=3.440257 3 0 M (0.00000000 1.00000000) *
## 21) symmetry_worst< -2.242858 6 1 M (0.16666667 0.83333333)
## 42) texture_mean>=3.357516 1 0 B (1.00000000 0.00000000) *
## 43) texture_mean< 3.357516 5 0 M (0.00000000 1.00000000) *
## 11) texture_mean< 3.198061 236 101 B (0.57203390 0.42796610)
## 22) texture_mean< 3.130673 216 82 B (0.62037037 0.37962963)
## 44) smoothness_mean< -2.408446 201 67 B (0.66666667 0.33333333)
## 88) smoothness_mean>=-2.495871 144 33 B (0.77083333 0.22916667) *
## 89) smoothness_mean< -2.495871 57 23 M (0.40350877 0.59649123) *
## 45) smoothness_mean>=-2.408446 15 0 M (0.00000000 1.00000000) *
## 23) texture_mean>=3.130673 20 1 M (0.05000000 0.95000000)
## 46) smoothness_mean>=-2.417513 1 0 B (1.00000000 0.00000000) *
## 47) smoothness_mean< -2.417513 19 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean>=-2.392182 576 280 B (0.51388889 0.48611111)
## 6) texture_worst< 4.1745 38 5 B (0.86842105 0.13157895)
## 12) compactness_se< -3.032021 35 3 B (0.91428571 0.08571429)
## 24) smoothness_mean< -2.07745 29 1 B (0.96551724 0.03448276)
## 48) smoothness_worst>=-1.53208 26 0 B (1.00000000 0.00000000) *
## 49) smoothness_worst< -1.53208 3 1 B (0.66666667 0.33333333)
## 98) texture_mean>=2.744166 2 0 B (1.00000000 0.00000000) *
## 99) texture_mean< 2.744166 1 0 M (0.00000000 1.00000000) *
## 25) smoothness_mean>=-2.07745 6 2 B (0.66666667 0.33333333)
## 50) smoothness_mean>=-2.060513 4 0 B (1.00000000 0.00000000) *
## 51) smoothness_mean< -2.060513 2 0 M (0.00000000 1.00000000) *
## 13) compactness_se>=-3.032021 3 1 M (0.33333333 0.66666667)
## 26) smoothness_mean< -2.298748 1 0 B (1.00000000 0.00000000) *
## 27) smoothness_mean>=-2.298748 2 0 M (0.00000000 1.00000000) *
## 7) texture_worst>=4.1745 538 263 M (0.48884758 0.51115242)
## 14) texture_worst>=4.751723 202 74 B (0.63366337 0.36633663)
## 28) compactness_se< -3.352836 167 47 B (0.71856287 0.28143713)
## 56) texture_worst< 4.818867 52 2 B (0.96153846 0.03846154)
## 112) smoothness_worst< -1.425578 50 0 B (1.00000000 0.00000000) *
## 113) smoothness_worst>=-1.425578 2 0 M (0.00000000 1.00000000) *
## 57) texture_worst>=4.818867 115 45 B (0.60869565 0.39130435)
## 114) texture_mean>=3.032246 96 29 B (0.69791667 0.30208333) *
## 115) texture_mean< 3.032246 19 3 M (0.15789474 0.84210526) *
## 29) compactness_se>=-3.352836 35 8 M (0.22857143 0.77142857)
## 58) symmetry_worst>=-1.477364 10 2 B (0.80000000 0.20000000)
## 116) texture_mean< 3.05648 8 0 B (1.00000000 0.00000000) *
## 117) texture_mean>=3.05648 2 0 M (0.00000000 1.00000000) *
## 59) symmetry_worst< -1.477364 25 0 M (0.00000000 1.00000000) *
## 15) texture_worst< 4.751723 336 135 M (0.40178571 0.59821429)
## 30) texture_worst< 4.682677 287 132 M (0.45993031 0.54006969)
## 60) texture_worst>=4.626933 42 6 B (0.85714286 0.14285714)
## 120) smoothness_worst>=-1.505734 36 0 B (1.00000000 0.00000000) *
## 121) smoothness_worst< -1.505734 6 0 M (0.00000000 1.00000000) *
## 61) texture_worst< 4.626933 245 96 M (0.39183673 0.60816327)
## 122) texture_worst< 4.50835 143 70 B (0.51048951 0.48951049) *
## 123) texture_worst>=4.50835 102 23 M (0.22549020 0.77450980) *
## 31) texture_worst>=4.682677 49 3 M (0.06122449 0.93877551)
## 62) texture_mean< 2.909862 4 1 B (0.75000000 0.25000000)
## 124) texture_worst>=4.709072 3 0 B (1.00000000 0.00000000) *
## 125) texture_worst< 4.709072 1 0 M (0.00000000 1.00000000) *
## 63) texture_mean>=2.909862 45 0 M (0.00000000 1.00000000) *
##
## $trees[[24]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 432 B (0.52631579 0.47368421)
## 2) smoothness_worst< -1.482699 593 231 B (0.61045531 0.38954469)
## 4) smoothness_mean< -2.506908 68 13 B (0.80882353 0.19117647)
## 8) compactness_se>=-4.692873 59 5 B (0.91525424 0.08474576)
## 16) smoothness_worst>=-1.71076 52 2 B (0.96153846 0.03846154)
## 32) symmetry_worst< -1.627715 43 0 B (1.00000000 0.00000000) *
## 33) symmetry_worst>=-1.627715 9 2 B (0.77777778 0.22222222)
## 66) symmetry_worst>=-1.617577 7 0 B (1.00000000 0.00000000) *
## 67) symmetry_worst< -1.617577 2 0 M (0.00000000 1.00000000) *
## 17) smoothness_worst< -1.71076 7 3 B (0.57142857 0.42857143)
## 34) compactness_se< -3.013033 4 0 B (1.00000000 0.00000000) *
## 35) compactness_se>=-3.013033 3 0 M (0.00000000 1.00000000) *
## 9) compactness_se< -4.692873 9 1 M (0.11111111 0.88888889)
## 18) texture_mean>=3.172108 1 0 B (1.00000000 0.00000000) *
## 19) texture_mean< 3.172108 8 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean>=-2.506908 525 218 B (0.58476190 0.41523810)
## 10) smoothness_mean>=-2.4986 511 204 B (0.60078278 0.39921722)
## 20) smoothness_worst< -1.618016 40 3 B (0.92500000 0.07500000)
## 40) smoothness_mean< -2.337942 38 1 B (0.97368421 0.02631579)
## 80) smoothness_worst>=-1.694089 37 0 B (1.00000000 0.00000000) *
## 81) smoothness_worst< -1.694089 1 0 M (0.00000000 1.00000000) *
## 41) smoothness_mean>=-2.337942 2 0 M (0.00000000 1.00000000) *
## 21) smoothness_worst>=-1.618016 471 201 B (0.57324841 0.42675159)
## 42) smoothness_worst>=-1.59596 423 168 B (0.60283688 0.39716312)
## 84) smoothness_worst< -1.584838 47 4 B (0.91489362 0.08510638) *
## 85) smoothness_worst>=-1.584838 376 164 B (0.56382979 0.43617021) *
## 43) smoothness_worst< -1.59596 48 15 M (0.31250000 0.68750000)
## 86) symmetry_worst>=-2.037346 22 9 B (0.59090909 0.40909091) *
## 87) symmetry_worst< -2.037346 26 2 M (0.07692308 0.92307692) *
## 11) smoothness_mean< -2.4986 14 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst>=-1.482699 319 118 M (0.36990596 0.63009404)
## 6) symmetry_worst< -1.658843 165 79 B (0.52121212 0.47878788)
## 12) smoothness_worst>=-1.477976 132 47 B (0.64393939 0.35606061)
## 24) compactness_se< -3.294139 114 32 B (0.71929825 0.28070175)
## 48) texture_worst< 5.041355 108 27 B (0.75000000 0.25000000)
## 96) texture_worst< 4.373034 19 0 B (1.00000000 0.00000000) *
## 97) texture_worst>=4.373034 89 27 B (0.69662921 0.30337079) *
## 49) texture_worst>=5.041355 6 1 M (0.16666667 0.83333333)
## 98) texture_mean< 2.955358 1 0 B (1.00000000 0.00000000) *
## 99) texture_mean>=2.955358 5 0 M (0.00000000 1.00000000) *
## 25) compactness_se>=-3.294139 18 3 M (0.16666667 0.83333333)
## 50) texture_mean>=3.23593 3 0 B (1.00000000 0.00000000) *
## 51) texture_mean< 3.23593 15 0 M (0.00000000 1.00000000) *
## 13) smoothness_worst< -1.477976 33 1 M (0.03030303 0.96969697)
## 26) compactness_se< -3.967101 1 0 B (1.00000000 0.00000000) *
## 27) compactness_se>=-3.967101 32 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-1.658843 154 32 M (0.20779221 0.79220779)
## 14) smoothness_mean< -2.219198 101 31 M (0.30693069 0.69306931)
## 28) smoothness_mean>=-2.233531 11 0 B (1.00000000 0.00000000) *
## 29) smoothness_mean< -2.233531 90 20 M (0.22222222 0.77777778)
## 58) texture_mean< 2.735974 4 0 B (1.00000000 0.00000000) *
## 59) texture_mean>=2.735974 86 16 M (0.18604651 0.81395349)
## 118) compactness_se>=-3.05573 7 2 B (0.71428571 0.28571429) *
## 119) compactness_se< -3.05573 79 11 M (0.13924051 0.86075949) *
## 15) smoothness_mean>=-2.219198 53 1 M (0.01886792 0.98113208)
## 30) compactness_se< -4.032019 1 0 B (1.00000000 0.00000000) *
## 31) compactness_se>=-4.032019 52 0 M (0.00000000 1.00000000) *
##
## $trees[[25]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 411 M (0.45065789 0.54934211)
## 2) smoothness_worst< -1.604472 107 29 B (0.72897196 0.27102804)
## 4) compactness_se>=-4.507137 84 16 B (0.80952381 0.19047619)
## 8) texture_worst>=4.680896 41 1 B (0.97560976 0.02439024)
## 16) smoothness_mean< -2.337942 40 0 B (1.00000000 0.00000000) *
## 17) smoothness_mean>=-2.337942 1 0 M (0.00000000 1.00000000) *
## 9) texture_worst< 4.680896 43 15 B (0.65116279 0.34883721)
## 18) texture_worst< 4.491851 20 1 B (0.95000000 0.05000000)
## 36) smoothness_worst< -1.637109 18 0 B (1.00000000 0.00000000) *
## 37) smoothness_worst>=-1.637109 2 1 B (0.50000000 0.50000000)
## 74) texture_mean< 2.675349 1 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.675349 1 0 M (0.00000000 1.00000000) *
## 19) texture_worst>=4.491851 23 9 M (0.39130435 0.60869565)
## 38) compactness_se< -4.234991 8 0 B (1.00000000 0.00000000) *
## 39) compactness_se>=-4.234991 15 1 M (0.06666667 0.93333333)
## 78) texture_worst>=4.619922 4 1 M (0.25000000 0.75000000) *
## 79) texture_worst< 4.619922 11 0 M (0.00000000 1.00000000) *
## 5) compactness_se< -4.507137 23 10 M (0.43478261 0.56521739)
## 10) smoothness_mean< -2.549773 7 0 B (1.00000000 0.00000000) *
## 11) smoothness_mean>=-2.549773 16 3 M (0.18750000 0.81250000)
## 22) symmetry_worst< -1.925408 6 3 B (0.50000000 0.50000000)
## 44) texture_mean< 3.149769 3 0 B (1.00000000 0.00000000) *
## 45) texture_mean>=3.149769 3 0 M (0.00000000 1.00000000) *
## 23) symmetry_worst>=-1.925408 10 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst>=-1.604472 805 333 M (0.41366460 0.58633540)
## 6) texture_mean< 2.707375 28 3 B (0.89285714 0.10714286)
## 12) symmetry_worst< -1.577652 23 0 B (1.00000000 0.00000000) *
## 13) symmetry_worst>=-1.577652 5 2 M (0.40000000 0.60000000)
## 26) texture_mean>=2.553793 2 0 B (1.00000000 0.00000000) *
## 27) texture_mean< 2.553793 3 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=2.707375 777 308 M (0.39639640 0.60360360)
## 14) compactness_se< -4.224388 116 48 B (0.58620690 0.41379310)
## 28) smoothness_mean>=-2.3007 25 0 B (1.00000000 0.00000000) *
## 29) smoothness_mean< -2.3007 91 43 M (0.47252747 0.52747253)
## 58) symmetry_worst>=-1.49608 11 0 B (1.00000000 0.00000000) *
## 59) symmetry_worst< -1.49608 80 32 M (0.40000000 0.60000000)
## 118) texture_worst>=4.876647 29 10 B (0.65517241 0.34482759) *
## 119) texture_worst< 4.876647 51 13 M (0.25490196 0.74509804) *
## 15) compactness_se>=-4.224388 661 240 M (0.36308623 0.63691377)
## 30) smoothness_mean< -2.2971 432 180 M (0.41666667 0.58333333)
## 60) compactness_se>=-3.93685 283 140 B (0.50530035 0.49469965)
## 120) smoothness_worst>=-1.565486 215 89 B (0.58604651 0.41395349) *
## 121) smoothness_worst< -1.565486 68 17 M (0.25000000 0.75000000) *
## 61) compactness_se< -3.93685 149 37 M (0.24832215 0.75167785)
## 122) smoothness_mean>=-2.312236 10 1 B (0.90000000 0.10000000) *
## 123) smoothness_mean< -2.312236 139 28 M (0.20143885 0.79856115) *
## 31) smoothness_mean>=-2.2971 229 60 M (0.26200873 0.73799127)
## 62) compactness_se< -4.032549 23 7 B (0.69565217 0.30434783)
## 124) compactness_se>=-4.156842 15 0 B (1.00000000 0.00000000) *
## 125) compactness_se< -4.156842 8 1 M (0.12500000 0.87500000) *
## 63) compactness_se>=-4.032549 206 44 M (0.21359223 0.78640777)
## 126) symmetry_worst< -1.660064 114 38 M (0.33333333 0.66666667) *
## 127) symmetry_worst>=-1.660064 92 6 M (0.06521739 0.93478261) *
##
## $trees[[26]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 412 M (0.45175439 0.54824561)
## 2) symmetry_worst< -1.816281 409 182 B (0.55501222 0.44498778)
## 4) smoothness_worst< -1.52112 245 85 B (0.65306122 0.34693878)
## 8) symmetry_worst>=-2.491275 229 71 B (0.68995633 0.31004367)
## 16) symmetry_worst>=-1.934101 75 11 B (0.85333333 0.14666667)
## 32) texture_mean>=2.718324 71 7 B (0.90140845 0.09859155)
## 64) smoothness_mean< -2.257258 68 4 B (0.94117647 0.05882353) *
## 65) smoothness_mean>=-2.257258 3 0 M (0.00000000 1.00000000) *
## 33) texture_mean< 2.718324 4 0 M (0.00000000 1.00000000) *
## 17) symmetry_worst< -1.934101 154 60 B (0.61038961 0.38961039)
## 34) smoothness_worst< -1.604936 33 1 B (0.96969697 0.03030303)
## 68) compactness_se< -2.951614 31 0 B (1.00000000 0.00000000) *
## 69) compactness_se>=-2.951614 2 1 B (0.50000000 0.50000000) *
## 35) smoothness_worst>=-1.604936 121 59 B (0.51239669 0.48760331)
## 70) smoothness_worst>=-1.540014 22 0 B (1.00000000 0.00000000) *
## 71) smoothness_worst< -1.540014 99 40 M (0.40404040 0.59595960) *
## 9) symmetry_worst< -2.491275 16 2 M (0.12500000 0.87500000)
## 18) compactness_se>=-3.572604 2 0 B (1.00000000 0.00000000) *
## 19) compactness_se< -3.572604 14 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst>=-1.52112 164 67 M (0.40853659 0.59146341)
## 10) smoothness_mean>=-2.293133 58 21 B (0.63793103 0.36206897)
## 20) texture_mean< 3.104804 47 10 B (0.78723404 0.21276596)
## 40) compactness_se< -3.4389 35 2 B (0.94285714 0.05714286)
## 80) texture_worst< 4.85229 33 0 B (1.00000000 0.00000000) *
## 81) texture_worst>=4.85229 2 0 M (0.00000000 1.00000000) *
## 41) compactness_se>=-3.4389 12 4 M (0.33333333 0.66666667)
## 82) smoothness_worst< -1.495985 4 0 B (1.00000000 0.00000000) *
## 83) smoothness_worst>=-1.495985 8 0 M (0.00000000 1.00000000) *
## 21) texture_mean>=3.104804 11 0 M (0.00000000 1.00000000) *
## 11) smoothness_mean< -2.293133 106 30 M (0.28301887 0.71698113)
## 22) compactness_se>=-3.601238 20 6 B (0.70000000 0.30000000)
## 44) smoothness_worst< -1.473672 12 0 B (1.00000000 0.00000000) *
## 45) smoothness_worst>=-1.473672 8 2 M (0.25000000 0.75000000)
## 90) texture_mean< 2.896181 2 0 B (1.00000000 0.00000000) *
## 91) texture_mean>=2.896181 6 0 M (0.00000000 1.00000000) *
## 23) compactness_se< -3.601238 86 16 M (0.18604651 0.81395349)
## 46) smoothness_worst>=-1.479352 23 10 B (0.56521739 0.43478261)
## 92) smoothness_mean>=-2.35715 13 0 B (1.00000000 0.00000000) *
## 93) smoothness_mean< -2.35715 10 0 M (0.00000000 1.00000000) *
## 47) smoothness_worst< -1.479352 63 3 M (0.04761905 0.95238095)
## 94) texture_mean< 2.739678 1 0 B (1.00000000 0.00000000) *
## 95) texture_mean>=2.739678 62 2 M (0.03225806 0.96774194) *
## 3) symmetry_worst>=-1.816281 503 185 M (0.36779324 0.63220676)
## 6) compactness_se< -3.71586 269 120 M (0.44609665 0.55390335)
## 12) symmetry_worst>=-1.733593 179 81 B (0.54748603 0.45251397)
## 24) symmetry_worst< -1.688251 26 0 B (1.00000000 0.00000000) *
## 25) symmetry_worst>=-1.688251 153 72 M (0.47058824 0.52941176)
## 50) smoothness_worst< -1.488048 69 23 B (0.66666667 0.33333333)
## 100) smoothness_worst>=-1.576547 51 7 B (0.86274510 0.13725490) *
## 101) smoothness_worst< -1.576547 18 2 M (0.11111111 0.88888889) *
## 51) smoothness_worst>=-1.488048 84 26 M (0.30952381 0.69047619)
## 102) compactness_se< -4.480629 8 0 B (1.00000000 0.00000000) *
## 103) compactness_se>=-4.480629 76 18 M (0.23684211 0.76315789) *
## 13) symmetry_worst< -1.733593 90 22 M (0.24444444 0.75555556)
## 26) smoothness_worst>=-1.427418 12 1 B (0.91666667 0.08333333)
## 52) texture_mean>=3.039503 10 0 B (1.00000000 0.00000000) *
## 53) texture_mean< 3.039503 2 1 B (0.50000000 0.50000000)
## 106) texture_mean< 2.938081 1 0 B (1.00000000 0.00000000) *
## 107) texture_mean>=2.938081 1 0 M (0.00000000 1.00000000) *
## 27) smoothness_worst< -1.427418 78 11 M (0.14102564 0.85897436)
## 54) compactness_se>=-3.93685 8 0 B (1.00000000 0.00000000) *
## 55) compactness_se< -3.93685 70 3 M (0.04285714 0.95714286)
## 110) texture_mean< 2.803754 2 0 B (1.00000000 0.00000000) *
## 111) texture_mean>=2.803754 68 1 M (0.01470588 0.98529412) *
## 7) compactness_se>=-3.71586 234 65 M (0.27777778 0.72222222)
## 14) compactness_se>=-3.494301 160 57 M (0.35625000 0.64375000)
## 28) compactness_se< -3.484318 13 0 B (1.00000000 0.00000000) *
## 29) compactness_se>=-3.484318 147 44 M (0.29931973 0.70068027)
## 58) texture_mean< 2.96681 51 25 B (0.50980392 0.49019608)
## 116) smoothness_worst< -1.473088 24 2 B (0.91666667 0.08333333) *
## 117) smoothness_worst>=-1.473088 27 4 M (0.14814815 0.85185185) *
## 59) texture_mean>=2.96681 96 18 M (0.18750000 0.81250000)
## 118) symmetry_worst< -1.775603 11 2 B (0.81818182 0.18181818) *
## 119) symmetry_worst>=-1.775603 85 9 M (0.10588235 0.89411765) *
## 15) compactness_se< -3.494301 74 8 M (0.10810811 0.89189189)
## 30) texture_worst>=4.900864 8 2 B (0.75000000 0.25000000)
## 60) smoothness_mean< -2.358194 6 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean>=-2.358194 2 0 M (0.00000000 1.00000000) *
## 31) texture_worst< 4.900864 66 2 M (0.03030303 0.96969697)
## 62) texture_mean< 2.671633 2 0 B (1.00000000 0.00000000) *
## 63) texture_mean>=2.671633 64 0 M (0.00000000 1.00000000) *
##
## $trees[[27]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 445 B (0.51206140 0.48793860)
## 2) compactness_se< -3.969125 357 134 B (0.62464986 0.37535014)
## 4) texture_mean< 2.81988 42 0 B (1.00000000 0.00000000) *
## 5) texture_mean>=2.81988 315 134 B (0.57460317 0.42539683)
## 10) smoothness_mean>=-2.294121 66 13 B (0.80303030 0.19696970)
## 20) texture_worst< 5.021606 59 6 B (0.89830508 0.10169492)
## 40) symmetry_worst< -1.463197 52 2 B (0.96153846 0.03846154)
## 80) smoothness_mean< -2.222419 47 1 B (0.97872340 0.02127660) *
## 81) smoothness_mean>=-2.222419 5 1 B (0.80000000 0.20000000) *
## 41) symmetry_worst>=-1.463197 7 3 M (0.42857143 0.57142857)
## 82) texture_mean< 2.946426 3 0 B (1.00000000 0.00000000) *
## 83) texture_mean>=2.946426 4 0 M (0.00000000 1.00000000) *
## 21) texture_worst>=5.021606 7 0 M (0.00000000 1.00000000) *
## 11) smoothness_mean< -2.294121 249 121 B (0.51405622 0.48594378)
## 22) smoothness_worst< -1.556321 124 37 B (0.70161290 0.29838710)
## 44) compactness_se>=-4.563271 84 10 B (0.88095238 0.11904762)
## 88) texture_mean< 2.977058 38 0 B (1.00000000 0.00000000) *
## 89) texture_mean>=2.977058 46 10 B (0.78260870 0.21739130) *
## 45) compactness_se< -4.563271 40 13 M (0.32500000 0.67500000)
## 90) smoothness_worst>=-1.595505 9 0 B (1.00000000 0.00000000) *
## 91) smoothness_worst< -1.595505 31 4 M (0.12903226 0.87096774) *
## 23) smoothness_worst>=-1.556321 125 41 M (0.32800000 0.67200000)
## 46) smoothness_mean< -2.473387 11 0 B (1.00000000 0.00000000) *
## 47) smoothness_mean>=-2.473387 114 30 M (0.26315789 0.73684211)
## 94) symmetry_worst>=-1.52618 20 7 B (0.65000000 0.35000000) *
## 95) symmetry_worst< -1.52618 94 17 M (0.18085106 0.81914894) *
## 3) compactness_se>=-3.969125 555 244 M (0.43963964 0.56036036)
## 6) smoothness_worst< -1.586874 97 34 B (0.64948454 0.35051546)
## 12) smoothness_worst>=-1.59459 25 0 B (1.00000000 0.00000000) *
## 13) smoothness_worst< -1.59459 72 34 B (0.52777778 0.47222222)
## 26) smoothness_worst< -1.616129 47 12 B (0.74468085 0.25531915)
## 52) smoothness_worst>=-1.720903 38 5 B (0.86842105 0.13157895)
## 104) compactness_se>=-3.570653 28 1 B (0.96428571 0.03571429) *
## 105) compactness_se< -3.570653 10 4 B (0.60000000 0.40000000) *
## 53) smoothness_worst< -1.720903 9 2 M (0.22222222 0.77777778)
## 106) texture_mean>=3.103494 2 0 B (1.00000000 0.00000000) *
## 107) texture_mean< 3.103494 7 0 M (0.00000000 1.00000000) *
## 27) smoothness_worst>=-1.616129 25 3 M (0.12000000 0.88000000)
## 54) texture_mean< 2.755158 2 0 B (1.00000000 0.00000000) *
## 55) texture_mean>=2.755158 23 1 M (0.04347826 0.95652174)
## 110) compactness_se>=-3.126751 4 1 M (0.25000000 0.75000000) *
## 111) compactness_se< -3.126751 19 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst>=-1.586874 458 181 M (0.39519651 0.60480349)
## 14) texture_mean>=3.192731 83 29 B (0.65060241 0.34939759)
## 28) compactness_se< -3.055765 76 22 B (0.71052632 0.28947368)
## 56) compactness_se>=-3.859901 57 10 B (0.82456140 0.17543860)
## 112) smoothness_mean>=-2.473552 50 5 B (0.90000000 0.10000000) *
## 113) smoothness_mean< -2.473552 7 2 M (0.28571429 0.71428571) *
## 57) compactness_se< -3.859901 19 7 M (0.36842105 0.63157895)
## 114) texture_mean< 3.216671 7 0 B (1.00000000 0.00000000) *
## 115) texture_mean>=3.216671 12 0 M (0.00000000 1.00000000) *
## 29) compactness_se>=-3.055765 7 0 M (0.00000000 1.00000000) *
## 15) texture_mean< 3.192731 375 127 M (0.33866667 0.66133333)
## 30) smoothness_worst>=-1.477976 133 63 B (0.52631579 0.47368421)
## 60) texture_worst< 4.879822 117 47 B (0.59829060 0.40170940)
## 120) symmetry_worst< -1.895488 25 0 B (1.00000000 0.00000000) *
## 121) symmetry_worst>=-1.895488 92 45 M (0.48913043 0.51086957) *
## 61) texture_worst>=4.879822 16 0 M (0.00000000 1.00000000) *
## 31) smoothness_worst< -1.477976 242 57 M (0.23553719 0.76446281)
## 62) compactness_se< -3.714286 57 26 M (0.45614035 0.54385965)
## 124) compactness_se>=-3.869459 24 1 B (0.95833333 0.04166667) *
## 125) compactness_se< -3.869459 33 3 M (0.09090909 0.90909091) *
## 63) compactness_se>=-3.714286 185 31 M (0.16756757 0.83243243)
## 126) smoothness_mean< -2.486577 6 0 B (1.00000000 0.00000000) *
## 127) smoothness_mean>=-2.486577 179 25 M (0.13966480 0.86033520) *
##
## $trees[[28]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 423 B (0.53618421 0.46381579)
## 2) symmetry_worst< -1.966052 223 68 B (0.69506726 0.30493274)
## 4) smoothness_worst< -1.523408 163 34 B (0.79141104 0.20858896)
## 8) compactness_se< -3.004445 149 25 B (0.83221477 0.16778523)
## 16) smoothness_mean< -2.394379 90 8 B (0.91111111 0.08888889)
## 32) compactness_se< -3.554993 65 1 B (0.98461538 0.01538462)
## 64) smoothness_worst< -1.552639 54 0 B (1.00000000 0.00000000) *
## 65) smoothness_worst>=-1.552639 11 1 B (0.90909091 0.09090909) *
## 33) compactness_se>=-3.554993 25 7 B (0.72000000 0.28000000)
## 66) texture_mean< 3.228271 21 3 B (0.85714286 0.14285714) *
## 67) texture_mean>=3.228271 4 0 M (0.00000000 1.00000000) *
## 17) smoothness_mean>=-2.394379 59 17 B (0.71186441 0.28813559)
## 34) smoothness_mean>=-2.382983 48 6 B (0.87500000 0.12500000)
## 68) symmetry_worst>=-2.424439 42 0 B (1.00000000 0.00000000) *
## 69) symmetry_worst< -2.424439 6 0 M (0.00000000 1.00000000) *
## 35) smoothness_mean< -2.382983 11 0 M (0.00000000 1.00000000) *
## 9) compactness_se>=-3.004445 14 5 M (0.35714286 0.64285714)
## 18) texture_mean< 3.076827 5 0 B (1.00000000 0.00000000) *
## 19) texture_mean>=3.076827 9 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst>=-1.523408 60 26 M (0.43333333 0.56666667)
## 10) smoothness_worst>=-1.493231 26 5 B (0.80769231 0.19230769)
## 20) smoothness_mean< -2.243629 22 1 B (0.95454545 0.04545455)
## 40) compactness_se< -3.443758 20 0 B (1.00000000 0.00000000) *
## 41) compactness_se>=-3.443758 2 1 B (0.50000000 0.50000000)
## 82) texture_mean>=2.866189 1 0 B (1.00000000 0.00000000) *
## 83) texture_mean< 2.866189 1 0 M (0.00000000 1.00000000) *
## 21) smoothness_mean>=-2.243629 4 0 M (0.00000000 1.00000000) *
## 11) smoothness_worst< -1.493231 34 5 M (0.14705882 0.85294118)
## 22) texture_mean< 2.965389 7 2 B (0.71428571 0.28571429)
## 44) texture_mean>=2.856718 5 0 B (1.00000000 0.00000000) *
## 45) texture_mean< 2.856718 2 0 M (0.00000000 1.00000000) *
## 23) texture_mean>=2.965389 27 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst>=-1.966052 689 334 M (0.48476052 0.51523948)
## 6) texture_mean< 3.058002 520 231 B (0.55576923 0.44423077)
## 12) texture_worst< 4.858219 490 204 B (0.58367347 0.41632653)
## 24) smoothness_worst>=-1.477389 184 51 B (0.72282609 0.27717391)
## 48) smoothness_worst< -1.472307 42 0 B (1.00000000 0.00000000) *
## 49) smoothness_worst>=-1.472307 142 51 B (0.64084507 0.35915493)
## 98) smoothness_worst>=-1.468619 133 42 B (0.68421053 0.31578947) *
## 99) smoothness_worst< -1.468619 9 0 M (0.00000000 1.00000000) *
## 25) smoothness_worst< -1.477389 306 153 B (0.50000000 0.50000000)
## 50) smoothness_worst< -1.496036 235 99 B (0.57872340 0.42127660)
## 100) smoothness_worst>=-1.519671 34 1 B (0.97058824 0.02941176) *
## 101) smoothness_worst< -1.519671 201 98 B (0.51243781 0.48756219) *
## 51) smoothness_worst>=-1.496036 71 17 M (0.23943662 0.76056338)
## 102) compactness_se< -4.216002 8 0 B (1.00000000 0.00000000) *
## 103) compactness_se>=-4.216002 63 9 M (0.14285714 0.85714286) *
## 13) texture_worst>=4.858219 30 3 M (0.10000000 0.90000000)
## 26) texture_mean>=3.04476 3 0 B (1.00000000 0.00000000) *
## 27) texture_mean< 3.04476 27 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=3.058002 169 45 M (0.26627219 0.73372781)
## 14) smoothness_worst< -1.618721 11 1 B (0.90909091 0.09090909)
## 28) smoothness_mean< -2.337942 10 0 B (1.00000000 0.00000000) *
## 29) smoothness_mean>=-2.337942 1 0 M (0.00000000 1.00000000) *
## 15) smoothness_worst>=-1.618721 158 35 M (0.22151899 0.77848101)
## 30) smoothness_mean< -2.41714 37 17 M (0.45945946 0.54054054)
## 60) smoothness_mean>=-2.453967 16 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean< -2.453967 21 1 M (0.04761905 0.95238095)
## 122) smoothness_mean< -2.521804 1 0 B (1.00000000 0.00000000) *
## 123) smoothness_mean>=-2.521804 20 0 M (0.00000000 1.00000000) *
## 31) smoothness_mean>=-2.41714 121 18 M (0.14876033 0.85123967)
## 62) texture_mean>=3.36829 5 1 B (0.80000000 0.20000000)
## 124) texture_mean< 3.407548 4 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=3.407548 1 0 M (0.00000000 1.00000000) *
## 63) texture_mean< 3.36829 116 14 M (0.12068966 0.87931034)
## 126) smoothness_mean>=-2.301586 29 11 M (0.37931034 0.62068966) *
## 127) smoothness_mean< -2.301586 87 3 M (0.03448276 0.96551724) *
##
## $trees[[29]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 421 B (0.53837719 0.46162281)
## 2) symmetry_worst< -1.815238 382 135 B (0.64659686 0.35340314)
## 4) texture_worst< 4.897936 285 79 B (0.72280702 0.27719298)
## 8) texture_worst>=4.68481 73 6 B (0.91780822 0.08219178)
## 16) compactness_se< -2.72933 69 2 B (0.97101449 0.02898551)
## 32) texture_mean>=2.958239 59 0 B (1.00000000 0.00000000) *
## 33) texture_mean< 2.958239 10 2 B (0.80000000 0.20000000)
## 66) texture_mean< 2.946804 8 0 B (1.00000000 0.00000000) *
## 67) texture_mean>=2.946804 2 0 M (0.00000000 1.00000000) *
## 17) compactness_se>=-2.72933 4 0 M (0.00000000 1.00000000) *
## 9) texture_worst< 4.68481 212 73 B (0.65566038 0.34433962)
## 18) compactness_se>=-3.483667 68 9 B (0.86764706 0.13235294)
## 36) texture_mean< 3.064089 63 4 B (0.93650794 0.06349206)
## 72) smoothness_worst< -1.482504 49 0 B (1.00000000 0.00000000) *
## 73) smoothness_worst>=-1.482504 14 4 B (0.71428571 0.28571429) *
## 37) texture_mean>=3.064089 5 0 M (0.00000000 1.00000000) *
## 19) compactness_se< -3.483667 144 64 B (0.55555556 0.44444444)
## 38) texture_worst< 4.612323 117 42 B (0.64102564 0.35897436)
## 76) smoothness_worst< -1.427424 108 33 B (0.69444444 0.30555556) *
## 77) smoothness_worst>=-1.427424 9 0 M (0.00000000 1.00000000) *
## 39) texture_worst>=4.612323 27 5 M (0.18518519 0.81481481)
## 78) smoothness_worst>=-1.452987 5 0 B (1.00000000 0.00000000) *
## 79) smoothness_worst< -1.452987 22 0 M (0.00000000 1.00000000) *
## 5) texture_worst>=4.897936 97 41 M (0.42268041 0.57731959)
## 10) texture_worst>=5.011215 66 26 B (0.60606061 0.39393939)
## 20) compactness_se< -3.413706 54 14 B (0.74074074 0.25925926)
## 40) smoothness_mean< -2.397526 35 3 B (0.91428571 0.08571429)
## 80) texture_mean< 3.431166 33 1 B (0.96969697 0.03030303) *
## 81) texture_mean>=3.431166 2 0 M (0.00000000 1.00000000) *
## 41) smoothness_mean>=-2.397526 19 8 M (0.42105263 0.57894737)
## 82) symmetry_worst< -2.133872 8 0 B (1.00000000 0.00000000) *
## 83) symmetry_worst>=-2.133872 11 0 M (0.00000000 1.00000000) *
## 21) compactness_se>=-3.413706 12 0 M (0.00000000 1.00000000) *
## 11) texture_worst< 5.011215 31 1 M (0.03225806 0.96774194)
## 22) compactness_se< -4.706178 1 0 B (1.00000000 0.00000000) *
## 23) compactness_se>=-4.706178 30 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst>=-1.815238 530 244 M (0.46037736 0.53962264)
## 6) smoothness_worst>=-1.537035 367 170 B (0.53678474 0.46321526)
## 12) compactness_se< -3.955455 133 42 B (0.68421053 0.31578947)
## 24) texture_mean< 2.956197 68 6 B (0.91176471 0.08823529)
## 48) texture_worst< 4.514456 45 0 B (1.00000000 0.00000000) *
## 49) texture_worst>=4.514456 23 6 B (0.73913043 0.26086957)
## 98) texture_worst>=4.527768 18 1 B (0.94444444 0.05555556) *
## 99) texture_worst< 4.527768 5 0 M (0.00000000 1.00000000) *
## 25) texture_mean>=2.956197 65 29 M (0.44615385 0.55384615)
## 50) texture_mean>=2.982883 51 22 B (0.56862745 0.43137255)
## 100) texture_mean< 3.082128 30 6 B (0.80000000 0.20000000) *
## 101) texture_mean>=3.082128 21 5 M (0.23809524 0.76190476) *
## 51) texture_mean< 2.982883 14 0 M (0.00000000 1.00000000) *
## 13) compactness_se>=-3.955455 234 106 M (0.45299145 0.54700855)
## 26) compactness_se>=-3.92342 212 106 B (0.50000000 0.50000000)
## 52) smoothness_mean< -2.36186 47 10 B (0.78723404 0.21276596)
## 104) smoothness_mean>=-2.453967 41 5 B (0.87804878 0.12195122) *
## 105) smoothness_mean< -2.453967 6 1 M (0.16666667 0.83333333) *
## 53) smoothness_mean>=-2.36186 165 69 M (0.41818182 0.58181818)
## 106) texture_mean< 2.927988 76 28 B (0.63157895 0.36842105) *
## 107) texture_mean>=2.927988 89 21 M (0.23595506 0.76404494) *
## 27) compactness_se< -3.92342 22 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst< -1.537035 163 47 M (0.28834356 0.71165644)
## 14) smoothness_mean< -2.528181 8 0 B (1.00000000 0.00000000) *
## 15) smoothness_mean>=-2.528181 155 39 M (0.25161290 0.74838710)
## 30) symmetry_worst>=-1.750623 89 32 M (0.35955056 0.64044944)
## 60) symmetry_worst< -1.549426 43 16 B (0.62790698 0.37209302)
## 120) compactness_se>=-4.283814 29 5 B (0.82758621 0.17241379) *
## 121) compactness_se< -4.283814 14 3 M (0.21428571 0.78571429) *
## 61) symmetry_worst>=-1.549426 46 5 M (0.10869565 0.89130435)
## 122) texture_worst< 4.496329 2 0 B (1.00000000 0.00000000) *
## 123) texture_worst>=4.496329 44 3 M (0.06818182 0.93181818) *
## 31) symmetry_worst< -1.750623 66 7 M (0.10606061 0.89393939)
## 62) smoothness_mean>=-2.406089 17 7 M (0.41176471 0.58823529)
## 124) smoothness_mean< -2.38134 7 0 B (1.00000000 0.00000000) *
## 125) smoothness_mean>=-2.38134 10 0 M (0.00000000 1.00000000) *
## 63) smoothness_mean< -2.406089 49 0 M (0.00000000 1.00000000) *
##
## $trees[[30]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 452 M (0.49561404 0.50438596)
## 2) compactness_se< -4.706178 32 3 B (0.90625000 0.09375000)
## 4) symmetry_worst< -1.170399 29 0 B (1.00000000 0.00000000) *
## 5) symmetry_worst>=-1.170399 3 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-4.706178 880 423 M (0.48068182 0.51931818)
## 6) texture_worst< 4.168738 85 26 B (0.69411765 0.30588235)
## 12) symmetry_worst< -1.086115 75 16 B (0.78666667 0.21333333)
## 24) smoothness_worst>=-1.59351 64 9 B (0.85937500 0.14062500)
## 48) texture_mean>=2.515298 60 6 B (0.90000000 0.10000000)
## 96) texture_mean< 2.805441 56 4 B (0.92857143 0.07142857) *
## 97) texture_mean>=2.805441 4 2 B (0.50000000 0.50000000) *
## 49) texture_mean< 2.515298 4 1 M (0.25000000 0.75000000)
## 98) texture_mean< 2.434062 1 0 B (1.00000000 0.00000000) *
## 99) texture_mean>=2.434062 3 0 M (0.00000000 1.00000000) *
## 25) smoothness_worst< -1.59351 11 4 M (0.36363636 0.63636364)
## 50) smoothness_worst< -1.607498 4 0 B (1.00000000 0.00000000) *
## 51) smoothness_worst>=-1.607498 7 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst>=-1.086115 10 0 M (0.00000000 1.00000000) *
## 7) texture_worst>=4.168738 795 364 M (0.45786164 0.54213836)
## 14) texture_mean>=2.761589 743 358 M (0.48183042 0.51816958)
## 28) compactness_se>=-4.676462 714 355 M (0.49719888 0.50280112)
## 56) symmetry_worst< -1.366937 661 317 B (0.52042360 0.47957640)
## 112) texture_mean< 2.824054 19 0 B (1.00000000 0.00000000) *
## 113) texture_mean>=2.824054 642 317 B (0.50623053 0.49376947) *
## 57) symmetry_worst>=-1.366937 53 11 M (0.20754717 0.79245283)
## 114) smoothness_worst< -1.49848 16 6 B (0.62500000 0.37500000) *
## 115) smoothness_worst>=-1.49848 37 1 M (0.02702703 0.97297297) *
## 29) compactness_se< -4.676462 29 3 M (0.10344828 0.89655172)
## 58) smoothness_mean>=-2.443464 3 0 B (1.00000000 0.00000000) *
## 59) smoothness_mean< -2.443464 26 0 M (0.00000000 1.00000000) *
## 15) texture_mean< 2.761589 52 6 M (0.11538462 0.88461538)
## 30) compactness_se< -3.892047 6 0 B (1.00000000 0.00000000) *
## 31) compactness_se>=-3.892047 46 0 M (0.00000000 1.00000000) *
##
## $trees[[31]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 399 M (0.43750000 0.56250000)
## 2) smoothness_mean< -2.506908 50 11 B (0.78000000 0.22000000)
## 4) texture_mean< 2.960617 18 0 B (1.00000000 0.00000000) *
## 5) texture_mean>=2.960617 32 11 B (0.65625000 0.34375000)
## 10) texture_mean>=2.986158 21 3 B (0.85714286 0.14285714)
## 20) smoothness_worst>=-1.714091 17 0 B (1.00000000 0.00000000) *
## 21) smoothness_worst< -1.714091 4 1 M (0.25000000 0.75000000)
## 42) texture_mean>=3.103494 1 0 B (1.00000000 0.00000000) *
## 43) texture_mean< 3.103494 3 0 M (0.00000000 1.00000000) *
## 11) texture_mean< 2.986158 11 3 M (0.27272727 0.72727273)
## 22) smoothness_mean< -2.546123 3 0 B (1.00000000 0.00000000) *
## 23) smoothness_mean>=-2.546123 8 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean>=-2.506908 862 360 M (0.41763341 0.58236659)
## 6) texture_worst>=4.626933 360 178 B (0.50555556 0.49444444)
## 12) smoothness_worst>=-1.400053 52 8 B (0.84615385 0.15384615)
## 24) texture_mean< 3.222134 48 4 B (0.91666667 0.08333333)
## 48) symmetry_worst< -1.405153 45 1 B (0.97777778 0.02222222)
## 96) compactness_se< -2.783552 44 0 B (1.00000000 0.00000000) *
## 97) compactness_se>=-2.783552 1 0 M (0.00000000 1.00000000) *
## 49) symmetry_worst>=-1.405153 3 0 M (0.00000000 1.00000000) *
## 25) texture_mean>=3.222134 4 0 M (0.00000000 1.00000000) *
## 13) smoothness_worst< -1.400053 308 138 M (0.44805195 0.55194805)
## 26) compactness_se< -3.379083 251 123 B (0.50996016 0.49003984)
## 52) compactness_se>=-3.502612 47 7 B (0.85106383 0.14893617)
## 104) smoothness_mean>=-2.355655 37 0 B (1.00000000 0.00000000) *
## 105) smoothness_mean< -2.355655 10 3 M (0.30000000 0.70000000) *
## 53) compactness_se< -3.502612 204 88 M (0.43137255 0.56862745)
## 106) symmetry_worst< -2.207988 19 0 B (1.00000000 0.00000000) *
## 107) symmetry_worst>=-2.207988 185 69 M (0.37297297 0.62702703) *
## 27) compactness_se>=-3.379083 57 10 M (0.17543860 0.82456140)
## 54) smoothness_worst< -1.647098 3 0 B (1.00000000 0.00000000) *
## 55) smoothness_worst>=-1.647098 54 7 M (0.12962963 0.87037037)
## 110) texture_mean< 3.038537 13 5 M (0.38461538 0.61538462) *
## 111) texture_mean>=3.038537 41 2 M (0.04878049 0.95121951) *
## 7) texture_worst< 4.626933 502 178 M (0.35458167 0.64541833)
## 14) texture_mean< 2.708379 28 5 B (0.82142857 0.17857143)
## 28) symmetry_worst< -1.577652 19 0 B (1.00000000 0.00000000) *
## 29) symmetry_worst>=-1.577652 9 4 M (0.44444444 0.55555556)
## 58) texture_mean>=2.553793 5 1 B (0.80000000 0.20000000)
## 116) compactness_se< -3.3026 4 0 B (1.00000000 0.00000000) *
## 117) compactness_se>=-3.3026 1 0 M (0.00000000 1.00000000) *
## 59) texture_mean< 2.553793 4 0 M (0.00000000 1.00000000) *
## 15) texture_mean>=2.708379 474 155 M (0.32700422 0.67299578)
## 30) compactness_se< -3.673868 260 112 M (0.43076923 0.56923077)
## 60) smoothness_mean>=-2.233531 36 5 B (0.86111111 0.13888889)
## 120) texture_worst< 4.489843 25 0 B (1.00000000 0.00000000) *
## 121) texture_worst>=4.489843 11 5 B (0.54545455 0.45454545) *
## 61) smoothness_mean< -2.233531 224 81 M (0.36160714 0.63839286)
## 122) texture_mean< 2.756519 12 0 B (1.00000000 0.00000000) *
## 123) texture_mean>=2.756519 212 69 M (0.32547170 0.67452830) *
## 31) compactness_se>=-3.673868 214 43 M (0.20093458 0.79906542)
## 62) compactness_se>=-2.716917 6 0 B (1.00000000 0.00000000) *
## 63) compactness_se< -2.716917 208 37 M (0.17788462 0.82211538)
## 126) symmetry_worst< -1.816281 71 22 M (0.30985915 0.69014085) *
## 127) symmetry_worst>=-1.816281 137 15 M (0.10948905 0.89051095) *
##
## $trees[[32]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 428 B (0.53070175 0.46929825)
## 2) texture_mean>=3.192731 99 27 B (0.72727273 0.27272727)
## 4) texture_worst>=4.881566 92 20 B (0.78260870 0.21739130)
## 8) symmetry_worst< -1.41032 88 16 B (0.81818182 0.18181818)
## 16) texture_worst< 5.194184 67 8 B (0.88059701 0.11940299)
## 32) symmetry_worst>=-2.242858 65 6 B (0.90769231 0.09230769)
## 64) smoothness_worst< -1.441559 49 2 B (0.95918367 0.04081633) *
## 65) smoothness_worst>=-1.441559 16 4 B (0.75000000 0.25000000) *
## 33) symmetry_worst< -2.242858 2 0 M (0.00000000 1.00000000) *
## 17) texture_worst>=5.194184 21 8 B (0.61904762 0.38095238)
## 34) texture_mean>=3.332536 16 3 B (0.81250000 0.18750000)
## 68) symmetry_worst< -1.612163 14 1 B (0.92857143 0.07142857) *
## 69) symmetry_worst>=-1.612163 2 0 M (0.00000000 1.00000000) *
## 35) texture_mean< 3.332536 5 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst>=-1.41032 4 0 M (0.00000000 1.00000000) *
## 5) texture_worst< 4.881566 7 0 M (0.00000000 1.00000000) *
## 3) texture_mean< 3.192731 813 401 B (0.50676507 0.49323493)
## 6) symmetry_worst< -1.863339 253 98 B (0.61264822 0.38735178)
## 12) texture_worst< 4.895326 231 77 B (0.66666667 0.33333333)
## 24) texture_mean>=2.775685 187 49 B (0.73796791 0.26203209)
## 48) texture_mean< 2.976803 87 9 B (0.89655172 0.10344828)
## 96) symmetry_worst>=-1.990832 52 0 B (1.00000000 0.00000000) *
## 97) symmetry_worst< -1.990832 35 9 B (0.74285714 0.25714286) *
## 49) texture_mean>=2.976803 100 40 B (0.60000000 0.40000000)
## 98) texture_worst>=4.530419 78 20 B (0.74358974 0.25641026) *
## 99) texture_worst< 4.530419 22 2 M (0.09090909 0.90909091) *
## 25) texture_mean< 2.775685 44 16 M (0.36363636 0.63636364)
## 50) texture_mean< 2.758426 15 0 B (1.00000000 0.00000000) *
## 51) texture_mean>=2.758426 29 1 M (0.03448276 0.96551724)
## 102) smoothness_mean< -2.479158 1 0 B (1.00000000 0.00000000) *
## 103) smoothness_mean>=-2.479158 28 0 M (0.00000000 1.00000000) *
## 13) texture_worst>=4.895326 22 1 M (0.04545455 0.95454545)
## 26) texture_worst>=5.15394 1 0 B (1.00000000 0.00000000) *
## 27) texture_worst< 5.15394 21 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-1.863339 560 257 M (0.45892857 0.54107143)
## 14) symmetry_worst>=-1.749963 396 190 B (0.52020202 0.47979798)
## 28) smoothness_worst< -1.388752 371 167 B (0.54986523 0.45013477)
## 56) smoothness_worst>=-1.439294 72 13 B (0.81944444 0.18055556)
## 112) symmetry_worst< -1.36527 67 8 B (0.88059701 0.11940299) *
## 113) symmetry_worst>=-1.36527 5 0 M (0.00000000 1.00000000) *
## 57) smoothness_worst< -1.439294 299 145 M (0.48494983 0.51505017)
## 114) smoothness_worst< -1.451541 269 125 B (0.53531599 0.46468401) *
## 115) smoothness_worst>=-1.451541 30 1 M (0.03333333 0.96666667) *
## 29) smoothness_worst>=-1.388752 25 2 M (0.08000000 0.92000000)
## 58) texture_mean< 2.692775 2 0 B (1.00000000 0.00000000) *
## 59) texture_mean>=2.692775 23 0 M (0.00000000 1.00000000) *
## 15) symmetry_worst< -1.749963 164 51 M (0.31097561 0.68902439)
## 30) symmetry_worst< -1.786753 98 48 M (0.48979592 0.51020408)
## 60) symmetry_worst>=-1.85615 82 34 B (0.58536585 0.41463415)
## 120) smoothness_mean< -2.327576 41 6 B (0.85365854 0.14634146) *
## 121) smoothness_mean>=-2.327576 41 13 M (0.31707317 0.68292683) *
## 61) symmetry_worst< -1.85615 16 0 M (0.00000000 1.00000000) *
## 31) symmetry_worst>=-1.786753 66 3 M (0.04545455 0.95454545)
## 62) smoothness_worst>=-1.385102 1 0 B (1.00000000 0.00000000) *
## 63) smoothness_worst< -1.385102 65 2 M (0.03076923 0.96923077)
## 126) texture_worst< 4.422428 10 2 M (0.20000000 0.80000000) *
## 127) texture_worst>=4.422428 55 0 M (0.00000000 1.00000000) *
##
## $trees[[33]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 401 M (0.43969298 0.56030702)
## 2) smoothness_worst>=-1.401479 74 24 B (0.67567568 0.32432432)
## 4) symmetry_worst< -1.607739 46 6 B (0.86956522 0.13043478)
## 8) compactness_se< -3.001392 42 2 B (0.95238095 0.04761905)
## 16) texture_worst< 5.106195 40 0 B (1.00000000 0.00000000) *
## 17) texture_worst>=5.106195 2 0 M (0.00000000 1.00000000) *
## 9) compactness_se>=-3.001392 4 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst>=-1.607739 28 10 M (0.35714286 0.64285714)
## 10) smoothness_mean< -2.219224 10 1 B (0.90000000 0.10000000)
## 20) texture_mean>=2.926371 9 0 B (1.00000000 0.00000000) *
## 21) texture_mean< 2.926371 1 0 M (0.00000000 1.00000000) *
## 11) smoothness_mean>=-2.219224 18 1 M (0.05555556 0.94444444)
## 22) texture_mean< 2.688296 1 0 B (1.00000000 0.00000000) *
## 23) texture_mean>=2.688296 17 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst< -1.401479 838 351 M (0.41885442 0.58114558)
## 6) symmetry_worst< -1.713275 434 211 M (0.48617512 0.51382488)
## 12) smoothness_mean>=-2.28279 86 25 B (0.70930233 0.29069767)
## 24) symmetry_worst>=-1.994978 63 10 B (0.84126984 0.15873016)
## 48) smoothness_worst>=-1.524868 61 8 B (0.86885246 0.13114754)
## 96) smoothness_worst< -1.433156 56 5 B (0.91071429 0.08928571) *
## 97) smoothness_worst>=-1.433156 5 2 M (0.40000000 0.60000000) *
## 49) smoothness_worst< -1.524868 2 0 M (0.00000000 1.00000000) *
## 25) symmetry_worst< -1.994978 23 8 M (0.34782609 0.65217391)
## 50) texture_mean< 3.018626 11 3 B (0.72727273 0.27272727)
## 100) smoothness_worst>=-1.56036 8 0 B (1.00000000 0.00000000) *
## 101) smoothness_worst< -1.56036 3 0 M (0.00000000 1.00000000) *
## 51) texture_mean>=3.018626 12 0 M (0.00000000 1.00000000) *
## 13) smoothness_mean< -2.28279 348 150 M (0.43103448 0.56896552)
## 26) compactness_se< -4.705732 14 0 B (1.00000000 0.00000000) *
## 27) compactness_se>=-4.705732 334 136 M (0.40718563 0.59281437)
## 54) compactness_se>=-4.493635 286 130 M (0.45454545 0.54545455)
## 108) symmetry_worst< -2.202388 24 3 B (0.87500000 0.12500000) *
## 109) symmetry_worst>=-2.202388 262 109 M (0.41603053 0.58396947) *
## 55) compactness_se< -4.493635 48 6 M (0.12500000 0.87500000)
## 110) texture_mean< 2.846651 3 0 B (1.00000000 0.00000000) *
## 111) texture_mean>=2.846651 45 3 M (0.06666667 0.93333333) *
## 7) symmetry_worst>=-1.713275 404 140 M (0.34653465 0.65346535)
## 14) texture_mean>=3.21466 36 8 B (0.77777778 0.22222222)
## 28) texture_mean< 3.260913 28 0 B (1.00000000 0.00000000) *
## 29) texture_mean>=3.260913 8 0 M (0.00000000 1.00000000) *
## 15) texture_mean< 3.21466 368 112 M (0.30434783 0.69565217)
## 30) texture_worst< 4.818867 311 110 M (0.35369775 0.64630225)
## 60) texture_mean>=3.110176 17 1 B (0.94117647 0.05882353)
## 120) texture_mean< 3.137421 16 0 B (1.00000000 0.00000000) *
## 121) texture_mean>=3.137421 1 0 M (0.00000000 1.00000000) *
## 61) texture_mean< 3.110176 294 94 M (0.31972789 0.68027211)
## 122) compactness_se< -3.821965 123 60 M (0.48780488 0.51219512) *
## 123) compactness_se>=-3.821965 171 34 M (0.19883041 0.80116959) *
## 31) texture_worst>=4.818867 57 2 M (0.03508772 0.96491228)
## 62) smoothness_mean< -2.54971 1 0 B (1.00000000 0.00000000) *
## 63) smoothness_mean>=-2.54971 56 1 M (0.01785714 0.98214286)
## 126) symmetry_worst< -1.664056 8 1 M (0.12500000 0.87500000) *
## 127) symmetry_worst>=-1.664056 48 0 M (0.00000000 1.00000000) *
##
## $trees[[34]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 430 B (0.52850877 0.47149123)
## 2) smoothness_mean>=-2.350891 461 191 B (0.58568330 0.41431670)
## 4) smoothness_mean< -2.332581 44 2 B (0.95454545 0.04545455)
## 8) smoothness_worst< -1.435092 43 1 B (0.97674419 0.02325581)
## 16) symmetry_worst>=-2.189951 42 0 B (1.00000000 0.00000000) *
## 17) symmetry_worst< -2.189951 1 0 M (0.00000000 1.00000000) *
## 9) smoothness_worst>=-1.435092 1 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean>=-2.332581 417 189 B (0.54676259 0.45323741)
## 10) smoothness_worst>=-1.562856 392 165 B (0.57908163 0.42091837)
## 20) symmetry_worst< -1.529476 294 101 B (0.65646259 0.34353741)
## 40) texture_worst< 5.073596 284 91 B (0.67957746 0.32042254)
## 80) smoothness_mean>=-2.328057 275 82 B (0.70181818 0.29818182) *
## 81) smoothness_mean< -2.328057 9 0 M (0.00000000 1.00000000) *
## 41) texture_worst>=5.073596 10 0 M (0.00000000 1.00000000) *
## 21) symmetry_worst>=-1.529476 98 34 M (0.34693878 0.65306122)
## 42) texture_mean< 2.77286 24 8 B (0.66666667 0.33333333)
## 84) smoothness_mean< -2.081877 16 1 B (0.93750000 0.06250000) *
## 85) smoothness_mean>=-2.081877 8 1 M (0.12500000 0.87500000) *
## 43) texture_mean>=2.77286 74 18 M (0.24324324 0.75675676)
## 86) symmetry_worst>=-1.120651 11 2 B (0.81818182 0.18181818) *
## 87) symmetry_worst< -1.120651 63 9 M (0.14285714 0.85714286) *
## 11) smoothness_worst< -1.562856 25 1 M (0.04000000 0.96000000)
## 22) smoothness_mean>=-2.277089 6 1 M (0.16666667 0.83333333)
## 44) texture_mean< 2.898795 1 0 B (1.00000000 0.00000000) *
## 45) texture_mean>=2.898795 5 0 M (0.00000000 1.00000000) *
## 23) smoothness_mean< -2.277089 19 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean< -2.350891 451 212 M (0.47006652 0.52993348)
## 6) smoothness_mean< -2.360532 411 200 B (0.51338200 0.48661800)
## 12) texture_mean< 2.868712 72 16 B (0.77777778 0.22222222)
## 24) smoothness_worst>=-1.602623 62 9 B (0.85483871 0.14516129)
## 48) smoothness_worst< -1.452493 58 6 B (0.89655172 0.10344828)
## 96) compactness_se>=-4.133653 32 0 B (1.00000000 0.00000000) *
## 97) compactness_se< -4.133653 26 6 B (0.76923077 0.23076923) *
## 49) smoothness_worst>=-1.452493 4 1 M (0.25000000 0.75000000)
## 98) texture_mean< 2.764784 1 0 B (1.00000000 0.00000000) *
## 99) texture_mean>=2.764784 3 0 M (0.00000000 1.00000000) *
## 25) smoothness_worst< -1.602623 10 3 M (0.30000000 0.70000000)
## 50) compactness_se< -3.815858 3 0 B (1.00000000 0.00000000) *
## 51) compactness_se>=-3.815858 7 0 M (0.00000000 1.00000000) *
## 13) texture_mean>=2.868712 339 155 M (0.45722714 0.54277286)
## 26) texture_worst>=5.329405 16 0 B (1.00000000 0.00000000) *
## 27) texture_worst< 5.329405 323 139 M (0.43034056 0.56965944)
## 54) smoothness_worst>=-1.427204 15 0 B (1.00000000 0.00000000) *
## 55) smoothness_worst< -1.427204 308 124 M (0.40259740 0.59740260)
## 110) smoothness_worst< -1.584388 105 41 B (0.60952381 0.39047619) *
## 111) smoothness_worst>=-1.584388 203 60 M (0.29556650 0.70443350) *
## 7) smoothness_mean>=-2.360532 40 1 M (0.02500000 0.97500000)
## 14) texture_mean< 2.698419 1 0 B (1.00000000 0.00000000) *
## 15) texture_mean>=2.698419 39 0 M (0.00000000 1.00000000) *
##
## $trees[[35]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 450 M (0.49342105 0.50657895)
## 2) compactness_se< -3.66733 515 220 B (0.57281553 0.42718447)
## 4) texture_mean< 2.956197 264 85 B (0.67803030 0.32196970)
## 8) texture_mean>=2.922355 66 7 B (0.89393939 0.10606061)
## 16) compactness_se< -3.747374 63 4 B (0.93650794 0.06349206)
## 32) texture_worst< 4.705422 61 2 B (0.96721311 0.03278689)
## 64) smoothness_worst< -1.473478 59 0 B (1.00000000 0.00000000) *
## 65) smoothness_worst>=-1.473478 2 0 M (0.00000000 1.00000000) *
## 33) texture_worst>=4.705422 2 0 M (0.00000000 1.00000000) *
## 17) compactness_se>=-3.747374 3 0 M (0.00000000 1.00000000) *
## 9) texture_mean< 2.922355 198 78 B (0.60606061 0.39393939)
## 18) smoothness_mean>=-2.28772 46 4 B (0.91304348 0.08695652)
## 36) texture_worst< 4.740635 43 1 B (0.97674419 0.02325581)
## 72) smoothness_mean< -2.11834 41 0 B (1.00000000 0.00000000) *
## 73) smoothness_mean>=-2.11834 2 1 B (0.50000000 0.50000000) *
## 37) texture_worst>=4.740635 3 0 M (0.00000000 1.00000000) *
## 19) smoothness_mean< -2.28772 152 74 B (0.51315789 0.48684211)
## 38) texture_mean< 2.756519 24 0 B (1.00000000 0.00000000) *
## 39) texture_mean>=2.756519 128 54 M (0.42187500 0.57812500)
## 78) smoothness_worst< -1.501474 89 38 B (0.57303371 0.42696629) *
## 79) smoothness_worst>=-1.501474 39 3 M (0.07692308 0.92307692) *
## 5) texture_mean>=2.956197 251 116 M (0.46215139 0.53784861)
## 10) compactness_se>=-3.869459 43 9 B (0.79069767 0.20930233)
## 20) smoothness_worst< -1.417195 40 6 B (0.85000000 0.15000000)
## 40) compactness_se< -3.721197 27 1 B (0.96296296 0.03703704)
## 80) symmetry_worst< -1.482402 26 0 B (1.00000000 0.00000000) *
## 81) symmetry_worst>=-1.482402 1 0 M (0.00000000 1.00000000) *
## 41) compactness_se>=-3.721197 13 5 B (0.61538462 0.38461538)
## 82) texture_mean< 3.083513 8 0 B (1.00000000 0.00000000) *
## 83) texture_mean>=3.083513 5 0 M (0.00000000 1.00000000) *
## 21) smoothness_worst>=-1.417195 3 0 M (0.00000000 1.00000000) *
## 11) compactness_se< -3.869459 208 82 M (0.39423077 0.60576923)
## 22) texture_worst>=4.749969 130 64 B (0.50769231 0.49230769)
## 44) texture_worst< 4.822896 23 0 B (1.00000000 0.00000000) *
## 45) texture_worst>=4.822896 107 43 M (0.40186916 0.59813084)
## 90) texture_mean>=3.210432 49 17 B (0.65306122 0.34693878) *
## 91) texture_mean< 3.210432 58 11 M (0.18965517 0.81034483) *
## 23) texture_worst< 4.749969 78 16 M (0.20512821 0.79487179)
## 46) symmetry_worst< -1.953246 22 11 B (0.50000000 0.50000000)
## 92) texture_mean< 3.183122 16 5 B (0.68750000 0.31250000) *
## 93) texture_mean>=3.183122 6 0 M (0.00000000 1.00000000) *
## 47) symmetry_worst>=-1.953246 56 5 M (0.08928571 0.91071429)
## 94) symmetry_worst>=-1.490299 11 4 M (0.36363636 0.63636364) *
## 95) symmetry_worst< -1.490299 45 1 M (0.02222222 0.97777778) *
## 3) compactness_se>=-3.66733 397 155 M (0.39042821 0.60957179)
## 6) symmetry_worst< -1.816281 139 62 B (0.55395683 0.44604317)
## 12) smoothness_mean< -2.377849 74 19 B (0.74324324 0.25675676)
## 24) smoothness_mean>=-2.438756 39 0 B (1.00000000 0.00000000) *
## 25) smoothness_mean< -2.438756 35 16 M (0.45714286 0.54285714)
## 50) symmetry_worst>=-2.044741 14 2 B (0.85714286 0.14285714)
## 100) smoothness_worst< -1.493511 12 0 B (1.00000000 0.00000000) *
## 101) smoothness_worst>=-1.493511 2 0 M (0.00000000 1.00000000) *
## 51) symmetry_worst< -2.044741 21 4 M (0.19047619 0.80952381)
## 102) compactness_se< -3.514597 2 0 B (1.00000000 0.00000000) *
## 103) compactness_se>=-3.514597 19 2 M (0.10526316 0.89473684) *
## 13) smoothness_mean>=-2.377849 65 22 M (0.33846154 0.66153846)
## 26) texture_worst< 4.248666 12 2 B (0.83333333 0.16666667)
## 52) texture_mean< 2.874386 10 0 B (1.00000000 0.00000000) *
## 53) texture_mean>=2.874386 2 0 M (0.00000000 1.00000000) *
## 27) texture_worst>=4.248666 53 12 M (0.22641509 0.77358491)
## 54) texture_mean>=3.284902 3 0 B (1.00000000 0.00000000) *
## 55) texture_mean< 3.284902 50 9 M (0.18000000 0.82000000)
## 110) smoothness_worst>=-1.474648 5 1 B (0.80000000 0.20000000) *
## 111) smoothness_worst< -1.474648 45 5 M (0.11111111 0.88888889) *
## 7) symmetry_worst>=-1.816281 258 78 M (0.30232558 0.69767442)
## 14) compactness_se>=-3.494301 191 78 M (0.40837696 0.59162304)
## 28) compactness_se< -3.484318 15 0 B (1.00000000 0.00000000) *
## 29) compactness_se>=-3.484318 176 63 M (0.35795455 0.64204545)
## 58) compactness_se>=-2.759266 15 1 B (0.93333333 0.06666667)
## 116) smoothness_mean< -2.093543 14 0 B (1.00000000 0.00000000) *
## 117) smoothness_mean>=-2.093543 1 0 M (0.00000000 1.00000000) *
## 59) compactness_se< -2.759266 161 49 M (0.30434783 0.69565217)
## 118) smoothness_mean>=-2.106736 27 9 B (0.66666667 0.33333333) *
## 119) smoothness_mean< -2.106736 134 31 M (0.23134328 0.76865672) *
## 15) compactness_se< -3.494301 67 0 M (0.00000000 1.00000000) *
##
## $trees[[36]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 443 B (0.51425439 0.48574561)
## 2) smoothness_mean< -2.21595 809 372 B (0.54017305 0.45982695)
## 4) smoothness_mean>=-2.231196 40 2 B (0.95000000 0.05000000)
## 8) texture_mean< 3.093624 38 0 B (1.00000000 0.00000000) *
## 9) texture_mean>=3.093624 2 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean< -2.231196 769 370 B (0.51885566 0.48114434)
## 10) smoothness_mean< -2.367284 399 168 B (0.57894737 0.42105263)
## 20) compactness_se>=-3.230243 50 6 B (0.88000000 0.12000000)
## 40) smoothness_worst>=-1.720903 47 3 B (0.93617021 0.06382979)
## 80) texture_mean< 3.297828 46 2 B (0.95652174 0.04347826) *
## 81) texture_mean>=3.297828 1 0 M (0.00000000 1.00000000) *
## 41) smoothness_worst< -1.720903 3 0 M (0.00000000 1.00000000) *
## 21) compactness_se< -3.230243 349 162 B (0.53581662 0.46418338)
## 42) symmetry_worst< -1.815934 159 52 B (0.67295597 0.32704403)
## 84) texture_mean< 2.963209 52 7 B (0.86538462 0.13461538) *
## 85) texture_mean>=2.963209 107 45 B (0.57943925 0.42056075) *
## 43) symmetry_worst>=-1.815934 190 80 M (0.42105263 0.57894737)
## 86) texture_worst>=4.901515 29 6 B (0.79310345 0.20689655) *
## 87) texture_worst< 4.901515 161 57 M (0.35403727 0.64596273) *
## 11) smoothness_mean>=-2.367284 370 168 M (0.45405405 0.54594595)
## 22) compactness_se< -3.991189 119 44 B (0.63025210 0.36974790)
## 44) compactness_se>=-4.353745 82 20 B (0.75609756 0.24390244)
## 88) symmetry_worst< -1.614622 59 3 B (0.94915254 0.05084746) *
## 89) symmetry_worst>=-1.614622 23 6 M (0.26086957 0.73913043) *
## 45) compactness_se< -4.353745 37 13 M (0.35135135 0.64864865)
## 90) symmetry_worst>=-1.506254 10 0 B (1.00000000 0.00000000) *
## 91) symmetry_worst< -1.506254 27 3 M (0.11111111 0.88888889) *
## 23) compactness_se>=-3.991189 251 93 M (0.37051793 0.62948207)
## 46) symmetry_worst>=-1.770907 151 75 B (0.50331126 0.49668874)
## 92) smoothness_mean< -2.296604 92 29 B (0.68478261 0.31521739) *
## 93) smoothness_mean>=-2.296604 59 13 M (0.22033898 0.77966102) *
## 47) symmetry_worst< -1.770907 100 17 M (0.17000000 0.83000000)
## 94) symmetry_worst< -1.894024 51 16 M (0.31372549 0.68627451) *
## 95) symmetry_worst>=-1.894024 49 1 M (0.02040816 0.97959184) *
## 3) smoothness_mean>=-2.21595 103 32 M (0.31067961 0.68932039)
## 6) symmetry_worst< -1.653707 49 22 B (0.55102041 0.44897959)
## 12) smoothness_worst>=-1.427418 23 1 B (0.95652174 0.04347826)
## 24) texture_worst< 5.106195 22 0 B (1.00000000 0.00000000) *
## 25) texture_worst>=5.106195 1 0 M (0.00000000 1.00000000) *
## 13) smoothness_worst< -1.427418 26 5 M (0.19230769 0.80769231)
## 26) texture_worst< 4.490422 7 2 B (0.71428571 0.28571429)
## 52) smoothness_worst>=-1.56036 5 0 B (1.00000000 0.00000000) *
## 53) smoothness_worst< -1.56036 2 0 M (0.00000000 1.00000000) *
## 27) texture_worst>=4.490422 19 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-1.653707 54 5 M (0.09259259 0.90740741)
## 14) smoothness_worst>=-1.333822 6 2 B (0.66666667 0.33333333)
## 28) texture_mean< 2.838682 4 0 B (1.00000000 0.00000000) *
## 29) texture_mean>=2.838682 2 0 M (0.00000000 1.00000000) *
## 15) smoothness_worst< -1.333822 48 1 M (0.02083333 0.97916667)
## 30) compactness_se< -4.19021 1 0 B (1.00000000 0.00000000) *
## 31) compactness_se>=-4.19021 47 0 M (0.00000000 1.00000000) *
##
## $trees[[37]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 449 B (0.50767544 0.49232456)
## 2) compactness_se< -4.219581 216 71 B (0.67129630 0.32870370)
## 4) symmetry_worst>=-1.508268 61 5 B (0.91803279 0.08196721)
## 8) symmetry_worst< -1.312214 57 1 B (0.98245614 0.01754386)
## 16) texture_worst< 5.204837 56 0 B (1.00000000 0.00000000) *
## 17) texture_worst>=5.204837 1 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst>=-1.312214 4 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst< -1.508268 155 66 B (0.57419355 0.42580645)
## 10) symmetry_worst< -1.533879 133 47 B (0.64661654 0.35338346)
## 20) smoothness_worst>=-1.596418 98 24 B (0.75510204 0.24489796)
## 40) texture_worst< 5.05366 82 15 B (0.81707317 0.18292683)
## 80) smoothness_worst< -1.555675 26 0 B (1.00000000 0.00000000) *
## 81) smoothness_worst>=-1.555675 56 15 B (0.73214286 0.26785714) *
## 41) texture_worst>=5.05366 16 7 M (0.43750000 0.56250000)
## 82) texture_mean>=3.289936 7 0 B (1.00000000 0.00000000) *
## 83) texture_mean< 3.289936 9 0 M (0.00000000 1.00000000) *
## 21) smoothness_worst< -1.596418 35 12 M (0.34285714 0.65714286)
## 42) compactness_se< -4.711555 8 0 B (1.00000000 0.00000000) *
## 43) compactness_se>=-4.711555 27 4 M (0.14814815 0.85185185)
## 86) texture_mean>=3.23119 3 0 B (1.00000000 0.00000000) *
## 87) texture_mean< 3.23119 24 1 M (0.04166667 0.95833333) *
## 11) symmetry_worst>=-1.533879 22 3 M (0.13636364 0.86363636)
## 22) texture_mean< 2.906784 3 0 B (1.00000000 0.00000000) *
## 23) texture_mean>=2.906784 19 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-4.219581 696 318 M (0.45689655 0.54310345)
## 6) smoothness_mean< -2.478548 37 7 B (0.81081081 0.18918919)
## 12) symmetry_worst>=-1.750953 20 0 B (1.00000000 0.00000000) *
## 13) symmetry_worst< -1.750953 17 7 B (0.58823529 0.41176471)
## 26) smoothness_mean< -2.504718 10 0 B (1.00000000 0.00000000) *
## 27) smoothness_mean>=-2.504718 7 0 M (0.00000000 1.00000000) *
## 7) smoothness_mean>=-2.478548 659 288 M (0.43702580 0.56297420)
## 14) smoothness_worst>=-1.565486 552 265 M (0.48007246 0.51992754)
## 28) smoothness_worst< -1.560016 17 0 B (1.00000000 0.00000000) *
## 29) smoothness_worst>=-1.560016 535 248 M (0.46355140 0.53644860)
## 58) compactness_se>=-4.113227 497 243 M (0.48893360 0.51106640)
## 116) compactness_se< -4.024085 60 12 B (0.80000000 0.20000000) *
## 117) compactness_se>=-4.024085 437 195 M (0.44622426 0.55377574) *
## 59) compactness_se< -4.113227 38 5 M (0.13157895 0.86842105)
## 118) symmetry_worst< -1.75757 8 3 B (0.62500000 0.37500000) *
## 119) symmetry_worst>=-1.75757 30 0 M (0.00000000 1.00000000) *
## 15) smoothness_worst< -1.565486 107 23 M (0.21495327 0.78504673)
## 30) symmetry_worst< -2.25148 7 1 B (0.85714286 0.14285714)
## 60) smoothness_mean< -2.242079 6 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean>=-2.242079 1 0 M (0.00000000 1.00000000) *
## 31) symmetry_worst>=-2.25148 100 17 M (0.17000000 0.83000000)
## 62) smoothness_worst< -1.618016 7 2 B (0.71428571 0.28571429)
## 124) texture_mean< 3.160844 5 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=3.160844 2 0 M (0.00000000 1.00000000) *
## 63) smoothness_worst>=-1.618016 93 12 M (0.12903226 0.87096774)
## 126) texture_worst< 4.585652 36 10 M (0.27777778 0.72222222) *
## 127) texture_worst>=4.585652 57 2 M (0.03508772 0.96491228) *
##
## $trees[[38]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 418 B (0.54166667 0.45833333)
## 2) smoothness_worst>=-1.4768 272 90 B (0.66911765 0.33088235)
## 4) smoothness_mean< -2.079457 255 74 B (0.70980392 0.29019608)
## 8) texture_mean< 3.081214 211 50 B (0.76303318 0.23696682)
## 16) smoothness_worst< -1.473476 48 0 B (1.00000000 0.00000000) *
## 17) smoothness_worst>=-1.473476 163 50 B (0.69325153 0.30674847)
## 34) smoothness_worst>=-1.441166 83 13 B (0.84337349 0.15662651)
## 68) symmetry_worst< -1.36527 80 10 B (0.87500000 0.12500000) *
## 69) symmetry_worst>=-1.36527 3 0 M (0.00000000 1.00000000) *
## 35) smoothness_worst< -1.441166 80 37 B (0.53750000 0.46250000)
## 70) compactness_se< -3.652905 52 16 B (0.69230769 0.30769231) *
## 71) compactness_se>=-3.652905 28 7 M (0.25000000 0.75000000) *
## 9) texture_mean>=3.081214 44 20 M (0.45454545 0.54545455)
## 18) compactness_se>=-3.540614 23 5 B (0.78260870 0.21739130)
## 36) texture_mean>=3.099415 19 1 B (0.94736842 0.05263158)
## 72) texture_mean< 3.256167 18 0 B (1.00000000 0.00000000) *
## 73) texture_mean>=3.256167 1 0 M (0.00000000 1.00000000) *
## 37) texture_mean< 3.099415 4 0 M (0.00000000 1.00000000) *
## 19) compactness_se< -3.540614 21 2 M (0.09523810 0.90476190)
## 38) compactness_se< -4.494315 2 0 B (1.00000000 0.00000000) *
## 39) compactness_se>=-4.494315 19 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean>=-2.079457 17 1 M (0.05882353 0.94117647)
## 10) smoothness_mean>=-1.879984 1 0 B (1.00000000 0.00000000) *
## 11) smoothness_mean< -1.879984 16 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst< -1.4768 640 312 M (0.48750000 0.51250000)
## 6) smoothness_worst< -1.482699 603 294 B (0.51243781 0.48756219)
## 12) texture_mean< 2.874407 110 34 B (0.69090909 0.30909091)
## 24) symmetry_worst>=-1.74309 49 4 B (0.91836735 0.08163265)
## 48) symmetry_worst< -1.129539 45 0 B (1.00000000 0.00000000) *
## 49) symmetry_worst>=-1.129539 4 0 M (0.00000000 1.00000000) *
## 25) symmetry_worst< -1.74309 61 30 B (0.50819672 0.49180328)
## 50) texture_worst>=4.320273 17 2 B (0.88235294 0.11764706)
## 100) compactness_se>=-4.394685 14 0 B (1.00000000 0.00000000) *
## 101) compactness_se< -4.394685 3 1 M (0.33333333 0.66666667) *
## 51) texture_worst< 4.320273 44 16 M (0.36363636 0.63636364)
## 102) compactness_se< -4.18512 8 0 B (1.00000000 0.00000000) *
## 103) compactness_se>=-4.18512 36 8 M (0.22222222 0.77777778) *
## 13) texture_mean>=2.874407 493 233 M (0.47261663 0.52738337)
## 26) compactness_se>=-4.514873 442 219 B (0.50452489 0.49547511)
## 52) texture_mean>=2.882272 427 204 B (0.52224824 0.47775176)
## 104) smoothness_mean< -2.424301 140 42 B (0.70000000 0.30000000) *
## 105) smoothness_mean>=-2.424301 287 125 M (0.43554007 0.56445993) *
## 53) texture_mean< 2.882272 15 0 M (0.00000000 1.00000000) *
## 27) compactness_se< -4.514873 51 10 M (0.19607843 0.80392157)
## 54) texture_worst>=4.62656 29 10 M (0.34482759 0.65517241)
## 108) texture_mean< 2.957227 8 0 B (1.00000000 0.00000000) *
## 109) texture_mean>=2.957227 21 2 M (0.09523810 0.90476190) *
## 55) texture_worst< 4.62656 22 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst>=-1.482699 37 3 M (0.08108108 0.91891892)
## 14) texture_mean< 2.755881 2 0 B (1.00000000 0.00000000) *
## 15) texture_mean>=2.755881 35 1 M (0.02857143 0.97142857)
## 30) compactness_se< -3.967101 7 1 M (0.14285714 0.85714286)
## 60) smoothness_mean>=-2.254596 1 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean< -2.254596 6 0 M (0.00000000 1.00000000) *
## 31) compactness_se>=-3.967101 28 0 M (0.00000000 1.00000000) *
##
## $trees[[39]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 375 B (0.58881579 0.41118421)
## 2) symmetry_worst>=-1.490299 141 27 B (0.80851064 0.19148936)
## 4) symmetry_worst< -1.051331 135 21 B (0.84444444 0.15555556)
## 8) smoothness_mean< -2.334751 75 3 B (0.96000000 0.04000000)
## 16) texture_mean< 3.116237 67 0 B (1.00000000 0.00000000) *
## 17) texture_mean>=3.116237 8 3 B (0.62500000 0.37500000)
## 34) smoothness_mean>=-2.350921 5 0 B (1.00000000 0.00000000) *
## 35) smoothness_mean< -2.350921 3 0 M (0.00000000 1.00000000) *
## 9) smoothness_mean>=-2.334751 60 18 B (0.70000000 0.30000000)
## 18) texture_mean< 2.777879 27 0 B (1.00000000 0.00000000) *
## 19) texture_mean>=2.777879 33 15 M (0.45454545 0.54545455)
## 38) symmetry_worst>=-1.124686 10 0 B (1.00000000 0.00000000) *
## 39) symmetry_worst< -1.124686 23 5 M (0.21739130 0.78260870)
## 78) compactness_se< -4.171724 5 0 B (1.00000000 0.00000000) *
## 79) compactness_se>=-4.171724 18 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst>=-1.051331 6 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst< -1.490299 771 348 B (0.54863813 0.45136187)
## 6) texture_worst< 4.389172 182 55 B (0.69780220 0.30219780)
## 12) texture_mean>=2.857325 67 6 B (0.91044776 0.08955224)
## 24) smoothness_mean>=-2.515683 62 2 B (0.96774194 0.03225806)
## 48) smoothness_mean< -2.178638 61 1 B (0.98360656 0.01639344)
## 96) smoothness_mean>=-2.27605 51 0 B (1.00000000 0.00000000) *
## 97) smoothness_mean< -2.27605 10 1 B (0.90000000 0.10000000) *
## 49) smoothness_mean>=-2.178638 1 0 M (0.00000000 1.00000000) *
## 25) smoothness_mean< -2.515683 5 1 M (0.20000000 0.80000000)
## 50) texture_mean>=2.986158 1 0 B (1.00000000 0.00000000) *
## 51) texture_mean< 2.986158 4 0 M (0.00000000 1.00000000) *
## 13) texture_mean< 2.857325 115 49 B (0.57391304 0.42608696)
## 26) texture_mean< 2.824054 94 30 B (0.68085106 0.31914894)
## 52) texture_mean>=2.771335 23 0 B (1.00000000 0.00000000) *
## 53) texture_mean< 2.771335 71 30 B (0.57746479 0.42253521)
## 106) texture_mean< 2.758426 57 17 B (0.70175439 0.29824561) *
## 107) texture_mean>=2.758426 14 1 M (0.07142857 0.92857143) *
## 27) texture_mean>=2.824054 21 2 M (0.09523810 0.90476190)
## 54) smoothness_mean< -2.415139 2 0 B (1.00000000 0.00000000) *
## 55) smoothness_mean>=-2.415139 19 0 M (0.00000000 1.00000000) *
## 7) texture_worst>=4.389172 589 293 B (0.50254669 0.49745331)
## 14) texture_mean< 2.876103 37 5 B (0.86486486 0.13513514)
## 28) symmetry_worst< -1.596157 32 0 B (1.00000000 0.00000000) *
## 29) symmetry_worst>=-1.596157 5 0 M (0.00000000 1.00000000) *
## 15) texture_mean>=2.876103 552 264 M (0.47826087 0.52173913)
## 30) texture_worst>=4.406766 526 261 M (0.49619772 0.50380228)
## 60) texture_mean< 2.892591 12 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=2.892591 514 249 M (0.48443580 0.51556420)
## 122) symmetry_worst< -1.893233 179 74 B (0.58659218 0.41340782) *
## 123) symmetry_worst>=-1.893233 335 144 M (0.42985075 0.57014925) *
## 31) texture_worst< 4.406766 26 3 M (0.11538462 0.88461538)
## 62) texture_mean>=2.918641 3 0 B (1.00000000 0.00000000) *
## 63) texture_mean< 2.918641 23 0 M (0.00000000 1.00000000) *
##
## $trees[[40]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 379 B (0.58442982 0.41557018)
## 2) smoothness_mean>=-2.441446 728 266 B (0.63461538 0.36538462)
## 4) smoothness_mean< -2.425205 60 6 B (0.90000000 0.10000000)
## 8) symmetry_worst< -1.496954 52 0 B (1.00000000 0.00000000) *
## 9) symmetry_worst>=-1.496954 8 2 M (0.25000000 0.75000000)
## 18) texture_mean< 2.97943 2 0 B (1.00000000 0.00000000) *
## 19) texture_mean>=2.97943 6 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean>=-2.425205 668 260 B (0.61077844 0.38922156)
## 10) texture_mean< 3.054236 489 166 B (0.66053170 0.33946830)
## 20) texture_mean>=2.760642 418 123 B (0.70574163 0.29425837)
## 40) smoothness_worst>=-1.593678 404 111 B (0.72524752 0.27475248)
## 80) smoothness_worst< -1.551993 50 2 B (0.96000000 0.04000000) *
## 81) smoothness_worst>=-1.551993 354 109 B (0.69209040 0.30790960) *
## 41) smoothness_worst< -1.593678 14 2 M (0.14285714 0.85714286)
## 82) texture_mean< 2.889781 2 0 B (1.00000000 0.00000000) *
## 83) texture_mean>=2.889781 12 0 M (0.00000000 1.00000000) *
## 21) texture_mean< 2.760642 71 28 M (0.39436620 0.60563380)
## 42) texture_worst< 4.16384 35 11 B (0.68571429 0.31428571)
## 84) texture_mean>=2.715678 16 0 B (1.00000000 0.00000000) *
## 85) texture_mean< 2.715678 19 8 M (0.42105263 0.57894737) *
## 43) texture_worst>=4.16384 36 4 M (0.11111111 0.88888889)
## 86) compactness_se< -3.892047 3 0 B (1.00000000 0.00000000) *
## 87) compactness_se>=-3.892047 33 1 M (0.03030303 0.96969697) *
## 11) texture_mean>=3.054236 179 85 M (0.47486034 0.52513966)
## 22) smoothness_mean>=-2.383798 138 58 B (0.57971014 0.42028986)
## 44) compactness_se< -3.304429 126 46 B (0.63492063 0.36507937)
## 88) compactness_se>=-3.902076 89 20 B (0.77528090 0.22471910) *
## 89) compactness_se< -3.902076 37 11 M (0.29729730 0.70270270) *
## 45) compactness_se>=-3.304429 12 0 M (0.00000000 1.00000000) *
## 23) smoothness_mean< -2.383798 41 5 M (0.12195122 0.87804878)
## 46) symmetry_worst< -2.145206 3 0 B (1.00000000 0.00000000) *
## 47) symmetry_worst>=-2.145206 38 2 M (0.05263158 0.94736842)
## 94) smoothness_worst< -1.602866 2 0 B (1.00000000 0.00000000) *
## 95) smoothness_worst>=-1.602866 36 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean< -2.441446 184 71 M (0.38586957 0.61413043)
## 6) compactness_se< -4.356557 63 25 B (0.60317460 0.39682540)
## 12) symmetry_worst< -1.574457 50 12 B (0.76000000 0.24000000)
## 24) smoothness_mean< -2.44767 46 8 B (0.82608696 0.17391304)
## 48) smoothness_mean>=-2.496965 26 0 B (1.00000000 0.00000000) *
## 49) smoothness_mean< -2.496965 20 8 B (0.60000000 0.40000000)
## 98) smoothness_mean< -2.507092 16 4 B (0.75000000 0.25000000) *
## 99) smoothness_mean>=-2.507092 4 0 M (0.00000000 1.00000000) *
## 25) smoothness_mean>=-2.44767 4 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst>=-1.574457 13 0 M (0.00000000 1.00000000) *
## 7) compactness_se>=-4.356557 121 33 M (0.27272727 0.72727273)
## 14) texture_mean< 2.754836 8 0 B (1.00000000 0.00000000) *
## 15) texture_mean>=2.754836 113 25 M (0.22123894 0.77876106)
## 30) compactness_se>=-2.839112 7 0 B (1.00000000 0.00000000) *
## 31) compactness_se< -2.839112 106 18 M (0.16981132 0.83018868)
## 62) texture_worst>=5.316369 3 0 B (1.00000000 0.00000000) *
## 63) texture_worst< 5.316369 103 15 M (0.14563107 0.85436893)
## 126) smoothness_mean>=-2.467755 33 10 M (0.30303030 0.69696970) *
## 127) smoothness_mean< -2.467755 70 5 M (0.07142857 0.92857143) *
##
## $trees[[41]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 452 B (0.50438596 0.49561404)
## 2) symmetry_worst>=-1.930267 653 296 B (0.54670750 0.45329250)
## 4) smoothness_mean< -2.079457 631 277 B (0.56101426 0.43898574)
## 8) smoothness_mean>=-2.235394 126 35 B (0.72222222 0.27777778)
## 16) smoothness_mean< -2.222851 43 2 B (0.95348837 0.04651163)
## 32) compactness_se< -2.985939 41 0 B (1.00000000 0.00000000) *
## 33) compactness_se>=-2.985939 2 0 M (0.00000000 1.00000000) *
## 17) smoothness_mean>=-2.222851 83 33 B (0.60240964 0.39759036)
## 34) symmetry_worst< -1.765932 29 3 B (0.89655172 0.10344828)
## 68) texture_mean< 3.232324 27 1 B (0.96296296 0.03703704) *
## 69) texture_mean>=3.232324 2 0 M (0.00000000 1.00000000) *
## 35) symmetry_worst>=-1.765932 54 24 M (0.44444444 0.55555556)
## 70) smoothness_mean>=-2.092733 9 0 B (1.00000000 0.00000000) *
## 71) smoothness_mean< -2.092733 45 15 M (0.33333333 0.66666667) *
## 9) smoothness_mean< -2.235394 505 242 B (0.52079208 0.47920792)
## 18) smoothness_mean< -2.36186 287 110 B (0.61672474 0.38327526)
## 36) texture_mean< 2.956199 143 38 B (0.73426573 0.26573427)
## 72) symmetry_worst>=-1.748649 78 7 B (0.91025641 0.08974359) *
## 73) symmetry_worst< -1.748649 65 31 B (0.52307692 0.47692308) *
## 37) texture_mean>=2.956199 144 72 B (0.50000000 0.50000000)
## 74) texture_worst>=4.781424 82 24 B (0.70731707 0.29268293) *
## 75) texture_worst< 4.781424 62 14 M (0.22580645 0.77419355) *
## 19) smoothness_mean>=-2.36186 218 86 M (0.39449541 0.60550459)
## 38) symmetry_worst>=-1.128751 15 1 B (0.93333333 0.06666667)
## 76) smoothness_mean>=-2.321477 14 0 B (1.00000000 0.00000000) *
## 77) smoothness_mean< -2.321477 1 0 M (0.00000000 1.00000000) *
## 39) symmetry_worst< -1.128751 203 72 M (0.35467980 0.64532020)
## 78) compactness_se< -4.025757 49 18 B (0.63265306 0.36734694) *
## 79) compactness_se>=-4.025757 154 41 M (0.26623377 0.73376623) *
## 5) smoothness_mean>=-2.079457 22 3 M (0.13636364 0.86363636)
## 10) smoothness_mean>=-2.000349 5 2 B (0.60000000 0.40000000)
## 20) texture_mean< 2.688296 3 0 B (1.00000000 0.00000000) *
## 21) texture_mean>=2.688296 2 0 M (0.00000000 1.00000000) *
## 11) smoothness_mean< -2.000349 17 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst< -1.930267 259 103 M (0.39768340 0.60231660)
## 6) texture_worst< 4.605004 85 31 B (0.63529412 0.36470588)
## 12) compactness_se>=-4.49319 71 19 B (0.73239437 0.26760563)
## 24) symmetry_worst< -2.048468 38 2 B (0.94736842 0.05263158)
## 48) smoothness_worst>=-1.720903 36 0 B (1.00000000 0.00000000) *
## 49) smoothness_worst< -1.720903 2 0 M (0.00000000 1.00000000) *
## 25) symmetry_worst>=-2.048468 33 16 M (0.48484848 0.51515152)
## 50) compactness_se< -3.88564 10 0 B (1.00000000 0.00000000) *
## 51) compactness_se>=-3.88564 23 6 M (0.26086957 0.73913043)
## 102) texture_mean< 2.753964 2 0 B (1.00000000 0.00000000) *
## 103) texture_mean>=2.753964 21 4 M (0.19047619 0.80952381) *
## 13) compactness_se< -4.49319 14 2 M (0.14285714 0.85714286)
## 26) compactness_se< -4.635639 2 0 B (1.00000000 0.00000000) *
## 27) compactness_se>=-4.635639 12 0 M (0.00000000 1.00000000) *
## 7) texture_worst>=4.605004 174 49 M (0.28160920 0.71839080)
## 14) smoothness_mean< -2.486268 11 0 B (1.00000000 0.00000000) *
## 15) smoothness_mean>=-2.486268 163 38 M (0.23312883 0.76687117)
## 30) smoothness_worst< -1.576561 22 9 B (0.59090909 0.40909091)
## 60) texture_mean< 3.256972 10 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=3.256972 12 3 M (0.25000000 0.75000000)
## 122) texture_mean>=3.383004 3 0 B (1.00000000 0.00000000) *
## 123) texture_mean< 3.383004 9 0 M (0.00000000 1.00000000) *
## 31) smoothness_worst>=-1.576561 141 25 M (0.17730496 0.82269504)
## 62) symmetry_worst< -2.233349 10 3 B (0.70000000 0.30000000)
## 124) smoothness_mean< -2.337981 6 0 B (1.00000000 0.00000000) *
## 125) smoothness_mean>=-2.337981 4 1 M (0.25000000 0.75000000) *
## 63) symmetry_worst>=-2.233349 131 18 M (0.13740458 0.86259542)
## 126) smoothness_mean>=-2.352488 31 13 M (0.41935484 0.58064516) *
## 127) smoothness_mean< -2.352488 100 5 M (0.05000000 0.95000000) *
##
## $trees[[42]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 438 B (0.51973684 0.48026316)
## 2) smoothness_mean>=-2.441446 720 315 B (0.56250000 0.43750000)
## 4) smoothness_worst< -1.472307 493 183 B (0.62880325 0.37119675)
## 8) smoothness_worst>=-1.476409 51 1 B (0.98039216 0.01960784)
## 16) texture_worst< 4.844547 50 0 B (1.00000000 0.00000000) *
## 17) texture_worst>=4.844547 1 0 M (0.00000000 1.00000000) *
## 9) smoothness_worst< -1.476409 442 182 B (0.58823529 0.41176471)
## 18) smoothness_worst< -1.482107 410 157 B (0.61707317 0.38292683)
## 36) symmetry_worst>=-1.595052 106 21 B (0.80188679 0.19811321)
## 72) smoothness_worst>=-1.607486 95 12 B (0.87368421 0.12631579) *
## 73) smoothness_worst< -1.607486 11 2 M (0.18181818 0.81818182) *
## 37) symmetry_worst< -1.595052 304 136 B (0.55263158 0.44736842)
## 74) compactness_se>=-4.100467 214 79 B (0.63084112 0.36915888) *
## 75) compactness_se< -4.100467 90 33 M (0.36666667 0.63333333) *
## 19) smoothness_worst>=-1.482107 32 7 M (0.21875000 0.78125000)
## 38) texture_mean< 2.755881 4 0 B (1.00000000 0.00000000) *
## 39) texture_mean>=2.755881 28 3 M (0.10714286 0.89285714)
## 78) smoothness_mean>=-2.253991 7 3 M (0.42857143 0.57142857) *
## 79) smoothness_mean< -2.253991 21 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst>=-1.472307 227 95 M (0.41850220 0.58149780)
## 10) compactness_se< -4.040144 66 23 B (0.65151515 0.34848485)
## 20) texture_worst>=4.752437 48 9 B (0.81250000 0.18750000)
## 40) compactness_se>=-4.348305 30 0 B (1.00000000 0.00000000) *
## 41) compactness_se< -4.348305 18 9 B (0.50000000 0.50000000)
## 82) smoothness_mean< -2.333927 9 0 B (1.00000000 0.00000000) *
## 83) smoothness_mean>=-2.333927 9 0 M (0.00000000 1.00000000) *
## 21) texture_worst< 4.752437 18 4 M (0.22222222 0.77777778)
## 42) texture_mean< 2.934384 4 0 B (1.00000000 0.00000000) *
## 43) texture_mean>=2.934384 14 0 M (0.00000000 1.00000000) *
## 11) compactness_se>=-4.040144 161 52 M (0.32298137 0.67701863)
## 22) compactness_se>=-3.938669 124 52 M (0.41935484 0.58064516)
## 44) smoothness_worst>=-1.434633 62 26 B (0.58064516 0.41935484)
## 88) compactness_se< -3.311998 42 12 B (0.71428571 0.28571429) *
## 89) compactness_se>=-3.311998 20 6 M (0.30000000 0.70000000) *
## 45) smoothness_worst< -1.434633 62 16 M (0.25806452 0.74193548)
## 90) symmetry_worst< -2.030418 7 0 B (1.00000000 0.00000000) *
## 91) symmetry_worst>=-2.030418 55 9 M (0.16363636 0.83636364) *
## 23) compactness_se< -3.938669 37 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean< -2.441446 192 69 M (0.35937500 0.64062500)
## 6) texture_mean< 2.869285 17 3 B (0.82352941 0.17647059)
## 12) smoothness_mean< -2.448147 14 0 B (1.00000000 0.00000000) *
## 13) smoothness_mean>=-2.448147 3 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=2.869285 175 55 M (0.31428571 0.68571429)
## 14) compactness_se>=-2.839112 7 0 B (1.00000000 0.00000000) *
## 15) compactness_se< -2.839112 168 48 M (0.28571429 0.71428571)
## 30) texture_mean>=2.881435 141 48 M (0.34042553 0.65957447)
## 60) texture_mean< 2.921008 13 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=2.921008 128 35 M (0.27343750 0.72656250)
## 122) symmetry_worst< -1.54778 102 35 M (0.34313725 0.65686275) *
## 123) symmetry_worst>=-1.54778 26 0 M (0.00000000 1.00000000) *
## 31) texture_mean< 2.881435 27 0 M (0.00000000 1.00000000) *
##
## $trees[[43]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 436 B (0.52192982 0.47807018)
## 2) texture_mean< 2.876103 198 63 B (0.68181818 0.31818182)
## 4) texture_mean>=2.850128 50 3 B (0.94000000 0.06000000)
## 8) smoothness_mean< -2.220156 48 1 B (0.97916667 0.02083333)
## 16) smoothness_worst< -1.438497 47 0 B (1.00000000 0.00000000) *
## 17) smoothness_worst>=-1.438497 1 0 M (0.00000000 1.00000000) *
## 9) smoothness_mean>=-2.220156 2 0 M (0.00000000 1.00000000) *
## 5) texture_mean< 2.850128 148 60 B (0.59459459 0.40540541)
## 10) texture_worst< 4.260219 78 19 B (0.75641026 0.24358974)
## 20) symmetry_worst< -1.429489 69 13 B (0.81159420 0.18840580)
## 40) smoothness_worst>=-1.54469 50 5 B (0.90000000 0.10000000)
## 80) texture_worst< 4.176708 39 1 B (0.97435897 0.02564103) *
## 81) texture_worst>=4.176708 11 4 B (0.63636364 0.36363636) *
## 41) smoothness_worst< -1.54469 19 8 B (0.57894737 0.42105263)
## 82) compactness_se< -3.541332 10 0 B (1.00000000 0.00000000) *
## 83) compactness_se>=-3.541332 9 1 M (0.11111111 0.88888889) *
## 21) symmetry_worst>=-1.429489 9 3 M (0.33333333 0.66666667)
## 42) texture_worst>=4.136225 3 0 B (1.00000000 0.00000000) *
## 43) texture_worst< 4.136225 6 0 M (0.00000000 1.00000000) *
## 11) texture_worst>=4.260219 70 29 M (0.41428571 0.58571429)
## 22) texture_worst>=4.338767 42 14 B (0.66666667 0.33333333)
## 44) texture_worst< 4.517878 25 1 B (0.96000000 0.04000000)
## 88) smoothness_mean< -2.137307 24 0 B (1.00000000 0.00000000) *
## 89) smoothness_mean>=-2.137307 1 0 M (0.00000000 1.00000000) *
## 45) texture_worst>=4.517878 17 4 M (0.23529412 0.76470588)
## 90) smoothness_mean< -2.3918 3 0 B (1.00000000 0.00000000) *
## 91) smoothness_mean>=-2.3918 14 1 M (0.07142857 0.92857143) *
## 23) texture_worst< 4.338767 28 1 M (0.03571429 0.96428571)
## 46) smoothness_mean< -2.417779 1 0 B (1.00000000 0.00000000) *
## 47) smoothness_mean>=-2.417779 27 0 M (0.00000000 1.00000000) *
## 3) texture_mean>=2.876103 714 341 M (0.47759104 0.52240896)
## 6) symmetry_worst< -1.317839 685 337 M (0.49197080 0.50802920)
## 12) smoothness_worst>=-1.614253 623 302 B (0.51524880 0.48475120)
## 24) compactness_se< -3.816486 309 124 B (0.59870550 0.40129450)
## 48) smoothness_worst< -1.559798 93 18 B (0.80645161 0.19354839)
## 96) texture_worst< 5.083395 82 9 B (0.89024390 0.10975610) *
## 97) texture_worst>=5.083395 11 2 M (0.18181818 0.81818182) *
## 49) smoothness_worst>=-1.559798 216 106 B (0.50925926 0.49074074)
## 98) smoothness_worst>=-1.546619 168 67 B (0.60119048 0.39880952) *
## 99) smoothness_worst< -1.546619 48 9 M (0.18750000 0.81250000) *
## 25) compactness_se>=-3.816486 314 136 M (0.43312102 0.56687898)
## 50) texture_mean< 2.925574 26 1 B (0.96153846 0.03846154)
## 100) texture_worst>=4.12485 25 0 B (1.00000000 0.00000000) *
## 101) texture_worst< 4.12485 1 0 M (0.00000000 1.00000000) *
## 51) texture_mean>=2.925574 288 111 M (0.38541667 0.61458333)
## 102) texture_mean>=2.936509 256 111 M (0.43359375 0.56640625) *
## 103) texture_mean< 2.936509 32 0 M (0.00000000 1.00000000) *
## 13) smoothness_worst< -1.614253 62 16 M (0.25806452 0.74193548)
## 26) texture_mean>=3.089887 8 1 B (0.87500000 0.12500000)
## 52) compactness_se>=-4.480894 7 0 B (1.00000000 0.00000000) *
## 53) compactness_se< -4.480894 1 0 M (0.00000000 1.00000000) *
## 27) texture_mean< 3.089887 54 9 M (0.16666667 0.83333333)
## 54) compactness_se>=-2.870592 3 0 B (1.00000000 0.00000000) *
## 55) compactness_se< -2.870592 51 6 M (0.11764706 0.88235294)
## 110) texture_mean< 2.935975 1 0 B (1.00000000 0.00000000) *
## 111) texture_mean>=2.935975 50 5 M (0.10000000 0.90000000) *
## 7) symmetry_worst>=-1.317839 29 4 M (0.13793103 0.86206897)
## 14) texture_mean>=3.099059 6 2 B (0.66666667 0.33333333)
## 28) texture_mean< 3.163269 4 0 B (1.00000000 0.00000000) *
## 29) texture_mean>=3.163269 2 0 M (0.00000000 1.00000000) *
## 15) texture_mean< 3.099059 23 0 M (0.00000000 1.00000000) *
##
## $trees[[44]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 432 B (0.52631579 0.47368421)
## 2) compactness_se>=-3.494301 297 98 B (0.67003367 0.32996633)
## 4) texture_mean< 3.059388 228 50 B (0.78070175 0.21929825)
## 8) smoothness_mean< -2.066369 222 44 B (0.80180180 0.19819820)
## 16) smoothness_mean< -2.395314 44 1 B (0.97727273 0.02272727)
## 32) compactness_se>=-3.483667 43 0 B (1.00000000 0.00000000) *
## 33) compactness_se< -3.483667 1 0 M (0.00000000 1.00000000) *
## 17) smoothness_mean>=-2.395314 178 43 B (0.75842697 0.24157303)
## 34) smoothness_mean>=-2.369527 172 37 B (0.78488372 0.21511628)
## 68) smoothness_worst>=-1.477195 80 8 B (0.90000000 0.10000000) *
## 69) smoothness_worst< -1.477195 92 29 B (0.68478261 0.31521739) *
## 35) smoothness_mean< -2.369527 6 0 M (0.00000000 1.00000000) *
## 9) smoothness_mean>=-2.066369 6 0 M (0.00000000 1.00000000) *
## 5) texture_mean>=3.059388 69 21 M (0.30434783 0.69565217)
## 10) smoothness_worst>=-1.513087 32 12 B (0.62500000 0.37500000)
## 20) texture_worst>=4.744846 27 7 B (0.74074074 0.25925926)
## 40) texture_mean< 3.220473 24 4 B (0.83333333 0.16666667)
## 80) texture_worst< 5.016194 21 1 B (0.95238095 0.04761905) *
## 81) texture_worst>=5.016194 3 0 M (0.00000000 1.00000000) *
## 41) texture_mean>=3.220473 3 0 M (0.00000000 1.00000000) *
## 21) texture_worst< 4.744846 5 0 M (0.00000000 1.00000000) *
## 11) smoothness_worst< -1.513087 37 1 M (0.02702703 0.97297297)
## 22) smoothness_mean< -2.638103 1 0 B (1.00000000 0.00000000) *
## 23) smoothness_mean>=-2.638103 36 0 M (0.00000000 1.00000000) *
## 3) compactness_se< -3.494301 615 281 M (0.45691057 0.54308943)
## 6) smoothness_worst< -1.520292 340 155 B (0.54411765 0.45588235)
## 12) texture_worst>=4.465917 249 92 B (0.63052209 0.36947791)
## 24) smoothness_worst>=-1.570555 165 43 B (0.73939394 0.26060606)
## 48) compactness_se< -3.512408 157 35 B (0.77707006 0.22292994)
## 96) texture_worst< 4.949112 119 17 B (0.85714286 0.14285714) *
## 97) texture_worst>=4.949112 38 18 B (0.52631579 0.47368421) *
## 49) compactness_se>=-3.512408 8 0 M (0.00000000 1.00000000) *
## 25) smoothness_worst< -1.570555 84 35 M (0.41666667 0.58333333)
## 50) smoothness_worst< -1.584838 59 24 B (0.59322034 0.40677966)
## 100) smoothness_worst>=-1.607486 22 3 B (0.86363636 0.13636364) *
## 101) smoothness_worst< -1.607486 37 16 M (0.43243243 0.56756757) *
## 51) smoothness_worst>=-1.584838 25 0 M (0.00000000 1.00000000) *
## 13) texture_worst< 4.465917 91 28 M (0.30769231 0.69230769)
## 26) symmetry_worst< -1.948608 23 6 B (0.73913043 0.26086957)
## 52) symmetry_worst>=-2.391709 17 0 B (1.00000000 0.00000000) *
## 53) symmetry_worst< -2.391709 6 0 M (0.00000000 1.00000000) *
## 27) symmetry_worst>=-1.948608 68 11 M (0.16176471 0.83823529)
## 54) symmetry_worst>=-1.603839 5 0 B (1.00000000 0.00000000) *
## 55) symmetry_worst< -1.603839 63 6 M (0.09523810 0.90476190)
## 110) smoothness_worst< -1.556329 13 6 M (0.46153846 0.53846154) *
## 111) smoothness_worst>=-1.556329 50 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst>=-1.520292 275 96 M (0.34909091 0.65090909)
## 14) texture_worst< 4.469369 51 15 B (0.70588235 0.29411765)
## 28) smoothness_worst< -1.451541 29 3 B (0.89655172 0.10344828)
## 56) symmetry_worst>=-1.886625 23 0 B (1.00000000 0.00000000) *
## 57) symmetry_worst< -1.886625 6 3 B (0.50000000 0.50000000)
## 114) texture_mean>=2.796001 3 0 B (1.00000000 0.00000000) *
## 115) texture_mean< 2.796001 3 0 M (0.00000000 1.00000000) *
## 29) smoothness_worst>=-1.451541 22 10 M (0.45454545 0.54545455)
## 58) smoothness_worst>=-1.434633 13 4 B (0.69230769 0.30769231)
## 116) texture_worst< 4.30106 9 0 B (1.00000000 0.00000000) *
## 117) texture_worst>=4.30106 4 0 M (0.00000000 1.00000000) *
## 59) smoothness_worst< -1.434633 9 1 M (0.11111111 0.88888889)
## 118) compactness_se< -4.188107 1 0 B (1.00000000 0.00000000) *
## 119) compactness_se>=-4.188107 8 0 M (0.00000000 1.00000000) *
## 15) texture_worst>=4.469369 224 60 M (0.26785714 0.73214286)
## 30) texture_mean< 2.88392 26 8 B (0.69230769 0.30769231)
## 60) texture_mean>=2.849548 17 0 B (1.00000000 0.00000000) *
## 61) texture_mean< 2.849548 9 1 M (0.11111111 0.88888889)
## 122) texture_mean< 2.79419 1 0 B (1.00000000 0.00000000) *
## 123) texture_mean>=2.79419 8 0 M (0.00000000 1.00000000) *
## 31) texture_mean>=2.88392 198 42 M (0.21212121 0.78787879)
## 62) texture_mean>=3.309778 17 8 B (0.52941176 0.47058824)
## 124) texture_mean< 3.407548 12 3 B (0.75000000 0.25000000) *
## 125) texture_mean>=3.407548 5 0 M (0.00000000 1.00000000) *
## 63) texture_mean< 3.309778 181 33 M (0.18232044 0.81767956)
## 126) smoothness_mean>=-2.311929 78 25 M (0.32051282 0.67948718) *
## 127) smoothness_mean< -2.311929 103 8 M (0.07766990 0.92233010) *
##
## $trees[[45]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 446 M (0.48903509 0.51096491)
## 2) smoothness_worst< -1.558926 239 80 B (0.66527197 0.33472803)
## 4) smoothness_worst>=-1.565486 37 0 B (1.00000000 0.00000000) *
## 5) smoothness_worst< -1.565486 202 80 B (0.60396040 0.39603960)
## 10) smoothness_worst< -1.584838 149 44 B (0.70469799 0.29530201)
## 20) texture_mean< 3.204554 133 30 B (0.77443609 0.22556391)
## 40) compactness_se< -2.951614 121 21 B (0.82644628 0.17355372)
## 80) symmetry_worst>=-2.391709 116 16 B (0.86206897 0.13793103) *
## 81) symmetry_worst< -2.391709 5 0 M (0.00000000 1.00000000) *
## 41) compactness_se>=-2.951614 12 3 M (0.25000000 0.75000000)
## 82) texture_mean< 3.045208 3 0 B (1.00000000 0.00000000) *
## 83) texture_mean>=3.045208 9 0 M (0.00000000 1.00000000) *
## 21) texture_mean>=3.204554 16 2 M (0.12500000 0.87500000)
## 42) smoothness_mean< -2.520061 2 0 B (1.00000000 0.00000000) *
## 43) smoothness_mean>=-2.520061 14 0 M (0.00000000 1.00000000) *
## 11) smoothness_worst>=-1.584838 53 17 M (0.32075472 0.67924528)
## 22) smoothness_mean< -2.444611 10 1 B (0.90000000 0.10000000)
## 44) texture_mean< 3.074043 9 0 B (1.00000000 0.00000000) *
## 45) texture_mean>=3.074043 1 0 M (0.00000000 1.00000000) *
## 23) smoothness_mean>=-2.444611 43 8 M (0.18604651 0.81395349)
## 46) compactness_se< -4.144493 5 0 B (1.00000000 0.00000000) *
## 47) compactness_se>=-4.144493 38 3 M (0.07894737 0.92105263)
## 94) texture_mean< 2.869214 6 3 B (0.50000000 0.50000000) *
## 95) texture_mean>=2.869214 32 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst>=-1.558926 673 287 M (0.42644874 0.57355126)
## 6) smoothness_worst>=-1.551128 616 280 M (0.45454545 0.54545455)
## 12) smoothness_mean< -2.416986 80 26 B (0.67500000 0.32500000)
## 24) texture_worst< 4.998431 67 13 B (0.80597015 0.19402985)
## 48) symmetry_worst>=-1.995409 57 4 B (0.92982456 0.07017544)
## 96) symmetry_worst< -1.429489 53 0 B (1.00000000 0.00000000) *
## 97) symmetry_worst>=-1.429489 4 0 M (0.00000000 1.00000000) *
## 49) symmetry_worst< -1.995409 10 1 M (0.10000000 0.90000000)
## 98) texture_mean< 2.768852 1 0 B (1.00000000 0.00000000) *
## 99) texture_mean>=2.768852 9 0 M (0.00000000 1.00000000) *
## 25) texture_worst>=4.998431 13 0 M (0.00000000 1.00000000) *
## 13) smoothness_mean>=-2.416986 536 226 M (0.42164179 0.57835821)
## 26) smoothness_mean>=-2.326878 361 180 B (0.50138504 0.49861496)
## 52) symmetry_worst< -1.640882 188 68 B (0.63829787 0.36170213)
## 104) smoothness_mean< -2.304488 43 2 B (0.95348837 0.04651163) *
## 105) smoothness_mean>=-2.304488 145 66 B (0.54482759 0.45517241) *
## 53) symmetry_worst>=-1.640882 173 61 M (0.35260116 0.64739884)
## 106) smoothness_mean< -2.322588 11 0 B (1.00000000 0.00000000) *
## 107) smoothness_mean>=-2.322588 162 50 M (0.30864198 0.69135802) *
## 27) smoothness_mean< -2.326878 175 45 M (0.25714286 0.74285714)
## 54) symmetry_worst>=-1.50256 27 6 B (0.77777778 0.22222222)
## 108) symmetry_worst< -1.291518 21 1 B (0.95238095 0.04761905) *
## 109) symmetry_worst>=-1.291518 6 1 M (0.16666667 0.83333333) *
## 55) symmetry_worst< -1.50256 148 24 M (0.16216216 0.83783784)
## 110) texture_worst< 4.242051 3 0 B (1.00000000 0.00000000) *
## 111) texture_worst>=4.242051 145 21 M (0.14482759 0.85517241) *
## 7) smoothness_worst< -1.551128 57 7 M (0.12280702 0.87719298)
## 14) texture_mean>=3.344965 4 0 B (1.00000000 0.00000000) *
## 15) texture_mean< 3.344965 53 3 M (0.05660377 0.94339623)
## 30) texture_mean< 2.850634 1 0 B (1.00000000 0.00000000) *
## 31) texture_mean>=2.850634 52 2 M (0.03846154 0.96153846)
## 62) texture_mean< 2.919658 9 2 M (0.22222222 0.77777778)
## 124) texture_mean>=2.897371 2 0 B (1.00000000 0.00000000) *
## 125) texture_mean< 2.897371 7 0 M (0.00000000 1.00000000) *
## 63) texture_mean>=2.919658 43 0 M (0.00000000 1.00000000) *
##
## $trees[[46]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 448 B (0.50877193 0.49122807)
## 2) texture_worst< 4.905415 705 310 B (0.56028369 0.43971631)
## 4) smoothness_worst< -1.557621 154 41 B (0.73376623 0.26623377)
## 8) symmetry_worst< -1.838755 69 7 B (0.89855072 0.10144928)
## 16) smoothness_mean>=-2.603563 63 4 B (0.93650794 0.06349206)
## 32) texture_worst>=3.965685 58 2 B (0.96551724 0.03448276)
## 64) smoothness_worst< -1.575665 42 0 B (1.00000000 0.00000000) *
## 65) smoothness_worst>=-1.575665 16 2 B (0.87500000 0.12500000) *
## 33) texture_worst< 3.965685 5 2 B (0.60000000 0.40000000)
## 66) texture_mean< 2.754513 3 0 B (1.00000000 0.00000000) *
## 67) texture_mean>=2.754513 2 0 M (0.00000000 1.00000000) *
## 17) smoothness_mean< -2.603563 6 3 B (0.50000000 0.50000000)
## 34) texture_mean< 2.993132 3 0 B (1.00000000 0.00000000) *
## 35) texture_mean>=2.993132 3 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst>=-1.838755 85 34 B (0.60000000 0.40000000)
## 18) compactness_se>=-3.586422 35 5 B (0.85714286 0.14285714)
## 36) symmetry_worst< -1.528105 33 3 B (0.90909091 0.09090909)
## 72) compactness_se< -3.430879 25 0 B (1.00000000 0.00000000) *
## 73) compactness_se>=-3.430879 8 3 B (0.62500000 0.37500000) *
## 37) symmetry_worst>=-1.528105 2 0 M (0.00000000 1.00000000) *
## 19) compactness_se< -3.586422 50 21 M (0.42000000 0.58000000)
## 38) smoothness_worst>=-1.567699 16 0 B (1.00000000 0.00000000) *
## 39) smoothness_worst< -1.567699 34 5 M (0.14705882 0.85294118)
## 78) texture_worst>=4.658002 4 1 B (0.75000000 0.25000000) *
## 79) texture_worst< 4.658002 30 2 M (0.06666667 0.93333333) *
## 5) smoothness_worst>=-1.557621 551 269 B (0.51179673 0.48820327)
## 10) smoothness_worst>=-1.537035 460 194 B (0.57826087 0.42173913)
## 20) smoothness_worst< -1.501069 157 41 B (0.73885350 0.26114650)
## 40) texture_mean< 3.065024 122 20 B (0.83606557 0.16393443)
## 80) texture_mean>=2.893423 92 8 B (0.91304348 0.08695652) *
## 81) texture_mean< 2.893423 30 12 B (0.60000000 0.40000000) *
## 41) texture_mean>=3.065024 35 14 M (0.40000000 0.60000000)
## 82) texture_worst>=4.864642 10 0 B (1.00000000 0.00000000) *
## 83) texture_worst< 4.864642 25 4 M (0.16000000 0.84000000) *
## 21) smoothness_worst>=-1.501069 303 150 M (0.49504950 0.50495050)
## 42) smoothness_worst>=-1.477976 202 71 B (0.64851485 0.35148515)
## 84) smoothness_worst< -1.472307 40 1 B (0.97500000 0.02500000) *
## 85) smoothness_worst>=-1.472307 162 70 B (0.56790123 0.43209877) *
## 43) smoothness_worst< -1.477976 101 19 M (0.18811881 0.81188119)
## 86) smoothness_mean>=-2.231196 10 3 B (0.70000000 0.30000000) *
## 87) smoothness_mean< -2.231196 91 12 M (0.13186813 0.86813187) *
## 11) smoothness_worst< -1.537035 91 16 M (0.17582418 0.82417582)
## 22) texture_mean< 2.919658 37 14 M (0.37837838 0.62162162)
## 44) texture_worst>=4.403188 14 0 B (1.00000000 0.00000000) *
## 45) texture_worst< 4.403188 23 0 M (0.00000000 1.00000000) *
## 23) texture_mean>=2.919658 54 2 M (0.03703704 0.96296296)
## 46) smoothness_mean< -2.457256 11 2 M (0.18181818 0.81818182)
## 92) texture_mean< 3.006366 2 0 B (1.00000000 0.00000000) *
## 93) texture_mean>=3.006366 9 0 M (0.00000000 1.00000000) *
## 47) smoothness_mean>=-2.457256 43 0 M (0.00000000 1.00000000) *
## 3) texture_worst>=4.905415 207 69 M (0.33333333 0.66666667)
## 6) compactness_se>=-3.781676 80 39 B (0.51250000 0.48750000)
## 12) smoothness_mean< -2.3667 45 12 B (0.73333333 0.26666667)
## 24) symmetry_worst< -1.541072 39 6 B (0.84615385 0.15384615)
## 48) smoothness_mean>=-2.473552 32 1 B (0.96875000 0.03125000)
## 96) smoothness_worst>=-1.580728 28 0 B (1.00000000 0.00000000) *
## 97) smoothness_worst< -1.580728 4 1 B (0.75000000 0.25000000) *
## 49) smoothness_mean< -2.473552 7 2 M (0.28571429 0.71428571)
## 98) smoothness_mean< -2.491711 2 0 B (1.00000000 0.00000000) *
## 99) smoothness_mean>=-2.491711 5 0 M (0.00000000 1.00000000) *
## 25) symmetry_worst>=-1.541072 6 0 M (0.00000000 1.00000000) *
## 13) smoothness_mean>=-2.3667 35 8 M (0.22857143 0.77142857)
## 26) smoothness_worst>=-1.362317 5 1 B (0.80000000 0.20000000)
## 52) texture_mean>=3.181902 4 0 B (1.00000000 0.00000000) *
## 53) texture_mean< 3.181902 1 0 M (0.00000000 1.00000000) *
## 27) smoothness_worst< -1.362317 30 4 M (0.13333333 0.86666667)
## 54) symmetry_worst< -2.116233 7 3 B (0.57142857 0.42857143)
## 108) texture_mean>=3.253685 4 0 B (1.00000000 0.00000000) *
## 109) texture_mean< 3.253685 3 0 M (0.00000000 1.00000000) *
## 55) symmetry_worst>=-2.116233 23 0 M (0.00000000 1.00000000) *
## 7) compactness_se< -3.781676 127 28 M (0.22047244 0.77952756)
## 14) compactness_se< -4.030876 73 26 M (0.35616438 0.64383562)
## 28) smoothness_worst>=-1.452317 13 0 B (1.00000000 0.00000000) *
## 29) smoothness_worst< -1.452317 60 13 M (0.21666667 0.78333333)
## 58) smoothness_worst< -1.624406 4 0 B (1.00000000 0.00000000) *
## 59) smoothness_worst>=-1.624406 56 9 M (0.16071429 0.83928571)
## 118) texture_mean< 2.915217 2 0 B (1.00000000 0.00000000) *
## 119) texture_mean>=2.915217 54 7 M (0.12962963 0.87037037) *
## 15) compactness_se>=-4.030876 54 2 M (0.03703704 0.96296296)
## 30) texture_mean>=3.348904 2 0 B (1.00000000 0.00000000) *
## 31) texture_mean< 3.348904 52 0 M (0.00000000 1.00000000) *
##
## $trees[[47]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 455 B (0.50109649 0.49890351)
## 2) smoothness_mean< -2.424301 214 76 B (0.64485981 0.35514019)
## 4) symmetry_worst< -1.541072 166 44 B (0.73493976 0.26506024)
## 8) smoothness_mean>=-2.439727 35 0 B (1.00000000 0.00000000) *
## 9) smoothness_mean< -2.439727 131 44 B (0.66412214 0.33587786)
## 18) smoothness_mean< -2.444322 117 30 B (0.74358974 0.25641026)
## 36) compactness_se>=-4.296297 63 9 B (0.85714286 0.14285714)
## 72) symmetry_worst>=-2.081488 55 5 B (0.90909091 0.09090909) *
## 73) symmetry_worst< -2.081488 8 4 B (0.50000000 0.50000000) *
## 37) compactness_se< -4.296297 54 21 B (0.61111111 0.38888889)
## 74) compactness_se< -4.510773 29 5 B (0.82758621 0.17241379) *
## 75) compactness_se>=-4.510773 25 9 M (0.36000000 0.64000000) *
## 19) smoothness_mean>=-2.444322 14 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst>=-1.541072 48 16 M (0.33333333 0.66666667)
## 10) texture_mean< 2.973222 22 7 B (0.68181818 0.31818182)
## 20) smoothness_worst< -1.556321 15 0 B (1.00000000 0.00000000) *
## 21) smoothness_worst>=-1.556321 7 0 M (0.00000000 1.00000000) *
## 11) texture_mean>=2.973222 26 1 M (0.03846154 0.96153846)
## 22) smoothness_mean< -2.540124 1 0 B (1.00000000 0.00000000) *
## 23) smoothness_mean>=-2.540124 25 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean>=-2.424301 698 319 M (0.45702006 0.54297994)
## 6) compactness_se< -4.219581 71 21 B (0.70422535 0.29577465)
## 12) texture_mean< 2.99373 37 3 B (0.91891892 0.08108108)
## 24) compactness_se>=-4.469757 29 0 B (1.00000000 0.00000000) *
## 25) compactness_se< -4.469757 8 3 B (0.62500000 0.37500000)
## 50) texture_mean< 2.835488 3 0 B (1.00000000 0.00000000) *
## 51) texture_mean>=2.835488 5 2 M (0.40000000 0.60000000)
## 102) smoothness_mean>=-2.277891 2 0 B (1.00000000 0.00000000) *
## 103) smoothness_mean< -2.277891 3 0 M (0.00000000 1.00000000) *
## 13) texture_mean>=2.99373 34 16 M (0.47058824 0.52941176)
## 26) texture_mean>=3.227241 11 0 B (1.00000000 0.00000000) *
## 27) texture_mean< 3.227241 23 5 M (0.21739130 0.78260870)
## 54) smoothness_mean>=-2.30036 3 0 B (1.00000000 0.00000000) *
## 55) smoothness_mean< -2.30036 20 2 M (0.10000000 0.90000000)
## 110) compactness_se>=-4.332241 4 2 B (0.50000000 0.50000000) *
## 111) compactness_se< -4.332241 16 0 M (0.00000000 1.00000000) *
## 7) compactness_se>=-4.219581 627 269 M (0.42902711 0.57097289)
## 14) compactness_se>=-4.09685 556 256 M (0.46043165 0.53956835)
## 28) compactness_se< -4.025757 35 3 B (0.91428571 0.08571429)
## 56) texture_worst< 5.105262 32 0 B (1.00000000 0.00000000) *
## 57) texture_worst>=5.105262 3 0 M (0.00000000 1.00000000) *
## 29) compactness_se>=-4.025757 521 224 M (0.42994242 0.57005758)
## 58) smoothness_worst< -1.451541 383 183 M (0.47780679 0.52219321)
## 116) symmetry_worst>=-1.609029 107 31 B (0.71028037 0.28971963) *
## 117) symmetry_worst< -1.609029 276 107 M (0.38768116 0.61231884) *
## 59) smoothness_worst>=-1.451541 138 41 M (0.29710145 0.70289855)
## 118) compactness_se>=-3.68868 74 36 B (0.51351351 0.48648649) *
## 119) compactness_se< -3.68868 64 3 M (0.04687500 0.95312500) *
## 15) compactness_se< -4.09685 71 13 M (0.18309859 0.81690141)
## 30) texture_mean< 2.936778 24 11 B (0.54166667 0.45833333)
## 60) symmetry_worst< -1.589658 17 4 B (0.76470588 0.23529412)
## 120) smoothness_mean>=-2.390594 13 0 B (1.00000000 0.00000000) *
## 121) smoothness_mean< -2.390594 4 0 M (0.00000000 1.00000000) *
## 61) symmetry_worst>=-1.589658 7 0 M (0.00000000 1.00000000) *
## 31) texture_mean>=2.936778 47 0 M (0.00000000 1.00000000) *
##
## $trees[[48]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 449 M (0.49232456 0.50767544)
## 2) smoothness_mean< -2.21595 785 368 B (0.53121019 0.46878981)
## 4) symmetry_worst< -1.886611 259 88 B (0.66023166 0.33976834)
## 8) symmetry_worst>=-1.926862 47 1 B (0.97872340 0.02127660)
## 16) texture_mean< 3.241447 46 0 B (1.00000000 0.00000000) *
## 17) texture_mean>=3.241447 1 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst< -1.926862 212 87 B (0.58962264 0.41037736)
## 18) symmetry_worst< -2.052205 106 28 B (0.73584906 0.26415094)
## 36) texture_worst>=5.15236 20 0 B (1.00000000 0.00000000) *
## 37) texture_worst< 5.15236 86 28 B (0.67441860 0.32558140)
## 74) texture_mean< 3.14457 73 18 B (0.75342466 0.24657534) *
## 75) texture_mean>=3.14457 13 3 M (0.23076923 0.76923077) *
## 19) symmetry_worst>=-2.052205 106 47 M (0.44339623 0.55660377)
## 38) smoothness_worst>=-1.479155 13 0 B (1.00000000 0.00000000) *
## 39) smoothness_worst< -1.479155 93 34 M (0.36559140 0.63440860)
## 78) smoothness_mean< -2.408892 49 18 B (0.63265306 0.36734694) *
## 79) smoothness_mean>=-2.408892 44 3 M (0.06818182 0.93181818) *
## 5) symmetry_worst>=-1.886611 526 246 M (0.46768061 0.53231939)
## 10) smoothness_mean>=-2.235862 31 2 B (0.93548387 0.06451613)
## 20) compactness_se< -3.143422 29 0 B (1.00000000 0.00000000) *
## 21) compactness_se>=-3.143422 2 0 M (0.00000000 1.00000000) *
## 11) smoothness_mean< -2.235862 495 217 M (0.43838384 0.56161616)
## 22) smoothness_mean< -2.333148 280 129 B (0.53928571 0.46071429)
## 44) compactness_se>=-4.663537 253 106 B (0.58102767 0.41897233)
## 88) smoothness_mean>=-2.354616 22 1 B (0.95454545 0.04545455) *
## 89) smoothness_mean< -2.354616 231 105 B (0.54545455 0.45454545) *
## 45) compactness_se< -4.663537 27 4 M (0.14814815 0.85185185)
## 90) smoothness_mean>=-2.441817 3 0 B (1.00000000 0.00000000) *
## 91) smoothness_mean< -2.441817 24 1 M (0.04166667 0.95833333) *
## 23) smoothness_mean>=-2.333148 215 66 M (0.30697674 0.69302326)
## 46) symmetry_worst>=-1.769229 144 64 M (0.44444444 0.55555556)
## 92) symmetry_worst< -1.606092 78 28 B (0.64102564 0.35897436) *
## 93) symmetry_worst>=-1.606092 66 14 M (0.21212121 0.78787879) *
## 47) symmetry_worst< -1.769229 71 2 M (0.02816901 0.97183099)
## 94) symmetry_worst< -1.845834 1 0 B (1.00000000 0.00000000) *
## 95) symmetry_worst>=-1.845834 70 1 M (0.01428571 0.98571429) *
## 3) smoothness_mean>=-2.21595 127 32 M (0.25196850 0.74803150)
## 6) texture_worst< 3.952268 20 8 B (0.60000000 0.40000000)
## 12) texture_mean>=2.515298 12 0 B (1.00000000 0.00000000) *
## 13) texture_mean< 2.515298 8 0 M (0.00000000 1.00000000) *
## 7) texture_worst>=3.952268 107 20 M (0.18691589 0.81308411)
## 14) texture_mean>=2.999709 45 17 M (0.37777778 0.62222222)
## 28) smoothness_mean>=-2.093138 9 1 B (0.88888889 0.11111111)
## 56) smoothness_mean< -2.073133 8 0 B (1.00000000 0.00000000) *
## 57) smoothness_mean>=-2.073133 1 0 M (0.00000000 1.00000000) *
## 29) smoothness_mean< -2.093138 36 9 M (0.25000000 0.75000000)
## 58) compactness_se< -4.054302 6 0 B (1.00000000 0.00000000) *
## 59) compactness_se>=-4.054302 30 3 M (0.10000000 0.90000000)
## 118) texture_mean< 3.017902 3 0 B (1.00000000 0.00000000) *
## 119) texture_mean>=3.017902 27 0 M (0.00000000 1.00000000) *
## 15) texture_mean< 2.999709 62 3 M (0.04838710 0.95161290)
## 30) texture_mean< 2.825779 10 3 M (0.30000000 0.70000000)
## 60) smoothness_mean< -2.143945 3 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean>=-2.143945 7 0 M (0.00000000 1.00000000) *
## 31) texture_mean>=2.825779 52 0 M (0.00000000 1.00000000) *
##
## $trees[[49]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 396 M (0.43421053 0.56578947)
## 2) compactness_se< -4.705732 22 1 B (0.95454545 0.04545455)
## 4) symmetry_worst< -1.170399 21 0 B (1.00000000 0.00000000) *
## 5) symmetry_worst>=-1.170399 1 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-4.705732 890 375 M (0.42134831 0.57865169)
## 6) texture_worst< 4.12321 41 11 B (0.73170732 0.26829268)
## 12) texture_mean>=2.714689 25 2 B (0.92000000 0.08000000)
## 24) smoothness_worst>=-1.590596 21 0 B (1.00000000 0.00000000) *
## 25) smoothness_worst< -1.590596 4 2 B (0.50000000 0.50000000)
## 50) smoothness_mean< -2.465605 2 0 B (1.00000000 0.00000000) *
## 51) smoothness_mean>=-2.465605 2 0 M (0.00000000 1.00000000) *
## 13) texture_mean< 2.714689 16 7 M (0.43750000 0.56250000)
## 26) texture_worst< 3.781157 5 0 B (1.00000000 0.00000000) *
## 27) texture_worst>=3.781157 11 2 M (0.18181818 0.81818182)
## 54) compactness_se< -3.869334 2 0 B (1.00000000 0.00000000) *
## 55) compactness_se>=-3.869334 9 0 M (0.00000000 1.00000000) *
## 7) texture_worst>=4.12321 849 345 M (0.40636042 0.59363958)
## 14) smoothness_mean< -2.21595 748 323 M (0.43181818 0.56818182)
## 28) smoothness_mean>=-2.235394 37 5 B (0.86486486 0.13513514)
## 56) texture_mean< 3.035465 32 0 B (1.00000000 0.00000000) *
## 57) texture_mean>=3.035465 5 0 M (0.00000000 1.00000000) *
## 29) smoothness_mean< -2.235394 711 291 M (0.40928270 0.59071730)
## 58) symmetry_worst>=-1.128751 16 1 B (0.93750000 0.06250000)
## 116) compactness_se>=-3.52227 15 0 B (1.00000000 0.00000000) *
## 117) compactness_se< -3.52227 1 0 M (0.00000000 1.00000000) *
## 59) symmetry_worst< -1.128751 695 276 M (0.39712230 0.60287770)
## 118) smoothness_mean< -2.242902 667 276 M (0.41379310 0.58620690) *
## 119) smoothness_mean>=-2.242902 28 0 M (0.00000000 1.00000000) *
## 15) smoothness_mean>=-2.21595 101 22 M (0.21782178 0.78217822)
## 30) texture_mean>=3.033028 28 13 B (0.53571429 0.46428571)
## 60) smoothness_mean>=-2.093138 9 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean< -2.093138 19 6 M (0.31578947 0.68421053)
## 122) compactness_se< -4.054302 6 0 B (1.00000000 0.00000000) *
## 123) compactness_se>=-4.054302 13 0 M (0.00000000 1.00000000) *
## 31) texture_mean< 3.033028 73 7 M (0.09589041 0.90410959)
## 62) symmetry_worst< -1.816375 10 5 B (0.50000000 0.50000000)
## 124) texture_mean< 3.018626 5 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=3.018626 5 0 M (0.00000000 1.00000000) *
## 63) symmetry_worst>=-1.816375 63 2 M (0.03174603 0.96825397)
## 126) texture_mean< 2.657764 1 0 B (1.00000000 0.00000000) *
## 127) texture_mean>=2.657764 62 1 M (0.01612903 0.98387097) *
##
## $trees[[50]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 436 B (0.52192982 0.47807018)
## 2) smoothness_worst< -1.473476 668 269 B (0.59730539 0.40269461)
## 4) smoothness_worst>=-1.4768 51 0 B (1.00000000 0.00000000) *
## 5) smoothness_worst< -1.4768 617 269 B (0.56401945 0.43598055)
## 10) smoothness_worst< -1.48191 589 245 B (0.58404075 0.41595925)
## 20) compactness_se< -3.021724 542 212 B (0.60885609 0.39114391)
## 40) smoothness_mean>=-2.301086 93 17 B (0.81720430 0.18279570)
## 80) smoothness_worst>=-1.533868 79 7 B (0.91139241 0.08860759) *
## 81) smoothness_worst< -1.533868 14 4 M (0.28571429 0.71428571) *
## 41) smoothness_mean< -2.301086 449 195 B (0.56570156 0.43429844)
## 82) smoothness_mean< -2.303285 434 180 B (0.58525346 0.41474654) *
## 83) smoothness_mean>=-2.303285 15 0 M (0.00000000 1.00000000) *
## 21) compactness_se>=-3.021724 47 14 M (0.29787234 0.70212766)
## 42) texture_mean< 3.038537 23 9 B (0.60869565 0.39130435)
## 84) smoothness_mean< -2.291354 14 0 B (1.00000000 0.00000000) *
## 85) smoothness_mean>=-2.291354 9 0 M (0.00000000 1.00000000) *
## 43) texture_mean>=3.038537 24 0 M (0.00000000 1.00000000) *
## 11) smoothness_worst>=-1.48191 28 4 M (0.14285714 0.85714286)
## 22) texture_mean< 2.755881 3 0 B (1.00000000 0.00000000) *
## 23) texture_mean>=2.755881 25 1 M (0.04000000 0.96000000)
## 46) compactness_se< -3.967101 3 1 M (0.33333333 0.66666667)
## 92) texture_mean>=2.844596 1 0 B (1.00000000 0.00000000) *
## 93) texture_mean< 2.844596 2 0 M (0.00000000 1.00000000) *
## 47) compactness_se>=-3.967101 22 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst>=-1.473476 244 77 M (0.31557377 0.68442623)
## 6) texture_worst>=4.76475 67 31 B (0.53731343 0.46268657)
## 12) symmetry_worst>=-1.537481 30 5 B (0.83333333 0.16666667)
## 24) symmetry_worst< -1.362966 27 2 B (0.92592593 0.07407407)
## 48) smoothness_mean< -2.252478 26 1 B (0.96153846 0.03846154)
## 96) smoothness_worst< -1.426496 19 0 B (1.00000000 0.00000000) *
## 97) smoothness_worst>=-1.426496 7 1 B (0.85714286 0.14285714) *
## 49) smoothness_mean>=-2.252478 1 0 M (0.00000000 1.00000000) *
## 25) symmetry_worst>=-1.362966 3 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst< -1.537481 37 11 M (0.29729730 0.70270270)
## 26) compactness_se>=-3.490357 9 3 B (0.66666667 0.33333333)
## 52) symmetry_worst< -1.763957 6 0 B (1.00000000 0.00000000) *
## 53) symmetry_worst>=-1.763957 3 0 M (0.00000000 1.00000000) *
## 27) compactness_se< -3.490357 28 5 M (0.17857143 0.82142857)
## 54) compactness_se< -4.054302 11 5 M (0.45454545 0.54545455)
## 108) symmetry_worst< -1.650994 5 0 B (1.00000000 0.00000000) *
## 109) symmetry_worst>=-1.650994 6 0 M (0.00000000 1.00000000) *
## 55) compactness_se>=-4.054302 17 0 M (0.00000000 1.00000000) *
## 7) texture_worst< 4.76475 177 41 M (0.23163842 0.76836158)
## 14) symmetry_worst< -1.895488 12 0 B (1.00000000 0.00000000) *
## 15) symmetry_worst>=-1.895488 165 29 M (0.17575758 0.82424242)
## 30) texture_worst< 4.398698 62 25 M (0.40322581 0.59677419)
## 60) smoothness_mean>=-2.326212 43 19 B (0.55813953 0.44186047)
## 120) compactness_se< -3.786997 13 0 B (1.00000000 0.00000000) *
## 121) compactness_se>=-3.786997 30 11 M (0.36666667 0.63333333) *
## 61) smoothness_mean< -2.326212 19 1 M (0.05263158 0.94736842)
## 122) texture_mean< 2.561441 1 0 B (1.00000000 0.00000000) *
## 123) texture_mean>=2.561441 18 0 M (0.00000000 1.00000000) *
## 31) texture_worst>=4.398698 103 4 M (0.03883495 0.96116505)
## 62) compactness_se< -4.224437 11 3 M (0.27272727 0.72727273)
## 124) texture_mean< 2.962963 3 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=2.962963 8 0 M (0.00000000 1.00000000) *
## 63) compactness_se>=-4.224437 92 1 M (0.01086957 0.98913043)
## 126) smoothness_mean>=-2.090314 4 1 M (0.25000000 0.75000000) *
## 127) smoothness_mean< -2.090314 88 0 M (0.00000000 1.00000000) *
##
## $trees[[51]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 408 M (0.44736842 0.55263158)
## 2) smoothness_mean>=-2.328057 396 176 B (0.55555556 0.44444444)
## 4) smoothness_mean< -2.305941 65 10 B (0.84615385 0.15384615)
## 8) compactness_se< -3.202039 61 6 B (0.90163934 0.09836066)
## 16) symmetry_worst< -1.40737 59 4 B (0.93220339 0.06779661)
## 32) texture_mean< 3.321235 58 3 B (0.94827586 0.05172414)
## 64) compactness_se>=-4.276389 51 1 B (0.98039216 0.01960784) *
## 65) compactness_se< -4.276389 7 2 B (0.71428571 0.28571429) *
## 33) texture_mean>=3.321235 1 0 M (0.00000000 1.00000000) *
## 17) symmetry_worst>=-1.40737 2 0 M (0.00000000 1.00000000) *
## 9) compactness_se>=-3.202039 4 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean>=-2.305941 331 165 M (0.49848943 0.50151057)
## 10) symmetry_worst>=-1.930267 267 116 B (0.56554307 0.43445693)
## 20) symmetry_worst< -1.825003 41 3 B (0.92682927 0.07317073)
## 40) texture_worst< 4.927821 38 0 B (1.00000000 0.00000000) *
## 41) texture_worst>=4.927821 3 0 M (0.00000000 1.00000000) *
## 21) symmetry_worst>=-1.825003 226 113 B (0.50000000 0.50000000)
## 42) texture_worst< 4.400395 86 22 B (0.74418605 0.25581395)
## 84) symmetry_worst< -1.532237 44 2 B (0.95454545 0.04545455) *
## 85) symmetry_worst>=-1.532237 42 20 B (0.52380952 0.47619048) *
## 43) texture_worst>=4.400395 140 49 M (0.35000000 0.65000000)
## 86) texture_worst>=4.769176 44 11 B (0.75000000 0.25000000) *
## 87) texture_worst< 4.769176 96 16 M (0.16666667 0.83333333) *
## 11) symmetry_worst< -1.930267 64 14 M (0.21875000 0.78125000)
## 22) symmetry_worst< -2.188127 25 11 M (0.44000000 0.56000000)
## 44) symmetry_worst>=-2.270701 11 0 B (1.00000000 0.00000000) *
## 45) symmetry_worst< -2.270701 14 0 M (0.00000000 1.00000000) *
## 23) symmetry_worst>=-2.188127 39 3 M (0.07692308 0.92307692)
## 46) compactness_se< -4.001166 1 0 B (1.00000000 0.00000000) *
## 47) compactness_se>=-4.001166 38 2 M (0.05263158 0.94736842)
## 94) compactness_se>=-3.425209 4 2 B (0.50000000 0.50000000) *
## 95) compactness_se< -3.425209 34 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean< -2.328057 516 188 M (0.36434109 0.63565891)
## 6) texture_mean< 2.74084 13 0 B (1.00000000 0.00000000) *
## 7) texture_mean>=2.74084 503 175 M (0.34791252 0.65208748)
## 14) smoothness_mean< -2.564314 14 2 B (0.85714286 0.14285714)
## 28) compactness_se< -3.021883 12 0 B (1.00000000 0.00000000) *
## 29) compactness_se>=-3.021883 2 0 M (0.00000000 1.00000000) *
## 15) smoothness_mean>=-2.564314 489 163 M (0.33333333 0.66666667)
## 30) symmetry_worst< -2.25148 6 0 B (1.00000000 0.00000000) *
## 31) symmetry_worst>=-2.25148 483 157 M (0.32505176 0.67494824)
## 62) smoothness_mean< -2.367284 359 132 M (0.36768802 0.63231198)
## 124) smoothness_worst>=-1.512058 73 29 B (0.60273973 0.39726027) *
## 125) smoothness_worst< -1.512058 286 88 M (0.30769231 0.69230769) *
## 63) smoothness_mean>=-2.367284 124 25 M (0.20161290 0.79838710)
## 126) smoothness_worst< -1.544057 11 3 B (0.72727273 0.27272727) *
## 127) smoothness_worst>=-1.544057 113 17 M (0.15044248 0.84955752) *
##
## $trees[[52]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 451 B (0.50548246 0.49451754)
## 2) symmetry_worst< -1.816281 352 139 B (0.60511364 0.39488636)
## 4) texture_mean< 2.963209 114 26 B (0.77192982 0.22807018)
## 8) smoothness_worst>=-1.595961 90 10 B (0.88888889 0.11111111)
## 16) texture_worst< 4.734027 87 8 B (0.90804598 0.09195402)
## 32) smoothness_worst< -1.441362 86 7 B (0.91860465 0.08139535)
## 64) smoothness_mean< -2.30802 61 1 B (0.98360656 0.01639344) *
## 65) smoothness_mean>=-2.30802 25 6 B (0.76000000 0.24000000) *
## 33) smoothness_worst>=-1.441362 1 0 M (0.00000000 1.00000000) *
## 17) texture_worst>=4.734027 3 1 M (0.33333333 0.66666667)
## 34) texture_mean< 2.946804 1 0 B (1.00000000 0.00000000) *
## 35) texture_mean>=2.946804 2 0 M (0.00000000 1.00000000) *
## 9) smoothness_worst< -1.595961 24 8 M (0.33333333 0.66666667)
## 18) smoothness_worst< -1.609607 8 0 B (1.00000000 0.00000000) *
## 19) smoothness_worst>=-1.609607 16 0 M (0.00000000 1.00000000) *
## 5) texture_mean>=2.963209 238 113 B (0.52521008 0.47478992)
## 10) smoothness_worst< -1.52112 165 61 B (0.63030303 0.36969697)
## 20) texture_mean>=3.087399 81 14 B (0.82716049 0.17283951)
## 40) compactness_se< -3.400535 72 9 B (0.87500000 0.12500000)
## 80) texture_worst< 5.309872 56 3 B (0.94642857 0.05357143) *
## 81) texture_worst>=5.309872 16 6 B (0.62500000 0.37500000) *
## 41) compactness_se>=-3.400535 9 4 M (0.44444444 0.55555556)
## 82) texture_mean< 3.136493 4 0 B (1.00000000 0.00000000) *
## 83) texture_mean>=3.136493 5 0 M (0.00000000 1.00000000) *
## 21) texture_mean< 3.087399 84 37 M (0.44047619 0.55952381)
## 42) compactness_se>=-3.439035 23 3 B (0.86956522 0.13043478)
## 84) texture_mean< 3.076827 20 0 B (1.00000000 0.00000000) *
## 85) texture_mean>=3.076827 3 0 M (0.00000000 1.00000000) *
## 43) compactness_se< -3.439035 61 17 M (0.27868852 0.72131148)
## 86) symmetry_worst< -1.969194 20 7 B (0.65000000 0.35000000) *
## 87) symmetry_worst>=-1.969194 41 4 M (0.09756098 0.90243902) *
## 11) smoothness_worst>=-1.52112 73 21 M (0.28767123 0.71232877)
## 22) texture_mean< 2.975782 8 0 B (1.00000000 0.00000000) *
## 23) texture_mean>=2.975782 65 13 M (0.20000000 0.80000000)
## 46) texture_worst< 4.469369 4 0 B (1.00000000 0.00000000) *
## 47) texture_worst>=4.469369 61 9 M (0.14754098 0.85245902)
## 94) symmetry_worst< -2.188127 7 3 B (0.57142857 0.42857143) *
## 95) symmetry_worst>=-2.188127 54 5 M (0.09259259 0.90740741) *
## 3) symmetry_worst>=-1.816281 560 248 M (0.44285714 0.55714286)
## 6) texture_worst< 4.514456 211 90 B (0.57345972 0.42654028)
## 12) smoothness_mean< -2.173316 173 56 B (0.67630058 0.32369942)
## 24) texture_worst>=4.463188 49 0 B (1.00000000 0.00000000) *
## 25) texture_worst< 4.463188 124 56 B (0.54838710 0.45161290)
## 50) smoothness_mean>=-2.353616 70 15 B (0.78571429 0.21428571)
## 100) smoothness_worst< -1.430373 66 11 B (0.83333333 0.16666667) *
## 101) smoothness_worst>=-1.430373 4 0 M (0.00000000 1.00000000) *
## 51) smoothness_mean< -2.353616 54 13 M (0.24074074 0.75925926)
## 102) compactness_se< -4.559289 4 0 B (1.00000000 0.00000000) *
## 103) compactness_se>=-4.559289 50 9 M (0.18000000 0.82000000) *
## 13) smoothness_mean>=-2.173316 38 4 M (0.10526316 0.89473684)
## 26) smoothness_mean>=-1.889548 3 0 B (1.00000000 0.00000000) *
## 27) smoothness_mean< -1.889548 35 1 M (0.02857143 0.97142857)
## 54) smoothness_mean>=-2.000349 6 1 M (0.16666667 0.83333333)
## 108) texture_mean< 2.688296 1 0 B (1.00000000 0.00000000) *
## 109) texture_mean>=2.688296 5 0 M (0.00000000 1.00000000) *
## 55) smoothness_mean< -2.000349 29 0 M (0.00000000 1.00000000) *
## 7) texture_worst>=4.514456 349 127 M (0.36389685 0.63610315)
## 14) symmetry_worst>=-1.749963 285 121 M (0.42456140 0.57543860)
## 28) symmetry_worst< -1.716176 37 7 B (0.81081081 0.18918919)
## 56) texture_mean< 3.407548 32 2 B (0.93750000 0.06250000)
## 112) compactness_se>=-4.528789 29 0 B (1.00000000 0.00000000) *
## 113) compactness_se< -4.528789 3 1 M (0.33333333 0.66666667) *
## 57) texture_mean>=3.407548 5 0 M (0.00000000 1.00000000) *
## 29) symmetry_worst>=-1.716176 248 91 M (0.36693548 0.63306452)
## 58) compactness_se< -3.447524 191 82 M (0.42931937 0.57068063)
## 116) compactness_se>=-3.494961 28 4 B (0.85714286 0.14285714) *
## 117) compactness_se< -3.494961 163 58 M (0.35582822 0.64417178) *
## 59) compactness_se>=-3.447524 57 9 M (0.15789474 0.84210526)
## 118) compactness_se>=-3.18382 19 9 M (0.47368421 0.52631579) *
## 119) compactness_se< -3.18382 38 0 M (0.00000000 1.00000000) *
## 15) symmetry_worst< -1.749963 64 6 M (0.09375000 0.90625000)
## 30) texture_worst>=4.897895 6 1 B (0.83333333 0.16666667)
## 60) compactness_se>=-4.246101 5 0 B (1.00000000 0.00000000) *
## 61) compactness_se< -4.246101 1 0 M (0.00000000 1.00000000) *
## 31) texture_worst< 4.897895 58 1 M (0.01724138 0.98275862)
## 62) texture_mean< 2.875669 1 0 B (1.00000000 0.00000000) *
## 63) texture_mean>=2.875669 57 0 M (0.00000000 1.00000000) *
##
## $trees[[53]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 409 B (0.55153509 0.44846491)
## 2) compactness_se< -3.672219 541 208 B (0.61552680 0.38447320)
## 4) compactness_se>=-3.761452 65 4 B (0.93846154 0.06153846)
## 8) smoothness_worst>=-1.534142 58 0 B (1.00000000 0.00000000) *
## 9) smoothness_worst< -1.534142 7 3 M (0.42857143 0.57142857)
## 18) smoothness_mean< -2.477152 3 0 B (1.00000000 0.00000000) *
## 19) smoothness_mean>=-2.477152 4 0 M (0.00000000 1.00000000) *
## 5) compactness_se< -3.761452 476 204 B (0.57142857 0.42857143)
## 10) texture_mean< 2.976294 246 77 B (0.68699187 0.31300813)
## 20) symmetry_worst>=-1.749307 135 21 B (0.84444444 0.15555556)
## 40) smoothness_worst< -1.451541 112 6 B (0.94642857 0.05357143)
## 80) texture_worst< 4.707428 107 3 B (0.97196262 0.02803738) *
## 81) texture_worst>=4.707428 5 2 M (0.40000000 0.60000000) *
## 41) smoothness_worst>=-1.451541 23 8 M (0.34782609 0.65217391)
## 82) compactness_se< -4.023814 7 0 B (1.00000000 0.00000000) *
## 83) compactness_se>=-4.023814 16 1 M (0.06250000 0.93750000) *
## 21) symmetry_worst< -1.749307 111 55 M (0.49549550 0.50450450)
## 42) symmetry_worst< -1.787433 93 39 B (0.58064516 0.41935484)
## 84) texture_worst>=4.4131 56 13 B (0.76785714 0.23214286) *
## 85) texture_worst< 4.4131 37 11 M (0.29729730 0.70270270) *
## 43) symmetry_worst>=-1.787433 18 1 M (0.05555556 0.94444444)
## 86) texture_mean< 2.788049 1 0 B (1.00000000 0.00000000) *
## 87) texture_mean>=2.788049 17 0 M (0.00000000 1.00000000) *
## 11) texture_mean>=2.976294 230 103 M (0.44782609 0.55217391)
## 22) smoothness_mean>=-2.295291 48 13 B (0.72916667 0.27083333)
## 44) compactness_se< -4.030876 28 0 B (1.00000000 0.00000000) *
## 45) compactness_se>=-4.030876 20 7 M (0.35000000 0.65000000)
## 90) smoothness_worst< -1.475287 7 0 B (1.00000000 0.00000000) *
## 91) smoothness_worst>=-1.475287 13 0 M (0.00000000 1.00000000) *
## 23) smoothness_mean< -2.295291 182 68 M (0.37362637 0.62637363)
## 46) compactness_se< -4.557422 22 4 B (0.81818182 0.18181818)
## 92) texture_mean>=3.186756 12 0 B (1.00000000 0.00000000) *
## 93) texture_mean< 3.186756 10 4 B (0.60000000 0.40000000) *
## 47) compactness_se>=-4.557422 160 50 M (0.31250000 0.68750000)
## 94) symmetry_worst< -1.670808 104 50 M (0.48076923 0.51923077) *
## 95) symmetry_worst>=-1.670808 56 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-3.672219 371 170 M (0.45822102 0.54177898)
## 6) symmetry_worst< -1.816281 135 54 B (0.60000000 0.40000000)
## 12) symmetry_worst>=-1.982941 64 13 B (0.79687500 0.20312500)
## 24) texture_mean< 3.058819 56 6 B (0.89285714 0.10714286)
## 48) compactness_se>=-3.596781 51 1 B (0.98039216 0.01960784)
## 96) texture_worst< 4.806084 50 0 B (1.00000000 0.00000000) *
## 97) texture_worst>=4.806084 1 0 M (0.00000000 1.00000000) *
## 49) compactness_se< -3.596781 5 0 M (0.00000000 1.00000000) *
## 25) texture_mean>=3.058819 8 1 M (0.12500000 0.87500000)
## 50) smoothness_mean< -2.497464 1 0 B (1.00000000 0.00000000) *
## 51) smoothness_mean>=-2.497464 7 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst< -1.982941 71 30 M (0.42253521 0.57746479)
## 26) texture_mean>=3.268457 14 0 B (1.00000000 0.00000000) *
## 27) texture_mean< 3.268457 57 16 M (0.28070175 0.71929825)
## 54) compactness_se>=-3.41277 22 9 B (0.59090909 0.40909091)
## 108) texture_mean< 3.076827 10 0 B (1.00000000 0.00000000) *
## 109) texture_mean>=3.076827 12 3 M (0.25000000 0.75000000) *
## 55) compactness_se< -3.41277 35 3 M (0.08571429 0.91428571)
## 110) texture_mean>=3.07129 4 1 B (0.75000000 0.25000000) *
## 111) texture_mean< 3.07129 31 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-1.816281 236 89 M (0.37711864 0.62288136)
## 14) compactness_se>=-3.494301 175 82 M (0.46857143 0.53142857)
## 28) symmetry_worst< -1.001713 164 82 B (0.50000000 0.50000000)
## 56) texture_mean< 2.927442 68 23 B (0.66176471 0.33823529)
## 112) symmetry_worst< -1.343592 50 9 B (0.82000000 0.18000000) *
## 113) symmetry_worst>=-1.343592 18 4 M (0.22222222 0.77777778) *
## 57) texture_mean>=2.927442 96 37 M (0.38541667 0.61458333)
## 114) symmetry_worst>=-1.694861 61 27 B (0.55737705 0.44262295) *
## 115) symmetry_worst< -1.694861 35 3 M (0.08571429 0.91428571) *
## 29) symmetry_worst>=-1.001713 11 0 M (0.00000000 1.00000000) *
## 15) compactness_se< -3.494301 61 7 M (0.11475410 0.88524590)
## 30) texture_mean>=3.094444 7 1 B (0.85714286 0.14285714)
## 60) texture_mean< 3.383373 6 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=3.383373 1 0 M (0.00000000 1.00000000) *
## 31) texture_mean< 3.094444 54 1 M (0.01851852 0.98148148)
## 62) symmetry_worst>=-1.47212 6 1 M (0.16666667 0.83333333)
## 124) texture_mean< 2.800842 1 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=2.800842 5 0 M (0.00000000 1.00000000) *
## 63) symmetry_worst< -1.47212 48 0 M (0.00000000 1.00000000) *
##
## $trees[[54]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 452 M (0.49561404 0.50438596)
## 2) compactness_se< -3.648711 530 221 B (0.58301887 0.41698113)
## 4) compactness_se>=-4.676462 467 176 B (0.62312634 0.37687366)
## 8) texture_mean< 2.960364 172 46 B (0.73255814 0.26744186)
## 16) symmetry_worst>=-1.749307 99 10 B (0.89898990 0.10101010)
## 32) texture_worst< 4.692158 95 6 B (0.93684211 0.06315789)
## 64) texture_mean>=2.518783 93 4 B (0.95698925 0.04301075) *
## 65) texture_mean< 2.518783 2 0 M (0.00000000 1.00000000) *
## 33) texture_worst>=4.692158 4 0 M (0.00000000 1.00000000) *
## 17) symmetry_worst< -1.749307 73 36 B (0.50684932 0.49315068)
## 34) symmetry_worst< -1.786753 54 19 B (0.64814815 0.35185185)
## 68) texture_worst< 4.608983 46 12 B (0.73913043 0.26086957) *
## 69) texture_worst>=4.608983 8 1 M (0.12500000 0.87500000) *
## 35) symmetry_worst>=-1.786753 19 2 M (0.10526316 0.89473684)
## 70) texture_mean>=2.946722 2 0 B (1.00000000 0.00000000) *
## 71) texture_mean< 2.946722 17 0 M (0.00000000 1.00000000) *
## 9) texture_mean>=2.960364 295 130 B (0.55932203 0.44067797)
## 18) symmetry_worst< -2.052205 49 9 B (0.81632653 0.18367347)
## 36) smoothness_worst< -1.514694 43 3 B (0.93023256 0.06976744)
## 72) smoothness_worst>=-1.668282 40 0 B (1.00000000 0.00000000) *
## 73) smoothness_worst< -1.668282 3 0 M (0.00000000 1.00000000) *
## 37) smoothness_worst>=-1.514694 6 0 M (0.00000000 1.00000000) *
## 19) symmetry_worst>=-2.052205 246 121 B (0.50813008 0.49186992)
## 38) compactness_se>=-3.781676 47 10 B (0.78723404 0.21276596)
## 76) texture_worst>=4.59283 41 4 B (0.90243902 0.09756098) *
## 77) texture_worst< 4.59283 6 0 M (0.00000000 1.00000000) *
## 39) compactness_se< -3.781676 199 88 M (0.44221106 0.55778894)
## 78) texture_worst< 5.194184 165 80 B (0.51515152 0.48484848) *
## 79) texture_worst>=5.194184 34 3 M (0.08823529 0.91176471) *
## 5) compactness_se< -4.676462 63 18 M (0.28571429 0.71428571)
## 10) compactness_se< -4.779408 10 0 B (1.00000000 0.00000000) *
## 11) compactness_se>=-4.779408 53 8 M (0.15094340 0.84905660)
## 22) smoothness_mean>=-2.441817 7 0 B (1.00000000 0.00000000) *
## 23) smoothness_mean< -2.441817 46 1 M (0.02173913 0.97826087)
## 46) texture_worst< 4.52395 1 0 B (1.00000000 0.00000000) *
## 47) texture_worst>=4.52395 45 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-3.648711 382 143 M (0.37434555 0.62565445)
## 6) compactness_se>=-2.809774 27 5 B (0.81481481 0.18518519)
## 12) texture_mean< 3.003947 20 0 B (1.00000000 0.00000000) *
## 13) texture_mean>=3.003947 7 2 M (0.28571429 0.71428571)
## 26) smoothness_mean< -2.364398 2 0 B (1.00000000 0.00000000) *
## 27) smoothness_mean>=-2.364398 5 0 M (0.00000000 1.00000000) *
## 7) compactness_se< -2.809774 355 121 M (0.34084507 0.65915493)
## 14) smoothness_mean< -2.503795 15 3 B (0.80000000 0.20000000)
## 28) compactness_se< -2.979429 12 0 B (1.00000000 0.00000000) *
## 29) compactness_se>=-2.979429 3 0 M (0.00000000 1.00000000) *
## 15) smoothness_mean>=-2.503795 340 109 M (0.32058824 0.67941176)
## 30) compactness_se< -2.910852 316 109 M (0.34493671 0.65506329)
## 60) compactness_se>=-2.924003 9 0 B (1.00000000 0.00000000) *
## 61) compactness_se< -2.924003 307 100 M (0.32573290 0.67426710)
## 122) smoothness_worst< -1.482896 167 72 M (0.43113772 0.56886228) *
## 123) smoothness_worst>=-1.482896 140 28 M (0.20000000 0.80000000) *
## 31) compactness_se>=-2.910852 24 0 M (0.00000000 1.00000000) *
##
## $trees[[55]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 433 B (0.52521930 0.47478070)
## 2) symmetry_worst< -1.424186 819 366 B (0.55311355 0.44688645)
## 4) texture_worst< 4.260219 66 10 B (0.84848485 0.15151515)
## 8) smoothness_mean< -2.318766 26 0 B (1.00000000 0.00000000) *
## 9) smoothness_mean>=-2.318766 40 10 B (0.75000000 0.25000000)
## 18) smoothness_mean>=-2.280931 27 2 B (0.92592593 0.07407407)
## 36) texture_mean< 2.878198 26 1 B (0.96153846 0.03846154)
## 72) compactness_se>=-3.757389 23 0 B (1.00000000 0.00000000) *
## 73) compactness_se< -3.757389 3 1 B (0.66666667 0.33333333) *
## 37) texture_mean>=2.878198 1 0 M (0.00000000 1.00000000) *
## 19) smoothness_mean< -2.280931 13 5 M (0.38461538 0.61538462)
## 38) texture_worst< 4.056844 5 0 B (1.00000000 0.00000000) *
## 39) texture_worst>=4.056844 8 0 M (0.00000000 1.00000000) *
## 5) texture_worst>=4.260219 753 356 B (0.52722444 0.47277556)
## 10) symmetry_worst>=-1.471051 30 2 B (0.93333333 0.06666667)
## 20) smoothness_mean< -2.29943 28 0 B (1.00000000 0.00000000) *
## 21) smoothness_mean>=-2.29943 2 0 M (0.00000000 1.00000000) *
## 11) symmetry_worst< -1.471051 723 354 B (0.51037344 0.48962656)
## 22) smoothness_mean< -2.392182 304 115 B (0.62171053 0.37828947)
## 44) symmetry_worst>=-1.860457 201 60 B (0.70149254 0.29850746)
## 88) symmetry_worst< -1.541072 169 38 B (0.77514793 0.22485207) *
## 89) symmetry_worst>=-1.541072 32 10 M (0.31250000 0.68750000) *
## 45) symmetry_worst< -1.860457 103 48 M (0.46601942 0.53398058)
## 90) symmetry_worst< -2.01934 40 10 B (0.75000000 0.25000000) *
## 91) symmetry_worst>=-2.01934 63 18 M (0.28571429 0.71428571) *
## 23) smoothness_mean>=-2.392182 419 180 M (0.42959427 0.57040573)
## 46) smoothness_mean>=-2.38347 375 180 M (0.48000000 0.52000000)
## 92) compactness_se< -3.445472 255 111 B (0.56470588 0.43529412) *
## 93) compactness_se>=-3.445472 120 36 M (0.30000000 0.70000000) *
## 47) smoothness_mean< -2.38347 44 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst>=-1.424186 93 26 M (0.27956989 0.72043011)
## 6) smoothness_worst< -1.49848 23 8 B (0.65217391 0.34782609)
## 12) smoothness_worst>=-1.553939 13 0 B (1.00000000 0.00000000) *
## 13) smoothness_worst< -1.553939 10 2 M (0.20000000 0.80000000)
## 26) texture_mean< 2.973222 2 0 B (1.00000000 0.00000000) *
## 27) texture_mean>=2.973222 8 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst>=-1.49848 70 11 M (0.15714286 0.84285714)
## 14) compactness_se>=-2.638423 6 0 B (1.00000000 0.00000000) *
## 15) compactness_se< -2.638423 64 5 M (0.07812500 0.92187500)
## 30) texture_mean< 2.77286 10 5 B (0.50000000 0.50000000)
## 60) symmetry_worst< -1.232339 5 0 B (1.00000000 0.00000000) *
## 61) symmetry_worst>=-1.232339 5 0 M (0.00000000 1.00000000) *
## 31) texture_mean>=2.77286 54 0 M (0.00000000 1.00000000) *
##
## $trees[[56]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 452 B (0.50438596 0.49561404)
## 2) smoothness_mean< -2.408446 299 107 B (0.64214047 0.35785953)
## 4) compactness_se>=-4.658767 260 78 B (0.70000000 0.30000000)
## 8) texture_worst>=4.498003 208 47 B (0.77403846 0.22596154)
## 16) texture_mean< 3.388429 199 38 B (0.80904523 0.19095477)
## 32) symmetry_worst>=-2.218277 190 30 B (0.84210526 0.15789474)
## 64) symmetry_worst< -1.496954 184 25 B (0.86413043 0.13586957) *
## 65) symmetry_worst>=-1.496954 6 1 M (0.16666667 0.83333333) *
## 33) symmetry_worst< -2.218277 9 1 M (0.11111111 0.88888889)
## 66) smoothness_mean< -2.57545 1 0 B (1.00000000 0.00000000) *
## 67) smoothness_mean>=-2.57545 8 0 M (0.00000000 1.00000000) *
## 17) texture_mean>=3.388429 9 0 M (0.00000000 1.00000000) *
## 9) texture_worst< 4.498003 52 21 M (0.40384615 0.59615385)
## 18) compactness_se>=-3.483667 11 1 B (0.90909091 0.09090909)
## 36) texture_mean< 3.038737 10 0 B (1.00000000 0.00000000) *
## 37) texture_mean>=3.038737 1 0 M (0.00000000 1.00000000) *
## 19) compactness_se< -3.483667 41 11 M (0.26829268 0.73170732)
## 38) smoothness_mean< -2.469112 10 4 B (0.60000000 0.40000000)
## 76) texture_mean< 2.94329 6 0 B (1.00000000 0.00000000) *
## 77) texture_mean>=2.94329 4 0 M (0.00000000 1.00000000) *
## 39) smoothness_mean>=-2.469112 31 5 M (0.16129032 0.83870968)
## 78) texture_worst>=4.418221 8 3 B (0.62500000 0.37500000) *
## 79) texture_worst< 4.418221 23 0 M (0.00000000 1.00000000) *
## 5) compactness_se< -4.658767 39 10 M (0.25641026 0.74358974)
## 10) compactness_se< -4.706178 9 2 B (0.77777778 0.22222222)
## 20) symmetry_worst< -1.284644 7 0 B (1.00000000 0.00000000) *
## 21) symmetry_worst>=-1.284644 2 0 M (0.00000000 1.00000000) *
## 11) compactness_se>=-4.706178 30 3 M (0.10000000 0.90000000)
## 22) smoothness_mean>=-2.441817 3 0 B (1.00000000 0.00000000) *
## 23) smoothness_mean< -2.441817 27 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean>=-2.408446 613 268 M (0.43719413 0.56280587)
## 6) texture_mean< 3.007414 361 175 B (0.51523546 0.48476454)
## 12) texture_mean>=3.003452 27 2 B (0.92592593 0.07407407)
## 24) smoothness_mean< -2.060282 25 0 B (1.00000000 0.00000000) *
## 25) smoothness_mean>=-2.060282 2 0 M (0.00000000 1.00000000) *
## 13) texture_mean< 3.003452 334 161 M (0.48203593 0.51796407)
## 26) compactness_se>=-2.834229 16 0 B (1.00000000 0.00000000) *
## 27) compactness_se< -2.834229 318 145 M (0.45597484 0.54402516)
## 54) symmetry_worst>=-2.400126 302 145 M (0.48013245 0.51986755)
## 108) smoothness_worst< -1.549191 39 8 B (0.79487179 0.20512821) *
## 109) smoothness_worst>=-1.549191 263 114 M (0.43346008 0.56653992) *
## 55) symmetry_worst< -2.400126 16 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=3.007414 252 82 M (0.32539683 0.67460317)
## 14) symmetry_worst< -2.207988 37 9 B (0.75675676 0.24324324)
## 28) smoothness_mean< -2.282028 30 2 B (0.93333333 0.06666667)
## 56) compactness_se>=-4.450281 28 0 B (1.00000000 0.00000000) *
## 57) compactness_se< -4.450281 2 0 M (0.00000000 1.00000000) *
## 29) smoothness_mean>=-2.282028 7 0 M (0.00000000 1.00000000) *
## 15) symmetry_worst>=-2.207988 215 54 M (0.25116279 0.74883721)
## 30) smoothness_mean>=-2.383798 164 54 M (0.32926829 0.67073171)
## 60) symmetry_worst>=-1.477364 25 7 B (0.72000000 0.28000000)
## 120) symmetry_worst< -1.41845 14 0 B (1.00000000 0.00000000) *
## 121) symmetry_worst>=-1.41845 11 4 M (0.36363636 0.63636364) *
## 61) symmetry_worst< -1.477364 139 36 M (0.25899281 0.74100719)
## 122) texture_worst< 4.682677 36 16 B (0.55555556 0.44444444) *
## 123) texture_worst>=4.682677 103 16 M (0.15533981 0.84466019) *
## 31) smoothness_mean< -2.383798 51 0 M (0.00000000 1.00000000) *
##
## $trees[[57]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 443 B (0.51425439 0.48574561)
## 2) texture_mean< 2.960364 375 133 B (0.64533333 0.35466667)
## 4) compactness_se>=-3.344528 86 9 B (0.89534884 0.10465116)
## 8) compactness_se< -3.086764 55 0 B (1.00000000 0.00000000) *
## 9) compactness_se>=-3.086764 31 9 B (0.70967742 0.29032258)
## 18) texture_mean>=2.850705 16 0 B (1.00000000 0.00000000) *
## 19) texture_mean< 2.850705 15 6 M (0.40000000 0.60000000)
## 38) symmetry_worst< -1.62579 6 0 B (1.00000000 0.00000000) *
## 39) symmetry_worst>=-1.62579 9 0 M (0.00000000 1.00000000) *
## 5) compactness_se< -3.344528 289 124 B (0.57093426 0.42906574)
## 10) compactness_se< -3.668499 188 62 B (0.67021277 0.32978723)
## 20) symmetry_worst>=-1.749307 92 14 B (0.84782609 0.15217391)
## 40) symmetry_worst< -1.533879 43 0 B (1.00000000 0.00000000) *
## 41) symmetry_worst>=-1.533879 49 14 B (0.71428571 0.28571429)
## 82) texture_worst< 4.61159 41 7 B (0.82926829 0.17073171) *
## 83) texture_worst>=4.61159 8 1 M (0.12500000 0.87500000) *
## 21) symmetry_worst< -1.749307 96 48 B (0.50000000 0.50000000)
## 42) symmetry_worst< -1.814891 56 17 B (0.69642857 0.30357143)
## 84) smoothness_mean< -2.311491 38 7 B (0.81578947 0.18421053) *
## 85) smoothness_mean>=-2.311491 18 8 M (0.44444444 0.55555556) *
## 43) symmetry_worst>=-1.814891 40 9 M (0.22500000 0.77500000)
## 86) compactness_se>=-3.93685 8 1 B (0.87500000 0.12500000) *
## 87) compactness_se< -3.93685 32 2 M (0.06250000 0.93750000) *
## 11) compactness_se>=-3.668499 101 39 M (0.38613861 0.61386139)
## 22) symmetry_worst< -1.834844 26 2 B (0.92307692 0.07692308)
## 44) symmetry_worst>=-1.982941 19 0 B (1.00000000 0.00000000) *
## 45) symmetry_worst< -1.982941 7 2 B (0.71428571 0.28571429)
## 90) texture_mean>=2.866189 5 0 B (1.00000000 0.00000000) *
## 91) texture_mean< 2.866189 2 0 M (0.00000000 1.00000000) *
## 23) symmetry_worst>=-1.834844 75 15 M (0.20000000 0.80000000)
## 46) texture_mean>=2.945474 8 0 B (1.00000000 0.00000000) *
## 47) texture_mean< 2.945474 67 7 M (0.10447761 0.89552239)
## 94) compactness_se>=-3.394391 12 5 M (0.41666667 0.58333333) *
## 95) compactness_se< -3.394391 55 2 M (0.03636364 0.96363636) *
## 3) texture_mean>=2.960364 537 227 M (0.42271881 0.57728119)
## 6) smoothness_mean< -2.425205 162 66 B (0.59259259 0.40740741)
## 12) symmetry_worst< -1.541072 143 48 B (0.66433566 0.33566434)
## 24) symmetry_worst>=-2.218277 131 37 B (0.71755725 0.28244275)
## 48) compactness_se>=-4.496793 98 17 B (0.82653061 0.17346939)
## 96) smoothness_worst< -1.522574 66 4 B (0.93939394 0.06060606) *
## 97) smoothness_worst>=-1.522574 32 13 B (0.59375000 0.40625000) *
## 49) compactness_se< -4.496793 33 13 M (0.39393939 0.60606061)
## 98) compactness_se< -4.568856 19 6 B (0.68421053 0.31578947) *
## 99) compactness_se>=-4.568856 14 0 M (0.00000000 1.00000000) *
## 25) symmetry_worst< -2.218277 12 1 M (0.08333333 0.91666667)
## 50) smoothness_mean< -2.57545 1 0 B (1.00000000 0.00000000) *
## 51) smoothness_mean>=-2.57545 11 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst>=-1.541072 19 1 M (0.05263158 0.94736842)
## 26) smoothness_mean< -2.540124 1 0 B (1.00000000 0.00000000) *
## 27) smoothness_mean>=-2.540124 18 0 M (0.00000000 1.00000000) *
## 7) smoothness_mean>=-2.425205 375 131 M (0.34933333 0.65066667)
## 14) smoothness_mean>=-2.383798 294 120 M (0.40816327 0.59183673)
## 28) smoothness_worst< -1.500061 118 50 B (0.57627119 0.42372881)
## 56) compactness_se< -3.723892 39 5 B (0.87179487 0.12820513)
## 112) texture_mean< 3.216271 32 0 B (1.00000000 0.00000000) *
## 113) texture_mean>=3.216271 7 2 M (0.28571429 0.71428571) *
## 57) compactness_se>=-3.723892 79 34 M (0.43037975 0.56962025)
## 114) symmetry_worst>=-1.407053 14 0 B (1.00000000 0.00000000) *
## 115) symmetry_worst< -1.407053 65 20 M (0.30769231 0.69230769) *
## 29) smoothness_worst>=-1.500061 176 52 M (0.29545455 0.70454545)
## 58) smoothness_worst>=-1.447374 90 44 M (0.48888889 0.51111111)
## 116) texture_worst>=4.599485 68 24 B (0.64705882 0.35294118) *
## 117) texture_worst< 4.599485 22 0 M (0.00000000 1.00000000) *
## 59) smoothness_worst< -1.447374 86 8 M (0.09302326 0.90697674)
## 118) smoothness_mean< -2.36516 3 1 B (0.66666667 0.33333333) *
## 119) smoothness_mean>=-2.36516 83 6 M (0.07228916 0.92771084) *
## 15) smoothness_mean< -2.383798 81 11 M (0.13580247 0.86419753)
## 30) symmetry_worst< -1.966444 15 7 M (0.46666667 0.53333333)
## 60) texture_mean< 3.127107 7 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=3.127107 8 0 M (0.00000000 1.00000000) *
## 31) symmetry_worst>=-1.966444 66 4 M (0.06060606 0.93939394)
## 62) texture_mean< 2.974761 8 4 B (0.50000000 0.50000000)
## 124) texture_mean>=2.966802 4 0 B (1.00000000 0.00000000) *
## 125) texture_mean< 2.966802 4 0 M (0.00000000 1.00000000) *
## 63) texture_mean>=2.974761 58 0 M (0.00000000 1.00000000) *
##
## $trees[[58]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 433 M (0.47478070 0.52521930)
## 2) texture_mean< 2.960364 368 141 B (0.61684783 0.38315217)
## 4) smoothness_worst< -1.451541 309 104 B (0.66343042 0.33656958)
## 8) texture_worst< 4.517889 178 43 B (0.75842697 0.24157303)
## 16) symmetry_worst< -1.001713 174 39 B (0.77586207 0.22413793)
## 32) compactness_se>=-3.343833 47 3 B (0.93617021 0.06382979)
## 64) smoothness_worst< -1.464806 42 0 B (1.00000000 0.00000000) *
## 65) smoothness_worst>=-1.464806 5 2 M (0.40000000 0.60000000) *
## 33) compactness_se< -3.343833 127 36 B (0.71653543 0.28346457)
## 66) texture_worst< 4.261496 59 8 B (0.86440678 0.13559322) *
## 67) texture_worst>=4.261496 68 28 B (0.58823529 0.41176471) *
## 17) symmetry_worst>=-1.001713 4 0 M (0.00000000 1.00000000) *
## 9) texture_worst>=4.517889 131 61 B (0.53435115 0.46564885)
## 18) texture_mean>=2.851854 115 45 B (0.60869565 0.39130435)
## 36) texture_worst>=4.545516 101 32 B (0.68316832 0.31683168)
## 72) smoothness_worst>=-1.548117 49 5 B (0.89795918 0.10204082) *
## 73) smoothness_worst< -1.548117 52 25 M (0.48076923 0.51923077) *
## 37) texture_worst< 4.545516 14 1 M (0.07142857 0.92857143)
## 74) texture_mean< 2.890089 1 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.890089 13 0 M (0.00000000 1.00000000) *
## 19) texture_mean< 2.851854 16 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst>=-1.451541 59 22 M (0.37288136 0.62711864)
## 10) texture_mean< 2.780541 20 5 B (0.75000000 0.25000000)
## 20) smoothness_worst>=-1.434076 15 0 B (1.00000000 0.00000000) *
## 21) smoothness_worst< -1.434076 5 0 M (0.00000000 1.00000000) *
## 11) texture_mean>=2.780541 39 7 M (0.17948718 0.82051282)
## 22) texture_mean>=2.879842 15 7 M (0.46666667 0.53333333)
## 44) texture_mean< 2.933323 7 0 B (1.00000000 0.00000000) *
## 45) texture_mean>=2.933323 8 0 M (0.00000000 1.00000000) *
## 23) texture_mean< 2.879842 24 0 M (0.00000000 1.00000000) *
## 3) texture_mean>=2.960364 544 206 M (0.37867647 0.62132353)
## 6) texture_mean>=3.336125 28 4 B (0.85714286 0.14285714)
## 12) compactness_se< -3.643388 25 1 B (0.96000000 0.04000000)
## 24) smoothness_mean< -2.346938 24 0 B (1.00000000 0.00000000) *
## 25) smoothness_mean>=-2.346938 1 0 M (0.00000000 1.00000000) *
## 13) compactness_se>=-3.643388 3 0 M (0.00000000 1.00000000) *
## 7) texture_mean< 3.336125 516 182 M (0.35271318 0.64728682)
## 14) texture_worst< 4.357182 11 0 B (1.00000000 0.00000000) *
## 15) texture_worst>=4.357182 505 171 M (0.33861386 0.66138614)
## 30) texture_worst< 5.073596 447 166 M (0.37136465 0.62863535)
## 60) texture_worst>=4.982438 42 11 B (0.73809524 0.26190476)
## 120) symmetry_worst< -1.541072 36 5 B (0.86111111 0.13888889) *
## 121) symmetry_worst>=-1.541072 6 0 M (0.00000000 1.00000000) *
## 61) texture_worst< 4.982438 405 135 M (0.33333333 0.66666667)
## 122) texture_worst< 4.911888 320 129 M (0.40312500 0.59687500) *
## 123) texture_worst>=4.911888 85 6 M (0.07058824 0.92941176) *
## 31) texture_worst>=5.073596 58 5 M (0.08620690 0.91379310)
## 62) smoothness_worst< -1.609702 2 0 B (1.00000000 0.00000000) *
## 63) smoothness_worst>=-1.609702 56 3 M (0.05357143 0.94642857)
## 126) symmetry_worst< -2.299309 1 0 B (1.00000000 0.00000000) *
## 127) symmetry_worst>=-2.299309 55 2 M (0.03636364 0.96363636) *
##
## $trees[[59]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 416 B (0.54385965 0.45614035)
## 2) texture_worst< 4.820212 652 257 B (0.60582822 0.39417178)
## 4) texture_worst>=4.751723 68 2 B (0.97058824 0.02941176)
## 8) symmetry_worst< -0.9904278 66 0 B (1.00000000 0.00000000) *
## 9) symmetry_worst>=-0.9904278 2 0 M (0.00000000 1.00000000) *
## 5) texture_worst< 4.751723 584 255 B (0.56335616 0.43664384)
## 10) compactness_se>=-3.426516 167 48 B (0.71257485 0.28742515)
## 20) smoothness_mean< -2.155475 150 34 B (0.77333333 0.22666667)
## 40) compactness_se< -3.02233 110 10 B (0.90909091 0.09090909)
## 80) texture_worst< 4.68552 106 6 B (0.94339623 0.05660377) *
## 81) texture_worst>=4.68552 4 0 M (0.00000000 1.00000000) *
## 41) compactness_se>=-3.02233 40 16 M (0.40000000 0.60000000)
## 82) compactness_se>=-2.86687 19 4 B (0.78947368 0.21052632) *
## 83) compactness_se< -2.86687 21 1 M (0.04761905 0.95238095) *
## 21) smoothness_mean>=-2.155475 17 3 M (0.17647059 0.82352941)
## 42) texture_mean< 2.664661 3 0 B (1.00000000 0.00000000) *
## 43) texture_mean>=2.664661 14 0 M (0.00000000 1.00000000) *
## 11) compactness_se< -3.426516 417 207 B (0.50359712 0.49640288)
## 22) symmetry_worst>=-1.52618 66 14 B (0.78787879 0.21212121)
## 44) texture_mean< 2.998678 59 7 B (0.88135593 0.11864407)
## 88) compactness_se< -3.964431 42 0 B (1.00000000 0.00000000) *
## 89) compactness_se>=-3.964431 17 7 B (0.58823529 0.41176471) *
## 45) texture_mean>=2.998678 7 0 M (0.00000000 1.00000000) *
## 23) symmetry_worst< -1.52618 351 158 M (0.45014245 0.54985755)
## 46) smoothness_mean< -2.525043 14 0 B (1.00000000 0.00000000) *
## 47) smoothness_mean>=-2.525043 337 144 M (0.42729970 0.57270030)
## 94) texture_worst< 4.254671 37 9 B (0.75675676 0.24324324) *
## 95) texture_worst>=4.254671 300 116 M (0.38666667 0.61333333) *
## 3) texture_worst>=4.820212 260 101 M (0.38846154 0.61153846)
## 6) symmetry_worst< -2.207988 26 4 B (0.84615385 0.15384615)
## 12) compactness_se< -3.413706 22 0 B (1.00000000 0.00000000) *
## 13) compactness_se>=-3.413706 4 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-2.207988 234 79 M (0.33760684 0.66239316)
## 14) symmetry_worst>=-1.857231 144 63 M (0.43750000 0.56250000)
## 28) symmetry_worst< -1.661584 72 28 B (0.61111111 0.38888889)
## 56) texture_worst< 5.052263 40 6 B (0.85000000 0.15000000)
## 112) texture_worst>=4.940521 29 0 B (1.00000000 0.00000000) *
## 113) texture_worst< 4.940521 11 5 M (0.45454545 0.54545455) *
## 57) texture_worst>=5.052263 32 10 M (0.31250000 0.68750000)
## 114) smoothness_mean< -2.544154 5 0 B (1.00000000 0.00000000) *
## 115) smoothness_mean>=-2.544154 27 5 M (0.18518519 0.81481481) *
## 29) symmetry_worst>=-1.661584 72 19 M (0.26388889 0.73611111)
## 58) compactness_se< -4.539406 7 0 B (1.00000000 0.00000000) *
## 59) compactness_se>=-4.539406 65 12 M (0.18461538 0.81538462)
## 118) compactness_se>=-3.132386 13 4 B (0.69230769 0.30769231) *
## 119) compactness_se< -3.132386 52 3 M (0.05769231 0.94230769) *
## 15) symmetry_worst< -1.857231 90 16 M (0.17777778 0.82222222)
## 30) texture_worst< 4.907333 7 0 B (1.00000000 0.00000000) *
## 31) texture_worst>=4.907333 83 9 M (0.10843373 0.89156627)
## 62) texture_mean>=3.287978 9 1 B (0.88888889 0.11111111)
## 124) smoothness_mean< -2.380359 8 0 B (1.00000000 0.00000000) *
## 125) smoothness_mean>=-2.380359 1 0 M (0.00000000 1.00000000) *
## 63) texture_mean< 3.287978 74 1 M (0.01351351 0.98648649)
## 126) compactness_se< -4.706178 1 0 B (1.00000000 0.00000000) *
## 127) compactness_se>=-4.706178 73 0 M (0.00000000 1.00000000) *
##
## $trees[[60]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 406 B (0.55482456 0.44517544)
## 2) texture_worst< 4.858219 714 281 B (0.60644258 0.39355742)
## 4) symmetry_worst< -1.828847 263 67 B (0.74524715 0.25475285)
## 8) symmetry_worst>=-2.400126 243 52 B (0.78600823 0.21399177)
## 16) compactness_se>=-4.098964 195 32 B (0.83589744 0.16410256)
## 32) texture_worst>=4.530419 124 8 B (0.93548387 0.06451613)
## 64) symmetry_worst>=-2.221546 119 4 B (0.96638655 0.03361345) *
## 65) symmetry_worst< -2.221546 5 1 M (0.20000000 0.80000000) *
## 33) texture_worst< 4.530419 71 24 B (0.66197183 0.33802817)
## 66) smoothness_worst< -1.482701 35 4 B (0.88571429 0.11428571) *
## 67) smoothness_worst>=-1.482701 36 16 M (0.44444444 0.55555556) *
## 17) compactness_se< -4.098964 48 20 B (0.58333333 0.41666667)
## 34) smoothness_mean< -2.426727 20 0 B (1.00000000 0.00000000) *
## 35) smoothness_mean>=-2.426727 28 8 M (0.28571429 0.71428571)
## 70) smoothness_mean>=-2.355463 8 0 B (1.00000000 0.00000000) *
## 71) smoothness_mean< -2.355463 20 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst< -2.400126 20 5 M (0.25000000 0.75000000)
## 18) texture_mean< 2.855865 4 0 B (1.00000000 0.00000000) *
## 19) texture_mean>=2.855865 16 1 M (0.06250000 0.93750000)
## 38) texture_worst>=4.573991 1 0 B (1.00000000 0.00000000) *
## 39) texture_worst< 4.573991 15 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst>=-1.828847 451 214 B (0.52549889 0.47450111)
## 10) smoothness_worst>=-1.623935 431 194 B (0.54988399 0.45011601)
## 20) texture_worst>=4.287261 344 136 B (0.60465116 0.39534884)
## 40) smoothness_mean< -2.216408 310 108 B (0.65161290 0.34838710)
## 80) texture_mean< 2.876103 49 0 B (1.00000000 0.00000000) *
## 81) texture_mean>=2.876103 261 108 B (0.58620690 0.41379310) *
## 41) smoothness_mean>=-2.216408 34 6 M (0.17647059 0.82352941)
## 82) texture_mean>=3.039982 7 2 B (0.71428571 0.28571429) *
## 83) texture_mean< 3.039982 27 1 M (0.03703704 0.96296296) *
## 21) texture_worst< 4.287261 87 29 M (0.33333333 0.66666667)
## 42) smoothness_mean< -2.437614 8 0 B (1.00000000 0.00000000) *
## 43) smoothness_mean>=-2.437614 79 21 M (0.26582278 0.73417722)
## 86) smoothness_worst>=-1.35291 5 0 B (1.00000000 0.00000000) *
## 87) smoothness_worst< -1.35291 74 16 M (0.21621622 0.78378378) *
## 11) smoothness_worst< -1.623935 20 0 M (0.00000000 1.00000000) *
## 3) texture_worst>=4.858219 198 73 M (0.36868687 0.63131313)
## 6) symmetry_worst< -2.063958 27 3 B (0.88888889 0.11111111)
## 12) texture_mean>=3.038878 25 1 B (0.96000000 0.04000000)
## 24) compactness_se< -3.400535 24 0 B (1.00000000 0.00000000) *
## 25) compactness_se>=-3.400535 1 0 M (0.00000000 1.00000000) *
## 13) texture_mean< 3.038878 2 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-2.063958 171 49 M (0.28654971 0.71345029)
## 14) smoothness_worst< -1.623453 14 2 B (0.85714286 0.14285714)
## 28) smoothness_mean< -2.396135 12 0 B (1.00000000 0.00000000) *
## 29) smoothness_mean>=-2.396135 2 0 M (0.00000000 1.00000000) *
## 15) smoothness_worst>=-1.623453 157 37 M (0.23566879 0.76433121)
## 30) compactness_se>=-3.902076 75 30 M (0.40000000 0.60000000)
## 60) texture_worst< 5.032208 38 13 B (0.65789474 0.34210526)
## 120) symmetry_worst< -1.541072 29 4 B (0.86206897 0.13793103) *
## 121) symmetry_worst>=-1.541072 9 0 M (0.00000000 1.00000000) *
## 61) texture_worst>=5.032208 37 5 M (0.13513514 0.86486486)
## 122) texture_worst>=5.329405 13 5 M (0.38461538 0.61538462) *
## 123) texture_worst< 5.329405 24 0 M (0.00000000 1.00000000) *
## 31) compactness_se< -3.902076 82 7 M (0.08536585 0.91463415)
## 62) compactness_se< -4.899363 2 0 B (1.00000000 0.00000000) *
## 63) compactness_se>=-4.899363 80 5 M (0.06250000 0.93750000)
## 126) texture_mean< 2.915217 1 0 B (1.00000000 0.00000000) *
## 127) texture_mean>=2.915217 79 4 M (0.05063291 0.94936709) *
##
## $trees[[61]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 450 M (0.49342105 0.50657895)
## 2) symmetry_worst< -1.424186 827 396 B (0.52116082 0.47883918)
## 4) compactness_se< -4.705732 17 0 B (1.00000000 0.00000000) *
## 5) compactness_se>=-4.705732 810 396 B (0.51111111 0.48888889)
## 10) compactness_se>=-4.663537 792 378 B (0.52272727 0.47727273)
## 20) symmetry_worst>=-1.440359 20 1 B (0.95000000 0.05000000)
## 40) smoothness_mean< -2.257921 19 0 B (1.00000000 0.00000000) *
## 41) smoothness_mean>=-2.257921 1 0 M (0.00000000 1.00000000) *
## 21) symmetry_worst< -1.440359 772 377 B (0.51165803 0.48834197)
## 42) symmetry_worst< -1.828847 319 126 B (0.60501567 0.39498433)
## 84) texture_worst< 4.897936 246 78 B (0.68292683 0.31707317) *
## 85) texture_worst>=4.897936 73 25 M (0.34246575 0.65753425) *
## 43) symmetry_worst>=-1.828847 453 202 M (0.44591611 0.55408389)
## 86) symmetry_worst>=-1.749635 322 155 B (0.51863354 0.48136646) *
## 87) symmetry_worst< -1.749635 131 35 M (0.26717557 0.73282443) *
## 11) compactness_se< -4.663537 18 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst>=-1.424186 85 19 M (0.22352941 0.77647059)
## 6) texture_mean< 2.77286 7 1 B (0.85714286 0.14285714)
## 12) compactness_se< -3.1317 6 0 B (1.00000000 0.00000000) *
## 13) compactness_se>=-3.1317 1 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=2.77286 78 13 M (0.16666667 0.83333333)
## 14) smoothness_worst< -1.49848 19 9 B (0.52631579 0.47368421)
## 28) smoothness_mean>=-2.45794 12 2 B (0.83333333 0.16666667)
## 56) texture_worst< 4.857215 10 0 B (1.00000000 0.00000000) *
## 57) texture_worst>=4.857215 2 0 M (0.00000000 1.00000000) *
## 29) smoothness_mean< -2.45794 7 0 M (0.00000000 1.00000000) *
## 15) smoothness_worst>=-1.49848 59 3 M (0.05084746 0.94915254)
## 30) compactness_se>=-2.567912 3 0 B (1.00000000 0.00000000) *
## 31) compactness_se< -2.567912 56 0 M (0.00000000 1.00000000) *
##
## $trees[[62]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 429 M (0.47039474 0.52960526)
## 2) symmetry_worst< -1.816281 361 155 B (0.57063712 0.42936288)
## 4) compactness_se>=-4.44774 311 119 B (0.61736334 0.38263666)
## 8) compactness_se< -4.244326 32 0 B (1.00000000 0.00000000) *
## 9) compactness_se>=-4.244326 279 119 B (0.57347670 0.42652330)
## 18) texture_mean< 3.044457 165 49 B (0.70303030 0.29696970)
## 36) texture_worst>=4.530419 95 13 B (0.86315789 0.13684211)
## 72) texture_worst< 4.669081 69 0 B (1.00000000 0.00000000) *
## 73) texture_worst>=4.669081 26 13 B (0.50000000 0.50000000) *
## 37) texture_worst< 4.530419 70 34 M (0.48571429 0.51428571)
## 74) smoothness_worst>=-1.477976 27 4 B (0.85185185 0.14814815) *
## 75) smoothness_worst< -1.477976 43 11 M (0.25581395 0.74418605) *
## 19) texture_mean>=3.044457 114 44 M (0.38596491 0.61403509)
## 38) symmetry_worst< -2.188127 41 15 B (0.63414634 0.36585366)
## 76) texture_worst>=4.762323 26 3 B (0.88461538 0.11538462) *
## 77) texture_worst< 4.762323 15 3 M (0.20000000 0.80000000) *
## 39) symmetry_worst>=-2.188127 73 18 M (0.24657534 0.75342466)
## 78) smoothness_worst< -1.589834 6 1 B (0.83333333 0.16666667) *
## 79) smoothness_worst>=-1.589834 67 13 M (0.19402985 0.80597015) *
## 5) compactness_se< -4.44774 50 14 M (0.28000000 0.72000000)
## 10) smoothness_worst< -1.626559 9 1 B (0.88888889 0.11111111)
## 20) texture_mean< 3.146983 8 0 B (1.00000000 0.00000000) *
## 21) texture_mean>=3.146983 1 0 M (0.00000000 1.00000000) *
## 11) smoothness_worst>=-1.626559 41 6 M (0.14634146 0.85365854)
## 22) compactness_se< -4.737326 2 0 B (1.00000000 0.00000000) *
## 23) compactness_se>=-4.737326 39 4 M (0.10256410 0.89743590)
## 46) texture_mean< 2.846651 1 0 B (1.00000000 0.00000000) *
## 47) texture_mean>=2.846651 38 3 M (0.07894737 0.92105263)
## 94) smoothness_mean>=-2.271294 1 0 B (1.00000000 0.00000000) *
## 95) smoothness_mean< -2.271294 37 2 M (0.05405405 0.94594595) *
## 3) symmetry_worst>=-1.816281 551 223 M (0.40471869 0.59528131)
## 6) texture_mean< 2.960364 248 118 B (0.52419355 0.47580645)
## 12) symmetry_worst>=-1.769229 196 75 B (0.61734694 0.38265306)
## 24) smoothness_worst< -1.492248 83 12 B (0.85542169 0.14457831)
## 48) compactness_se>=-4.681232 74 6 B (0.91891892 0.08108108)
## 96) symmetry_worst>=-1.748321 67 1 B (0.98507463 0.01492537) *
## 97) symmetry_worst< -1.748321 7 2 M (0.28571429 0.71428571) *
## 49) compactness_se< -4.681232 9 3 M (0.33333333 0.66666667)
## 98) texture_mean>=2.936149 3 0 B (1.00000000 0.00000000) *
## 99) texture_mean< 2.936149 6 0 M (0.00000000 1.00000000) *
## 25) smoothness_worst>=-1.492248 113 50 M (0.44247788 0.55752212)
## 50) symmetry_worst< -1.641484 30 1 B (0.96666667 0.03333333)
## 100) texture_mean< 2.933308 29 0 B (1.00000000 0.00000000) *
## 101) texture_mean>=2.933308 1 0 M (0.00000000 1.00000000) *
## 51) symmetry_worst>=-1.641484 83 21 M (0.25301205 0.74698795)
## 102) compactness_se< -4.218076 3 0 B (1.00000000 0.00000000) *
## 103) compactness_se>=-4.218076 80 18 M (0.22500000 0.77500000) *
## 13) symmetry_worst< -1.769229 52 9 M (0.17307692 0.82692308)
## 26) texture_mean< 2.862952 11 5 B (0.54545455 0.45454545)
## 52) texture_mean>=2.739163 6 0 B (1.00000000 0.00000000) *
## 53) texture_mean< 2.739163 5 0 M (0.00000000 1.00000000) *
## 27) texture_mean>=2.862952 41 3 M (0.07317073 0.92682927)
## 54) smoothness_mean< -2.480896 1 0 B (1.00000000 0.00000000) *
## 55) smoothness_mean>=-2.480896 40 2 M (0.05000000 0.95000000)
## 110) smoothness_mean>=-2.31876 9 2 M (0.22222222 0.77777778) *
## 111) smoothness_mean< -2.31876 31 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=2.960364 303 93 M (0.30693069 0.69306931)
## 14) texture_worst>=4.900538 68 25 B (0.63235294 0.36764706)
## 28) smoothness_mean< -2.333105 45 8 B (0.82222222 0.17777778)
## 56) compactness_se< -3.435038 39 4 B (0.89743590 0.10256410)
## 112) texture_mean>=3.088538 32 1 B (0.96875000 0.03125000) *
## 113) texture_mean< 3.088538 7 3 B (0.57142857 0.42857143) *
## 57) compactness_se>=-3.435038 6 2 M (0.33333333 0.66666667)
## 114) texture_mean>=3.165773 2 0 B (1.00000000 0.00000000) *
## 115) texture_mean< 3.165773 4 0 M (0.00000000 1.00000000) *
## 29) smoothness_mean>=-2.333105 23 6 M (0.26086957 0.73913043)
## 58) symmetry_worst< -1.700833 6 0 B (1.00000000 0.00000000) *
## 59) symmetry_worst>=-1.700833 17 0 M (0.00000000 1.00000000) *
## 15) texture_worst< 4.900538 235 50 M (0.21276596 0.78723404)
## 30) symmetry_worst>=-1.606972 109 40 M (0.36697248 0.63302752)
## 60) compactness_se< -3.483184 67 31 B (0.53731343 0.46268657)
## 120) compactness_se>=-3.494961 17 0 B (1.00000000 0.00000000) *
## 121) compactness_se< -3.494961 50 19 M (0.38000000 0.62000000) *
## 61) compactness_se>=-3.483184 42 4 M (0.09523810 0.90476190)
## 122) symmetry_worst>=-1.471051 4 0 B (1.00000000 0.00000000) *
## 123) symmetry_worst< -1.471051 38 0 M (0.00000000 1.00000000) *
## 31) symmetry_worst< -1.606972 126 10 M (0.07936508 0.92063492)
## 62) smoothness_mean>=-2.137157 4 0 B (1.00000000 0.00000000) *
## 63) smoothness_mean< -2.137157 122 6 M (0.04918033 0.95081967)
## 126) compactness_se>=-2.744014 10 4 M (0.40000000 0.60000000) *
## 127) compactness_se< -2.744014 112 2 M (0.01785714 0.98214286) *
##
## $trees[[63]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 412 B (0.54824561 0.45175439)
## 2) symmetry_worst< -1.758563 452 166 B (0.63274336 0.36725664)
## 4) texture_worst< 4.605004 253 69 B (0.72727273 0.27272727)
## 8) smoothness_mean>=-2.603563 246 62 B (0.74796748 0.25203252)
## 16) symmetry_worst< -1.948993 96 10 B (0.89583333 0.10416667)
## 32) symmetry_worst>=-2.49184 92 6 B (0.93478261 0.06521739)
## 64) texture_worst>=3.98381 89 4 B (0.95505618 0.04494382) *
## 65) texture_worst< 3.98381 3 1 M (0.33333333 0.66666667) *
## 33) symmetry_worst< -2.49184 4 0 M (0.00000000 1.00000000) *
## 17) symmetry_worst>=-1.948993 150 52 B (0.65333333 0.34666667)
## 34) smoothness_worst< -1.455007 127 34 B (0.73228346 0.26771654)
## 68) texture_mean>=2.766607 96 17 B (0.82291667 0.17708333) *
## 69) texture_mean< 2.766607 31 14 M (0.45161290 0.54838710) *
## 35) smoothness_worst>=-1.455007 23 5 M (0.21739130 0.78260870)
## 70) smoothness_mean>=-2.212285 4 0 B (1.00000000 0.00000000) *
## 71) smoothness_mean< -2.212285 19 1 M (0.05263158 0.94736842) *
## 9) smoothness_mean< -2.603563 7 0 M (0.00000000 1.00000000) *
## 5) texture_worst>=4.605004 199 97 B (0.51256281 0.48743719)
## 10) texture_mean>=3.021644 135 47 B (0.65185185 0.34814815)
## 20) texture_worst>=4.651138 125 37 B (0.70400000 0.29600000)
## 40) smoothness_worst< -1.52112 81 13 B (0.83950617 0.16049383)
## 80) compactness_se< -2.810352 79 11 B (0.86075949 0.13924051) *
## 81) compactness_se>=-2.810352 2 0 M (0.00000000 1.00000000) *
## 41) smoothness_worst>=-1.52112 44 20 M (0.45454545 0.54545455)
## 82) smoothness_worst>=-1.49243 30 10 B (0.66666667 0.33333333) *
## 83) smoothness_worst< -1.49243 14 0 M (0.00000000 1.00000000) *
## 21) texture_worst< 4.651138 10 0 M (0.00000000 1.00000000) *
## 11) texture_mean< 3.021644 64 14 M (0.21875000 0.78125000)
## 22) smoothness_mean>=-2.33454 8 1 B (0.87500000 0.12500000)
## 44) texture_worst< 4.831396 7 0 B (1.00000000 0.00000000) *
## 45) texture_worst>=4.831396 1 0 M (0.00000000 1.00000000) *
## 23) smoothness_mean< -2.33454 56 7 M (0.12500000 0.87500000)
## 46) compactness_se< -4.737326 2 0 B (1.00000000 0.00000000) *
## 47) compactness_se>=-4.737326 54 5 M (0.09259259 0.90740741)
## 94) smoothness_mean< -2.58747 1 0 B (1.00000000 0.00000000) *
## 95) smoothness_mean>=-2.58747 53 4 M (0.07547170 0.92452830) *
## 3) symmetry_worst>=-1.758563 460 214 M (0.46521739 0.53478261)
## 6) texture_mean< 2.955938 175 67 B (0.61714286 0.38285714)
## 12) smoothness_mean< -2.22055 131 35 B (0.73282443 0.26717557)
## 24) symmetry_worst>=-1.749307 123 27 B (0.78048780 0.21951220)
## 48) symmetry_worst< -1.426958 76 8 B (0.89473684 0.10526316)
## 96) compactness_se>=-4.650552 70 4 B (0.94285714 0.05714286) *
## 97) compactness_se< -4.650552 6 2 M (0.33333333 0.66666667) *
## 49) symmetry_worst>=-1.426958 47 19 B (0.59574468 0.40425532)
## 98) texture_worst>=4.136225 35 7 B (0.80000000 0.20000000) *
## 99) texture_worst< 4.136225 12 0 M (0.00000000 1.00000000) *
## 25) symmetry_worst< -1.749307 8 0 M (0.00000000 1.00000000) *
## 13) smoothness_mean>=-2.22055 44 12 M (0.27272727 0.72727273)
## 26) smoothness_worst>=-1.333822 6 0 B (1.00000000 0.00000000) *
## 27) smoothness_worst< -1.333822 38 6 M (0.15789474 0.84210526)
## 54) smoothness_worst< -1.534923 4 0 B (1.00000000 0.00000000) *
## 55) smoothness_worst>=-1.534923 34 2 M (0.05882353 0.94117647)
## 110) compactness_se< -3.796566 3 1 B (0.66666667 0.33333333) *
## 111) compactness_se>=-3.796566 31 0 M (0.00000000 1.00000000) *
## 7) texture_mean>=2.955938 285 106 M (0.37192982 0.62807018)
## 14) compactness_se< -3.446121 218 102 M (0.46788991 0.53211009)
## 28) texture_worst>=4.608306 153 64 B (0.58169935 0.41830065)
## 56) smoothness_mean>=-2.306712 59 9 B (0.84745763 0.15254237)
## 112) smoothness_worst< -1.394193 53 3 B (0.94339623 0.05660377) *
## 113) smoothness_worst>=-1.394193 6 0 M (0.00000000 1.00000000) *
## 57) smoothness_mean< -2.306712 94 39 M (0.41489362 0.58510638)
## 114) compactness_se>=-3.765171 40 13 B (0.67500000 0.32500000) *
## 115) compactness_se< -3.765171 54 12 M (0.22222222 0.77777778) *
## 29) texture_worst< 4.608306 65 13 M (0.20000000 0.80000000)
## 58) compactness_se< -4.291103 13 0 B (1.00000000 0.00000000) *
## 59) compactness_se>=-4.291103 52 0 M (0.00000000 1.00000000) *
## 15) compactness_se>=-3.446121 67 4 M (0.05970149 0.94029851)
## 30) smoothness_mean< -2.476623 2 0 B (1.00000000 0.00000000) *
## 31) smoothness_mean>=-2.476623 65 2 M (0.03076923 0.96923077)
## 62) symmetry_worst>=-1.471051 10 2 M (0.20000000 0.80000000)
## 124) symmetry_worst< -1.448226 2 0 B (1.00000000 0.00000000) *
## 125) symmetry_worst>=-1.448226 8 0 M (0.00000000 1.00000000) *
## 63) symmetry_worst< -1.471051 55 0 M (0.00000000 1.00000000) *
##
## $trees[[64]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 452 B (0.50438596 0.49561404)
## 2) texture_mean< 3.007414 529 228 B (0.56899811 0.43100189)
## 4) texture_mean>=2.996482 45 3 B (0.93333333 0.06666667)
## 8) compactness_se< -3.239931 41 0 B (1.00000000 0.00000000) *
## 9) compactness_se>=-3.239931 4 1 M (0.25000000 0.75000000)
## 18) texture_mean>=3.003945 1 0 B (1.00000000 0.00000000) *
## 19) texture_mean< 3.003945 3 0 M (0.00000000 1.00000000) *
## 5) texture_mean< 2.996482 484 225 B (0.53512397 0.46487603)
## 10) texture_mean< 2.737601 67 16 B (0.76119403 0.23880597)
## 20) compactness_se< -3.768369 33 0 B (1.00000000 0.00000000) *
## 21) compactness_se>=-3.768369 34 16 B (0.52941176 0.47058824)
## 42) texture_worst< 4.057309 24 6 B (0.75000000 0.25000000)
## 84) compactness_se>=-3.734904 18 0 B (1.00000000 0.00000000) *
## 85) compactness_se< -3.734904 6 0 M (0.00000000 1.00000000) *
## 43) texture_worst>=4.057309 10 0 M (0.00000000 1.00000000) *
## 11) texture_mean>=2.737601 417 208 M (0.49880096 0.50119904)
## 22) texture_worst>=4.543638 176 65 B (0.63068182 0.36931818)
## 44) symmetry_worst< -1.362443 159 48 B (0.69811321 0.30188679)
## 88) symmetry_worst>=-2.1835 146 37 B (0.74657534 0.25342466) *
## 89) symmetry_worst< -2.1835 13 2 M (0.15384615 0.84615385) *
## 45) symmetry_worst>=-1.362443 17 0 M (0.00000000 1.00000000) *
## 23) texture_worst< 4.543638 241 97 M (0.40248963 0.59751037)
## 46) texture_worst< 4.389172 125 59 B (0.52800000 0.47200000)
## 92) texture_worst>=4.365735 23 3 B (0.86956522 0.13043478) *
## 93) texture_worst< 4.365735 102 46 M (0.45098039 0.54901961) *
## 47) texture_worst>=4.389172 116 31 M (0.26724138 0.73275862)
## 94) texture_worst>=4.465917 71 30 M (0.42253521 0.57746479) *
## 95) texture_worst< 4.465917 45 1 M (0.02222222 0.97777778) *
## 3) texture_mean>=3.007414 383 159 M (0.41514360 0.58485640)
## 6) texture_mean>=3.029409 327 151 M (0.46177370 0.53822630)
## 12) compactness_se< -3.05924 303 149 M (0.49174917 0.50825083)
## 24) smoothness_worst< -1.618721 21 0 B (1.00000000 0.00000000) *
## 25) smoothness_worst>=-1.618721 282 128 M (0.45390071 0.54609929)
## 50) compactness_se< -4.539406 21 3 B (0.85714286 0.14285714)
## 100) symmetry_worst>=-1.674863 18 0 B (1.00000000 0.00000000) *
## 101) symmetry_worst< -1.674863 3 0 M (0.00000000 1.00000000) *
## 51) compactness_se>=-4.539406 261 110 M (0.42145594 0.57854406)
## 102) symmetry_worst< -1.661892 185 91 B (0.50810811 0.49189189) *
## 103) symmetry_worst>=-1.661892 76 16 M (0.21052632 0.78947368) *
## 13) compactness_se>=-3.05924 24 2 M (0.08333333 0.91666667)
## 26) texture_mean< 3.031099 2 0 B (1.00000000 0.00000000) *
## 27) texture_mean>=3.031099 22 0 M (0.00000000 1.00000000) *
## 7) texture_mean< 3.029409 56 8 M (0.14285714 0.85714286)
## 14) compactness_se>=-3.446107 8 0 B (1.00000000 0.00000000) *
## 15) compactness_se< -3.446107 48 0 M (0.00000000 1.00000000) *
##
## $trees[[65]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 433 M (0.47478070 0.52521930)
## 2) texture_worst< 4.820212 669 300 B (0.55156951 0.44843049)
## 4) symmetry_worst< -1.828847 249 77 B (0.69076305 0.30923695)
## 8) smoothness_mean>=-2.358733 115 16 B (0.86086957 0.13913043)
## 16) symmetry_worst>=-2.354921 104 9 B (0.91346154 0.08653846)
## 32) compactness_se< -3.02233 97 5 B (0.94845361 0.05154639)
## 64) compactness_se< -3.4389 75 1 B (0.98666667 0.01333333) *
## 65) compactness_se>=-3.4389 22 4 B (0.81818182 0.18181818) *
## 33) compactness_se>=-3.02233 7 3 M (0.42857143 0.57142857)
## 66) texture_mean< 2.962882 3 0 B (1.00000000 0.00000000) *
## 67) texture_mean>=2.962882 4 0 M (0.00000000 1.00000000) *
## 17) symmetry_worst< -2.354921 11 4 M (0.36363636 0.63636364)
## 34) texture_mean< 2.855865 3 0 B (1.00000000 0.00000000) *
## 35) texture_mean>=2.855865 8 1 M (0.12500000 0.87500000)
## 70) compactness_se< -3.905141 3 1 M (0.33333333 0.66666667) *
## 71) compactness_se>=-3.905141 5 0 M (0.00000000 1.00000000) *
## 9) smoothness_mean< -2.358733 134 61 B (0.54477612 0.45522388)
## 18) smoothness_worst< -1.559148 68 10 B (0.85294118 0.14705882)
## 36) texture_worst>=4.018768 62 5 B (0.91935484 0.08064516)
## 72) smoothness_mean< -2.378659 60 3 B (0.95000000 0.05000000) *
## 73) smoothness_mean>=-2.378659 2 0 M (0.00000000 1.00000000) *
## 37) texture_worst< 4.018768 6 1 M (0.16666667 0.83333333)
## 74) texture_mean< 2.764104 1 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.764104 5 0 M (0.00000000 1.00000000) *
## 19) smoothness_worst>=-1.559148 66 15 M (0.22727273 0.77272727)
## 38) smoothness_mean< -2.419122 17 2 B (0.88235294 0.11764706)
## 76) texture_mean< 3.049127 15 0 B (1.00000000 0.00000000) *
## 77) texture_mean>=3.049127 2 0 M (0.00000000 1.00000000) *
## 39) smoothness_mean>=-2.419122 49 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst>=-1.828847 420 197 M (0.46904762 0.53095238)
## 10) texture_worst< 4.50835 171 65 B (0.61988304 0.38011696)
## 20) symmetry_worst>=-1.809351 145 39 B (0.73103448 0.26896552)
## 40) texture_mean< 2.96681 123 25 B (0.79674797 0.20325203)
## 80) smoothness_worst< -1.451541 91 8 B (0.91208791 0.08791209) *
## 81) smoothness_worst>=-1.451541 32 15 M (0.46875000 0.53125000) *
## 41) texture_mean>=2.96681 22 8 M (0.36363636 0.63636364)
## 82) smoothness_mean< -2.381799 10 2 B (0.80000000 0.20000000) *
## 83) smoothness_mean>=-2.381799 12 0 M (0.00000000 1.00000000) *
## 21) symmetry_worst< -1.809351 26 0 M (0.00000000 1.00000000) *
## 11) texture_worst>=4.50835 249 91 M (0.36546185 0.63453815)
## 22) symmetry_worst>=-1.685469 156 76 M (0.48717949 0.51282051)
## 44) symmetry_worst< -1.620541 21 2 B (0.90476190 0.09523810)
## 88) compactness_se< -3.350492 19 0 B (1.00000000 0.00000000) *
## 89) compactness_se>=-3.350492 2 0 M (0.00000000 1.00000000) *
## 45) symmetry_worst>=-1.620541 135 57 M (0.42222222 0.57777778)
## 90) symmetry_worst>=-1.606972 116 57 M (0.49137931 0.50862069) *
## 91) symmetry_worst< -1.606972 19 0 M (0.00000000 1.00000000) *
## 23) symmetry_worst< -1.685469 93 15 M (0.16129032 0.83870968)
## 46) symmetry_worst< -1.801087 21 10 B (0.52380952 0.47619048)
## 92) texture_mean< 3.018415 11 0 B (1.00000000 0.00000000) *
## 93) texture_mean>=3.018415 10 0 M (0.00000000 1.00000000) *
## 47) symmetry_worst>=-1.801087 72 4 M (0.05555556 0.94444444)
## 94) compactness_se< -4.222363 2 0 B (1.00000000 0.00000000) *
## 95) compactness_se>=-4.222363 70 2 M (0.02857143 0.97142857) *
## 3) texture_worst>=4.820212 243 64 M (0.26337449 0.73662551)
## 6) smoothness_worst< -1.623453 10 2 B (0.80000000 0.20000000)
## 12) smoothness_mean< -2.382409 8 0 B (1.00000000 0.00000000) *
## 13) smoothness_mean>=-2.382409 2 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst>=-1.623453 233 56 M (0.24034335 0.75965665)
## 14) texture_worst>=4.982438 137 46 M (0.33576642 0.66423358)
## 28) texture_worst< 4.998743 20 1 B (0.95000000 0.05000000)
## 56) texture_mean>=3.086552 19 0 B (1.00000000 0.00000000) *
## 57) texture_mean< 3.086552 1 0 M (0.00000000 1.00000000) *
## 29) texture_worst>=4.998743 117 27 M (0.23076923 0.76923077)
## 58) symmetry_worst< -2.257286 7 1 B (0.85714286 0.14285714)
## 116) smoothness_mean< -2.317053 6 0 B (1.00000000 0.00000000) *
## 117) smoothness_mean>=-2.317053 1 0 M (0.00000000 1.00000000) *
## 59) symmetry_worst>=-2.257286 110 21 M (0.19090909 0.80909091)
## 118) smoothness_mean< -2.505388 4 0 B (1.00000000 0.00000000) *
## 119) smoothness_mean>=-2.505388 106 17 M (0.16037736 0.83962264) *
## 15) texture_worst< 4.982438 96 10 M (0.10416667 0.89583333)
## 30) compactness_se>=-2.919705 2 0 B (1.00000000 0.00000000) *
## 31) compactness_se< -2.919705 94 8 M (0.08510638 0.91489362)
## 62) texture_mean>=3.208403 15 6 M (0.40000000 0.60000000)
## 124) smoothness_worst< -1.511906 6 0 B (1.00000000 0.00000000) *
## 125) smoothness_worst>=-1.511906 9 0 M (0.00000000 1.00000000) *
## 63) texture_mean< 3.208403 79 2 M (0.02531646 0.97468354)
## 126) symmetry_worst< -2.139 1 0 B (1.00000000 0.00000000) *
## 127) symmetry_worst>=-2.139 78 1 M (0.01282051 0.98717949) *
##
## $trees[[66]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 432 B (0.52631579 0.47368421)
## 2) texture_mean< 3.007414 527 207 B (0.60721063 0.39278937)
## 4) texture_mean>=2.995732 62 6 B (0.90322581 0.09677419)
## 8) smoothness_mean>=-2.301343 48 0 B (1.00000000 0.00000000) *
## 9) smoothness_mean< -2.301343 14 6 B (0.57142857 0.42857143)
## 18) smoothness_mean< -2.332196 8 0 B (1.00000000 0.00000000) *
## 19) smoothness_mean>=-2.332196 6 0 M (0.00000000 1.00000000) *
## 5) texture_mean< 2.995732 465 201 B (0.56774194 0.43225806)
## 10) texture_mean< 2.993479 453 189 B (0.58278146 0.41721854)
## 20) compactness_se< -3.726279 273 94 B (0.65567766 0.34432234)
## 40) texture_mean< 2.813911 68 10 B (0.85294118 0.14705882)
## 80) compactness_se< -3.88564 51 0 B (1.00000000 0.00000000) *
## 81) compactness_se>=-3.88564 17 7 M (0.41176471 0.58823529) *
## 41) texture_mean>=2.813911 205 84 B (0.59024390 0.40975610)
## 82) smoothness_mean>=-2.31481 82 16 B (0.80487805 0.19512195) *
## 83) smoothness_mean< -2.31481 123 55 M (0.44715447 0.55284553) *
## 21) compactness_se>=-3.726279 180 85 M (0.47222222 0.52777778)
## 42) compactness_se>=-3.344528 59 15 B (0.74576271 0.25423729)
## 84) symmetry_worst< -1.316602 46 6 B (0.86956522 0.13043478) *
## 85) symmetry_worst>=-1.316602 13 4 M (0.30769231 0.69230769) *
## 43) compactness_se< -3.344528 121 41 M (0.33884298 0.66115702)
## 86) symmetry_worst< -1.813857 45 18 B (0.60000000 0.40000000) *
## 87) symmetry_worst>=-1.813857 76 14 M (0.18421053 0.81578947) *
## 11) texture_mean>=2.993479 12 0 M (0.00000000 1.00000000) *
## 3) texture_mean>=3.007414 385 160 M (0.41558442 0.58441558)
## 6) compactness_se< -4.26701 54 12 B (0.77777778 0.22222222)
## 12) texture_mean>=3.227241 23 0 B (1.00000000 0.00000000) *
## 13) texture_mean< 3.227241 31 12 B (0.61290323 0.38709677)
## 26) texture_mean< 3.108384 23 4 B (0.82608696 0.17391304)
## 52) smoothness_worst< -1.508396 21 2 B (0.90476190 0.09523810)
## 104) smoothness_mean>=-2.477398 15 0 B (1.00000000 0.00000000) *
## 105) smoothness_mean< -2.477398 6 2 B (0.66666667 0.33333333) *
## 53) smoothness_worst>=-1.508396 2 0 M (0.00000000 1.00000000) *
## 27) texture_mean>=3.108384 8 0 M (0.00000000 1.00000000) *
## 7) compactness_se>=-4.26701 331 118 M (0.35649547 0.64350453)
## 14) texture_worst>=4.678763 224 97 M (0.43303571 0.56696429)
## 28) texture_worst< 4.681966 18 0 B (1.00000000 0.00000000) *
## 29) texture_worst>=4.681966 206 79 M (0.38349515 0.61650485)
## 58) smoothness_worst< -1.610115 11 0 B (1.00000000 0.00000000) *
## 59) smoothness_worst>=-1.610115 195 68 M (0.34871795 0.65128205)
## 118) texture_worst>=4.80876 163 65 M (0.39877301 0.60122699) *
## 119) texture_worst< 4.80876 32 3 M (0.09375000 0.90625000) *
## 15) texture_worst< 4.678763 107 21 M (0.19626168 0.80373832)
## 30) symmetry_worst< -2.232873 13 2 B (0.84615385 0.15384615)
## 60) smoothness_mean< -2.242961 11 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean>=-2.242961 2 0 M (0.00000000 1.00000000) *
## 31) symmetry_worst>=-2.232873 94 10 M (0.10638298 0.89361702)
## 62) smoothness_worst>=-1.51308 33 10 M (0.30303030 0.69696970)
## 124) symmetry_worst< -1.871076 12 2 B (0.83333333 0.16666667) *
## 125) symmetry_worst>=-1.871076 21 0 M (0.00000000 1.00000000) *
## 63) smoothness_worst< -1.51308 61 0 M (0.00000000 1.00000000) *
##
## $trees[[67]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 387 B (0.57565789 0.42434211)
## 2) compactness_se< -3.721197 515 170 B (0.66990291 0.33009709)
## 4) compactness_se>=-3.867535 106 12 B (0.88679245 0.11320755)
## 8) smoothness_worst< -1.48132 85 0 B (1.00000000 0.00000000) *
## 9) smoothness_worst>=-1.48132 21 9 M (0.42857143 0.57142857)
## 18) texture_mean< 2.971675 13 4 B (0.69230769 0.30769231)
## 36) symmetry_worst< -1.612049 9 0 B (1.00000000 0.00000000) *
## 37) symmetry_worst>=-1.612049 4 0 M (0.00000000 1.00000000) *
## 19) texture_mean>=2.971675 8 0 M (0.00000000 1.00000000) *
## 5) compactness_se< -3.867535 409 158 B (0.61369193 0.38630807)
## 10) texture_worst< 4.822896 292 90 B (0.69178082 0.30821918)
## 20) compactness_se< -4.356557 84 9 B (0.89285714 0.10714286)
## 40) symmetry_worst>=-2.374205 79 6 B (0.92405063 0.07594937)
## 80) compactness_se< -4.50262 66 3 B (0.95454545 0.04545455) *
## 81) compactness_se>=-4.50262 13 3 B (0.76923077 0.23076923) *
## 41) symmetry_worst< -2.374205 5 2 M (0.40000000 0.60000000)
## 82) texture_mean< 2.828748 2 0 B (1.00000000 0.00000000) *
## 83) texture_mean>=2.828748 3 0 M (0.00000000 1.00000000) *
## 21) compactness_se>=-4.356557 208 81 B (0.61057692 0.38942308)
## 42) compactness_se>=-4.316443 195 68 B (0.65128205 0.34871795)
## 84) texture_worst>=4.751011 26 0 B (1.00000000 0.00000000) *
## 85) texture_worst< 4.751011 169 68 B (0.59763314 0.40236686) *
## 43) compactness_se< -4.316443 13 0 M (0.00000000 1.00000000) *
## 11) texture_worst>=4.822896 117 49 M (0.41880342 0.58119658)
## 22) compactness_se< -4.060578 82 36 B (0.56097561 0.43902439)
## 44) smoothness_worst>=-1.453658 26 2 B (0.92307692 0.07692308)
## 88) texture_worst>=4.831607 24 0 B (1.00000000 0.00000000) *
## 89) texture_worst< 4.831607 2 0 M (0.00000000 1.00000000) *
## 45) smoothness_worst< -1.453658 56 22 M (0.39285714 0.60714286)
## 90) texture_worst>=4.984637 30 12 B (0.60000000 0.40000000) *
## 91) texture_worst< 4.984637 26 4 M (0.15384615 0.84615385) *
## 23) compactness_se>=-4.060578 35 3 M (0.08571429 0.91428571)
## 46) texture_worst< 4.895983 2 0 B (1.00000000 0.00000000) *
## 47) texture_worst>=4.895983 33 1 M (0.03030303 0.96969697)
## 94) texture_mean< 3.053025 7 1 M (0.14285714 0.85714286) *
## 95) texture_mean>=3.053025 26 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-3.721197 397 180 M (0.45340050 0.54659950)
## 6) smoothness_mean< -2.298595 248 115 B (0.53629032 0.46370968)
## 12) symmetry_worst>=-1.609029 74 17 B (0.77027027 0.22972973)
## 24) compactness_se>=-3.592308 68 11 B (0.83823529 0.16176471)
## 48) texture_worst< 4.993407 65 8 B (0.87692308 0.12307692)
## 96) texture_mean>=2.896936 61 5 B (0.91803279 0.08196721) *
## 97) texture_mean< 2.896936 4 1 M (0.25000000 0.75000000) *
## 49) texture_worst>=4.993407 3 0 M (0.00000000 1.00000000) *
## 25) compactness_se< -3.592308 6 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst< -1.609029 174 76 M (0.43678161 0.56321839)
## 26) texture_worst>=4.863167 43 13 B (0.69767442 0.30232558)
## 52) smoothness_mean>=-2.457972 29 1 B (0.96551724 0.03448276)
## 104) texture_mean< 3.329636 28 0 B (1.00000000 0.00000000) *
## 105) texture_mean>=3.329636 1 0 M (0.00000000 1.00000000) *
## 53) smoothness_mean< -2.457972 14 2 M (0.14285714 0.85714286)
## 106) compactness_se< -3.643388 2 0 B (1.00000000 0.00000000) *
## 107) compactness_se>=-3.643388 12 0 M (0.00000000 1.00000000) *
## 27) texture_worst< 4.863167 131 46 M (0.35114504 0.64885496)
## 54) smoothness_mean< -2.443631 42 19 B (0.54761905 0.45238095)
## 108) texture_mean< 3.064089 23 3 B (0.86956522 0.13043478) *
## 109) texture_mean>=3.064089 19 3 M (0.15789474 0.84210526) *
## 55) smoothness_mean>=-2.443631 89 23 M (0.25842697 0.74157303)
## 110) symmetry_worst< -1.813857 41 20 M (0.48780488 0.51219512) *
## 111) symmetry_worst>=-1.813857 48 3 M (0.06250000 0.93750000) *
## 7) smoothness_mean>=-2.298595 149 47 M (0.31543624 0.68456376)
## 14) texture_mean< 2.8622 37 9 B (0.75675676 0.24324324)
## 28) symmetry_worst< -1.53342 17 0 B (1.00000000 0.00000000) *
## 29) symmetry_worst>=-1.53342 20 9 B (0.55000000 0.45000000)
## 58) smoothness_worst>=-1.454202 14 3 B (0.78571429 0.21428571)
## 116) symmetry_worst>=-1.435787 11 0 B (1.00000000 0.00000000) *
## 117) symmetry_worst< -1.435787 3 0 M (0.00000000 1.00000000) *
## 59) smoothness_worst< -1.454202 6 0 M (0.00000000 1.00000000) *
## 15) texture_mean>=2.8622 112 19 M (0.16964286 0.83035714)
## 30) smoothness_mean>=-2.093138 9 1 B (0.88888889 0.11111111)
## 60) smoothness_mean< -2.073133 8 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean>=-2.073133 1 0 M (0.00000000 1.00000000) *
## 31) smoothness_mean< -2.093138 103 11 M (0.10679612 0.89320388)
## 62) texture_worst< 4.345743 8 4 B (0.50000000 0.50000000)
## 124) smoothness_mean< -2.178638 4 0 B (1.00000000 0.00000000) *
## 125) smoothness_mean>=-2.178638 4 0 M (0.00000000 1.00000000) *
## 63) texture_worst>=4.345743 95 7 M (0.07368421 0.92631579)
## 126) texture_mean< 2.925574 11 3 M (0.27272727 0.72727273) *
## 127) texture_mean>=2.925574 84 4 M (0.04761905 0.95238095) *
##
## $trees[[68]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 431 B (0.52741228 0.47258772)
## 2) compactness_se< -4.505325 96 24 B (0.75000000 0.25000000)
## 4) smoothness_worst>=-1.576547 61 3 B (0.95081967 0.04918033)
## 8) smoothness_worst>=-1.550704 49 0 B (1.00000000 0.00000000) *
## 9) smoothness_worst< -1.550704 12 3 B (0.75000000 0.25000000)
## 18) texture_worst>=4.62656 9 0 B (1.00000000 0.00000000) *
## 19) texture_worst< 4.62656 3 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst< -1.576547 35 14 M (0.40000000 0.60000000)
## 10) smoothness_worst< -1.629412 9 0 B (1.00000000 0.00000000) *
## 11) smoothness_worst>=-1.629412 26 5 M (0.19230769 0.80769231)
## 22) compactness_se< -4.860597 2 0 B (1.00000000 0.00000000) *
## 23) compactness_se>=-4.860597 24 3 M (0.12500000 0.87500000)
## 46) compactness_se>=-4.557299 8 3 M (0.37500000 0.62500000)
## 92) texture_mean< 3.023648 3 0 B (1.00000000 0.00000000) *
## 93) texture_mean>=3.023648 5 0 M (0.00000000 1.00000000) *
## 47) compactness_se< -4.557299 16 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-4.505325 816 407 B (0.50122549 0.49877451)
## 6) compactness_se>=-3.933251 529 219 B (0.58601134 0.41398866)
## 12) smoothness_worst< -1.499656 295 98 B (0.66779661 0.33220339)
## 24) compactness_se< -3.723892 80 4 B (0.95000000 0.05000000)
## 48) texture_worst< 5.269605 76 0 B (1.00000000 0.00000000) *
## 49) texture_worst>=5.269605 4 0 M (0.00000000 1.00000000) *
## 25) compactness_se>=-3.723892 215 94 B (0.56279070 0.43720930)
## 50) smoothness_worst>=-1.555451 98 27 B (0.72448980 0.27551020)
## 100) compactness_se>=-3.494301 68 8 B (0.88235294 0.11764706) *
## 101) compactness_se< -3.494301 30 11 M (0.36666667 0.63333333) *
## 51) smoothness_worst< -1.555451 117 50 M (0.42735043 0.57264957)
## 102) smoothness_mean< -2.400474 85 37 B (0.56470588 0.43529412) *
## 103) smoothness_mean>=-2.400474 32 2 M (0.06250000 0.93750000) *
## 13) smoothness_worst>=-1.499656 234 113 M (0.48290598 0.51709402)
## 26) smoothness_worst>=-1.49223 215 102 B (0.52558140 0.47441860)
## 52) symmetry_worst< -2.037336 22 1 B (0.95454545 0.04545455)
## 104) smoothness_mean< -2.231276 21 0 B (1.00000000 0.00000000) *
## 105) smoothness_mean>=-2.231276 1 0 M (0.00000000 1.00000000) *
## 53) symmetry_worst>=-2.037336 193 92 M (0.47668394 0.52331606)
## 106) texture_worst< 4.683387 148 64 B (0.56756757 0.43243243) *
## 107) texture_worst>=4.683387 45 8 M (0.17777778 0.82222222) *
## 27) smoothness_worst< -1.49223 19 0 M (0.00000000 1.00000000) *
## 7) compactness_se< -3.933251 287 99 M (0.34494774 0.65505226)
## 14) compactness_se< -3.996495 224 95 M (0.42410714 0.57589286)
## 28) compactness_se>=-4.100467 52 16 B (0.69230769 0.30769231)
## 56) texture_worst< 4.977078 42 6 B (0.85714286 0.14285714)
## 112) symmetry_worst< -1.485318 35 0 B (1.00000000 0.00000000) *
## 113) symmetry_worst>=-1.485318 7 1 M (0.14285714 0.85714286) *
## 57) texture_worst>=4.977078 10 0 M (0.00000000 1.00000000) *
## 29) compactness_se< -4.100467 172 59 M (0.34302326 0.65697674)
## 58) compactness_se< -4.219581 106 50 M (0.47169811 0.52830189)
## 116) symmetry_worst>=-1.508268 19 0 B (1.00000000 0.00000000) *
## 117) symmetry_worst< -1.508268 87 31 M (0.35632184 0.64367816) *
## 59) compactness_se>=-4.219581 66 9 M (0.13636364 0.86363636)
## 118) smoothness_mean< -2.434347 7 0 B (1.00000000 0.00000000) *
## 119) smoothness_mean>=-2.434347 59 2 M (0.03389831 0.96610169) *
## 15) compactness_se>=-3.996495 63 4 M (0.06349206 0.93650794)
## 30) texture_worst< 4.514719 20 4 M (0.20000000 0.80000000)
## 60) texture_mean>=2.888377 4 0 B (1.00000000 0.00000000) *
## 61) texture_mean< 2.888377 16 0 M (0.00000000 1.00000000) *
## 31) texture_worst>=4.514719 43 0 M (0.00000000 1.00000000) *
##
## $trees[[69]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 453 B (0.50328947 0.49671053)
## 2) texture_worst>=4.753106 275 90 B (0.67272727 0.32727273)
## 4) compactness_se< -3.321165 247 65 B (0.73684211 0.26315789)
## 8) texture_worst< 4.820212 75 3 B (0.96000000 0.04000000)
## 16) symmetry_worst< -0.9904278 72 0 B (1.00000000 0.00000000) *
## 17) symmetry_worst>=-0.9904278 3 0 M (0.00000000 1.00000000) *
## 9) texture_worst>=4.820212 172 62 B (0.63953488 0.36046512)
## 18) texture_mean>=3.166067 109 26 B (0.76146789 0.23853211)
## 36) texture_worst< 5.194184 64 5 B (0.92187500 0.07812500)
## 72) symmetry_worst< -1.302628 62 3 B (0.95161290 0.04838710) *
## 73) symmetry_worst>=-1.302628 2 0 M (0.00000000 1.00000000) *
## 37) texture_worst>=5.194184 45 21 B (0.53333333 0.46666667)
## 74) smoothness_mean< -2.363096 30 6 B (0.80000000 0.20000000) *
## 75) smoothness_mean>=-2.363096 15 0 M (0.00000000 1.00000000) *
## 19) texture_mean< 3.166067 63 27 M (0.42857143 0.57142857)
## 38) smoothness_worst>=-1.441541 14 2 B (0.85714286 0.14285714)
## 76) compactness_se< -4.032549 12 0 B (1.00000000 0.00000000) *
## 77) compactness_se>=-4.032549 2 0 M (0.00000000 1.00000000) *
## 39) smoothness_worst< -1.441541 49 15 M (0.30612245 0.69387755)
## 78) smoothness_worst< -1.52382 24 10 B (0.58333333 0.41666667) *
## 79) smoothness_worst>=-1.52382 25 1 M (0.04000000 0.96000000) *
## 5) compactness_se>=-3.321165 28 3 M (0.10714286 0.89285714)
## 10) smoothness_mean< -2.576888 1 0 B (1.00000000 0.00000000) *
## 11) smoothness_mean>=-2.576888 27 2 M (0.07407407 0.92592593)
## 22) smoothness_worst>=-1.415354 2 1 B (0.50000000 0.50000000)
## 44) texture_mean>=3.085887 1 0 B (1.00000000 0.00000000) *
## 45) texture_mean< 3.085887 1 0 M (0.00000000 1.00000000) *
## 23) smoothness_worst< -1.415354 25 1 M (0.04000000 0.96000000)
## 46) symmetry_worst>=-1.545802 2 1 B (0.50000000 0.50000000)
## 92) texture_mean>=3.19534 1 0 B (1.00000000 0.00000000) *
## 93) texture_mean< 3.19534 1 0 M (0.00000000 1.00000000) *
## 47) symmetry_worst< -1.545802 23 0 M (0.00000000 1.00000000) *
## 3) texture_worst< 4.753106 637 274 M (0.43014129 0.56985871)
## 6) texture_worst< 4.703562 586 271 M (0.46245734 0.53754266)
## 12) symmetry_worst>=-1.56405 153 54 B (0.64705882 0.35294118)
## 24) smoothness_worst>=-1.607486 138 39 B (0.71739130 0.28260870)
## 48) smoothness_worst< -1.451541 110 23 B (0.79090909 0.20909091)
## 96) compactness_se< -3.026518 92 13 B (0.85869565 0.14130435) *
## 97) compactness_se>=-3.026518 18 8 M (0.44444444 0.55555556) *
## 49) smoothness_worst>=-1.451541 28 12 M (0.42857143 0.57142857)
## 98) smoothness_worst>=-1.434633 17 5 B (0.70588235 0.29411765) *
## 99) smoothness_worst< -1.434633 11 0 M (0.00000000 1.00000000) *
## 25) smoothness_worst< -1.607486 15 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst< -1.56405 433 172 M (0.39722864 0.60277136)
## 26) texture_mean< 2.70704 19 0 B (1.00000000 0.00000000) *
## 27) texture_mean>=2.70704 414 153 M (0.36956522 0.63043478)
## 54) symmetry_worst< -2.048468 57 19 B (0.66666667 0.33333333)
## 108) symmetry_worst>=-2.103063 14 0 B (1.00000000 0.00000000) *
## 109) symmetry_worst< -2.103063 43 19 B (0.55813953 0.44186047) *
## 55) symmetry_worst>=-2.048468 357 115 M (0.32212885 0.67787115)
## 110) texture_worst>=4.644679 34 8 B (0.76470588 0.23529412) *
## 111) texture_worst< 4.644679 323 89 M (0.27554180 0.72445820) *
## 7) texture_worst>=4.703562 51 3 M (0.05882353 0.94117647)
## 14) compactness_se< -4.398027 4 1 B (0.75000000 0.25000000)
## 28) texture_mean< 2.985881 3 0 B (1.00000000 0.00000000) *
## 29) texture_mean>=2.985881 1 0 M (0.00000000 1.00000000) *
## 15) compactness_se>=-4.398027 47 0 M (0.00000000 1.00000000) *
##
## $trees[[70]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 434 B (0.52412281 0.47587719)
## 2) smoothness_worst< -1.482502 585 240 B (0.58974359 0.41025641)
## 4) smoothness_worst>=-1.598251 463 164 B (0.64578834 0.35421166)
## 8) compactness_se>=-4.49319 399 122 B (0.69423559 0.30576441)
## 16) texture_worst< 4.595069 205 45 B (0.78048780 0.21951220)
## 32) symmetry_worst< -1.815934 92 5 B (0.94565217 0.05434783)
## 64) texture_mean< 3.059757 91 4 B (0.95604396 0.04395604) *
## 65) texture_mean>=3.059757 1 0 M (0.00000000 1.00000000) *
## 33) symmetry_worst>=-1.815934 113 40 B (0.64601770 0.35398230)
## 66) symmetry_worst>=-1.797319 100 27 B (0.73000000 0.27000000) *
## 67) symmetry_worst< -1.797319 13 0 M (0.00000000 1.00000000) *
## 17) texture_worst>=4.595069 194 77 B (0.60309278 0.39690722)
## 34) compactness_se>=-3.902076 135 35 B (0.74074074 0.25925926)
## 68) texture_worst>=4.606472 126 26 B (0.79365079 0.20634921) *
## 69) texture_worst< 4.606472 9 0 M (0.00000000 1.00000000) *
## 35) compactness_se< -3.902076 59 17 M (0.28813559 0.71186441)
## 70) compactness_se< -4.276595 17 1 B (0.94117647 0.05882353) *
## 71) compactness_se>=-4.276595 42 1 M (0.02380952 0.97619048) *
## 9) compactness_se< -4.49319 64 22 M (0.34375000 0.65625000)
## 18) compactness_se< -4.694501 20 0 B (1.00000000 0.00000000) *
## 19) compactness_se>=-4.694501 44 2 M (0.04545455 0.95454545)
## 38) texture_mean>=3.192081 2 0 B (1.00000000 0.00000000) *
## 39) texture_mean< 3.192081 42 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst< -1.598251 122 46 M (0.37704918 0.62295082)
## 10) symmetry_worst< -1.868413 42 15 B (0.64285714 0.35714286)
## 20) texture_worst>=4.498003 22 0 B (1.00000000 0.00000000) *
## 21) texture_worst< 4.498003 20 5 M (0.25000000 0.75000000)
## 42) symmetry_worst< -2.343297 5 0 B (1.00000000 0.00000000) *
## 43) symmetry_worst>=-2.343297 15 0 M (0.00000000 1.00000000) *
## 11) symmetry_worst>=-1.868413 80 19 M (0.23750000 0.76250000)
## 22) texture_mean< 2.939162 11 0 B (1.00000000 0.00000000) *
## 23) texture_mean>=2.939162 69 8 M (0.11594203 0.88405797)
## 46) texture_mean>=3.083898 18 7 M (0.38888889 0.61111111)
## 92) compactness_se< -3.477558 7 0 B (1.00000000 0.00000000) *
## 93) compactness_se>=-3.477558 11 0 M (0.00000000 1.00000000) *
## 47) texture_mean< 3.083898 51 1 M (0.01960784 0.98039216)
## 94) compactness_se>=-3.433938 1 0 B (1.00000000 0.00000000) *
## 95) compactness_se< -3.433938 50 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst>=-1.482502 327 133 M (0.40672783 0.59327217)
## 6) smoothness_worst>=-1.477976 277 130 M (0.46931408 0.53068592)
## 12) smoothness_worst< -1.472307 40 4 B (0.90000000 0.10000000)
## 24) texture_mean< 3.069079 37 1 B (0.97297297 0.02702703)
## 48) texture_worst< 4.844547 36 0 B (1.00000000 0.00000000) *
## 49) texture_worst>=4.844547 1 0 M (0.00000000 1.00000000) *
## 25) texture_mean>=3.069079 3 0 M (0.00000000 1.00000000) *
## 13) smoothness_worst>=-1.472307 237 94 M (0.39662447 0.60337553)
## 26) smoothness_worst>=-1.465904 204 94 M (0.46078431 0.53921569)
## 52) compactness_se< -4.040144 49 13 B (0.73469388 0.26530612)
## 104) texture_mean< 3.082128 36 5 B (0.86111111 0.13888889) *
## 105) texture_mean>=3.082128 13 5 M (0.38461538 0.61538462) *
## 53) compactness_se>=-4.040144 155 58 M (0.37419355 0.62580645)
## 106) compactness_se>=-3.813086 97 42 B (0.56701031 0.43298969) *
## 107) compactness_se< -3.813086 58 3 M (0.05172414 0.94827586) *
## 27) smoothness_worst< -1.465904 33 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst< -1.477976 50 3 M (0.06000000 0.94000000)
## 14) texture_worst< 4.126187 3 0 B (1.00000000 0.00000000) *
## 15) texture_worst>=4.126187 47 0 M (0.00000000 1.00000000) *
##
## $trees[[71]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 430 M (0.47149123 0.52850877)
## 2) smoothness_mean< -2.201842 830 414 M (0.49879518 0.50120482)
## 4) compactness_se< -3.439055 636 292 B (0.54088050 0.45911950)
## 8) texture_mean< 2.756519 34 6 B (0.82352941 0.17647059)
## 16) smoothness_worst>=-1.569234 26 0 B (1.00000000 0.00000000) *
## 17) smoothness_worst< -1.569234 8 2 M (0.25000000 0.75000000)
## 34) texture_mean>=2.724206 2 0 B (1.00000000 0.00000000) *
## 35) texture_mean< 2.724206 6 0 M (0.00000000 1.00000000) *
## 9) texture_mean>=2.756519 602 286 B (0.52491694 0.47508306)
## 18) texture_mean>=2.770085 579 264 B (0.54404145 0.45595855)
## 36) texture_mean< 2.813911 19 0 B (1.00000000 0.00000000) *
## 37) texture_mean>=2.813911 560 264 B (0.52857143 0.47142857)
## 74) compactness_se< -4.505325 83 25 B (0.69879518 0.30120482) *
## 75) compactness_se>=-4.505325 477 238 M (0.49895178 0.50104822) *
## 19) texture_mean< 2.770085 23 1 M (0.04347826 0.95652174)
## 38) smoothness_mean< -2.443516 1 0 B (1.00000000 0.00000000) *
## 39) smoothness_mean>=-2.443516 22 0 M (0.00000000 1.00000000) *
## 5) compactness_se>=-3.439055 194 70 M (0.36082474 0.63917526)
## 10) texture_mean< 3.071302 138 66 M (0.47826087 0.52173913)
## 20) smoothness_worst< -1.502897 41 4 B (0.90243902 0.09756098)
## 40) compactness_se>=-3.392487 28 0 B (1.00000000 0.00000000) *
## 41) compactness_se< -3.392487 13 4 B (0.69230769 0.30769231)
## 82) compactness_se< -3.404656 9 0 B (1.00000000 0.00000000) *
## 83) compactness_se>=-3.404656 4 0 M (0.00000000 1.00000000) *
## 21) smoothness_worst>=-1.502897 97 29 M (0.29896907 0.70103093)
## 42) symmetry_worst>=-1.471051 38 14 B (0.63157895 0.36842105)
## 84) texture_mean>=2.840763 27 4 B (0.85185185 0.14814815) *
## 85) texture_mean< 2.840763 11 1 M (0.09090909 0.90909091) *
## 43) symmetry_worst< -1.471051 59 5 M (0.08474576 0.91525424)
## 86) texture_worst< 4.217 2 0 B (1.00000000 0.00000000) *
## 87) texture_worst>=4.217 57 3 M (0.05263158 0.94736842) *
## 11) texture_mean>=3.071302 56 4 M (0.07142857 0.92857143)
## 22) smoothness_worst>=-1.427204 5 2 M (0.40000000 0.60000000)
## 44) texture_mean>=3.16708 2 0 B (1.00000000 0.00000000) *
## 45) texture_mean< 3.16708 3 0 M (0.00000000 1.00000000) *
## 23) smoothness_worst< -1.427204 51 2 M (0.03921569 0.96078431)
## 46) compactness_se>=-3.107684 22 2 M (0.09090909 0.90909091)
## 92) compactness_se< -3.05924 2 0 B (1.00000000 0.00000000) *
## 93) compactness_se>=-3.05924 20 0 M (0.00000000 1.00000000) *
## 47) compactness_se< -3.107684 29 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean>=-2.201842 82 16 M (0.19512195 0.80487805)
## 6) symmetry_worst< -1.685481 18 8 B (0.55555556 0.44444444)
## 12) smoothness_mean>=-2.176486 10 0 B (1.00000000 0.00000000) *
## 13) smoothness_mean< -2.176486 8 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-1.685481 64 6 M (0.09375000 0.90625000)
## 14) compactness_se< -4.098715 3 0 B (1.00000000 0.00000000) *
## 15) compactness_se>=-4.098715 61 3 M (0.04918033 0.95081967)
## 30) texture_mean>=3.037093 7 2 M (0.28571429 0.71428571)
## 60) texture_mean< 3.044522 2 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=3.044522 5 0 M (0.00000000 1.00000000) *
## 31) texture_mean< 3.037093 54 1 M (0.01851852 0.98148148)
## 62) smoothness_mean>=-2.000349 3 1 M (0.33333333 0.66666667)
## 124) texture_mean< 2.688296 1 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=2.688296 2 0 M (0.00000000 1.00000000) *
## 63) smoothness_mean< -2.000349 51 0 M (0.00000000 1.00000000) *
##
## $trees[[72]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 432 M (0.47368421 0.52631579)
## 2) compactness_se< -3.488718 643 306 B (0.52410575 0.47589425)
## 4) smoothness_worst< -1.425207 592 265 B (0.55236486 0.44763514)
## 8) compactness_se>=-3.512073 25 0 B (1.00000000 0.00000000) *
## 9) compactness_se< -3.512073 567 265 B (0.53262787 0.46737213)
## 18) symmetry_worst>=-1.559535 129 42 B (0.67441860 0.32558140)
## 36) smoothness_mean>=-2.454281 109 27 B (0.75229358 0.24770642)
## 72) symmetry_worst< -1.423936 84 14 B (0.83333333 0.16666667) *
## 73) symmetry_worst>=-1.423936 25 12 M (0.48000000 0.52000000) *
## 37) smoothness_mean< -2.454281 20 5 M (0.25000000 0.75000000)
## 74) compactness_se>=-4.161775 6 1 B (0.83333333 0.16666667) *
## 75) compactness_se< -4.161775 14 0 M (0.00000000 1.00000000) *
## 19) symmetry_worst< -1.559535 438 215 M (0.49086758 0.50913242)
## 38) texture_mean>=3.33381 31 6 B (0.80645161 0.19354839)
## 76) texture_mean< 3.431382 26 1 B (0.96153846 0.03846154) *
## 77) texture_mean>=3.431382 5 0 M (0.00000000 1.00000000) *
## 39) texture_mean< 3.33381 407 190 M (0.46683047 0.53316953)
## 78) symmetry_worst< -1.787433 255 120 B (0.52941176 0.47058824) *
## 79) symmetry_worst>=-1.787433 152 55 M (0.36184211 0.63815789) *
## 5) smoothness_worst>=-1.425207 51 10 M (0.19607843 0.80392157)
## 10) texture_worst< 4.269167 6 0 B (1.00000000 0.00000000) *
## 11) texture_worst>=4.269167 45 4 M (0.08888889 0.91111111)
## 22) compactness_se< -4.099998 6 2 B (0.66666667 0.33333333)
## 44) texture_mean< 3.075523 4 0 B (1.00000000 0.00000000) *
## 45) texture_mean>=3.075523 2 0 M (0.00000000 1.00000000) *
## 23) compactness_se>=-4.099998 39 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-3.488718 269 95 M (0.35315985 0.64684015)
## 6) symmetry_worst< -1.656074 146 72 M (0.49315068 0.50684932)
## 12) symmetry_worst>=-1.982549 90 30 B (0.66666667 0.33333333)
## 24) symmetry_worst< -1.839514 39 1 B (0.97435897 0.02564103)
## 48) texture_mean< 3.088324 38 0 B (1.00000000 0.00000000) *
## 49) texture_mean>=3.088324 1 0 M (0.00000000 1.00000000) *
## 25) symmetry_worst>=-1.839514 51 22 M (0.43137255 0.56862745)
## 50) texture_worst< 4.40102 12 0 B (1.00000000 0.00000000) *
## 51) texture_worst>=4.40102 39 10 M (0.25641026 0.74358974)
## 102) smoothness_mean>=-2.120284 6 0 B (1.00000000 0.00000000) *
## 103) smoothness_mean< -2.120284 33 4 M (0.12121212 0.87878788) *
## 13) symmetry_worst< -1.982549 56 12 M (0.21428571 0.78571429)
## 26) smoothness_worst>=-1.477394 7 2 B (0.71428571 0.28571429)
## 52) texture_mean< 3.158131 5 0 B (1.00000000 0.00000000) *
## 53) texture_mean>=3.158131 2 0 M (0.00000000 1.00000000) *
## 27) smoothness_worst< -1.477394 49 7 M (0.14285714 0.85714286)
## 54) compactness_se>=-3.248462 16 7 M (0.43750000 0.56250000)
## 108) texture_mean< 3.076827 7 0 B (1.00000000 0.00000000) *
## 109) texture_mean>=3.076827 9 0 M (0.00000000 1.00000000) *
## 55) compactness_se< -3.248462 33 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-1.656074 123 23 M (0.18699187 0.81300813)
## 14) compactness_se>=-2.470993 5 0 B (1.00000000 0.00000000) *
## 15) compactness_se< -2.470993 118 18 M (0.15254237 0.84745763)
## 30) texture_worst< 3.969009 5 1 B (0.80000000 0.20000000)
## 60) texture_mean>=2.44739 4 0 B (1.00000000 0.00000000) *
## 61) texture_mean< 2.44739 1 0 M (0.00000000 1.00000000) *
## 31) texture_worst>=3.969009 113 14 M (0.12389381 0.87610619)
## 62) smoothness_mean< -2.338805 36 11 M (0.30555556 0.69444444)
## 124) symmetry_worst>=-1.497271 9 0 B (1.00000000 0.00000000) *
## 125) symmetry_worst< -1.497271 27 2 M (0.07407407 0.92592593) *
## 63) smoothness_mean>=-2.338805 77 3 M (0.03896104 0.96103896)
## 126) symmetry_worst< -1.580867 7 2 M (0.28571429 0.71428571) *
## 127) symmetry_worst>=-1.580867 70 1 M (0.01428571 0.98571429) *
##
## $trees[[73]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 452 M (0.49561404 0.50438596)
## 2) compactness_se>=-4.49319 807 383 B (0.52540273 0.47459727)
## 4) smoothness_mean< -2.2971 525 213 B (0.59428571 0.40571429)
## 8) smoothness_mean>=-2.326878 95 15 B (0.84210526 0.15789474)
## 16) smoothness_worst>=-1.560669 84 4 B (0.95238095 0.04761905)
## 32) texture_worst< 5.091364 82 2 B (0.97560976 0.02439024)
## 64) compactness_se>=-4.101376 81 1 B (0.98765432 0.01234568) *
## 65) compactness_se< -4.101376 1 0 M (0.00000000 1.00000000) *
## 33) texture_worst>=5.091364 2 0 M (0.00000000 1.00000000) *
## 17) smoothness_worst< -1.560669 11 0 M (0.00000000 1.00000000) *
## 9) smoothness_mean< -2.326878 430 198 B (0.53953488 0.46046512)
## 18) symmetry_worst< -1.39888 409 178 B (0.56479218 0.43520782)
## 36) symmetry_worst>=-1.750623 162 46 B (0.71604938 0.28395062)
## 72) smoothness_worst>=-1.568787 119 18 B (0.84873950 0.15126050) *
## 73) smoothness_worst< -1.568787 43 15 M (0.34883721 0.65116279) *
## 37) symmetry_worst< -1.750623 247 115 M (0.46558704 0.53441296)
## 74) symmetry_worst< -1.789477 202 92 B (0.54455446 0.45544554) *
## 75) symmetry_worst>=-1.789477 45 5 M (0.11111111 0.88888889) *
## 19) symmetry_worst>=-1.39888 21 1 M (0.04761905 0.95238095)
## 38) texture_worst< 4.479829 1 0 B (1.00000000 0.00000000) *
## 39) texture_worst>=4.479829 20 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean>=-2.2971 282 112 M (0.39716312 0.60283688)
## 10) smoothness_mean>=-2.292637 245 110 M (0.44897959 0.55102041)
## 20) compactness_se< -4.023814 46 11 B (0.76086957 0.23913043)
## 40) smoothness_mean< -2.222419 27 0 B (1.00000000 0.00000000) *
## 41) smoothness_mean>=-2.222419 19 8 M (0.42105263 0.57894737)
## 82) texture_mean< 2.910336 7 0 B (1.00000000 0.00000000) *
## 83) texture_mean>=2.910336 12 1 M (0.08333333 0.91666667) *
## 21) compactness_se>=-4.023814 199 75 M (0.37688442 0.62311558)
## 42) smoothness_worst< -1.482898 51 17 B (0.66666667 0.33333333)
## 84) compactness_se< -3.656611 25 0 B (1.00000000 0.00000000) *
## 85) compactness_se>=-3.656611 26 9 M (0.34615385 0.65384615) *
## 43) smoothness_worst>=-1.482898 148 41 M (0.27702703 0.72297297)
## 86) texture_worst< 4.398698 57 28 B (0.50877193 0.49122807) *
## 87) texture_worst>=4.398698 91 12 M (0.13186813 0.86813187) *
## 11) smoothness_mean< -2.292637 37 2 M (0.05405405 0.94594595)
## 22) texture_mean< 2.753964 1 0 B (1.00000000 0.00000000) *
## 23) texture_mean>=2.753964 36 1 M (0.02777778 0.97222222)
## 46) compactness_se< -4.216002 1 0 B (1.00000000 0.00000000) *
## 47) compactness_se>=-4.216002 35 0 M (0.00000000 1.00000000) *
## 3) compactness_se< -4.49319 105 28 M (0.26666667 0.73333333)
## 6) smoothness_worst>=-1.547264 30 10 B (0.66666667 0.33333333)
## 12) symmetry_worst>=-1.696111 18 0 B (1.00000000 0.00000000) *
## 13) symmetry_worst< -1.696111 12 2 M (0.16666667 0.83333333)
## 26) compactness_se< -4.631213 2 0 B (1.00000000 0.00000000) *
## 27) compactness_se>=-4.631213 10 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst< -1.547264 75 8 M (0.10666667 0.89333333)
## 14) compactness_se< -4.727869 4 1 B (0.75000000 0.25000000)
## 28) smoothness_worst< -1.634 3 0 B (1.00000000 0.00000000) *
## 29) smoothness_worst>=-1.634 1 0 M (0.00000000 1.00000000) *
## 15) compactness_se>=-4.727869 71 5 M (0.07042254 0.92957746)
## 30) symmetry_worst>=-1.459568 1 0 B (1.00000000 0.00000000) *
## 31) symmetry_worst< -1.459568 70 4 M (0.05714286 0.94285714)
## 62) texture_worst>=4.660194 31 4 M (0.12903226 0.87096774)
## 124) texture_worst< 4.812659 4 0 B (1.00000000 0.00000000) *
## 125) texture_worst>=4.812659 27 0 M (0.00000000 1.00000000) *
## 63) texture_worst< 4.660194 39 0 M (0.00000000 1.00000000) *
##
## $trees[[74]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 425 M (0.46600877 0.53399123)
## 2) symmetry_worst>=-1.9261 650 314 B (0.51692308 0.48307692)
## 4) compactness_se< -3.690481 374 146 B (0.60962567 0.39037433)
## 8) smoothness_worst< -1.451541 283 92 B (0.67491166 0.32508834)
## 16) compactness_se>=-4.160164 144 18 B (0.87500000 0.12500000)
## 32) symmetry_worst>=-1.767566 92 3 B (0.96739130 0.03260870)
## 64) symmetry_worst< -1.201763 91 2 B (0.97802198 0.02197802) *
## 65) symmetry_worst>=-1.201763 1 0 M (0.00000000 1.00000000) *
## 33) symmetry_worst< -1.767566 52 15 B (0.71153846 0.28846154)
## 66) symmetry_worst< -1.786753 39 2 B (0.94871795 0.05128205) *
## 67) symmetry_worst>=-1.786753 13 0 M (0.00000000 1.00000000) *
## 17) compactness_se< -4.160164 139 65 M (0.46762590 0.53237410)
## 34) compactness_se< -4.260936 110 48 B (0.56363636 0.43636364)
## 68) smoothness_mean>=-2.454281 54 10 B (0.81481481 0.18518519) *
## 69) smoothness_mean< -2.454281 56 18 M (0.32142857 0.67857143) *
## 35) compactness_se>=-4.260936 29 3 M (0.10344828 0.89655172)
## 70) symmetry_worst< -1.580909 11 3 M (0.27272727 0.72727273) *
## 71) symmetry_worst>=-1.580909 18 0 M (0.00000000 1.00000000) *
## 9) smoothness_worst>=-1.451541 91 37 M (0.40659341 0.59340659)
## 18) compactness_se< -4.040144 41 9 B (0.78048780 0.21951220)
## 36) smoothness_mean< -2.222419 34 3 B (0.91176471 0.08823529)
## 72) smoothness_worst< -1.426496 29 0 B (1.00000000 0.00000000) *
## 73) smoothness_worst>=-1.426496 5 2 M (0.40000000 0.60000000) *
## 37) smoothness_mean>=-2.222419 7 1 M (0.14285714 0.85714286)
## 74) texture_mean< 2.88089 1 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.88089 6 0 M (0.00000000 1.00000000) *
## 19) compactness_se>=-4.040144 50 5 M (0.10000000 0.90000000)
## 38) smoothness_mean>=-2.166752 12 4 M (0.33333333 0.66666667)
## 76) smoothness_mean< -2.111645 3 0 B (1.00000000 0.00000000) *
## 77) smoothness_mean>=-2.111645 9 1 M (0.11111111 0.88888889) *
## 39) smoothness_mean< -2.166752 38 1 M (0.02631579 0.97368421)
## 78) symmetry_worst< -1.895488 1 0 B (1.00000000 0.00000000) *
## 79) symmetry_worst>=-1.895488 37 0 M (0.00000000 1.00000000) *
## 5) compactness_se>=-3.690481 276 108 M (0.39130435 0.60869565)
## 10) compactness_se>=-3.586422 228 106 M (0.46491228 0.53508772)
## 20) symmetry_worst< -1.656986 74 23 B (0.68918919 0.31081081)
## 40) smoothness_mean< -2.389667 24 1 B (0.95833333 0.04166667)
## 80) texture_mean< 3.09132 23 0 B (1.00000000 0.00000000) *
## 81) texture_mean>=3.09132 1 0 M (0.00000000 1.00000000) *
## 41) smoothness_mean>=-2.389667 50 22 B (0.56000000 0.44000000)
## 82) compactness_se>=-3.355844 33 8 B (0.75757576 0.24242424) *
## 83) compactness_se< -3.355844 17 3 M (0.17647059 0.82352941) *
## 21) symmetry_worst>=-1.656986 154 55 M (0.35714286 0.64285714)
## 42) smoothness_mean< -2.219632 121 54 M (0.44628099 0.55371901)
## 84) symmetry_worst>=-1.629511 99 45 B (0.54545455 0.45454545) *
## 85) symmetry_worst< -1.629511 22 0 M (0.00000000 1.00000000) *
## 43) smoothness_mean>=-2.219632 33 1 M (0.03030303 0.96969697)
## 86) smoothness_mean>=-1.889548 1 0 B (1.00000000 0.00000000) *
## 87) smoothness_mean< -1.889548 32 0 M (0.00000000 1.00000000) *
## 11) compactness_se< -3.586422 48 2 M (0.04166667 0.95833333)
## 22) smoothness_mean< -2.509667 1 0 B (1.00000000 0.00000000) *
## 23) smoothness_mean>=-2.509667 47 1 M (0.02127660 0.97872340)
## 46) compactness_se< -3.681134 3 1 M (0.33333333 0.66666667)
## 92) texture_mean< 2.986001 1 0 B (1.00000000 0.00000000) *
## 93) texture_mean>=2.986001 2 0 M (0.00000000 1.00000000) *
## 47) compactness_se>=-3.681134 44 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst< -1.9261 262 89 M (0.33969466 0.66030534)
## 6) smoothness_worst>=-1.477976 30 8 B (0.73333333 0.26666667)
## 12) texture_worst< 4.851322 22 0 B (1.00000000 0.00000000) *
## 13) texture_worst>=4.851322 8 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst< -1.477976 232 67 M (0.28879310 0.71120690)
## 14) texture_mean>=3.336125 13 2 B (0.84615385 0.15384615)
## 28) smoothness_mean< -2.380359 11 0 B (1.00000000 0.00000000) *
## 29) smoothness_mean>=-2.380359 2 0 M (0.00000000 1.00000000) *
## 15) texture_mean< 3.336125 219 56 M (0.25570776 0.74429224)
## 30) compactness_se< -4.614925 6 0 B (1.00000000 0.00000000) *
## 31) compactness_se>=-4.614925 213 50 M (0.23474178 0.76525822)
## 62) symmetry_worst< -2.202388 63 27 M (0.42857143 0.57142857)
## 124) smoothness_worst>=-1.553723 26 4 B (0.84615385 0.15384615) *
## 125) smoothness_worst< -1.553723 37 5 M (0.13513514 0.86486486) *
## 63) symmetry_worst>=-2.202388 150 23 M (0.15333333 0.84666667)
## 126) texture_mean< 2.986405 61 20 M (0.32786885 0.67213115) *
## 127) texture_mean>=2.986405 89 3 M (0.03370787 0.96629213) *
##
## $trees[[75]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 434 M (0.47587719 0.52412281)
## 2) compactness_se< -3.987083 335 134 B (0.60000000 0.40000000)
## 4) smoothness_mean>=-2.291157 69 8 B (0.88405797 0.11594203)
## 8) symmetry_worst< -1.449852 65 4 B (0.93846154 0.06153846)
## 16) texture_worst< 5.040422 63 2 B (0.96825397 0.03174603)
## 32) symmetry_worst>=-1.743442 50 0 B (1.00000000 0.00000000) *
## 33) symmetry_worst< -1.743442 13 2 B (0.84615385 0.15384615)
## 66) symmetry_worst< -1.765932 11 0 B (1.00000000 0.00000000) *
## 67) symmetry_worst>=-1.765932 2 0 M (0.00000000 1.00000000) *
## 17) texture_worst>=5.040422 2 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst>=-1.449852 4 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean< -2.291157 266 126 B (0.52631579 0.47368421)
## 10) texture_mean< 2.947329 109 31 B (0.71559633 0.28440367)
## 20) smoothness_mean< -2.329495 77 14 B (0.81818182 0.18181818)
## 40) smoothness_worst< -1.555669 29 0 B (1.00000000 0.00000000) *
## 41) smoothness_worst>=-1.555669 48 14 B (0.70833333 0.29166667)
## 82) smoothness_worst>=-1.541278 35 1 B (0.97142857 0.02857143) *
## 83) smoothness_worst< -1.541278 13 0 M (0.00000000 1.00000000) *
## 21) smoothness_mean>=-2.329495 32 15 M (0.46875000 0.53125000)
## 42) texture_mean< 2.894246 19 4 B (0.78947368 0.21052632)
## 84) smoothness_mean< -2.295113 15 0 B (1.00000000 0.00000000) *
## 85) smoothness_mean>=-2.295113 4 0 M (0.00000000 1.00000000) *
## 43) texture_mean>=2.894246 13 0 M (0.00000000 1.00000000) *
## 11) texture_mean>=2.947329 157 62 M (0.39490446 0.60509554)
## 22) texture_mean>=3.227241 20 2 B (0.90000000 0.10000000)
## 44) texture_mean< 3.409933 18 0 B (1.00000000 0.00000000) *
## 45) texture_mean>=3.409933 2 0 M (0.00000000 1.00000000) *
## 23) texture_mean< 3.227241 137 44 M (0.32116788 0.67883212)
## 46) texture_mean< 3.107047 100 44 M (0.44000000 0.56000000)
## 92) smoothness_mean< -2.422683 45 15 B (0.66666667 0.33333333) *
## 93) smoothness_mean>=-2.422683 55 14 M (0.25454545 0.74545455) *
## 47) texture_mean>=3.107047 37 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-3.987083 577 233 M (0.40381282 0.59618718)
## 6) smoothness_worst< -1.499656 283 136 B (0.51943463 0.48056537)
## 12) texture_mean< 3.006671 103 25 B (0.75728155 0.24271845)
## 24) compactness_se>=-3.93685 98 20 B (0.79591837 0.20408163)
## 48) smoothness_worst>=-1.568787 59 2 B (0.96610169 0.03389831)
## 96) smoothness_mean< -2.176018 57 0 B (1.00000000 0.00000000) *
## 97) smoothness_mean>=-2.176018 2 0 M (0.00000000 1.00000000) *
## 49) smoothness_worst< -1.568787 39 18 B (0.53846154 0.46153846)
## 98) texture_worst< 4.56463 30 9 B (0.70000000 0.30000000) *
## 99) texture_worst>=4.56463 9 0 M (0.00000000 1.00000000) *
## 25) compactness_se< -3.93685 5 0 M (0.00000000 1.00000000) *
## 13) texture_mean>=3.006671 180 69 M (0.38333333 0.61666667)
## 26) smoothness_worst>=-1.553723 82 37 B (0.54878049 0.45121951)
## 52) compactness_se< -3.616009 27 3 B (0.88888889 0.11111111)
## 104) texture_mean>=3.059872 24 0 B (1.00000000 0.00000000) *
## 105) texture_mean< 3.059872 3 0 M (0.00000000 1.00000000) *
## 53) compactness_se>=-3.616009 55 21 M (0.38181818 0.61818182)
## 106) texture_worst< 4.566107 11 0 B (1.00000000 0.00000000) *
## 107) texture_worst>=4.566107 44 10 M (0.22727273 0.77272727) *
## 27) smoothness_worst< -1.553723 98 24 M (0.24489796 0.75510204)
## 54) symmetry_worst>=-2.04723 45 19 M (0.42222222 0.57777778)
## 108) symmetry_worst< -1.550826 27 8 B (0.70370370 0.29629630) *
## 109) symmetry_worst>=-1.550826 18 0 M (0.00000000 1.00000000) *
## 55) symmetry_worst< -2.04723 53 5 M (0.09433962 0.90566038)
## 110) compactness_se>=-3.179583 19 5 M (0.26315789 0.73684211) *
## 111) compactness_se< -3.179583 34 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst>=-1.499656 294 86 M (0.29251701 0.70748299)
## 14) symmetry_worst< -1.352813 244 82 M (0.33606557 0.66393443)
## 28) symmetry_worst>=-1.471051 17 4 B (0.76470588 0.23529412)
## 56) texture_mean< 2.794024 10 0 B (1.00000000 0.00000000) *
## 57) texture_mean>=2.794024 7 3 M (0.42857143 0.57142857)
## 114) texture_mean>=2.887911 3 0 B (1.00000000 0.00000000) *
## 115) texture_mean< 2.887911 4 0 M (0.00000000 1.00000000) *
## 29) symmetry_worst< -1.471051 227 69 M (0.30396476 0.69603524)
## 58) symmetry_worst< -1.641484 175 64 M (0.36571429 0.63428571)
## 116) smoothness_mean>=-2.14559 15 0 B (1.00000000 0.00000000) *
## 117) smoothness_mean< -2.14559 160 49 M (0.30625000 0.69375000) *
## 59) symmetry_worst>=-1.641484 52 5 M (0.09615385 0.90384615)
## 118) texture_mean>=3.04903 1 0 B (1.00000000 0.00000000) *
## 119) texture_mean< 3.04903 51 4 M (0.07843137 0.92156863) *
## 15) symmetry_worst>=-1.352813 50 4 M (0.08000000 0.92000000)
## 30) smoothness_mean< -2.365259 2 0 B (1.00000000 0.00000000) *
## 31) smoothness_mean>=-2.365259 48 2 M (0.04166667 0.95833333)
## 62) compactness_se>=-2.588521 2 0 B (1.00000000 0.00000000) *
## 63) compactness_se< -2.588521 46 0 M (0.00000000 1.00000000) *
##
## $trees[[76]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 455 M (0.49890351 0.50109649)
## 2) compactness_se< -3.643388 525 222 B (0.57714286 0.42285714)
## 4) compactness_se>=-3.867535 115 25 B (0.78260870 0.21739130)
## 8) smoothness_worst>=-1.574151 105 16 B (0.84761905 0.15238095)
## 16) symmetry_worst< -1.3705 98 10 B (0.89795918 0.10204082)
## 32) smoothness_worst< -1.417195 91 5 B (0.94505495 0.05494505)
## 64) texture_mean>=2.680808 90 4 B (0.95555556 0.04444444) *
## 65) texture_mean< 2.680808 1 0 M (0.00000000 1.00000000) *
## 33) smoothness_worst>=-1.417195 7 2 M (0.28571429 0.71428571)
## 66) texture_mean< 2.89891 2 0 B (1.00000000 0.00000000) *
## 67) texture_mean>=2.89891 5 0 M (0.00000000 1.00000000) *
## 17) symmetry_worst>=-1.3705 7 1 M (0.14285714 0.85714286)
## 34) texture_mean< 2.76528 1 0 B (1.00000000 0.00000000) *
## 35) texture_mean>=2.76528 6 0 M (0.00000000 1.00000000) *
## 9) smoothness_worst< -1.574151 10 1 M (0.10000000 0.90000000)
## 18) texture_mean< 2.850088 1 0 B (1.00000000 0.00000000) *
## 19) texture_mean>=2.850088 9 0 M (0.00000000 1.00000000) *
## 5) compactness_se< -3.867535 410 197 B (0.51951220 0.48048780)
## 10) compactness_se< -3.885633 387 174 B (0.55038760 0.44961240)
## 20) texture_mean< 2.947329 159 49 B (0.69182390 0.30817610)
## 40) compactness_se>=-4.334002 105 20 B (0.80952381 0.19047619)
## 80) smoothness_worst< -1.451541 85 9 B (0.89411765 0.10588235) *
## 81) smoothness_worst>=-1.451541 20 9 M (0.45000000 0.55000000) *
## 41) compactness_se< -4.334002 54 25 M (0.46296296 0.53703704)
## 82) texture_worst< 4.35485 15 0 B (1.00000000 0.00000000) *
## 83) texture_worst>=4.35485 39 10 M (0.25641026 0.74358974) *
## 21) texture_mean>=2.947329 228 103 M (0.45175439 0.54824561)
## 42) symmetry_worst< -1.724518 152 71 B (0.53289474 0.46710526)
## 84) smoothness_worst< -1.576561 58 11 B (0.81034483 0.18965517) *
## 85) smoothness_worst>=-1.576561 94 34 M (0.36170213 0.63829787) *
## 43) symmetry_worst>=-1.724518 76 22 M (0.28947368 0.71052632)
## 86) symmetry_worst>=-1.490299 17 4 B (0.76470588 0.23529412) *
## 87) symmetry_worst< -1.490299 59 9 M (0.15254237 0.84745763) *
## 11) compactness_se>=-3.885633 23 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-3.643388 387 152 M (0.39276486 0.60723514)
## 6) symmetry_worst< -1.65431 225 112 B (0.50222222 0.49777778)
## 12) compactness_se>=-3.483667 135 48 B (0.64444444 0.35555556)
## 24) texture_mean< 3.135612 111 30 B (0.72972973 0.27027027)
## 48) smoothness_mean>=-2.563309 104 23 B (0.77884615 0.22115385)
## 96) smoothness_mean< -2.385259 37 0 B (1.00000000 0.00000000) *
## 97) smoothness_mean>=-2.385259 67 23 B (0.65671642 0.34328358) *
## 49) smoothness_mean< -2.563309 7 0 M (0.00000000 1.00000000) *
## 25) texture_mean>=3.135612 24 6 M (0.25000000 0.75000000)
## 50) smoothness_worst>=-1.43594 6 0 B (1.00000000 0.00000000) *
## 51) smoothness_worst< -1.43594 18 0 M (0.00000000 1.00000000) *
## 13) compactness_se< -3.483667 90 26 M (0.28888889 0.71111111)
## 26) texture_mean>=3.138519 18 5 B (0.72222222 0.27777778)
## 52) texture_mean< 3.399247 13 0 B (1.00000000 0.00000000) *
## 53) texture_mean>=3.399247 5 0 M (0.00000000 1.00000000) *
## 27) texture_mean< 3.138519 72 13 M (0.18055556 0.81944444)
## 54) smoothness_mean< -2.380923 23 10 M (0.43478261 0.56521739)
## 108) smoothness_mean>=-2.436819 10 0 B (1.00000000 0.00000000) *
## 109) smoothness_mean< -2.436819 13 0 M (0.00000000 1.00000000) *
## 55) smoothness_mean>=-2.380923 49 3 M (0.06122449 0.93877551)
## 110) smoothness_worst>=-1.459744 2 0 B (1.00000000 0.00000000) *
## 111) smoothness_worst< -1.459744 47 1 M (0.02127660 0.97872340) *
## 7) symmetry_worst>=-1.65431 162 39 M (0.24074074 0.75925926)
## 14) compactness_se< -3.484318 29 11 B (0.62068966 0.37931034)
## 28) compactness_se>=-3.502612 17 0 B (1.00000000 0.00000000) *
## 29) compactness_se< -3.502612 12 1 M (0.08333333 0.91666667)
## 58) texture_mean>=3.08111 1 0 B (1.00000000 0.00000000) *
## 59) texture_mean< 3.08111 11 0 M (0.00000000 1.00000000) *
## 15) compactness_se>=-3.484318 133 21 M (0.15789474 0.84210526)
## 30) compactness_se>=-2.659237 7 2 B (0.71428571 0.28571429)
## 60) texture_mean< 3.031641 5 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=3.031641 2 0 M (0.00000000 1.00000000) *
## 31) compactness_se< -2.659237 126 16 M (0.12698413 0.87301587)
## 62) smoothness_worst>=-1.533662 90 16 M (0.17777778 0.82222222)
## 124) smoothness_worst< -1.482108 39 14 M (0.35897436 0.64102564) *
## 125) smoothness_worst>=-1.482108 51 2 M (0.03921569 0.96078431) *
## 63) smoothness_worst< -1.533662 36 0 M (0.00000000 1.00000000) *
##
## $trees[[77]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 450 B (0.50657895 0.49342105)
## 2) compactness_se< -3.672219 561 230 B (0.59001783 0.40998217)
## 4) smoothness_worst>=-1.6166 507 187 B (0.63116371 0.36883629)
## 8) smoothness_mean< -2.468758 46 2 B (0.95652174 0.04347826)
## 16) texture_mean< 3.388429 44 0 B (1.00000000 0.00000000) *
## 17) texture_mean>=3.388429 2 0 M (0.00000000 1.00000000) *
## 9) smoothness_mean>=-2.468758 461 185 B (0.59869848 0.40130152)
## 18) smoothness_mean>=-2.382983 284 90 B (0.68309859 0.31690141)
## 36) symmetry_worst< -1.293329 270 77 B (0.71481481 0.28518519)
## 72) symmetry_worst< -1.786753 114 20 B (0.82456140 0.17543860) *
## 73) symmetry_worst>=-1.786753 156 57 B (0.63461538 0.36538462) *
## 37) symmetry_worst>=-1.293329 14 1 M (0.07142857 0.92857143)
## 74) texture_mean< 2.824658 1 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.824658 13 0 M (0.00000000 1.00000000) *
## 19) smoothness_mean< -2.382983 177 82 M (0.46327684 0.53672316)
## 38) smoothness_mean< -2.394871 137 57 B (0.58394161 0.41605839)
## 76) texture_worst>=4.465917 111 35 B (0.68468468 0.31531532) *
## 77) texture_worst< 4.465917 26 4 M (0.15384615 0.84615385) *
## 39) smoothness_mean>=-2.394871 40 2 M (0.05000000 0.95000000)
## 78) texture_mean< 2.909709 2 0 B (1.00000000 0.00000000) *
## 79) texture_mean>=2.909709 38 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst< -1.6166 54 11 M (0.20370370 0.79629630)
## 10) smoothness_mean< -2.572721 6 0 B (1.00000000 0.00000000) *
## 11) smoothness_mean>=-2.572721 48 5 M (0.10416667 0.89583333)
## 22) texture_mean< 2.936117 2 0 B (1.00000000 0.00000000) *
## 23) texture_mean>=2.936117 46 3 M (0.06521739 0.93478261)
## 46) texture_mean>=3.23119 2 0 B (1.00000000 0.00000000) *
## 47) texture_mean< 3.23119 44 1 M (0.02272727 0.97727273)
## 94) compactness_se< -4.803674 1 0 B (1.00000000 0.00000000) *
## 95) compactness_se>=-4.803674 43 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-3.672219 351 131 M (0.37321937 0.62678063)
## 6) smoothness_mean< -2.413575 75 27 B (0.64000000 0.36000000)
## 12) compactness_se>=-3.125122 40 6 B (0.85000000 0.15000000)
## 24) smoothness_worst>=-1.708845 33 0 B (1.00000000 0.00000000) *
## 25) smoothness_worst< -1.708845 7 1 M (0.14285714 0.85714286)
## 50) texture_mean>=3.103494 1 0 B (1.00000000 0.00000000) *
## 51) texture_mean< 3.103494 6 0 M (0.00000000 1.00000000) *
## 13) compactness_se< -3.125122 35 14 M (0.40000000 0.60000000)
## 26) texture_mean< 3.049127 20 7 B (0.65000000 0.35000000)
## 52) texture_worst>=3.981964 14 2 B (0.85714286 0.14285714)
## 104) symmetry_worst< -1.851403 11 0 B (1.00000000 0.00000000) *
## 105) symmetry_worst>=-1.851403 3 1 M (0.33333333 0.66666667) *
## 53) texture_worst< 3.981964 6 1 M (0.16666667 0.83333333)
## 106) texture_mean< 2.754513 1 0 B (1.00000000 0.00000000) *
## 107) texture_mean>=2.754513 5 0 M (0.00000000 1.00000000) *
## 27) texture_mean>=3.049127 15 1 M (0.06666667 0.93333333)
## 54) texture_mean>=3.337721 2 1 B (0.50000000 0.50000000)
## 108) texture_mean< 3.410351 1 0 B (1.00000000 0.00000000) *
## 109) texture_mean>=3.410351 1 0 M (0.00000000 1.00000000) *
## 55) texture_mean< 3.337721 13 0 M (0.00000000 1.00000000) *
## 7) smoothness_mean>=-2.413575 276 83 M (0.30072464 0.69927536)
## 14) texture_worst< 3.832298 7 0 B (1.00000000 0.00000000) *
## 15) texture_worst>=3.832298 269 76 M (0.28252788 0.71747212)
## 30) symmetry_worst< -2.156952 15 5 B (0.66666667 0.33333333)
## 60) smoothness_mean< -2.244441 10 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean>=-2.244441 5 0 M (0.00000000 1.00000000) *
## 31) symmetry_worst>=-2.156952 254 66 M (0.25984252 0.74015748)
## 62) texture_worst< 4.55941 139 49 M (0.35251799 0.64748201)
## 124) smoothness_worst< -1.503711 53 20 B (0.62264151 0.37735849) *
## 125) smoothness_worst>=-1.503711 86 16 M (0.18604651 0.81395349) *
## 63) texture_worst>=4.55941 115 17 M (0.14782609 0.85217391)
## 126) smoothness_mean>=-2.093138 4 0 B (1.00000000 0.00000000) *
## 127) smoothness_mean< -2.093138 111 13 M (0.11711712 0.88288288) *
##
## $trees[[78]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 446 M (0.48903509 0.51096491)
## 2) smoothness_mean< -2.413908 240 93 B (0.61250000 0.38750000)
## 4) smoothness_worst>=-1.548762 84 11 B (0.86904762 0.13095238)
## 8) symmetry_worst>=-1.996006 77 5 B (0.93506494 0.06493506)
## 16) texture_mean< 3.359301 75 3 B (0.96000000 0.04000000)
## 32) symmetry_worst< -1.429489 73 1 B (0.98630137 0.01369863)
## 64) texture_worst< 5.003123 70 0 B (1.00000000 0.00000000) *
## 65) texture_worst>=5.003123 3 1 B (0.66666667 0.33333333) *
## 33) symmetry_worst>=-1.429489 2 0 M (0.00000000 1.00000000) *
## 17) texture_mean>=3.359301 2 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst< -1.996006 7 1 M (0.14285714 0.85714286)
## 18) texture_mean< 3.047283 1 0 B (1.00000000 0.00000000) *
## 19) texture_mean>=3.047283 6 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst< -1.548762 156 74 M (0.47435897 0.52564103)
## 10) texture_mean< 2.921008 43 13 B (0.69767442 0.30232558)
## 20) texture_mean>=2.881715 20 0 B (1.00000000 0.00000000) *
## 21) texture_mean< 2.881715 23 10 M (0.43478261 0.56521739)
## 42) smoothness_mean< -2.469112 7 0 B (1.00000000 0.00000000) *
## 43) smoothness_mean>=-2.469112 16 3 M (0.18750000 0.81250000)
## 86) compactness_se< -4.514299 3 0 B (1.00000000 0.00000000) *
## 87) compactness_se>=-4.514299 13 0 M (0.00000000 1.00000000) *
## 11) texture_mean>=2.921008 113 44 M (0.38938053 0.61061947)
## 22) smoothness_worst< -1.623453 28 9 B (0.67857143 0.32142857)
## 44) texture_worst>=4.576562 17 1 B (0.94117647 0.05882353)
## 88) symmetry_worst< -1.18694 16 0 B (1.00000000 0.00000000) *
## 89) symmetry_worst>=-1.18694 1 0 M (0.00000000 1.00000000) *
## 45) texture_worst< 4.576562 11 3 M (0.27272727 0.72727273)
## 90) texture_worst< 4.457426 4 1 B (0.75000000 0.25000000) *
## 91) texture_worst>=4.457426 7 0 M (0.00000000 1.00000000) *
## 23) smoothness_worst>=-1.623453 85 25 M (0.29411765 0.70588235)
## 46) texture_mean>=3.337367 7 1 B (0.85714286 0.14285714)
## 92) texture_mean< 3.388429 6 0 B (1.00000000 0.00000000) *
## 93) texture_mean>=3.388429 1 0 M (0.00000000 1.00000000) *
## 47) texture_mean< 3.337367 78 19 M (0.24358974 0.75641026)
## 94) texture_worst< 4.828203 40 17 M (0.42500000 0.57500000) *
## 95) texture_worst>=4.828203 38 2 M (0.05263158 0.94736842) *
## 3) smoothness_mean>=-2.413908 672 299 M (0.44494048 0.55505952)
## 6) smoothness_mean>=-2.354774 460 222 B (0.51739130 0.48260870)
## 12) compactness_se< -3.952856 130 34 B (0.73846154 0.26153846)
## 24) symmetry_worst< -1.33108 124 28 B (0.77419355 0.22580645)
## 48) smoothness_mean< -2.329495 28 0 B (1.00000000 0.00000000) *
## 49) smoothness_mean>=-2.329495 96 28 B (0.70833333 0.29166667)
## 98) smoothness_mean>=-2.294121 70 10 B (0.85714286 0.14285714) *
## 99) smoothness_mean< -2.294121 26 8 M (0.30769231 0.69230769) *
## 25) symmetry_worst>=-1.33108 6 0 M (0.00000000 1.00000000) *
## 13) compactness_se>=-3.952856 330 142 M (0.43030303 0.56969697)
## 26) compactness_se>=-3.904303 291 141 M (0.48453608 0.51546392)
## 52) texture_mean>=3.039742 91 27 B (0.70329670 0.29670330)
## 104) compactness_se< -3.352836 78 14 B (0.82051282 0.17948718) *
## 105) compactness_se>=-3.352836 13 0 M (0.00000000 1.00000000) *
## 53) texture_mean< 3.039742 200 77 M (0.38500000 0.61500000)
## 106) smoothness_mean>=-2.262404 107 48 B (0.55140187 0.44859813) *
## 107) smoothness_mean< -2.262404 93 18 M (0.19354839 0.80645161) *
## 27) compactness_se< -3.904303 39 1 M (0.02564103 0.97435897)
## 54) texture_mean< 2.858451 1 0 B (1.00000000 0.00000000) *
## 55) texture_mean>=2.858451 38 0 M (0.00000000 1.00000000) *
## 7) smoothness_mean< -2.354774 212 61 M (0.28773585 0.71226415)
## 14) texture_mean< 2.974761 70 28 B (0.60000000 0.40000000)
## 28) smoothness_worst< -1.549191 22 0 B (1.00000000 0.00000000) *
## 29) smoothness_worst>=-1.549191 48 20 M (0.41666667 0.58333333)
## 58) symmetry_worst>=-1.582804 11 0 B (1.00000000 0.00000000) *
## 59) symmetry_worst< -1.582804 37 9 M (0.24324324 0.75675676)
## 118) texture_worst< 4.228934 4 0 B (1.00000000 0.00000000) *
## 119) texture_worst>=4.228934 33 5 M (0.15151515 0.84848485) *
## 15) texture_mean>=2.974761 142 19 M (0.13380282 0.86619718)
## 30) texture_mean>=3.351321 13 2 B (0.84615385 0.15384615)
## 60) texture_mean< 3.431382 11 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=3.431382 2 0 M (0.00000000 1.00000000) *
## 31) texture_mean< 3.351321 129 8 M (0.06201550 0.93798450)
## 62) symmetry_worst< -2.204211 7 3 B (0.57142857 0.42857143)
## 124) smoothness_mean< -2.372601 4 0 B (1.00000000 0.00000000) *
## 125) smoothness_mean>=-2.372601 3 0 M (0.00000000 1.00000000) *
## 63) symmetry_worst>=-2.204211 122 4 M (0.03278689 0.96721311)
## 126) smoothness_worst>=-1.421107 2 0 B (1.00000000 0.00000000) *
## 127) smoothness_worst< -1.421107 120 2 M (0.01666667 0.98333333) *
##
## $trees[[79]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 423 M (0.46381579 0.53618421)
## 2) smoothness_mean< -2.408446 266 89 B (0.66541353 0.33458647)
## 4) texture_mean< 2.959066 95 17 B (0.82105263 0.17894737)
## 8) compactness_se< -3.734437 72 6 B (0.91666667 0.08333333)
## 16) smoothness_worst< -1.555886 32 0 B (1.00000000 0.00000000) *
## 17) smoothness_worst>=-1.555886 40 6 B (0.85000000 0.15000000)
## 34) smoothness_worst>=-1.551775 35 1 B (0.97142857 0.02857143)
## 68) texture_mean>=2.837818 32 0 B (1.00000000 0.00000000) *
## 69) texture_mean< 2.837818 3 1 B (0.66666667 0.33333333) *
## 35) smoothness_worst< -1.551775 5 0 M (0.00000000 1.00000000) *
## 9) compactness_se>=-3.734437 23 11 B (0.52173913 0.47826087)
## 18) compactness_se>=-3.483667 11 0 B (1.00000000 0.00000000) *
## 19) compactness_se< -3.483667 12 1 M (0.08333333 0.91666667)
## 38) texture_mean< 2.680923 1 0 B (1.00000000 0.00000000) *
## 39) texture_mean>=2.680923 11 0 M (0.00000000 1.00000000) *
## 5) texture_mean>=2.959066 171 72 B (0.57894737 0.42105263)
## 10) symmetry_worst< -1.541072 158 59 B (0.62658228 0.37341772)
## 20) texture_worst>=4.498003 143 45 B (0.68531469 0.31468531)
## 40) symmetry_worst>=-2.218277 132 36 B (0.72727273 0.27272727)
## 80) compactness_se>=-4.658767 115 24 B (0.79130435 0.20869565) *
## 81) compactness_se< -4.658767 17 5 M (0.29411765 0.70588235) *
## 41) symmetry_worst< -2.218277 11 2 M (0.18181818 0.81818182)
## 82) smoothness_mean< -2.490273 2 0 B (1.00000000 0.00000000) *
## 83) smoothness_mean>=-2.490273 9 0 M (0.00000000 1.00000000) *
## 21) texture_worst< 4.498003 15 1 M (0.06666667 0.93333333)
## 42) compactness_se>=-2.715861 1 0 B (1.00000000 0.00000000) *
## 43) compactness_se< -2.715861 14 0 M (0.00000000 1.00000000) *
## 11) symmetry_worst>=-1.541072 13 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean>=-2.408446 646 246 M (0.38080495 0.61919505)
## 6) compactness_se< -3.063476 566 237 M (0.41872792 0.58127208)
## 12) smoothness_mean>=-2.354774 419 201 M (0.47971360 0.52028640)
## 24) smoothness_mean< -2.344241 30 0 B (1.00000000 0.00000000) *
## 25) smoothness_mean>=-2.344241 389 171 M (0.43958869 0.56041131)
## 50) texture_worst< 4.895983 318 157 M (0.49371069 0.50628931)
## 100) texture_worst>=4.775439 37 2 B (0.94594595 0.05405405) *
## 101) texture_worst< 4.775439 281 122 M (0.43416370 0.56583630) *
## 51) texture_worst>=4.895983 71 14 M (0.19718310 0.80281690)
## 102) compactness_se< -4.040144 9 1 B (0.88888889 0.11111111) *
## 103) compactness_se>=-4.040144 62 6 M (0.09677419 0.90322581) *
## 13) smoothness_mean< -2.354774 147 36 M (0.24489796 0.75510204)
## 26) texture_mean>=3.351321 15 3 B (0.80000000 0.20000000)
## 52) texture_mean< 3.407548 12 0 B (1.00000000 0.00000000) *
## 53) texture_mean>=3.407548 3 0 M (0.00000000 1.00000000) *
## 27) texture_mean< 3.351321 132 24 M (0.18181818 0.81818182)
## 54) texture_worst< 4.212101 4 0 B (1.00000000 0.00000000) *
## 55) texture_worst>=4.212101 128 20 M (0.15625000 0.84375000)
## 110) smoothness_mean< -2.367284 82 20 M (0.24390244 0.75609756) *
## 111) smoothness_mean>=-2.367284 46 0 M (0.00000000 1.00000000) *
## 7) compactness_se>=-3.063476 80 9 M (0.11250000 0.88750000)
## 14) compactness_se>=-2.721974 12 6 B (0.50000000 0.50000000)
## 28) texture_worst< 4.41664 6 0 B (1.00000000 0.00000000) *
## 29) texture_worst>=4.41664 6 0 M (0.00000000 1.00000000) *
## 15) compactness_se< -2.721974 68 3 M (0.04411765 0.95588235)
## 30) smoothness_mean>=-2.080253 1 0 B (1.00000000 0.00000000) *
## 31) smoothness_mean< -2.080253 67 2 M (0.02985075 0.97014925)
## 62) texture_worst>=4.801107 5 2 M (0.40000000 0.60000000)
## 124) texture_mean< 3.159827 2 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=3.159827 3 0 M (0.00000000 1.00000000) *
## 63) texture_worst< 4.801107 62 0 M (0.00000000 1.00000000) *
##
## $trees[[80]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 434 B (0.52412281 0.47587719)
## 2) texture_worst< 4.818867 696 288 B (0.58620690 0.41379310)
## 4) texture_worst>=4.753106 59 4 B (0.93220339 0.06779661)
## 8) compactness_se< -3.322755 57 2 B (0.96491228 0.03508772)
## 16) symmetry_worst< -0.9904278 55 0 B (1.00000000 0.00000000) *
## 17) symmetry_worst>=-0.9904278 2 0 M (0.00000000 1.00000000) *
## 9) compactness_se>=-3.322755 2 0 M (0.00000000 1.00000000) *
## 5) texture_worst< 4.753106 637 284 B (0.55416013 0.44583987)
## 10) texture_worst< 4.681966 554 224 B (0.59566787 0.40433213)
## 20) texture_worst>=4.642157 47 3 B (0.93617021 0.06382979)
## 40) texture_mean< 3.062639 45 1 B (0.97777778 0.02222222)
## 80) texture_mean>=2.836998 44 0 B (1.00000000 0.00000000) *
## 81) texture_mean< 2.836998 1 0 M (0.00000000 1.00000000) *
## 41) texture_mean>=3.062639 2 0 M (0.00000000 1.00000000) *
## 21) texture_worst< 4.642157 507 221 B (0.56410256 0.43589744)
## 42) texture_worst< 4.580648 427 164 B (0.61592506 0.38407494)
## 84) smoothness_worst< -1.384694 404 144 B (0.64356436 0.35643564) *
## 85) smoothness_worst>=-1.384694 23 3 M (0.13043478 0.86956522) *
## 43) texture_worst>=4.580648 80 23 M (0.28750000 0.71250000)
## 86) symmetry_worst>=-1.580305 17 6 B (0.64705882 0.35294118) *
## 87) symmetry_worst< -1.580305 63 12 M (0.19047619 0.80952381) *
## 11) texture_worst>=4.681966 83 23 M (0.27710843 0.72289157)
## 22) symmetry_worst< -1.87333 36 13 B (0.63888889 0.36111111)
## 44) smoothness_mean< -2.391331 23 0 B (1.00000000 0.00000000) *
## 45) smoothness_mean>=-2.391331 13 0 M (0.00000000 1.00000000) *
## 23) symmetry_worst>=-1.87333 47 0 M (0.00000000 1.00000000) *
## 3) texture_worst>=4.818867 216 70 M (0.32407407 0.67592593)
## 6) smoothness_worst< -1.52112 105 50 M (0.47619048 0.52380952)
## 12) smoothness_worst>=-1.588911 67 23 B (0.65671642 0.34328358)
## 24) texture_worst< 5.084467 46 8 B (0.82608696 0.17391304)
## 48) compactness_se< -3.56617 44 6 B (0.86363636 0.13636364)
## 96) compactness_se>=-4.393029 30 0 B (1.00000000 0.00000000) *
## 97) compactness_se< -4.393029 14 6 B (0.57142857 0.42857143) *
## 49) compactness_se>=-3.56617 2 0 M (0.00000000 1.00000000) *
## 25) texture_worst>=5.084467 21 6 M (0.28571429 0.71428571)
## 50) symmetry_worst< -2.159635 3 0 B (1.00000000 0.00000000) *
## 51) symmetry_worst>=-2.159635 18 3 M (0.16666667 0.83333333)
## 102) smoothness_mean< -2.431488 4 1 B (0.75000000 0.25000000) *
## 103) smoothness_mean>=-2.431488 14 0 M (0.00000000 1.00000000) *
## 13) smoothness_worst< -1.588911 38 6 M (0.15789474 0.84210526)
## 26) compactness_se< -4.899363 3 0 B (1.00000000 0.00000000) *
## 27) compactness_se>=-4.899363 35 3 M (0.08571429 0.91428571)
## 54) smoothness_worst< -1.62752 6 3 B (0.50000000 0.50000000)
## 108) texture_mean>=3.22319 3 0 B (1.00000000 0.00000000) *
## 109) texture_mean< 3.22319 3 0 M (0.00000000 1.00000000) *
## 55) smoothness_worst>=-1.62752 29 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst>=-1.52112 111 20 M (0.18018018 0.81981982)
## 14) smoothness_mean>=-2.209101 8 1 B (0.87500000 0.12500000)
## 28) smoothness_mean< -2.075957 7 0 B (1.00000000 0.00000000) *
## 29) smoothness_mean>=-2.075957 1 0 M (0.00000000 1.00000000) *
## 15) smoothness_mean< -2.209101 103 13 M (0.12621359 0.87378641)
## 30) compactness_se>=-3.106177 7 2 B (0.71428571 0.28571429)
## 60) smoothness_mean< -2.309338 5 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean>=-2.309338 2 0 M (0.00000000 1.00000000) *
## 31) compactness_se< -3.106177 96 8 M (0.08333333 0.91666667)
## 62) compactness_se< -4.557422 5 1 B (0.80000000 0.20000000)
## 124) texture_worst>=5.08621 4 0 B (1.00000000 0.00000000) *
## 125) texture_worst< 5.08621 1 0 M (0.00000000 1.00000000) *
## 63) compactness_se>=-4.557422 91 4 M (0.04395604 0.95604396)
## 126) texture_mean>=3.309778 12 2 M (0.16666667 0.83333333) *
## 127) texture_mean< 3.309778 79 2 M (0.02531646 0.97468354) *
##
## $trees[[81]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 445 M (0.48793860 0.51206140)
## 2) compactness_se< -4.219581 196 66 B (0.66326531 0.33673469)
## 4) smoothness_mean>=-2.3007 41 0 B (1.00000000 0.00000000) *
## 5) smoothness_mean< -2.3007 155 66 B (0.57419355 0.42580645)
## 10) symmetry_worst>=-1.52618 27 1 B (0.96296296 0.03703704)
## 20) smoothness_mean>=-2.466044 26 0 B (1.00000000 0.00000000) *
## 21) smoothness_mean< -2.466044 1 0 M (0.00000000 1.00000000) *
## 11) symmetry_worst< -1.52618 128 63 M (0.49218750 0.50781250)
## 22) texture_mean< 2.841101 17 0 B (1.00000000 0.00000000) *
## 23) texture_mean>=2.841101 111 46 M (0.41441441 0.58558559)
## 46) texture_mean>=3.232565 12 0 B (1.00000000 0.00000000) *
## 47) texture_mean< 3.232565 99 34 M (0.34343434 0.65656566)
## 94) smoothness_worst< -1.557411 57 27 B (0.52631579 0.47368421) *
## 95) smoothness_worst>=-1.557411 42 4 M (0.09523810 0.90476190) *
## 3) compactness_se>=-4.219581 716 315 M (0.43994413 0.56005587)
## 6) compactness_se>=-4.180058 689 314 M (0.45573295 0.54426705)
## 12) compactness_se< -3.027402 618 298 M (0.48220065 0.51779935)
## 24) compactness_se>=-3.239083 43 6 B (0.86046512 0.13953488)
## 48) smoothness_worst< -1.477215 38 2 B (0.94736842 0.05263158)
## 96) symmetry_worst< -1.339667 36 0 B (1.00000000 0.00000000) *
## 97) symmetry_worst>=-1.339667 2 0 M (0.00000000 1.00000000) *
## 49) smoothness_worst>=-1.477215 5 1 M (0.20000000 0.80000000)
## 98) texture_mean< 2.701935 1 0 B (1.00000000 0.00000000) *
## 99) texture_mean>=2.701935 4 0 M (0.00000000 1.00000000) *
## 25) compactness_se< -3.239083 575 261 M (0.45391304 0.54608696)
## 50) smoothness_worst>=-1.434633 101 36 B (0.64356436 0.35643564)
## 100) texture_mean< 3.05894 81 19 B (0.76543210 0.23456790) *
## 101) texture_mean>=3.05894 20 3 M (0.15000000 0.85000000) *
## 51) smoothness_worst< -1.434633 474 196 M (0.41350211 0.58649789)
## 102) smoothness_worst< -1.452493 400 187 M (0.46750000 0.53250000) *
## 103) smoothness_worst>=-1.452493 74 9 M (0.12162162 0.87837838) *
## 13) compactness_se>=-3.027402 71 16 M (0.22535211 0.77464789)
## 26) smoothness_mean< -2.336585 23 9 B (0.60869565 0.39130435)
## 52) texture_mean< 3.076827 14 1 B (0.92857143 0.07142857)
## 104) compactness_se>=-2.984387 13 0 B (1.00000000 0.00000000) *
## 105) compactness_se< -2.984387 1 0 M (0.00000000 1.00000000) *
## 53) texture_mean>=3.076827 9 1 M (0.11111111 0.88888889)
## 106) texture_mean>=3.166628 1 0 B (1.00000000 0.00000000) *
## 107) texture_mean< 3.166628 8 0 M (0.00000000 1.00000000) *
## 27) smoothness_mean>=-2.336585 48 2 M (0.04166667 0.95833333)
## 54) compactness_se>=-2.470993 3 1 B (0.66666667 0.33333333)
## 108) texture_mean< 2.929061 2 0 B (1.00000000 0.00000000) *
## 109) texture_mean>=2.929061 1 0 M (0.00000000 1.00000000) *
## 55) compactness_se< -2.470993 45 0 M (0.00000000 1.00000000) *
## 7) compactness_se< -4.180058 27 1 M (0.03703704 0.96296296)
## 14) smoothness_mean< -2.456941 1 0 B (1.00000000 0.00000000) *
## 15) smoothness_mean>=-2.456941 26 0 M (0.00000000 1.00000000) *
##
## $trees[[82]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 451 B (0.50548246 0.49451754)
## 2) smoothness_worst< -1.458403 707 311 B (0.56011315 0.43988685)
## 4) smoothness_worst>=-1.477976 115 22 B (0.80869565 0.19130435)
## 8) texture_worst< 4.682677 84 6 B (0.92857143 0.07142857)
## 16) symmetry_worst< -1.35761 82 4 B (0.95121951 0.04878049)
## 32) smoothness_mean>=-2.354774 77 1 B (0.98701299 0.01298701)
## 64) texture_mean< 3.069079 76 0 B (1.00000000 0.00000000) *
## 65) texture_mean>=3.069079 1 0 M (0.00000000 1.00000000) *
## 33) smoothness_mean< -2.354774 5 2 M (0.40000000 0.60000000)
## 66) texture_mean>=2.774841 2 0 B (1.00000000 0.00000000) *
## 67) texture_mean< 2.774841 3 0 M (0.00000000 1.00000000) *
## 17) symmetry_worst>=-1.35761 2 0 M (0.00000000 1.00000000) *
## 9) texture_worst>=4.682677 31 15 M (0.48387097 0.51612903)
## 18) texture_mean< 2.978826 11 1 B (0.90909091 0.09090909)
## 36) smoothness_mean< -2.304115 10 0 B (1.00000000 0.00000000) *
## 37) smoothness_mean>=-2.304115 1 0 M (0.00000000 1.00000000) *
## 19) texture_mean>=2.978826 20 5 M (0.25000000 0.75000000)
## 38) symmetry_worst< -2.052347 5 0 B (1.00000000 0.00000000) *
## 39) symmetry_worst>=-2.052347 15 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst< -1.477976 592 289 B (0.51182432 0.48817568)
## 10) smoothness_worst< -1.482107 549 248 B (0.54826958 0.45173042)
## 20) smoothness_worst>=-1.484675 37 0 B (1.00000000 0.00000000) *
## 21) smoothness_worst< -1.484675 512 248 B (0.51562500 0.48437500)
## 42) texture_worst< 4.961576 417 184 B (0.55875300 0.44124700)
## 84) symmetry_worst>=-2.391709 388 160 B (0.58762887 0.41237113) *
## 85) symmetry_worst< -2.391709 29 5 M (0.17241379 0.82758621) *
## 43) texture_worst>=4.961576 95 31 M (0.32631579 0.67368421)
## 86) symmetry_worst< -2.057752 23 7 B (0.69565217 0.30434783) *
## 87) symmetry_worst>=-2.057752 72 15 M (0.20833333 0.79166667) *
## 11) smoothness_worst>=-1.482107 43 2 M (0.04651163 0.95348837)
## 22) texture_worst< 4.126187 1 0 B (1.00000000 0.00000000) *
## 23) texture_worst>=4.126187 42 1 M (0.02380952 0.97619048)
## 46) texture_worst>=4.635614 2 1 B (0.50000000 0.50000000)
## 92) texture_mean< 2.931199 1 0 B (1.00000000 0.00000000) *
## 93) texture_mean>=2.931199 1 0 M (0.00000000 1.00000000) *
## 47) texture_worst< 4.635614 40 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst>=-1.458403 205 65 M (0.31707317 0.68292683)
## 6) symmetry_worst< -1.895488 19 2 B (0.89473684 0.10526316)
## 12) texture_mean< 3.129344 17 0 B (1.00000000 0.00000000) *
## 13) texture_mean>=3.129344 2 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-1.895488 186 48 M (0.25806452 0.74193548)
## 14) compactness_se< -4.02632 38 13 B (0.65789474 0.34210526)
## 28) texture_worst< 5.196207 30 5 B (0.83333333 0.16666667)
## 56) compactness_se>=-4.186419 22 1 B (0.95454545 0.04545455)
## 112) symmetry_worst< -1.180039 21 0 B (1.00000000 0.00000000) *
## 113) symmetry_worst>=-1.180039 1 0 M (0.00000000 1.00000000) *
## 57) compactness_se< -4.186419 8 4 B (0.50000000 0.50000000)
## 114) texture_mean< 2.950291 3 0 B (1.00000000 0.00000000) *
## 115) texture_mean>=2.950291 5 1 M (0.20000000 0.80000000) *
## 29) texture_worst>=5.196207 8 0 M (0.00000000 1.00000000) *
## 15) compactness_se>=-4.02632 148 23 M (0.15540541 0.84459459)
## 30) smoothness_worst>=-1.351748 17 8 B (0.52941176 0.47058824)
## 60) symmetry_worst< -1.596878 7 0 B (1.00000000 0.00000000) *
## 61) symmetry_worst>=-1.596878 10 2 M (0.20000000 0.80000000)
## 122) texture_mean< 2.688296 2 0 B (1.00000000 0.00000000) *
## 123) texture_mean>=2.688296 8 0 M (0.00000000 1.00000000) *
## 31) smoothness_worst< -1.351748 131 14 M (0.10687023 0.89312977)
## 62) texture_mean< 2.777879 27 9 M (0.33333333 0.66666667)
## 124) texture_mean>=2.603081 9 0 B (1.00000000 0.00000000) *
## 125) texture_mean< 2.603081 18 0 M (0.00000000 1.00000000) *
## 63) texture_mean>=2.777879 104 5 M (0.04807692 0.95192308)
## 126) compactness_se>=-3.702474 50 5 M (0.10000000 0.90000000) *
## 127) compactness_se< -3.702474 54 0 M (0.00000000 1.00000000) *
##
## $trees[[83]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 391 M (0.42872807 0.57127193)
## 2) compactness_se< -4.705732 18 1 B (0.94444444 0.05555556)
## 4) symmetry_worst< -1.19897 17 0 B (1.00000000 0.00000000) *
## 5) symmetry_worst>=-1.19897 1 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-4.705732 894 374 M (0.41834452 0.58165548)
## 6) smoothness_worst>=-1.477976 241 111 B (0.53941909 0.46058091)
## 12) smoothness_worst< -1.473476 38 2 B (0.94736842 0.05263158)
## 24) texture_mean< 3.069079 36 0 B (1.00000000 0.00000000) *
## 25) texture_mean>=3.069079 2 0 M (0.00000000 1.00000000) *
## 13) smoothness_worst>=-1.473476 203 94 M (0.46305419 0.53694581)
## 26) symmetry_worst< -1.920541 25 3 B (0.88000000 0.12000000)
## 52) texture_mean< 3.159934 22 0 B (1.00000000 0.00000000) *
## 53) texture_mean>=3.159934 3 0 M (0.00000000 1.00000000) *
## 27) symmetry_worst>=-1.920541 178 72 M (0.40449438 0.59550562)
## 54) smoothness_mean< -2.428454 10 0 B (1.00000000 0.00000000) *
## 55) smoothness_mean>=-2.428454 168 62 M (0.36904762 0.63095238)
## 110) smoothness_mean>=-2.239141 78 36 B (0.53846154 0.46153846) *
## 111) smoothness_mean< -2.239141 90 20 M (0.22222222 0.77777778) *
## 7) smoothness_worst< -1.477976 653 244 M (0.37366003 0.62633997)
## 14) compactness_se>=-4.49319 568 231 M (0.40669014 0.59330986)
## 28) symmetry_worst< -2.052205 89 35 B (0.60674157 0.39325843)
## 56) symmetry_worst>=-2.107807 24 0 B (1.00000000 0.00000000) *
## 57) symmetry_worst< -2.107807 65 30 M (0.46153846 0.53846154)
## 114) compactness_se< -4.170636 11 0 B (1.00000000 0.00000000) *
## 115) compactness_se>=-4.170636 54 19 M (0.35185185 0.64814815) *
## 29) symmetry_worst>=-2.052205 479 177 M (0.36951983 0.63048017)
## 58) symmetry_worst>=-1.98727 427 172 M (0.40281030 0.59718970)
## 116) smoothness_mean>=-2.224699 43 12 B (0.72093023 0.27906977) *
## 117) smoothness_mean< -2.224699 384 141 M (0.36718750 0.63281250) *
## 59) symmetry_worst< -1.98727 52 5 M (0.09615385 0.90384615)
## 118) smoothness_mean< -2.458231 4 0 B (1.00000000 0.00000000) *
## 119) smoothness_mean>=-2.458231 48 1 M (0.02083333 0.97916667) *
## 15) compactness_se< -4.49319 85 13 M (0.15294118 0.84705882)
## 30) smoothness_mean>=-2.295268 3 0 B (1.00000000 0.00000000) *
## 31) smoothness_mean< -2.295268 82 10 M (0.12195122 0.87804878)
## 62) texture_mean< 2.960617 33 8 M (0.24242424 0.75757576)
## 124) texture_mean>=2.930624 6 0 B (1.00000000 0.00000000) *
## 125) texture_mean< 2.930624 27 2 M (0.07407407 0.92592593) *
## 63) texture_mean>=2.960617 49 2 M (0.04081633 0.95918367)
## 126) texture_mean>=3.232565 1 0 B (1.00000000 0.00000000) *
## 127) texture_mean< 3.232565 48 1 M (0.02083333 0.97916667) *
##
## $trees[[84]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 453 B (0.50328947 0.49671053)
## 2) smoothness_mean< -2.216408 798 372 B (0.53383459 0.46616541)
## 4) smoothness_worst>=-1.484675 254 88 B (0.65354331 0.34645669)
## 8) smoothness_worst< -1.372876 242 76 B (0.68595041 0.31404959)
## 16) smoothness_worst< -1.482502 33 0 B (1.00000000 0.00000000) *
## 17) smoothness_worst>=-1.482502 209 76 B (0.63636364 0.36363636)
## 34) smoothness_worst>=-1.478565 187 54 B (0.71122995 0.28877005)
## 68) smoothness_worst< -1.472307 54 1 B (0.98148148 0.01851852) *
## 69) smoothness_worst>=-1.472307 133 53 B (0.60150376 0.39849624) *
## 35) smoothness_worst< -1.478565 22 0 M (0.00000000 1.00000000) *
## 9) smoothness_worst>=-1.372876 12 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst< -1.484675 544 260 M (0.47794118 0.52205882)
## 10) compactness_se>=-4.49319 472 229 B (0.51483051 0.48516949)
## 20) compactness_se< -3.488718 333 138 B (0.58558559 0.41441441)
## 40) symmetry_worst< -2.207519 24 0 B (1.00000000 0.00000000) *
## 41) symmetry_worst>=-2.207519 309 138 B (0.55339806 0.44660194)
## 82) smoothness_worst< -1.556752 159 50 B (0.68553459 0.31446541) *
## 83) smoothness_worst>=-1.556752 150 62 M (0.41333333 0.58666667) *
## 21) compactness_se>=-3.488718 139 48 M (0.34532374 0.65467626)
## 42) smoothness_mean>=-2.224699 8 0 B (1.00000000 0.00000000) *
## 43) smoothness_mean< -2.224699 131 40 M (0.30534351 0.69465649)
## 86) texture_mean< 3.038537 77 32 M (0.41558442 0.58441558) *
## 87) texture_mean>=3.038537 54 8 M (0.14814815 0.85185185) *
## 11) compactness_se< -4.49319 72 17 M (0.23611111 0.76388889)
## 22) compactness_se< -4.706178 13 2 B (0.84615385 0.15384615)
## 44) symmetry_worst< -1.19897 11 0 B (1.00000000 0.00000000) *
## 45) symmetry_worst>=-1.19897 2 0 M (0.00000000 1.00000000) *
## 23) compactness_se>=-4.706178 59 6 M (0.10169492 0.89830508)
## 46) smoothness_mean>=-2.295268 2 0 B (1.00000000 0.00000000) *
## 47) smoothness_mean< -2.295268 57 4 M (0.07017544 0.92982456)
## 94) symmetry_worst>=-1.459568 1 0 B (1.00000000 0.00000000) *
## 95) symmetry_worst< -1.459568 56 3 M (0.05357143 0.94642857) *
## 3) smoothness_mean>=-2.216408 114 33 M (0.28947368 0.71052632)
## 6) smoothness_worst>=-1.426679 37 16 B (0.56756757 0.43243243)
## 12) symmetry_worst< -1.710625 11 0 B (1.00000000 0.00000000) *
## 13) symmetry_worst>=-1.710625 26 10 M (0.38461538 0.61538462)
## 26) texture_mean< 2.688296 11 2 B (0.81818182 0.18181818)
## 52) texture_mean>=2.450874 9 0 B (1.00000000 0.00000000) *
## 53) texture_mean< 2.450874 2 0 M (0.00000000 1.00000000) *
## 27) texture_mean>=2.688296 15 1 M (0.06666667 0.93333333)
## 54) smoothness_worst< -1.388752 3 1 M (0.33333333 0.66666667)
## 108) texture_mean>=3.037093 1 0 B (1.00000000 0.00000000) *
## 109) texture_mean< 3.037093 2 0 M (0.00000000 1.00000000) *
## 55) smoothness_worst>=-1.388752 12 0 M (0.00000000 1.00000000) *
## 7) smoothness_worst< -1.426679 77 12 M (0.15584416 0.84415584)
## 14) smoothness_worst< -1.482898 28 10 M (0.35714286 0.64285714)
## 28) smoothness_worst>=-1.530722 10 1 B (0.90000000 0.10000000)
## 56) smoothness_mean>=-2.213204 9 0 B (1.00000000 0.00000000) *
## 57) smoothness_mean< -2.213204 1 0 M (0.00000000 1.00000000) *
## 29) smoothness_worst< -1.530722 18 1 M (0.05555556 0.94444444)
## 58) texture_mean< 2.820036 1 0 B (1.00000000 0.00000000) *
## 59) texture_mean>=2.820036 17 0 M (0.00000000 1.00000000) *
## 15) smoothness_worst>=-1.482898 49 2 M (0.04081633 0.95918367)
## 30) texture_mean< 2.434062 1 0 B (1.00000000 0.00000000) *
## 31) texture_mean>=2.434062 48 1 M (0.02083333 0.97916667)
## 62) compactness_se< -4.089448 6 1 M (0.16666667 0.83333333)
## 124) texture_mean< 2.892399 1 0 B (1.00000000 0.00000000) *
## 125) texture_mean>=2.892399 5 0 M (0.00000000 1.00000000) *
## 63) compactness_se>=-4.089448 42 0 M (0.00000000 1.00000000) *
##
## $trees[[85]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 441 B (0.51644737 0.48355263)
## 2) smoothness_worst< -1.472307 699 306 B (0.56223176 0.43776824)
## 4) smoothness_worst>=-1.477976 50 3 B (0.94000000 0.06000000)
## 8) texture_mean< 3.069079 47 0 B (1.00000000 0.00000000) *
## 9) texture_mean>=3.069079 3 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst< -1.477976 649 303 B (0.53312789 0.46687211)
## 10) smoothness_worst< -1.482502 621 276 B (0.55555556 0.44444444)
## 20) smoothness_worst>=-1.537883 272 90 B (0.66911765 0.33088235)
## 40) compactness_se< -2.985939 256 76 B (0.70312500 0.29687500)
## 80) texture_worst< 4.521311 63 5 B (0.92063492 0.07936508) *
## 81) texture_worst>=4.521311 193 71 B (0.63212435 0.36787565) *
## 41) compactness_se>=-2.985939 16 2 M (0.12500000 0.87500000)
## 82) smoothness_mean< -2.310579 2 0 B (1.00000000 0.00000000) *
## 83) smoothness_mean>=-2.310579 14 0 M (0.00000000 1.00000000) *
## 21) smoothness_worst< -1.537883 349 163 M (0.46704871 0.53295129)
## 42) texture_worst>=4.683744 145 50 B (0.65517241 0.34482759)
## 84) smoothness_worst< -1.647098 20 0 B (1.00000000 0.00000000) *
## 85) smoothness_worst>=-1.647098 125 50 B (0.60000000 0.40000000) *
## 43) texture_worst< 4.683744 204 68 M (0.33333333 0.66666667)
## 86) texture_mean< 2.976803 118 57 M (0.48305085 0.51694915) *
## 87) texture_mean>=2.976803 86 11 M (0.12790698 0.87209302) *
## 11) smoothness_worst>=-1.482502 28 1 M (0.03571429 0.96428571)
## 22) compactness_se< -4.290267 1 0 B (1.00000000 0.00000000) *
## 23) compactness_se>=-4.290267 27 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst>=-1.472307 213 78 M (0.36619718 0.63380282)
## 6) compactness_se< -4.02632 47 16 B (0.65957447 0.34042553)
## 12) smoothness_worst>=-1.456873 34 6 B (0.82352941 0.17647059)
## 24) symmetry_worst< -1.136473 32 4 B (0.87500000 0.12500000)
## 48) compactness_se>=-4.186419 19 0 B (1.00000000 0.00000000) *
## 49) compactness_se< -4.186419 13 4 B (0.69230769 0.30769231)
## 98) compactness_se< -4.224437 11 2 B (0.81818182 0.18181818) *
## 99) compactness_se>=-4.224437 2 0 M (0.00000000 1.00000000) *
## 25) symmetry_worst>=-1.136473 2 0 M (0.00000000 1.00000000) *
## 13) smoothness_worst< -1.456873 13 3 M (0.23076923 0.76923077)
## 26) texture_mean< 2.901883 3 0 B (1.00000000 0.00000000) *
## 27) texture_mean>=2.901883 10 0 M (0.00000000 1.00000000) *
## 7) compactness_se>=-4.02632 166 47 M (0.28313253 0.71686747)
## 14) symmetry_worst< -1.905461 22 5 B (0.77272727 0.22727273)
## 28) texture_worst< 4.851322 17 0 B (1.00000000 0.00000000) *
## 29) texture_worst>=4.851322 5 0 M (0.00000000 1.00000000) *
## 15) symmetry_worst>=-1.905461 144 30 M (0.20833333 0.79166667)
## 30) compactness_se>=-3.479267 59 23 M (0.38983051 0.61016949)
## 60) smoothness_mean< -2.359377 8 0 B (1.00000000 0.00000000) *
## 61) smoothness_mean>=-2.359377 51 15 M (0.29411765 0.70588235)
## 122) smoothness_mean>=-2.239141 29 14 B (0.51724138 0.48275862) *
## 123) smoothness_mean< -2.239141 22 0 M (0.00000000 1.00000000) *
## 31) compactness_se< -3.479267 85 7 M (0.08235294 0.91764706)
## 62) texture_worst< 4.110502 18 6 M (0.33333333 0.66666667)
## 124) texture_mean>=2.618802 5 0 B (1.00000000 0.00000000) *
## 125) texture_mean< 2.618802 13 1 M (0.07692308 0.92307692) *
## 63) texture_worst>=4.110502 67 1 M (0.01492537 0.98507463)
## 126) texture_worst< 4.30106 7 1 M (0.14285714 0.85714286) *
## 127) texture_worst>=4.30106 60 0 M (0.00000000 1.00000000) *
##
## $trees[[86]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 442 M (0.48464912 0.51535088)
## 2) smoothness_mean< -2.209551 812 393 B (0.51600985 0.48399015)
## 4) smoothness_mean< -2.413908 255 99 B (0.61176471 0.38823529)
## 8) texture_mean< 2.963209 111 25 B (0.77477477 0.22522523)
## 16) compactness_se>=-4.688717 103 19 B (0.81553398 0.18446602)
## 32) symmetry_worst>=-1.748651 51 2 B (0.96078431 0.03921569)
## 64) smoothness_worst< -1.470029 49 0 B (1.00000000 0.00000000) *
## 65) smoothness_worst>=-1.470029 2 0 M (0.00000000 1.00000000) *
## 33) symmetry_worst< -1.748651 52 17 B (0.67307692 0.32692308)
## 66) symmetry_worst< -1.818723 40 5 B (0.87500000 0.12500000) *
## 67) symmetry_worst>=-1.818723 12 0 M (0.00000000 1.00000000) *
## 17) compactness_se< -4.688717 8 2 M (0.25000000 0.75000000)
## 34) compactness_se< -4.694501 2 0 B (1.00000000 0.00000000) *
## 35) compactness_se>=-4.694501 6 0 M (0.00000000 1.00000000) *
## 9) texture_mean>=2.963209 144 70 M (0.48611111 0.51388889)
## 18) texture_mean>=3.015024 107 40 B (0.62616822 0.37383178)
## 36) texture_mean< 3.071535 27 2 B (0.92592593 0.07407407)
## 72) symmetry_worst>=-2.196711 25 0 B (1.00000000 0.00000000) *
## 73) symmetry_worst< -2.196711 2 0 M (0.00000000 1.00000000) *
## 37) texture_mean>=3.071535 80 38 B (0.52500000 0.47500000)
## 74) texture_mean>=3.198061 29 5 B (0.82758621 0.17241379) *
## 75) texture_mean< 3.198061 51 18 M (0.35294118 0.64705882) *
## 19) texture_mean< 3.015024 37 3 M (0.08108108 0.91891892)
## 38) compactness_se>=-3.794131 2 0 B (1.00000000 0.00000000) *
## 39) compactness_se< -3.794131 35 1 M (0.02857143 0.97142857)
## 78) compactness_se< -4.803674 1 0 B (1.00000000 0.00000000) *
## 79) compactness_se>=-4.803674 34 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean>=-2.413908 557 263 M (0.47217235 0.52782765)
## 10) smoothness_mean>=-2.404673 502 246 B (0.50996016 0.49003984)
## 20) texture_worst< 4.818867 411 184 B (0.55231144 0.44768856)
## 40) texture_worst>=4.751723 42 1 B (0.97619048 0.02380952)
## 80) texture_mean>=2.936904 41 0 B (1.00000000 0.00000000) *
## 81) texture_mean< 2.936904 1 0 M (0.00000000 1.00000000) *
## 41) texture_worst< 4.751723 369 183 B (0.50406504 0.49593496)
## 82) texture_worst< 4.682677 325 145 B (0.55384615 0.44615385) *
## 83) texture_worst>=4.682677 44 6 M (0.13636364 0.86363636) *
## 21) texture_worst>=4.818867 91 29 M (0.31868132 0.68131868)
## 42) symmetry_worst< -2.207988 10 0 B (1.00000000 0.00000000) *
## 43) symmetry_worst>=-2.207988 81 19 M (0.23456790 0.76543210)
## 86) symmetry_worst>=-1.733268 39 18 M (0.46153846 0.53846154) *
## 87) symmetry_worst< -1.733268 42 1 M (0.02380952 0.97619048) *
## 11) smoothness_mean< -2.404673 55 7 M (0.12727273 0.87272727)
## 22) smoothness_worst< -1.602866 3 0 B (1.00000000 0.00000000) *
## 23) smoothness_worst>=-1.602866 52 4 M (0.07692308 0.92307692)
## 46) symmetry_worst>=-1.685469 11 4 M (0.36363636 0.63636364)
## 92) texture_mean< 3.023311 4 0 B (1.00000000 0.00000000) *
## 93) texture_mean>=3.023311 7 0 M (0.00000000 1.00000000) *
## 47) symmetry_worst< -1.685469 41 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean>=-2.209551 100 23 M (0.23000000 0.77000000)
## 6) texture_worst< 3.781157 11 0 B (1.00000000 0.00000000) *
## 7) texture_worst>=3.781157 89 12 M (0.13483146 0.86516854)
## 14) compactness_se< -4.024648 5 1 B (0.80000000 0.20000000)
## 28) texture_mean< 3.047521 4 0 B (1.00000000 0.00000000) *
## 29) texture_mean>=3.047521 1 0 M (0.00000000 1.00000000) *
## 15) compactness_se>=-4.024648 84 8 M (0.09523810 0.90476190)
## 30) symmetry_worst< -1.653707 21 6 M (0.28571429 0.71428571)
## 60) texture_mean< 2.909334 4 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=2.909334 17 2 M (0.11764706 0.88235294)
## 122) smoothness_mean>=-2.120284 2 0 B (1.00000000 0.00000000) *
## 123) smoothness_mean< -2.120284 15 0 M (0.00000000 1.00000000) *
## 31) symmetry_worst>=-1.653707 63 2 M (0.03174603 0.96825397)
## 62) smoothness_worst< -1.534923 1 0 B (1.00000000 0.00000000) *
## 63) smoothness_worst>=-1.534923 62 1 M (0.01612903 0.98387097)
## 126) smoothness_worst>=-1.333822 3 1 M (0.33333333 0.66666667) *
## 127) smoothness_worst< -1.333822 59 0 M (0.00000000 1.00000000) *
##
## $trees[[87]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 449 M (0.49232456 0.50767544)
## 2) smoothness_worst< -1.647098 37 3 B (0.91891892 0.08108108)
## 4) texture_worst>=4.595702 22 0 B (1.00000000 0.00000000) *
## 5) texture_worst< 4.595702 15 3 B (0.80000000 0.20000000)
## 10) texture_worst< 4.563505 13 1 B (0.92307692 0.07692308)
## 20) texture_mean< 3.075433 12 0 B (1.00000000 0.00000000) *
## 21) texture_mean>=3.075433 1 0 M (0.00000000 1.00000000) *
## 11) texture_worst>=4.563505 2 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst>=-1.647098 875 415 M (0.47428571 0.52571429)
## 6) smoothness_mean>=-2.496541 837 411 M (0.49103943 0.50896057)
## 12) smoothness_mean< -2.332581 414 178 B (0.57004831 0.42995169)
## 24) texture_mean< 2.976294 192 54 B (0.71875000 0.28125000)
## 48) smoothness_worst< -1.452493 184 46 B (0.75000000 0.25000000)
## 96) smoothness_mean>=-2.354774 35 0 B (1.00000000 0.00000000) *
## 97) smoothness_mean< -2.354774 149 46 B (0.69127517 0.30872483) *
## 49) smoothness_worst>=-1.452493 8 0 M (0.00000000 1.00000000) *
## 25) texture_mean>=2.976294 222 98 M (0.44144144 0.55855856)
## 50) texture_worst>=4.753106 129 47 B (0.63565891 0.36434109)
## 100) symmetry_worst>=-1.74642 75 10 B (0.86666667 0.13333333) *
## 101) symmetry_worst< -1.74642 54 17 M (0.31481481 0.68518519) *
## 51) texture_worst< 4.753106 93 16 M (0.17204301 0.82795699)
## 102) smoothness_worst< -1.610979 12 3 B (0.75000000 0.25000000) *
## 103) smoothness_worst>=-1.610979 81 7 M (0.08641975 0.91358025) *
## 13) smoothness_mean>=-2.332581 423 175 M (0.41371158 0.58628842)
## 26) symmetry_worst< -1.839065 105 34 B (0.67619048 0.32380952)
## 52) smoothness_worst>=-1.567424 89 21 B (0.76404494 0.23595506)
## 104) compactness_se< -3.02233 84 16 B (0.80952381 0.19047619) *
## 105) compactness_se>=-3.02233 5 0 M (0.00000000 1.00000000) *
## 53) smoothness_worst< -1.567424 16 3 M (0.18750000 0.81250000)
## 106) compactness_se< -3.863524 7 3 M (0.42857143 0.57142857) *
## 107) compactness_se>=-3.863524 9 0 M (0.00000000 1.00000000) *
## 27) symmetry_worst>=-1.839065 318 104 M (0.32704403 0.67295597)
## 54) symmetry_worst>=-1.781339 257 101 M (0.39299611 0.60700389)
## 108) compactness_se< -3.294139 198 92 M (0.46464646 0.53535354) *
## 109) compactness_se>=-3.294139 59 9 M (0.15254237 0.84745763) *
## 55) symmetry_worst< -1.781339 61 3 M (0.04918033 0.95081967)
## 110) texture_worst< 4.216838 3 0 B (1.00000000 0.00000000) *
## 111) texture_worst>=4.216838 58 0 M (0.00000000 1.00000000) *
## 7) smoothness_mean< -2.496541 38 4 M (0.10526316 0.89473684)
## 14) smoothness_worst>=-1.570144 3 0 B (1.00000000 0.00000000) *
## 15) smoothness_worst< -1.570144 35 1 M (0.02857143 0.97142857)
## 30) compactness_se< -4.899363 1 0 B (1.00000000 0.00000000) *
## 31) compactness_se>=-4.899363 34 0 M (0.00000000 1.00000000) *
##
## $trees[[88]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 436 M (0.47807018 0.52192982)
## 2) smoothness_mean< -2.413908 234 91 B (0.61111111 0.38888889)
## 4) texture_worst< 4.961576 162 50 B (0.69135802 0.30864198)
## 8) texture_worst>=4.528519 101 18 B (0.82178218 0.17821782)
## 16) compactness_se>=-4.663537 94 12 B (0.87234043 0.12765957)
## 32) texture_mean< 3.172196 85 8 B (0.90588235 0.09411765)
## 64) symmetry_worst< -1.3705 84 7 B (0.91666667 0.08333333) *
## 65) symmetry_worst>=-1.3705 1 0 M (0.00000000 1.00000000) *
## 33) texture_mean>=3.172196 9 4 B (0.55555556 0.44444444)
## 66) texture_mean>=3.176386 5 0 B (1.00000000 0.00000000) *
## 67) texture_mean< 3.176386 4 0 M (0.00000000 1.00000000) *
## 17) compactness_se< -4.663537 7 1 M (0.14285714 0.85714286)
## 34) compactness_se< -4.803674 1 0 B (1.00000000 0.00000000) *
## 35) compactness_se>=-4.803674 6 0 M (0.00000000 1.00000000) *
## 9) texture_worst< 4.528519 61 29 M (0.47540984 0.52459016)
## 18) texture_mean< 2.963351 39 11 B (0.71794872 0.28205128)
## 36) texture_worst>=3.981964 31 4 B (0.87096774 0.12903226)
## 72) texture_mean< 2.869285 20 0 B (1.00000000 0.00000000) *
## 73) texture_mean>=2.869285 11 4 B (0.63636364 0.36363636) *
## 37) texture_worst< 3.981964 8 1 M (0.12500000 0.87500000)
## 74) texture_mean< 2.764104 1 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.764104 7 0 M (0.00000000 1.00000000) *
## 19) texture_mean>=2.963351 22 1 M (0.04545455 0.95454545)
## 38) compactness_se< -4.501722 1 0 B (1.00000000 0.00000000) *
## 39) compactness_se>=-4.501722 21 0 M (0.00000000 1.00000000) *
## 5) texture_worst>=4.961576 72 31 M (0.43055556 0.56944444)
## 10) symmetry_worst>=-1.857231 39 12 B (0.69230769 0.30769231)
## 20) symmetry_worst< -1.541072 27 1 B (0.96296296 0.03703704)
## 40) compactness_se>=-4.645782 25 0 B (1.00000000 0.00000000) *
## 41) compactness_se< -4.645782 2 1 B (0.50000000 0.50000000)
## 82) texture_mean< 3.083637 1 0 B (1.00000000 0.00000000) *
## 83) texture_mean>=3.083637 1 0 M (0.00000000 1.00000000) *
## 21) symmetry_worst>=-1.541072 12 1 M (0.08333333 0.91666667)
## 42) smoothness_mean< -2.540124 1 0 B (1.00000000 0.00000000) *
## 43) smoothness_mean>=-2.540124 11 0 M (0.00000000 1.00000000) *
## 11) symmetry_worst< -1.857231 33 4 M (0.12121212 0.87878788)
## 22) texture_worst>=5.222912 4 0 B (1.00000000 0.00000000) *
## 23) texture_worst< 5.222912 29 0 M (0.00000000 1.00000000) *
## 3) smoothness_mean>=-2.413908 678 293 M (0.43215339 0.56784661)
## 6) symmetry_worst< -2.193154 42 12 B (0.71428571 0.28571429)
## 12) smoothness_mean< -2.266808 35 6 B (0.82857143 0.17142857)
## 24) compactness_se>=-4.398122 31 2 B (0.93548387 0.06451613)
## 48) compactness_se< -2.576401 30 1 B (0.96666667 0.03333333)
## 96) symmetry_worst< -2.202388 27 0 B (1.00000000 0.00000000) *
## 97) symmetry_worst>=-2.202388 3 1 B (0.66666667 0.33333333) *
## 49) compactness_se>=-2.576401 1 0 M (0.00000000 1.00000000) *
## 25) compactness_se< -4.398122 4 0 M (0.00000000 1.00000000) *
## 13) smoothness_mean>=-2.266808 7 1 M (0.14285714 0.85714286)
## 26) texture_mean< 2.864879 1 0 B (1.00000000 0.00000000) *
## 27) texture_mean>=2.864879 6 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-2.193154 636 263 M (0.41352201 0.58647799)
## 14) symmetry_worst< -1.072749 618 263 M (0.42556634 0.57443366)
## 28) compactness_se>=-2.622717 10 0 B (1.00000000 0.00000000) *
## 29) compactness_se< -2.622717 608 253 M (0.41611842 0.58388158)
## 58) texture_worst< 4.168738 51 17 B (0.66666667 0.33333333)
## 116) texture_mean>=2.515298 40 7 B (0.82500000 0.17500000) *
## 117) texture_mean< 2.515298 11 1 M (0.09090909 0.90909091) *
## 59) texture_worst>=4.168738 557 219 M (0.39317774 0.60682226)
## 118) texture_mean>=2.899221 373 174 M (0.46648794 0.53351206) *
## 119) texture_mean< 2.899221 184 45 M (0.24456522 0.75543478) *
## 15) symmetry_worst>=-1.072749 18 0 M (0.00000000 1.00000000) *
##
## $trees[[89]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 399 B (0.56250000 0.43750000)
## 2) compactness_se< -3.488718 678 269 B (0.60324484 0.39675516)
## 4) compactness_se>=-3.494301 38 0 B (1.00000000 0.00000000) *
## 5) compactness_se< -3.494301 640 269 B (0.57968750 0.42031250)
## 10) smoothness_mean>=-2.311929 259 79 B (0.69498069 0.30501931)
## 20) compactness_se< -3.658265 213 44 B (0.79342723 0.20657277)
## 40) texture_worst< 5.031275 205 36 B (0.82439024 0.17560976)
## 80) symmetry_worst>=-1.926862 180 23 B (0.87222222 0.12777778) *
## 81) symmetry_worst< -1.926862 25 12 M (0.48000000 0.52000000) *
## 41) texture_worst>=5.031275 8 0 M (0.00000000 1.00000000) *
## 21) compactness_se>=-3.658265 46 11 M (0.23913043 0.76086957)
## 42) symmetry_worst< -1.834844 16 5 B (0.68750000 0.31250000)
## 84) symmetry_worst>=-2.311448 11 0 B (1.00000000 0.00000000) *
## 85) symmetry_worst< -2.311448 5 0 M (0.00000000 1.00000000) *
## 43) symmetry_worst>=-1.834844 30 0 M (0.00000000 1.00000000) *
## 11) smoothness_mean< -2.311929 381 190 B (0.50131234 0.49868766)
## 22) smoothness_mean< -2.366751 278 111 B (0.60071942 0.39928058)
## 44) compactness_se>=-4.098964 143 35 B (0.75524476 0.24475524)
## 88) smoothness_worst< -1.51761 104 14 B (0.86538462 0.13461538) *
## 89) smoothness_worst>=-1.51761 39 18 M (0.46153846 0.53846154) *
## 45) compactness_se< -4.098964 135 59 M (0.43703704 0.56296296)
## 90) smoothness_worst< -1.556321 80 35 B (0.56250000 0.43750000) *
## 91) smoothness_worst>=-1.556321 55 14 M (0.25454545 0.74545455) *
## 23) smoothness_mean>=-2.366751 103 24 M (0.23300971 0.76699029)
## 46) symmetry_worst< -1.995212 25 12 B (0.52000000 0.48000000)
## 92) symmetry_worst>=-2.121358 10 0 B (1.00000000 0.00000000) *
## 93) symmetry_worst< -2.121358 15 3 M (0.20000000 0.80000000) *
## 47) symmetry_worst>=-1.995212 78 11 M (0.14102564 0.85897436)
## 94) compactness_se< -4.534889 3 0 B (1.00000000 0.00000000) *
## 95) compactness_se>=-4.534889 75 8 M (0.10666667 0.89333333) *
## 3) compactness_se>=-3.488718 234 104 M (0.44444444 0.55555556)
## 6) symmetry_worst< -1.317839 203 101 M (0.49753695 0.50246305)
## 12) compactness_se>=-3.476676 188 87 B (0.53723404 0.46276596)
## 24) texture_worst< 5.016194 176 75 B (0.57386364 0.42613636)
## 48) smoothness_mean< -2.385259 47 9 B (0.80851064 0.19148936)
## 96) symmetry_worst< -1.636934 40 3 B (0.92500000 0.07500000) *
## 97) symmetry_worst>=-1.636934 7 1 M (0.14285714 0.85714286) *
## 49) smoothness_mean>=-2.385259 129 63 M (0.48837209 0.51162791)
## 98) symmetry_worst>=-1.471051 20 2 B (0.90000000 0.10000000) *
## 99) symmetry_worst< -1.471051 109 45 M (0.41284404 0.58715596) *
## 25) texture_worst>=5.016194 12 0 M (0.00000000 1.00000000) *
## 13) compactness_se< -3.476676 15 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-1.317839 31 3 M (0.09677419 0.90322581)
## 14) compactness_se>=-2.646661 6 3 B (0.50000000 0.50000000)
## 28) texture_mean< 2.915767 3 0 B (1.00000000 0.00000000) *
## 29) texture_mean>=2.915767 3 0 M (0.00000000 1.00000000) *
## 15) compactness_se< -2.646661 25 0 M (0.00000000 1.00000000) *
##
## $trees[[90]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 424 B (0.53508772 0.46491228)
## 2) symmetry_worst< -1.619058 630 261 B (0.58571429 0.41428571)
## 4) smoothness_worst>=-1.480334 155 41 B (0.73548387 0.26451613)
## 8) texture_worst< 5.032208 142 30 B (0.78873239 0.21126761)
## 16) smoothness_mean>=-2.354774 130 21 B (0.83846154 0.16153846)
## 32) compactness_se< -3.294139 119 13 B (0.89075630 0.10924370)
## 64) texture_mean< 2.932513 61 0 B (1.00000000 0.00000000) *
## 65) texture_mean>=2.932513 58 13 B (0.77586207 0.22413793) *
## 33) compactness_se>=-3.294139 11 3 M (0.27272727 0.72727273)
## 66) texture_mean< 2.781176 3 0 B (1.00000000 0.00000000) *
## 67) texture_mean>=2.781176 8 0 M (0.00000000 1.00000000) *
## 17) smoothness_mean< -2.354774 12 3 M (0.25000000 0.75000000)
## 34) smoothness_mean< -2.396647 3 0 B (1.00000000 0.00000000) *
## 35) smoothness_mean>=-2.396647 9 0 M (0.00000000 1.00000000) *
## 9) texture_worst>=5.032208 13 2 M (0.15384615 0.84615385)
## 18) texture_mean< 2.955358 2 0 B (1.00000000 0.00000000) *
## 19) texture_mean>=2.955358 11 0 M (0.00000000 1.00000000) *
## 5) smoothness_worst< -1.480334 475 220 B (0.53684211 0.46315789)
## 10) smoothness_worst< -1.482107 459 204 B (0.55555556 0.44444444)
## 20) smoothness_worst>=-1.49223 32 3 B (0.90625000 0.09375000)
## 40) compactness_se>=-4.133152 29 0 B (1.00000000 0.00000000) *
## 41) compactness_se< -4.133152 3 0 M (0.00000000 1.00000000) *
## 21) smoothness_worst< -1.49223 427 201 B (0.52927400 0.47072600)
## 42) smoothness_worst< -1.501879 404 179 B (0.55693069 0.44306931)
## 84) texture_mean>=2.717337 389 165 B (0.57583548 0.42416452) *
## 85) texture_mean< 2.717337 15 1 M (0.06666667 0.93333333) *
## 43) smoothness_worst>=-1.501879 23 1 M (0.04347826 0.95652174)
## 86) texture_mean< 2.835488 1 0 B (1.00000000 0.00000000) *
## 87) texture_mean>=2.835488 22 0 M (0.00000000 1.00000000) *
## 11) smoothness_worst>=-1.482107 16 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst>=-1.619058 282 119 M (0.42198582 0.57801418)
## 6) compactness_se>=-3.759737 149 64 B (0.57046980 0.42953020)
## 12) smoothness_mean< -2.216408 121 39 B (0.67768595 0.32231405)
## 24) smoothness_mean>=-2.230731 29 0 B (1.00000000 0.00000000) *
## 25) smoothness_mean< -2.230731 92 39 B (0.57608696 0.42391304)
## 50) smoothness_mean< -2.298098 70 18 B (0.74285714 0.25714286)
## 100) smoothness_worst>=-1.513087 46 5 B (0.89130435 0.10869565) *
## 101) smoothness_worst< -1.513087 24 11 M (0.45833333 0.54166667) *
## 51) smoothness_mean>=-2.298098 22 1 M (0.04545455 0.95454545)
## 102) texture_mean< 2.622235 1 0 B (1.00000000 0.00000000) *
## 103) texture_mean>=2.622235 21 0 M (0.00000000 1.00000000) *
## 13) smoothness_mean>=-2.216408 28 3 M (0.10714286 0.89285714)
## 26) compactness_se< -3.646366 3 0 B (1.00000000 0.00000000) *
## 27) compactness_se>=-3.646366 25 0 M (0.00000000 1.00000000) *
## 7) compactness_se< -3.759737 133 34 M (0.25563910 0.74436090)
## 14) texture_mean< 2.956197 54 25 M (0.46296296 0.53703704)
## 28) smoothness_mean< -2.295113 23 6 B (0.73913043 0.26086957)
## 56) texture_mean< 2.919658 16 0 B (1.00000000 0.00000000) *
## 57) texture_mean>=2.919658 7 1 M (0.14285714 0.85714286)
## 114) texture_mean>=2.92664 1 0 B (1.00000000 0.00000000) *
## 115) texture_mean< 2.92664 6 0 M (0.00000000 1.00000000) *
## 29) smoothness_mean>=-2.295113 31 8 M (0.25806452 0.74193548)
## 58) texture_mean>=2.912011 7 2 B (0.71428571 0.28571429)
## 116) smoothness_mean< -2.200472 5 0 B (1.00000000 0.00000000) *
## 117) smoothness_mean>=-2.200472 2 0 M (0.00000000 1.00000000) *
## 59) texture_mean< 2.912011 24 3 M (0.12500000 0.87500000)
## 118) smoothness_worst>=-1.425992 2 0 B (1.00000000 0.00000000) *
## 119) smoothness_worst< -1.425992 22 1 M (0.04545455 0.95454545) *
## 15) texture_mean>=2.956197 79 9 M (0.11392405 0.88607595)
## 30) compactness_se< -4.291103 15 7 B (0.53333333 0.46666667)
## 60) symmetry_worst< -1.41032 9 1 B (0.88888889 0.11111111)
## 120) smoothness_worst< -1.43601 8 0 B (1.00000000 0.00000000) *
## 121) smoothness_worst>=-1.43601 1 0 M (0.00000000 1.00000000) *
## 61) symmetry_worst>=-1.41032 6 0 M (0.00000000 1.00000000) *
## 31) compactness_se>=-4.291103 64 1 M (0.01562500 0.98437500)
## 62) smoothness_worst>=-1.433164 7 1 M (0.14285714 0.85714286)
## 124) smoothness_mean< -2.265514 1 0 B (1.00000000 0.00000000) *
## 125) smoothness_mean>=-2.265514 6 0 M (0.00000000 1.00000000) *
## 63) smoothness_worst< -1.433164 57 0 M (0.00000000 1.00000000) *
##
## $trees[[91]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 404 M (0.44298246 0.55701754)
## 2) compactness_se>=-4.49319 817 389 M (0.47613219 0.52386781)
## 4) symmetry_worst< -1.427209 746 370 M (0.49597855 0.50402145)
## 8) symmetry_worst>=-1.749963 317 126 B (0.60252366 0.39747634)
## 16) symmetry_worst< -1.713275 57 5 B (0.91228070 0.08771930)
## 32) texture_mean< 3.407548 54 2 B (0.96296296 0.03703704)
## 64) smoothness_mean< -2.223835 51 0 B (1.00000000 0.00000000) *
## 65) smoothness_mean>=-2.223835 3 1 M (0.33333333 0.66666667) *
## 33) texture_mean>=3.407548 3 0 M (0.00000000 1.00000000) *
## 17) symmetry_worst>=-1.713275 260 121 B (0.53461538 0.46538462)
## 34) texture_mean< 2.925843 87 19 B (0.78160920 0.21839080)
## 68) smoothness_mean< -2.081877 80 12 B (0.85000000 0.15000000) *
## 69) smoothness_mean>=-2.081877 7 0 M (0.00000000 1.00000000) *
## 35) texture_mean>=2.925843 173 71 M (0.41040462 0.58959538)
## 70) compactness_se>=-3.863738 107 50 B (0.53271028 0.46728972) *
## 71) compactness_se< -3.863738 66 14 M (0.21212121 0.78787879) *
## 9) symmetry_worst< -1.749963 429 179 M (0.41724942 0.58275058)
## 18) smoothness_mean>=-2.317597 159 67 B (0.57861635 0.42138365)
## 36) symmetry_worst< -1.759228 144 52 B (0.63888889 0.36111111)
## 72) compactness_se< -3.734237 70 12 B (0.82857143 0.17142857) *
## 73) compactness_se>=-3.734237 74 34 M (0.45945946 0.54054054) *
## 37) symmetry_worst>=-1.759228 15 0 M (0.00000000 1.00000000) *
## 19) smoothness_mean< -2.317597 270 87 M (0.32222222 0.67777778)
## 38) symmetry_worst< -1.815934 181 83 M (0.45856354 0.54143646)
## 76) symmetry_worst>=-1.88003 30 2 B (0.93333333 0.06666667) *
## 77) symmetry_worst< -1.88003 151 55 M (0.36423841 0.63576159) *
## 39) symmetry_worst>=-1.815934 89 4 M (0.04494382 0.95505618)
## 78) smoothness_mean< -2.518446 1 0 B (1.00000000 0.00000000) *
## 79) smoothness_mean>=-2.518446 88 3 M (0.03409091 0.96590909) *
## 5) symmetry_worst>=-1.427209 71 19 M (0.26760563 0.73239437)
## 10) smoothness_worst< -1.497484 15 4 B (0.73333333 0.26666667)
## 20) smoothness_mean>=-2.372291 11 0 B (1.00000000 0.00000000) *
## 21) smoothness_mean< -2.372291 4 0 M (0.00000000 1.00000000) *
## 11) smoothness_worst>=-1.497484 56 8 M (0.14285714 0.85714286)
## 22) smoothness_mean>=-2.231196 19 8 M (0.42105263 0.57894737)
## 44) smoothness_mean< -2.217831 7 0 B (1.00000000 0.00000000) *
## 45) smoothness_mean>=-2.217831 12 1 M (0.08333333 0.91666667)
## 90) texture_mean< 2.745901 2 1 B (0.50000000 0.50000000) *
## 91) texture_mean>=2.745901 10 0 M (0.00000000 1.00000000) *
## 23) smoothness_mean< -2.231196 37 0 M (0.00000000 1.00000000) *
## 3) compactness_se< -4.49319 95 15 M (0.15789474 0.84210526)
## 6) compactness_se< -4.705732 7 1 B (0.85714286 0.14285714)
## 12) texture_mean< 2.952113 6 0 B (1.00000000 0.00000000) *
## 13) texture_mean>=2.952113 1 0 M (0.00000000 1.00000000) *
## 7) compactness_se>=-4.705732 88 9 M (0.10227273 0.89772727)
## 14) texture_worst>=5.185666 2 0 B (1.00000000 0.00000000) *
## 15) texture_worst< 5.185666 86 7 M (0.08139535 0.91860465)
## 30) texture_mean>=3.232565 1 0 B (1.00000000 0.00000000) *
## 31) texture_mean< 3.232565 85 6 M (0.07058824 0.92941176)
## 62) smoothness_mean< -2.572721 1 0 B (1.00000000 0.00000000) *
## 63) smoothness_mean>=-2.572721 84 5 M (0.05952381 0.94047619)
## 126) texture_worst< 4.800175 31 5 M (0.16129032 0.83870968) *
## 127) texture_worst>=4.800175 53 0 M (0.00000000 1.00000000) *
##
## $trees[[92]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 441 M (0.48355263 0.51644737)
## 2) symmetry_worst< -2.052205 118 38 B (0.67796610 0.32203390)
## 4) symmetry_worst>=-2.107807 24 0 B (1.00000000 0.00000000) *
## 5) symmetry_worst< -2.107807 94 38 B (0.59574468 0.40425532)
## 10) texture_mean< 2.883577 11 0 B (1.00000000 0.00000000) *
## 11) texture_mean>=2.883577 83 38 B (0.54216867 0.45783133)
## 22) texture_worst>=4.7448 37 10 B (0.72972973 0.27027027)
## 44) smoothness_mean< -2.280871 33 6 B (0.81818182 0.18181818)
## 88) texture_mean< 3.326618 27 2 B (0.92592593 0.07407407) *
## 89) texture_mean>=3.326618 6 2 M (0.33333333 0.66666667) *
## 45) smoothness_mean>=-2.280871 4 0 M (0.00000000 1.00000000) *
## 23) texture_worst< 4.7448 46 18 M (0.39130435 0.60869565)
## 46) smoothness_mean>=-2.30269 7 0 B (1.00000000 0.00000000) *
## 47) smoothness_mean< -2.30269 39 11 M (0.28205128 0.71794872)
## 94) smoothness_worst< -1.614216 17 7 B (0.58823529 0.41176471) *
## 95) smoothness_worst>=-1.614216 22 1 M (0.04545455 0.95454545) *
## 3) symmetry_worst>=-2.052205 794 361 M (0.45465995 0.54534005)
## 6) smoothness_worst>=-1.568787 608 299 B (0.50822368 0.49177632)
## 12) smoothness_worst< -1.559144 32 0 B (1.00000000 0.00000000) *
## 13) smoothness_worst>=-1.559144 576 277 M (0.48090278 0.51909722)
## 26) symmetry_worst>=-1.982941 515 248 B (0.51844660 0.48155340)
## 52) smoothness_mean< -2.413908 68 16 B (0.76470588 0.23529412)
## 104) texture_worst< 5.003123 56 7 B (0.87500000 0.12500000) *
## 105) texture_worst>=5.003123 12 3 M (0.25000000 0.75000000) *
## 53) smoothness_mean>=-2.413908 447 215 M (0.48098434 0.51901566)
## 106) smoothness_worst>=-1.533868 406 196 B (0.51724138 0.48275862) *
## 107) smoothness_worst< -1.533868 41 5 M (0.12195122 0.87804878) *
## 27) symmetry_worst< -1.982941 61 10 M (0.16393443 0.83606557)
## 54) smoothness_worst>=-1.453469 5 0 B (1.00000000 0.00000000) *
## 55) smoothness_worst< -1.453469 56 5 M (0.08928571 0.91071429)
## 110) texture_mean< 2.841101 3 0 B (1.00000000 0.00000000) *
## 111) texture_mean>=2.841101 53 2 M (0.03773585 0.96226415) *
## 7) smoothness_worst< -1.568787 186 52 M (0.27956989 0.72043011)
## 14) smoothness_worst< -1.658238 9 0 B (1.00000000 0.00000000) *
## 15) smoothness_worst>=-1.658238 177 43 M (0.24293785 0.75706215)
## 30) texture_worst>=4.683744 73 30 M (0.41095890 0.58904110)
## 60) smoothness_mean>=-2.472257 28 4 B (0.85714286 0.14285714)
## 120) texture_mean< 3.194865 24 0 B (1.00000000 0.00000000) *
## 121) texture_mean>=3.194865 4 0 M (0.00000000 1.00000000) *
## 61) smoothness_mean< -2.472257 45 6 M (0.13333333 0.86666667)
## 122) compactness_se< -4.938351 3 0 B (1.00000000 0.00000000) *
## 123) compactness_se>=-4.938351 42 3 M (0.07142857 0.92857143) *
## 31) texture_worst< 4.683744 104 13 M (0.12500000 0.87500000)
## 62) compactness_se< -4.387578 4 0 B (1.00000000 0.00000000) *
## 63) compactness_se>=-4.387578 100 9 M (0.09000000 0.91000000)
## 126) texture_mean< 2.67759 2 0 B (1.00000000 0.00000000) *
## 127) texture_mean>=2.67759 98 7 M (0.07142857 0.92857143) *
##
## $trees[[93]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 394 M (0.43201754 0.56798246)
## 2) symmetry_worst< -1.8637 273 117 B (0.57142857 0.42857143)
## 4) texture_worst< 4.907333 228 82 B (0.64035088 0.35964912)
## 8) smoothness_mean>=-2.357468 97 21 B (0.78350515 0.21649485)
## 16) texture_worst>=4.425254 53 1 B (0.98113208 0.01886792)
## 32) texture_mean< 3.104804 50 0 B (1.00000000 0.00000000) *
## 33) texture_mean>=3.104804 3 1 B (0.66666667 0.33333333)
## 66) texture_mean>=3.124007 2 0 B (1.00000000 0.00000000) *
## 67) texture_mean< 3.124007 1 0 M (0.00000000 1.00000000) *
## 17) texture_worst< 4.425254 44 20 B (0.54545455 0.45454545)
## 34) texture_mean< 2.755881 15 0 B (1.00000000 0.00000000) *
## 35) texture_mean>=2.755881 29 9 M (0.31034483 0.68965517)
## 70) smoothness_mean>=-2.278455 10 2 B (0.80000000 0.20000000) *
## 71) smoothness_mean< -2.278455 19 1 M (0.05263158 0.94736842) *
## 9) smoothness_mean< -2.357468 131 61 B (0.53435115 0.46564885)
## 18) smoothness_worst< -1.557839 91 28 B (0.69230769 0.30769231)
## 36) texture_mean>=2.786702 81 19 B (0.76543210 0.23456790)
## 72) smoothness_worst>=-1.694089 68 9 B (0.86764706 0.13235294) *
## 73) smoothness_worst< -1.694089 13 3 M (0.23076923 0.76923077) *
## 37) texture_mean< 2.786702 10 1 M (0.10000000 0.90000000)
## 74) texture_mean< 2.764104 1 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.764104 9 0 M (0.00000000 1.00000000) *
## 19) smoothness_worst>=-1.557839 40 7 M (0.17500000 0.82500000)
## 38) compactness_se>=-3.455891 6 0 B (1.00000000 0.00000000) *
## 39) compactness_se< -3.455891 34 1 M (0.02941176 0.97058824)
## 78) texture_worst>=4.815376 1 0 B (1.00000000 0.00000000) *
## 79) texture_worst< 4.815376 33 0 M (0.00000000 1.00000000) *
## 5) texture_worst>=4.907333 45 10 M (0.22222222 0.77777778)
## 10) symmetry_worst< -2.207988 7 0 B (1.00000000 0.00000000) *
## 11) symmetry_worst>=-2.207988 38 3 M (0.07894737 0.92105263)
## 22) texture_mean>=3.361554 3 0 B (1.00000000 0.00000000) *
## 23) texture_mean< 3.361554 35 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst>=-1.8637 639 238 M (0.37245696 0.62754304)
## 6) texture_mean>=3.176386 51 15 B (0.70588235 0.29411765)
## 12) symmetry_worst< -1.453337 46 10 B (0.78260870 0.21739130)
## 24) texture_mean< 3.386045 40 5 B (0.87500000 0.12500000)
## 48) smoothness_mean< -2.340941 33 1 B (0.96969697 0.03030303)
## 96) compactness_se< -3.055765 28 0 B (1.00000000 0.00000000) *
## 97) compactness_se>=-3.055765 5 1 B (0.80000000 0.20000000) *
## 49) smoothness_mean>=-2.340941 7 3 M (0.42857143 0.57142857)
## 98) texture_mean< 3.247139 3 0 B (1.00000000 0.00000000) *
## 99) texture_mean>=3.247139 4 0 M (0.00000000 1.00000000) *
## 25) texture_mean>=3.386045 6 1 M (0.16666667 0.83333333)
## 50) texture_mean>=3.500537 1 0 B (1.00000000 0.00000000) *
## 51) texture_mean< 3.500537 5 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst>=-1.453337 5 0 M (0.00000000 1.00000000) *
## 7) texture_mean< 3.176386 588 202 M (0.34353741 0.65646259)
## 14) texture_mean< 2.960364 319 138 M (0.43260188 0.56739812)
## 28) smoothness_mean< -2.22055 272 135 B (0.50367647 0.49632353)
## 56) symmetry_worst>=-1.769229 162 49 B (0.69753086 0.30246914)
## 112) symmetry_worst< -1.36527 141 31 B (0.78014184 0.21985816) *
## 113) symmetry_worst>=-1.36527 21 3 M (0.14285714 0.85714286) *
## 57) symmetry_worst< -1.769229 110 24 M (0.21818182 0.78181818)
## 114) texture_mean>=2.93492 16 4 B (0.75000000 0.25000000) *
## 115) texture_mean< 2.93492 94 12 M (0.12765957 0.87234043) *
## 29) smoothness_mean>=-2.22055 47 1 M (0.02127660 0.97872340)
## 58) smoothness_worst< -1.534923 1 0 B (1.00000000 0.00000000) *
## 59) smoothness_worst>=-1.534923 46 0 M (0.00000000 1.00000000) *
## 15) texture_mean>=2.960364 269 64 M (0.23791822 0.76208178)
## 30) texture_mean>=2.987952 154 51 M (0.33116883 0.66883117)
## 60) texture_mean< 3.005682 20 6 B (0.70000000 0.30000000)
## 120) compactness_se>=-4.280193 14 1 B (0.92857143 0.07142857) *
## 121) compactness_se< -4.280193 6 1 M (0.16666667 0.83333333) *
## 61) texture_mean>=3.005682 134 37 M (0.27611940 0.72388060)
## 122) texture_worst>=4.667341 95 36 M (0.37894737 0.62105263) *
## 123) texture_worst< 4.667341 39 1 M (0.02564103 0.97435897) *
## 31) texture_mean< 2.987952 115 13 M (0.11304348 0.88695652)
## 62) texture_worst< 4.31854 5 0 B (1.00000000 0.00000000) *
## 63) texture_worst>=4.31854 110 8 M (0.07272727 0.92727273)
## 126) compactness_se< -4.291103 20 8 M (0.40000000 0.60000000) *
## 127) compactness_se>=-4.291103 90 0 M (0.00000000 1.00000000) *
##
## $trees[[94]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 438 M (0.48026316 0.51973684)
## 2) symmetry_worst>=-1.234283 45 10 B (0.77777778 0.22222222)
## 4) symmetry_worst< -1.069325 37 2 B (0.94594595 0.05405405)
## 8) smoothness_mean< -2.185335 35 0 B (1.00000000 0.00000000) *
## 9) smoothness_mean>=-2.185335 2 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst>=-1.069325 8 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst< -1.234283 867 403 M (0.46482122 0.53517878)
## 6) texture_worst< 4.517889 307 135 B (0.56026059 0.43973941)
## 12) compactness_se< -4.020169 86 20 B (0.76744186 0.23255814)
## 24) texture_worst>=4.3976 48 1 B (0.97916667 0.02083333)
## 48) compactness_se>=-4.484801 42 0 B (1.00000000 0.00000000) *
## 49) compactness_se< -4.484801 6 1 B (0.83333333 0.16666667)
## 98) compactness_se< -4.501722 5 0 B (1.00000000 0.00000000) *
## 99) compactness_se>=-4.501722 1 0 M (0.00000000 1.00000000) *
## 25) texture_worst< 4.3976 38 19 B (0.50000000 0.50000000)
## 50) texture_worst< 4.271231 17 0 B (1.00000000 0.00000000) *
## 51) texture_worst>=4.271231 21 2 M (0.09523810 0.90476190)
## 102) smoothness_worst< -1.654625 1 0 B (1.00000000 0.00000000) *
## 103) smoothness_worst>=-1.654625 20 1 M (0.05000000 0.95000000) *
## 13) compactness_se>=-4.020169 221 106 M (0.47963801 0.52036199)
## 26) smoothness_worst< -1.473088 139 58 B (0.58273381 0.41726619)
## 52) smoothness_worst>=-1.479941 19 0 B (1.00000000 0.00000000) *
## 53) smoothness_worst< -1.479941 120 58 B (0.51666667 0.48333333)
## 106) smoothness_worst< -1.482701 103 41 B (0.60194175 0.39805825) *
## 107) smoothness_worst>=-1.482701 17 0 M (0.00000000 1.00000000) *
## 27) smoothness_worst>=-1.473088 82 25 M (0.30487805 0.69512195)
## 54) smoothness_mean>=-2.267218 45 21 B (0.53333333 0.46666667)
## 108) symmetry_worst< -1.619683 18 2 B (0.88888889 0.11111111) *
## 109) symmetry_worst>=-1.619683 27 8 M (0.29629630 0.70370370) *
## 55) smoothness_mean< -2.267218 37 1 M (0.02702703 0.97297297)
## 110) smoothness_mean< -2.405579 6 1 M (0.16666667 0.83333333) *
## 111) smoothness_mean>=-2.405579 31 0 M (0.00000000 1.00000000) *
## 7) texture_worst>=4.517889 560 231 M (0.41250000 0.58750000)
## 14) texture_worst>=4.543638 476 218 M (0.45798319 0.54201681)
## 28) texture_worst< 4.577679 38 6 B (0.84210526 0.15789474)
## 56) smoothness_mean< -2.246212 34 3 B (0.91176471 0.08823529)
## 112) smoothness_mean>=-2.494905 31 1 B (0.96774194 0.03225806) *
## 113) smoothness_mean< -2.494905 3 1 M (0.33333333 0.66666667) *
## 57) smoothness_mean>=-2.246212 4 1 M (0.25000000 0.75000000)
## 114) texture_mean< 2.943507 1 0 B (1.00000000 0.00000000) *
## 115) texture_mean>=2.943507 3 0 M (0.00000000 1.00000000) *
## 29) texture_worst>=4.577679 438 186 M (0.42465753 0.57534247)
## 58) texture_worst>=4.642157 328 163 M (0.49695122 0.50304878)
## 116) symmetry_worst< -1.39888 312 149 B (0.52243590 0.47756410) *
## 117) symmetry_worst>=-1.39888 16 0 M (0.00000000 1.00000000) *
## 59) texture_worst< 4.642157 110 23 M (0.20909091 0.79090909)
## 118) symmetry_worst>=-1.685469 34 16 M (0.47058824 0.52941176) *
## 119) symmetry_worst< -1.685469 76 7 M (0.09210526 0.90789474) *
## 15) texture_worst< 4.543638 84 13 M (0.15476190 0.84523810)
## 30) symmetry_worst< -1.859307 13 3 B (0.76923077 0.23076923)
## 60) texture_mean< 3.157578 10 0 B (1.00000000 0.00000000) *
## 61) texture_mean>=3.157578 3 0 M (0.00000000 1.00000000) *
## 31) symmetry_worst>=-1.859307 71 3 M (0.04225352 0.95774648)
## 62) smoothness_mean>=-2.234468 3 0 B (1.00000000 0.00000000) *
## 63) smoothness_mean< -2.234468 68 0 M (0.00000000 1.00000000) *
##
## $trees[[95]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 436 M (0.47807018 0.52192982)
## 2) symmetry_worst>=-1.412496 61 18 B (0.70491803 0.29508197)
## 4) symmetry_worst< -1.072749 56 13 B (0.76785714 0.23214286)
## 8) symmetry_worst>=-1.126811 22 0 B (1.00000000 0.00000000) *
## 9) symmetry_worst< -1.126811 34 13 B (0.61764706 0.38235294)
## 18) symmetry_worst< -1.293329 24 4 B (0.83333333 0.16666667)
## 36) texture_worst< 4.689831 18 0 B (1.00000000 0.00000000) *
## 37) texture_worst>=4.689831 6 2 M (0.33333333 0.66666667)
## 74) texture_mean< 2.89312 2 0 B (1.00000000 0.00000000) *
## 75) texture_mean>=2.89312 4 0 M (0.00000000 1.00000000) *
## 19) symmetry_worst>=-1.293329 10 1 M (0.10000000 0.90000000)
## 38) texture_mean< 2.756192 1 0 B (1.00000000 0.00000000) *
## 39) texture_mean>=2.756192 9 0 M (0.00000000 1.00000000) *
## 5) symmetry_worst>=-1.072749 5 0 M (0.00000000 1.00000000) *
## 3) symmetry_worst< -1.412496 851 393 M (0.46180964 0.53819036)
## 6) texture_worst< 3.788077 11 0 B (1.00000000 0.00000000) *
## 7) texture_worst>=3.788077 840 382 M (0.45476190 0.54523810)
## 14) compactness_se< -3.721197 453 220 B (0.51434879 0.48565121)
## 28) compactness_se>=-3.867535 65 12 B (0.81538462 0.18461538)
## 56) smoothness_worst< -1.461024 52 0 B (1.00000000 0.00000000) *
## 57) smoothness_worst>=-1.461024 13 1 M (0.07692308 0.92307692)
## 114) compactness_se< -3.816941 1 0 B (1.00000000 0.00000000) *
## 115) compactness_se>=-3.816941 12 0 M (0.00000000 1.00000000) *
## 29) compactness_se< -3.867535 388 180 M (0.46391753 0.53608247)
## 58) compactness_se< -3.987083 315 149 B (0.52698413 0.47301587)
## 116) smoothness_mean>=-2.356093 125 42 B (0.66400000 0.33600000) *
## 117) smoothness_mean< -2.356093 190 83 M (0.43684211 0.56315789) *
## 59) compactness_se>=-3.987083 73 14 M (0.19178082 0.80821918)
## 118) smoothness_mean< -2.394871 14 5 B (0.64285714 0.35714286) *
## 119) smoothness_mean>=-2.394871 59 5 M (0.08474576 0.91525424) *
## 15) compactness_se>=-3.721197 387 149 M (0.38501292 0.61498708)
## 30) symmetry_worst< -1.849754 141 65 B (0.53900709 0.46099291)
## 60) smoothness_worst>=-1.565486 74 21 B (0.71621622 0.28378378)
## 120) texture_worst< 4.605004 43 6 B (0.86046512 0.13953488) *
## 121) texture_worst>=4.605004 31 15 B (0.51612903 0.48387097) *
## 61) smoothness_worst< -1.565486 67 23 M (0.34328358 0.65671642)
## 122) symmetry_worst>=-1.934101 9 0 B (1.00000000 0.00000000) *
## 123) symmetry_worst< -1.934101 58 14 M (0.24137931 0.75862069) *
## 31) symmetry_worst>=-1.849754 246 73 M (0.29674797 0.70325203)
## 62) symmetry_worst>=-1.608735 88 41 B (0.53409091 0.46590909)
## 124) texture_mean< 2.955045 31 1 B (0.96774194 0.03225806) *
## 125) texture_mean>=2.955045 57 17 M (0.29824561 0.70175439) *
## 63) symmetry_worst< -1.608735 158 26 M (0.16455696 0.83544304)
## 126) smoothness_mean>=-2.120284 4 0 B (1.00000000 0.00000000) *
## 127) smoothness_mean< -2.120284 154 22 M (0.14285714 0.85714286) *
##
## $trees[[96]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 424 M (0.46491228 0.53508772)
## 2) smoothness_mean< -2.392182 322 125 B (0.61180124 0.38819876)
## 4) smoothness_mean>=-2.441446 159 37 B (0.76729560 0.23270440)
## 8) symmetry_worst< -1.448573 146 27 B (0.81506849 0.18493151)
## 16) smoothness_mean< -2.424301 44 0 B (1.00000000 0.00000000) *
## 17) smoothness_mean>=-2.424301 102 27 B (0.73529412 0.26470588)
## 34) smoothness_mean>=-2.405782 50 2 B (0.96000000 0.04000000)
## 68) texture_mean< 3.082932 42 0 B (1.00000000 0.00000000) *
## 69) texture_mean>=3.082932 8 2 B (0.75000000 0.25000000) *
## 35) smoothness_mean< -2.405782 52 25 B (0.51923077 0.48076923)
## 70) smoothness_mean< -2.408446 43 16 B (0.62790698 0.37209302) *
## 71) smoothness_mean>=-2.408446 9 0 M (0.00000000 1.00000000) *
## 9) symmetry_worst>=-1.448573 13 3 M (0.23076923 0.76923077)
## 18) smoothness_mean< -2.425324 3 0 B (1.00000000 0.00000000) *
## 19) smoothness_mean>=-2.425324 10 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean< -2.441446 163 75 M (0.46012270 0.53987730)
## 10) texture_mean< 2.921008 23 3 B (0.86956522 0.13043478)
## 20) smoothness_mean< -2.444843 21 1 B (0.95238095 0.04761905)
## 40) symmetry_worst>=-1.811428 17 0 B (1.00000000 0.00000000) *
## 41) symmetry_worst< -1.811428 4 1 B (0.75000000 0.25000000)
## 82) texture_mean< 2.85595 3 0 B (1.00000000 0.00000000) *
## 83) texture_mean>=2.85595 1 0 M (0.00000000 1.00000000) *
## 21) smoothness_mean>=-2.444843 2 0 M (0.00000000 1.00000000) *
## 11) texture_mean>=2.921008 140 55 M (0.39285714 0.60714286)
## 22) symmetry_worst< -1.868413 70 31 B (0.55714286 0.44285714)
## 44) compactness_se< -4.169518 24 2 B (0.91666667 0.08333333)
## 88) smoothness_mean< -2.44767 20 0 B (1.00000000 0.00000000) *
## 89) smoothness_mean>=-2.44767 4 2 B (0.50000000 0.50000000) *
## 45) compactness_se>=-4.169518 46 17 M (0.36956522 0.63043478)
## 90) smoothness_worst< -1.601489 20 6 B (0.70000000 0.30000000) *
## 91) smoothness_worst>=-1.601489 26 3 M (0.11538462 0.88461538) *
## 23) symmetry_worst>=-1.868413 70 16 M (0.22857143 0.77142857)
## 46) smoothness_worst< -1.657635 7 0 B (1.00000000 0.00000000) *
## 47) smoothness_worst>=-1.657635 63 9 M (0.14285714 0.85714286)
## 94) smoothness_worst>=-1.549205 5 1 B (0.80000000 0.20000000) *
## 95) smoothness_worst< -1.549205 58 5 M (0.08620690 0.91379310) *
## 3) smoothness_mean>=-2.392182 590 227 M (0.38474576 0.61525424)
## 6) symmetry_worst>=-1.234283 29 6 B (0.79310345 0.20689655)
## 12) smoothness_worst< -1.440335 24 1 B (0.95833333 0.04166667)
## 24) texture_mean>=2.693961 23 0 B (1.00000000 0.00000000) *
## 25) texture_mean< 2.693961 1 0 M (0.00000000 1.00000000) *
## 13) smoothness_worst>=-1.440335 5 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst< -1.234283 561 204 M (0.36363636 0.63636364)
## 14) smoothness_worst< -1.472307 372 156 M (0.41935484 0.58064516)
## 28) smoothness_worst>=-1.477976 38 0 B (1.00000000 0.00000000) *
## 29) smoothness_worst< -1.477976 334 118 M (0.35329341 0.64670659)
## 58) smoothness_worst< -1.482699 286 115 M (0.40209790 0.59790210)
## 116) compactness_se< -3.965703 62 21 B (0.66129032 0.33870968) *
## 117) compactness_se>=-3.965703 224 74 M (0.33035714 0.66964286) *
## 59) smoothness_worst>=-1.482699 48 3 M (0.06250000 0.93750000)
## 118) texture_worst< 4.136746 2 0 B (1.00000000 0.00000000) *
## 119) texture_worst>=4.136746 46 1 M (0.02173913 0.97826087) *
## 15) smoothness_worst>=-1.472307 189 48 M (0.25396825 0.74603175)
## 30) symmetry_worst< -1.941776 14 5 B (0.64285714 0.35714286)
## 60) texture_worst< 4.85229 9 0 B (1.00000000 0.00000000) *
## 61) texture_worst>=4.85229 5 0 M (0.00000000 1.00000000) *
## 31) symmetry_worst>=-1.941776 175 39 M (0.22285714 0.77714286)
## 62) smoothness_mean>=-2.094359 22 10 B (0.54545455 0.45454545)
## 124) symmetry_worst< -1.596878 9 0 B (1.00000000 0.00000000) *
## 125) symmetry_worst>=-1.596878 13 3 M (0.23076923 0.76923077) *
## 63) smoothness_mean< -2.094359 153 27 M (0.17647059 0.82352941)
## 126) compactness_se< -4.040144 49 20 M (0.40816327 0.59183673) *
## 127) compactness_se>=-4.040144 104 7 M (0.06730769 0.93269231) *
##
## $trees[[97]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 447 B (0.50986842 0.49013158)
## 2) compactness_se< -4.705732 17 1 B (0.94117647 0.05882353)
## 4) symmetry_worst< -1.179946 16 0 B (1.00000000 0.00000000) *
## 5) symmetry_worst>=-1.179946 1 0 M (0.00000000 1.00000000) *
## 3) compactness_se>=-4.705732 895 446 B (0.50167598 0.49832402)
## 6) symmetry_worst>=-2.923662 883 434 B (0.50849377 0.49150623)
## 12) symmetry_worst< -2.202388 65 17 B (0.73846154 0.26153846)
## 24) smoothness_mean>=-2.469349 52 6 B (0.88461538 0.11538462)
## 48) compactness_se>=-4.492707 49 3 B (0.93877551 0.06122449)
## 96) smoothness_mean< -2.266808 45 0 B (1.00000000 0.00000000) *
## 97) smoothness_mean>=-2.266808 4 1 M (0.25000000 0.75000000) *
## 49) compactness_se< -4.492707 3 0 M (0.00000000 1.00000000) *
## 25) smoothness_mean< -2.469349 13 2 M (0.15384615 0.84615385)
## 50) smoothness_mean< -2.57545 2 0 B (1.00000000 0.00000000) *
## 51) smoothness_mean>=-2.57545 11 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst>=-2.202388 818 401 M (0.49022005 0.50977995)
## 26) smoothness_mean>=-2.262885 196 77 B (0.60714286 0.39285714)
## 52) smoothness_mean< -2.21595 114 28 B (0.75438596 0.24561404)
## 104) texture_mean< 3.081899 106 20 B (0.81132075 0.18867925) *
## 105) texture_mean>=3.081899 8 0 M (0.00000000 1.00000000) *
## 53) smoothness_mean>=-2.21595 82 33 M (0.40243902 0.59756098)
## 106) smoothness_mean>=-2.188811 61 30 B (0.50819672 0.49180328) *
## 107) smoothness_mean< -2.188811 21 2 M (0.09523810 0.90476190) *
## 27) smoothness_mean< -2.262885 622 282 M (0.45337621 0.54662379)
## 54) smoothness_mean< -2.295113 557 271 M (0.48653501 0.51346499)
## 108) smoothness_mean>=-2.311929 60 13 B (0.78333333 0.21666667) *
## 109) smoothness_mean< -2.311929 497 224 M (0.45070423 0.54929577) *
## 55) smoothness_mean>=-2.295113 65 11 M (0.16923077 0.83076923)
## 110) smoothness_worst< -1.514953 8 2 B (0.75000000 0.25000000) *
## 111) smoothness_worst>=-1.514953 57 5 M (0.08771930 0.91228070) *
## 7) symmetry_worst< -2.923662 12 0 M (0.00000000 1.00000000) *
##
## $trees[[98]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 448 B (0.50877193 0.49122807)
## 2) smoothness_mean< -2.258569 749 337 B (0.55006676 0.44993324)
## 4) smoothness_mean>=-2.267218 31 1 B (0.96774194 0.03225806)
## 8) compactness_se< -3.294139 30 0 B (1.00000000 0.00000000) *
## 9) compactness_se>=-3.294139 1 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean< -2.267218 718 336 B (0.53203343 0.46796657)
## 10) compactness_se>=-4.406791 616 270 B (0.56168831 0.43831169)
## 20) texture_worst>=4.726366 205 60 B (0.70731707 0.29268293)
## 40) texture_worst< 5.003123 139 23 B (0.83453237 0.16546763)
## 80) compactness_se< -2.614641 135 19 B (0.85925926 0.14074074) *
## 81) compactness_se>=-2.614641 4 0 M (0.00000000 1.00000000) *
## 41) texture_worst>=5.003123 66 29 M (0.43939394 0.56060606)
## 82) smoothness_mean< -2.363096 46 19 B (0.58695652 0.41304348) *
## 83) smoothness_mean>=-2.363096 20 2 M (0.10000000 0.90000000) *
## 21) texture_worst< 4.726366 411 201 M (0.48905109 0.51094891)
## 42) smoothness_mean< -2.296106 353 164 B (0.53541076 0.46458924)
## 84) texture_mean< 2.976548 248 94 B (0.62096774 0.37903226) *
## 85) texture_mean>=2.976548 105 35 M (0.33333333 0.66666667) *
## 43) smoothness_mean>=-2.296106 58 12 M (0.20689655 0.79310345)
## 86) symmetry_worst< -1.93369 10 2 B (0.80000000 0.20000000) *
## 87) symmetry_worst>=-1.93369 48 4 M (0.08333333 0.91666667) *
## 11) compactness_se< -4.406791 102 36 M (0.35294118 0.64705882)
## 22) compactness_se< -4.520844 55 25 B (0.54545455 0.45454545)
## 44) smoothness_mean>=-2.536393 41 12 B (0.70731707 0.29268293)
## 88) symmetry_worst>=-2.330898 33 4 B (0.87878788 0.12121212) *
## 89) symmetry_worst< -2.330898 8 0 M (0.00000000 1.00000000) *
## 45) smoothness_mean< -2.536393 14 1 M (0.07142857 0.92857143)
## 90) texture_mean< 2.933381 1 0 B (1.00000000 0.00000000) *
## 91) texture_mean>=2.933381 13 0 M (0.00000000 1.00000000) *
## 23) compactness_se>=-4.520844 47 6 M (0.12765957 0.87234043)
## 46) texture_mean< 2.841101 3 0 B (1.00000000 0.00000000) *
## 47) texture_mean>=2.841101 44 3 M (0.06818182 0.93181818)
## 94) texture_mean>=3.28326 1 0 B (1.00000000 0.00000000) *
## 95) texture_mean< 3.28326 43 2 M (0.04651163 0.95348837) *
## 3) smoothness_mean>=-2.258569 163 52 M (0.31901840 0.68098160)
## 6) symmetry_worst< -1.765932 51 25 M (0.49019608 0.50980392)
## 12) texture_mean< 2.909334 13 0 B (1.00000000 0.00000000) *
## 13) texture_mean>=2.909334 38 12 M (0.31578947 0.68421053)
## 26) smoothness_worst>=-1.433185 9 2 B (0.77777778 0.22222222)
## 52) texture_mean>=3.014892 7 0 B (1.00000000 0.00000000) *
## 53) texture_mean< 3.014892 2 0 M (0.00000000 1.00000000) *
## 27) smoothness_worst< -1.433185 29 5 M (0.17241379 0.82758621)
## 54) texture_worst< 4.489662 7 3 B (0.57142857 0.42857143)
## 108) texture_mean>=2.956939 4 0 B (1.00000000 0.00000000) *
## 109) texture_mean< 2.956939 3 0 M (0.00000000 1.00000000) *
## 55) texture_worst>=4.489662 22 1 M (0.04545455 0.95454545)
## 110) smoothness_worst< -1.506961 2 1 B (0.50000000 0.50000000) *
## 111) smoothness_worst>=-1.506961 20 0 M (0.00000000 1.00000000) *
## 7) symmetry_worst>=-1.765932 112 27 M (0.24107143 0.75892857)
## 14) texture_worst< 4.680515 91 27 M (0.29670330 0.70329670)
## 28) texture_worst>=4.623656 4 0 B (1.00000000 0.00000000) *
## 29) texture_worst< 4.623656 87 23 M (0.26436782 0.73563218)
## 58) smoothness_mean< -2.216408 31 14 M (0.45161290 0.54838710)
## 116) smoothness_mean>=-2.231196 12 0 B (1.00000000 0.00000000) *
## 117) smoothness_mean< -2.231196 19 2 M (0.10526316 0.89473684) *
## 59) smoothness_mean>=-2.216408 56 9 M (0.16071429 0.83928571)
## 118) texture_worst< 4.185244 20 8 M (0.40000000 0.60000000) *
## 119) texture_worst>=4.185244 36 1 M (0.02777778 0.97222222) *
## 15) texture_worst>=4.680515 21 0 M (0.00000000 1.00000000) *
##
## $trees[[99]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 456 B (0.50000000 0.50000000)
## 2) compactness_se>=-2.809774 31 3 B (0.90322581 0.09677419)
## 4) smoothness_mean< -2.085882 29 1 B (0.96551724 0.03448276)
## 8) symmetry_worst>=-2.167572 28 0 B (1.00000000 0.00000000) *
## 9) symmetry_worst< -2.167572 1 0 M (0.00000000 1.00000000) *
## 5) smoothness_mean>=-2.085882 2 0 M (0.00000000 1.00000000) *
## 3) compactness_se< -2.809774 881 428 M (0.48581158 0.51418842)
## 6) compactness_se< -4.274791 137 46 B (0.66423358 0.33576642)
## 12) compactness_se>=-4.49319 57 7 B (0.87719298 0.12280702)
## 24) smoothness_mean>=-2.467165 47 3 B (0.93617021 0.06382979)
## 48) texture_mean< 3.151222 46 2 B (0.95652174 0.04347826)
## 96) smoothness_worst< -1.483426 38 0 B (1.00000000 0.00000000) *
## 97) smoothness_worst>=-1.483426 8 2 B (0.75000000 0.25000000) *
## 49) texture_mean>=3.151222 1 0 M (0.00000000 1.00000000) *
## 25) smoothness_mean< -2.467165 10 4 B (0.60000000 0.40000000)
## 50) compactness_se< -4.341665 6 0 B (1.00000000 0.00000000) *
## 51) compactness_se>=-4.341665 4 0 M (0.00000000 1.00000000) *
## 13) compactness_se< -4.49319 80 39 B (0.51250000 0.48750000)
## 26) compactness_se< -4.704842 12 0 B (1.00000000 0.00000000) *
## 27) compactness_se>=-4.704842 68 29 M (0.42647059 0.57352941)
## 54) smoothness_worst>=-1.547264 23 6 B (0.73913043 0.26086957)
## 108) symmetry_worst>=-1.809609 16 1 B (0.93750000 0.06250000) *
## 109) symmetry_worst< -1.809609 7 2 M (0.28571429 0.71428571) *
## 55) smoothness_worst< -1.547264 45 12 M (0.26666667 0.73333333)
## 110) texture_worst< 4.812659 29 12 M (0.41379310 0.58620690) *
## 111) texture_worst>=4.812659 16 0 M (0.00000000 1.00000000) *
## 7) compactness_se>=-4.274791 744 337 M (0.45295699 0.54704301)
## 14) smoothness_worst>=-1.40309 48 10 B (0.79166667 0.20833333)
## 28) texture_mean< 3.05894 45 7 B (0.84444444 0.15555556)
## 56) texture_mean>=3.008413 19 0 B (1.00000000 0.00000000) *
## 57) texture_mean< 3.008413 26 7 B (0.73076923 0.26923077)
## 114) compactness_se< -3.086764 23 4 B (0.82608696 0.17391304) *
## 115) compactness_se>=-3.086764 3 0 M (0.00000000 1.00000000) *
## 29) texture_mean>=3.05894 3 0 M (0.00000000 1.00000000) *
## 15) smoothness_worst< -1.40309 696 299 M (0.42959770 0.57040230)
## 30) smoothness_worst< -1.603778 81 25 B (0.69135802 0.30864198)
## 60) texture_mean>=3.086027 40 0 B (1.00000000 0.00000000) *
## 61) texture_mean< 3.086027 41 16 M (0.39024390 0.60975610)
## 122) texture_mean< 2.939162 11 0 B (1.00000000 0.00000000) *
## 123) texture_mean>=2.939162 30 5 M (0.16666667 0.83333333) *
## 31) smoothness_worst>=-1.603778 615 243 M (0.39512195 0.60487805)
## 62) smoothness_worst>=-1.567247 530 230 M (0.43396226 0.56603774)
## 124) smoothness_worst< -1.55958 30 3 B (0.90000000 0.10000000) *
## 125) smoothness_worst>=-1.55958 500 203 M (0.40600000 0.59400000) *
## 63) smoothness_worst< -1.567247 85 13 M (0.15294118 0.84705882)
## 126) smoothness_mean>=-2.419351 42 12 M (0.28571429 0.71428571) *
## 127) smoothness_mean< -2.419351 43 1 M (0.02325581 0.97674419) *
##
## $trees[[100]]
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 427 B (0.53179825 0.46820175)
## 2) smoothness_worst< -1.603315 108 29 B (0.73148148 0.26851852)
## 4) texture_mean>=3.086027 40 2 B (0.95000000 0.05000000)
## 8) compactness_se>=-4.480894 38 0 B (1.00000000 0.00000000) *
## 9) compactness_se< -4.480894 2 0 M (0.00000000 1.00000000) *
## 5) texture_mean< 3.086027 68 27 B (0.60294118 0.39705882)
## 10) smoothness_worst>=-1.60795 17 0 B (1.00000000 0.00000000) *
## 11) smoothness_worst< -1.60795 51 24 M (0.47058824 0.52941176)
## 22) symmetry_worst< -1.667161 36 13 B (0.63888889 0.36111111)
## 44) texture_mean< 3.080067 29 6 B (0.79310345 0.20689655)
## 88) texture_worst< 4.864124 24 1 B (0.95833333 0.04166667) *
## 89) texture_worst>=4.864124 5 0 M (0.00000000 1.00000000) *
## 45) texture_mean>=3.080067 7 0 M (0.00000000 1.00000000) *
## 23) symmetry_worst>=-1.667161 15 1 M (0.06666667 0.93333333)
## 46) texture_mean< 2.885795 1 0 B (1.00000000 0.00000000) *
## 47) texture_mean>=2.885795 14 0 M (0.00000000 1.00000000) *
## 3) smoothness_worst>=-1.603315 804 398 B (0.50497512 0.49502488)
## 6) smoothness_worst>=-1.59459 778 374 B (0.51928021 0.48071979)
## 12) smoothness_worst< -1.584388 33 1 B (0.96969697 0.03030303)
## 24) texture_mean< 3.267122 32 0 B (1.00000000 0.00000000) *
## 25) texture_mean>=3.267122 1 0 M (0.00000000 1.00000000) *
## 13) smoothness_worst>=-1.584388 745 372 M (0.49932886 0.50067114)
## 26) compactness_se>=-3.494301 226 87 B (0.61504425 0.38495575)
## 52) compactness_se< -3.444843 49 3 B (0.93877551 0.06122449)
## 104) smoothness_mean>=-2.463517 46 0 B (1.00000000 0.00000000) *
## 105) smoothness_mean< -2.463517 3 0 M (0.00000000 1.00000000) *
## 53) compactness_se>=-3.444843 177 84 B (0.52542373 0.47457627)
## 106) compactness_se>=-3.426516 154 63 B (0.59090909 0.40909091) *
## 107) compactness_se< -3.426516 23 2 M (0.08695652 0.91304348) *
## 27) compactness_se< -3.494301 519 233 M (0.44894027 0.55105973)
## 54) compactness_se< -3.66733 444 219 M (0.49324324 0.50675676)
## 108) texture_mean>=3.348904 20 1 B (0.95000000 0.05000000) *
## 109) texture_mean< 3.348904 424 200 M (0.47169811 0.52830189) *
## 55) compactness_se>=-3.66733 75 14 M (0.18666667 0.81333333)
## 110) texture_mean>=3.145434 10 1 B (0.90000000 0.10000000) *
## 111) texture_mean< 3.145434 65 5 M (0.07692308 0.92307692) *
## 7) smoothness_worst< -1.59459 26 2 M (0.07692308 0.92307692)
## 14) texture_mean< 2.755158 2 0 B (1.00000000 0.00000000) *
## 15) texture_mean>=2.755158 24 0 M (0.00000000 1.00000000) *
##
##
## $weights
## [1] 1.0642386 0.9517970 0.7684041 0.5793833 0.6323994 0.7337124 0.5493768
## [8] 0.7347059 0.8042379 0.5636014 0.5367516 0.4876431 0.3206582 0.6539157
## [15] 0.5093635 0.6648857 0.5026250 0.2927844 0.5666438 0.6402904 0.6201142
## [22] 0.3569723 0.5418616 0.4304642 0.5381729 0.9289972 0.6696281 0.6369112
## [29] 0.7936017 0.2413852 0.5679277 0.5423372 0.4333344 0.4961133 0.7167576
## [36] 0.5443922 0.4028488 0.4175805 0.4115830 0.5246761 0.6065379 0.5421187
## [43] 0.4854263 0.6947258 0.6230596 0.6219552 0.5126567 0.4969624 0.3586968
## [50] 0.4692455 0.4498236 0.6898165 0.7332071 0.5226157 0.3772735 0.5096765
## [57] 0.7364397 0.4606224 0.6473455 0.6227635 0.3472248 0.7561317 0.7708424
## [64] 0.4784138 0.7103869 0.5683997 0.6992354 0.6129001 0.5718152 0.7558681
## [71] 0.4084518 0.5597012 0.5215857 0.7652235 0.8515905 0.6346368 0.5966106
## [78] 0.6937558 0.5497377 0.5101590 0.3734845 0.5873386 0.4147868 0.4774993
## [85] 0.5152268 0.4816840 0.5237209 0.3983270 0.5446059 0.5659719 0.5241156
## [92] 0.4338659 0.7521560 0.5336086 0.5130153 0.6569338 0.2870437 0.4795788
## [99] 0.4234391 0.3298559
##
## $votes
## [,1] [,2]
## [1,] 15.770631 40.942345
## [2,] 17.213111 39.499864
## [3,] 13.287915 43.425060
## [4,] 15.945589 40.767387
## [5,] 16.166797 40.546178
## [6,] 14.949651 41.763325
## [7,] 12.289696 44.423280
## [8,] 9.831883 46.881093
## [9,] 16.486741 40.226235
## [10,] 17.584448 39.128527
## [11,] 17.015885 39.697091
## [12,] 11.398470 45.314506
## [13,] 8.843713 47.869262
## [14,] 16.359905 40.353071
## [15,] 10.331381 46.381594
## [16,] 44.274543 12.438432
## [17,] 44.179938 12.533037
## [18,] 44.653472 12.059503
## [19,] 16.029224 40.683752
## [20,] 8.258340 48.454635
## [21,] 15.460261 41.252714
## [22,] 13.423686 43.289289
## [23,] 17.922414 38.790561
## [24,] 18.012245 38.700730
## [25,] 9.799023 46.913952
## [26,] 13.934914 42.778062
## [27,] 43.495625 13.217351
## [28,] 16.615958 40.097017
## [29,] 16.588248 40.124728
## [30,] 16.274650 40.438325
## [31,] 14.644228 42.068747
## [32,] 12.793774 43.919202
## [33,] 15.603049 41.109927
## [34,] 41.356636 15.356340
## [35,] 12.885000 43.827976
## [36,] 42.718814 13.994161
## [37,] 45.950137 10.762838
## [38,] 41.568239 15.144736
## [39,] 17.790713 38.922262
## [40,] 15.448864 41.264111
## [41,] 43.478189 13.234787
## [42,] 16.601468 40.111507
## [43,] 11.966156 44.746819
## [44,] 44.323688 12.389288
## [45,] 45.682097 11.030878
## [46,] 40.440032 16.272944
## [47,] 44.870215 11.842760
## [48,] 12.696284 44.016691
## [49,] 38.915553 17.797423
## [50,] 40.186094 16.526882
## [51,] 39.774979 16.937997
## [52,] 17.520022 39.192953
## [53,] 14.362500 42.350475
## [54,] 17.269488 39.443487
## [55,] 41.030742 15.682233
## [56,] 17.444534 39.268441
## [57,] 10.804987 45.907989
## [58,] 39.109539 17.603436
## [59,] 38.949864 17.763112
## [60,] 39.733432 16.979543
## [61,] 13.527358 43.185618
## [62,] 14.516071 42.196904
## [63,] 41.248066 15.464910
## [64,] 16.986440 39.726535
## [65,] 17.105598 39.607377
## [66,] 14.781496 41.931480
## [67,] 39.092940 17.620036
## [68,] 39.754907 16.958069
## [69,] 40.833049 15.879927
## [70,] 16.198770 40.514205
## [71,] 46.788657 9.924319
## [72,] 41.490335 15.222641
## [73,] 16.688301 40.024675
## [74,] 14.605614 42.107362
## [75,] 42.474587 14.238388
## [76,] 42.544582 14.168393
## [77,] 16.613916 40.099059
## [78,] 48.799685 7.913290
## [79,] 40.697132 16.015844
## [80,] 39.470015 17.242960
## [81,] 39.525214 17.187761
## [82,] 16.072117 40.640858
## [83,] 39.695935 17.017041
## [84,] 14.623544 42.089431
## [85,] 39.044648 17.668328
## [86,] 39.928659 16.784317
## [87,] 39.714371 16.998604
## [88,] 42.245444 14.467532
## [89,] 39.984787 16.728189
## [90,] 39.038393 17.674582
## [91,] 38.672522 18.040454
## [92,] 40.654793 16.058183
## [93,] 14.571006 42.141970
## [94,] 13.159873 43.553102
## [95,] 15.743529 40.969446
## [96,] 42.805768 13.907208
## [97,] 15.613982 41.098993
## [98,] 42.686093 14.026883
## [99,] 40.676317 16.036658
## [100,] 16.278184 40.434791
## [101,] 17.444143 39.268833
## [102,] 41.168457 15.544518
## [103,] 15.511556 41.201419
## [104,] 41.302831 15.410145
## [105,] 17.320774 39.392202
## [106,] 13.628987 43.083989
## [107,] 41.673650 15.039326
## [108,] 13.692129 43.020846
## [109,] 42.497151 14.215825
## [110,] 40.543216 16.169760
## [111,] 16.352812 40.360164
## [112,] 41.984485 14.728491
## [113,] 44.732094 11.980881
## [114,] 18.544930 38.168045
## [115,] 41.676366 15.036610
## [116,] 43.822652 12.890324
## [117,] 15.868165 40.844811
## [118,] 40.818102 15.894873
## [119,] 40.375090 16.337886
## [120,] 44.526715 12.186261
## [121,] 38.935307 17.777668
## [122,] 48.652987 8.059988
## [123,] 39.687404 17.025572
## [124,] 41.409094 15.303882
## [125,] 40.600126 16.112850
## [126,] 43.323440 13.389536
## [127,] 45.845442 10.867534
## [128,] 38.047626 18.665350
## [129,] 17.591765 39.121210
## [130,] 15.892801 40.820174
## [131,] 40.469760 16.243215
## [132,] 15.232856 41.480120
## [133,] 42.493406 14.219570
## [134,] 18.184074 38.528901
## [135,] 16.589639 40.123337
## [136,] 43.545566 13.167409
## [137,] 18.005034 38.707942
## [138,] 44.074254 12.638721
## [139,] 47.694887 9.018089
## [140,] 40.986591 15.726384
## [141,] 16.673528 40.039447
## [142,] 46.207421 10.505554
## [143,] 48.178617 8.534359
## [144,] 7.743186 48.969789
## [145,] 14.577974 42.135001
## [146,] 42.140904 14.572071
## [147,] 17.142220 39.570756
## [148,] 43.341310 13.371666
## [149,] 41.016437 15.696538
## [150,] 42.432610 14.280366
## [151,] 13.441184 43.271791
## [152,] 39.833034 16.879941
## [153,] 42.803156 13.909820
## [154,] 10.891920 45.821055
## [155,] 42.941229 13.771746
## [156,] 12.892587 43.820388
## [157,] 17.611585 39.101391
## [158,] 15.750297 40.962679
## [159,] 39.904668 16.808308
## [160,] 18.168566 38.544409
## [161,] 12.465104 44.247871
## [162,] 12.206802 44.506173
## [163,] 39.032284 17.680692
## [164,] 17.435224 39.277751
## [165,] 41.716372 14.996604
## [166,] 16.615957 40.097018
## [167,] 38.825920 17.887055
## [168,] 47.571713 9.141263
## [169,] 16.345455 40.367521
## [170,] 39.259423 17.453553
## [171,] 15.867135 40.845840
## [172,] 15.673557 41.039418
## [173,] 40.344147 16.368828
## [174,] 41.973868 14.739107
## [175,] 16.468344 40.244631
## [176,] 40.679791 16.033184
## [177,] 40.562979 16.149997
## [178,] 11.877042 44.835934
## [179,] 40.388609 16.324366
## [180,] 44.874083 11.838893
## [181,] 41.131513 15.581463
## [182,] 40.597623 16.115353
## [183,] 15.145747 41.567228
## [184,] 44.026069 12.686906
## [185,] 41.938170 14.774805
## [186,] 18.112535 38.600440
## [187,] 41.146846 15.566129
## [188,] 40.751791 15.961184
## [189,] 40.932340 15.780636
## [190,] 12.908527 43.804448
## [191,] 41.519504 15.193471
## [192,] 45.438347 11.274628
## [193,] 38.456309 18.256666
## [194,] 38.804761 17.908215
## [195,] 15.585494 41.127481
## [196,] 39.733489 16.979486
## [197,] 42.426717 14.286258
## [198,] 39.525587 17.187389
## [199,] 44.512441 12.200535
## [200,] 17.263380 39.449595
## [201,] 16.539118 40.173857
## [202,] 18.234634 38.478342
## [203,] 17.519738 39.193237
## [204,] 15.848679 40.864296
## [205,] 17.020160 39.692816
## [206,] 16.282094 40.430882
## [207,] 12.278382 44.434593
## [208,] 16.010525 40.702450
## [209,] 17.557445 39.155531
## [210,] 15.722596 40.990379
## [211,] 38.755595 17.957380
## [212,] 44.151227 12.561749
## [213,] 41.383506 15.329469
## [214,] 40.441111 16.271865
## [215,] 42.890948 13.822027
## [216,] 16.491647 40.221329
## [217,] 40.100003 16.612972
## [218,] 16.519004 40.193971
## [219,] 44.935116 11.777859
## [220,] 43.694015 13.018960
## [221,] 15.470179 41.242797
## [222,] 48.249954 8.463022
## [223,] 14.069065 42.643911
## [224,] 45.662435 11.050541
## [225,] 14.230133 42.482843
## [226,] 17.012664 39.700311
## [227,] 47.317972 9.395003
## [228,] 47.180112 9.532864
## [229,] 41.217196 15.495780
## [230,] 47.957359 8.755617
## [231,] 41.085638 15.627337
## [232,] 39.475226 17.237750
## [233,] 45.515224 11.197751
## [234,] 39.063678 17.649297
## [235,] 39.041382 17.671594
## [236,] 40.136644 16.576331
## [237,] 47.321821 9.391155
## [238,] 45.788023 10.924953
## [239,] 48.511766 8.201209
## [240,] 17.202376 39.510599
## [241,] 43.623547 13.089428
## [242,] 41.235680 15.477296
## [243,] 16.587225 40.125750
## [244,] 41.071650 15.641326
## [245,] 15.931765 40.781210
## [246,] 41.648704 15.064271
## [247,] 41.423976 15.289000
## [248,] 44.800090 11.912886
## [249,] 46.542954 10.170021
## [250,] 51.478066 5.234909
## [251,] 52.726681 3.986294
## [252,] 51.816131 4.896844
## [253,] 42.156969 14.556006
## [254,] 46.789302 9.923673
## [255,] 47.033954 9.679021
## [256,] 17.939534 38.773442
## [257,] 40.388572 16.324403
## [258,] 43.852414 12.860562
## [259,] 39.387312 17.325664
## [260,] 17.431963 39.281013
## [261,] 42.986149 13.726827
## [262,] 12.582705 44.130271
## [263,] 40.602830 16.110146
## [264,] 41.618293 15.094682
## [265,] 15.995097 40.717878
## [266,] 18.088077 38.624899
## [267,] 39.847651 16.865324
## [268,] 40.128674 16.584302
## [269,] 43.028728 13.684247
## [270,] 17.380567 39.332409
## [271,] 42.102490 14.610485
## [272,] 39.678887 17.034088
## [273,] 15.803214 40.909762
## [274,] 39.073850 17.639125
## [275,] 40.497825 16.215150
## [276,] 40.625114 16.087861
## [277,] 15.386031 41.326945
## [278,] 45.839495 10.873480
## [279,] 41.734782 14.978194
## [280,] 39.424901 17.288075
## [281,] 45.550802 11.162173
## [282,] 43.880849 12.832127
## [283,] 41.618059 15.094916
## [284,] 46.103154 10.609821
## [285,] 15.180581 41.532395
## [286,] 14.204322 42.508653
## [287,] 15.885888 40.827087
## [288,] 46.325575 10.387400
## [289,] 46.857229 9.855747
## [290,] 43.221246 13.491729
## [291,] 41.586639 15.126336
## [292,] 48.464399 8.248576
## [293,] 40.953283 15.759693
## [294,] 38.964295 17.748681
## [295,] 42.035816 14.677159
## [296,] 16.199566 40.513409
## [297,] 12.404138 44.308838
## [298,] 16.911264 39.801711
## [299,] 16.573321 40.139654
## [300,] 10.160189 46.552786
## [301,] 45.691797 11.021178
## [302,] 16.801802 39.911173
## [303,] 45.585617 11.127359
## [304,] 42.898341 13.814634
## [305,] 39.847554 16.865421
## [306,] 39.915071 16.797905
## [307,] 39.738535 16.974440
## [308,] 12.408280 44.304696
## [309,] 39.040940 17.672035
## [310,] 45.472008 11.240967
## [311,] 40.427360 16.285615
## [312,] 38.808080 17.904895
## [313,] 41.597075 15.115900
## [314,] 17.046261 39.666715
## [315,] 45.442685 11.270290
## [316,] 16.077227 40.635749
## [317,] 39.739712 16.973263
## [318,] 11.487710 45.225265
## [319,] 38.982141 17.730834
## [320,] 42.992476 13.720499
## [321,] 42.800140 13.912836
## [322,] 37.924094 18.788881
## [323,] 14.918980 41.793995
## [324,] 43.296495 13.416481
## [325,] 43.493131 13.219844
## [326,] 49.647363 7.065613
## [327,] 40.933615 15.779360
## [328,] 40.979971 15.733004
## [329,] 43.223226 13.489750
## [330,] 14.037143 42.675832
## [331,] 39.932592 16.780384
## [332,] 40.253440 16.459536
## [333,] 41.609674 15.103302
## [334,] 40.019841 16.693134
## [335,] 39.021293 17.691682
## [336,] 16.943069 39.769907
## [337,] 37.716459 18.996517
## [338,] 38.519345 18.193631
## [339,] 14.188733 42.524242
## [340,] 41.726306 14.986670
## [341,] 41.968979 14.743996
## [342,] 40.958076 15.754899
## [343,] 38.937311 17.775664
## [344,] 40.583640 16.129336
## [345,] 46.425265 10.287711
## [346,] 39.386938 17.326038
## [347,] 15.254605 41.458371
## [348,] 40.276430 16.436546
## [349,] 13.590664 43.122312
## [350,] 14.138133 42.574842
## [351,] 17.469015 39.243960
## [352,] 37.551109 19.161866
## [353,] 41.110565 15.602410
## [354,] 39.820370 16.892605
## [355,] 48.450657 8.262318
## [356,] 41.018459 15.694516
## [357,] 42.188299 14.524677
## [358,] 40.576433 16.136542
## [359,] 18.109353 38.603622
## [360,] 38.948052 17.764924
## [361,] 15.688920 41.024056
## [362,] 42.645322 14.067653
## [363,] 44.151560 12.561415
## [364,] 16.199682 40.513293
## [365,] 41.125843 15.587133
## [366,] 17.194501 39.518475
## [367,] 40.566917 16.146058
## [368,] 42.936683 13.776292
## [369,] 41.360363 15.352612
## [370,] 38.264808 18.448168
## [371,] 40.270093 16.442882
## [372,] 45.213239 11.499736
## [373,] 15.610357 41.102619
## [374,] 14.175765 42.537211
## [375,] 41.264621 15.448354
## [376,] 44.233821 12.479154
## [377,] 39.689827 17.023148
## [378,] 39.011788 17.701187
## [379,] 40.836809 15.876167
## [380,] 16.677779 40.035196
## [381,] 41.515173 15.197802
## [382,] 43.290172 13.422803
## [383,] 45.065436 11.647539
## [384,] 39.069503 17.643472
## [385,] 43.578182 13.134794
## [386,] 39.200069 17.512907
## [387,] 17.837099 38.875876
## [388,] 40.092031 16.620945
## [389,] 39.900827 16.812148
## [390,] 45.618491 11.094484
## [391,] 43.052873 13.660102
## [392,] 42.678594 14.034381
## [393,] 13.100300 43.612676
## [394,] 15.531682 41.181294
## [395,] 48.522581 8.190395
## [396,] 41.292767 15.420208
## [397,] 40.655632 16.057343
## [398,] 39.200737 17.512238
## [399,] 41.025748 15.687228
## [400,] 16.922252 39.790724
## [401,] 8.462537 48.250438
## [402,] 18.100017 38.612958
## [403,] 40.485095 16.227880
## [404,] 41.357128 15.355847
## [405,] 41.060965 15.652011
## [406,] 11.507211 45.205764
## [407,] 45.611610 11.101366
## [408,] 45.326365 11.386611
## [409,] 11.342092 45.370883
## [410,] 17.798164 38.914812
## [411,] 40.642342 16.070633
## [412,] 16.700959 40.012017
## [413,] 40.998006 15.714970
## [414,] 39.166810 17.546165
## [415,] 39.324534 17.388441
## [416,] 16.502283 40.210693
## [417,] 40.052772 16.660203
## [418,] 39.781287 16.931688
## [419,] 45.491418 11.221557
## [420,] 40.167569 16.545406
## [421,] 44.141764 12.571211
## [422,] 40.440480 16.272496
## [423,] 46.413372 10.299604
## [424,] 40.087520 16.625456
## [425,] 39.345624 17.367352
## [426,] 42.872903 13.840072
## [427,] 14.150743 42.562233
## [428,] 40.232852 16.480123
## [429,] 39.835103 16.877873
## [430,] 40.232987 16.479988
## [431,] 41.231814 15.481162
## [432,] 40.518414 16.194561
## [433,] 40.245791 16.467185
## [434,] 39.219541 17.493435
## [435,] 43.455687 13.257288
## [436,] 40.894248 15.818727
## [437,] 40.549726 16.163249
## [438,] 39.073660 17.639316
## [439,] 40.724560 15.988415
## [440,] 39.075012 17.637963
## [441,] 42.758726 13.954249
## [442,] 39.917893 16.795082
## [443,] 39.396710 17.316266
## [444,] 41.247325 15.465651
## [445,] 40.138234 16.574741
## [446,] 40.360527 16.352449
## [447,] 46.528963 10.184012
## [448,] 15.429840 41.283136
## [449,] 15.046920 41.666056
## [450,] 17.815924 38.897051
## [451,] 9.742806 46.970169
## [452,] 15.770631 40.942345
## [453,] 17.213111 39.499864
## [454,] 13.287915 43.425060
## [455,] 13.530525 43.182451
## [456,] 15.945589 40.767387
## [457,] 16.166797 40.546178
## [458,] 14.949651 41.763325
## [459,] 12.289696 44.423280
## [460,] 9.831883 46.881093
## [461,] 16.486741 40.226235
## [462,] 17.176570 39.536405
## [463,] 11.398470 45.314506
## [464,] 8.843713 47.869262
## [465,] 16.359905 40.353071
## [466,] 10.331381 46.381594
## [467,] 15.855439 40.857537
## [468,] 44.274543 12.438432
## [469,] 44.179938 12.533037
## [470,] 44.653472 12.059503
## [471,] 16.029224 40.683752
## [472,] 14.218191 42.494784
## [473,] 15.460261 41.252714
## [474,] 9.799023 46.913952
## [475,] 14.930952 41.782024
## [476,] 16.848789 39.864186
## [477,] 13.934914 42.778062
## [478,] 16.662135 40.050841
## [479,] 12.949429 43.763546
## [480,] 43.495625 13.217351
## [481,] 16.615958 40.097017
## [482,] 16.588248 40.124728
## [483,] 18.322208 38.390768
## [484,] 16.274650 40.438325
## [485,] 9.023127 47.689849
## [486,] 12.793774 43.919202
## [487,] 15.603049 41.109927
## [488,] 42.718814 13.994161
## [489,] 38.454828 18.258147
## [490,] 40.489526 16.223449
## [491,] 45.950137 10.762838
## [492,] 41.568239 15.144736
## [493,] 17.790713 38.922262
## [494,] 15.448864 41.264111
## [495,] 43.478189 13.234787
## [496,] 11.966156 44.746819
## [497,] 44.323688 12.389288
## [498,] 41.755059 14.957917
## [499,] 45.682097 11.030878
## [500,] 40.440032 16.272944
## [501,] 16.383585 40.329390
## [502,] 44.870215 11.842760
## [503,] 14.304082 42.408893
## [504,] 38.915553 17.797423
## [505,] 40.186094 16.526882
## [506,] 39.774979 16.937997
## [507,] 17.520022 39.192953
## [508,] 14.362500 42.350475
## [509,] 17.269488 39.443487
## [510,] 41.030742 15.682233
## [511,] 16.323325 40.389651
## [512,] 39.647228 17.065747
## [513,] 17.444534 39.268441
## [514,] 10.804987 45.907989
## [515,] 39.109539 17.603436
## [516,] 39.733432 16.979543
## [517,] 13.527358 43.185618
## [518,] 14.516071 42.196904
## [519,] 41.248066 15.464910
## [520,] 16.986440 39.726535
## [521,] 17.105598 39.607377
## [522,] 14.781496 41.931480
## [523,] 39.092940 17.620036
## [524,] 39.754907 16.958069
## [525,] 40.833049 15.879927
## [526,] 16.198770 40.514205
## [527,] 46.788657 9.924319
## [528,] 41.490335 15.222641
## [529,] 16.688301 40.024675
## [530,] 39.696382 17.016593
## [531,] 42.544582 14.168393
## [532,] 16.613916 40.099059
## [533,] 16.683302 40.029673
## [534,] 48.799685 7.913290
## [535,] 40.697132 16.015844
## [536,] 39.525214 17.187761
## [537,] 16.072117 40.640858
## [538,] 39.695935 17.017041
## [539,] 14.623544 42.089431
## [540,] 39.044648 17.668328
## [541,] 39.928659 16.784317
## [542,] 39.714371 16.998604
## [543,] 42.245444 14.467532
## [544,] 39.984787 16.728189
## [545,] 38.672522 18.040454
## [546,] 40.654793 16.058183
## [547,] 14.571006 42.141970
## [548,] 15.743529 40.969446
## [549,] 42.805768 13.907208
## [550,] 15.613982 41.098993
## [551,] 45.420123 11.292853
## [552,] 42.686093 14.026883
## [553,] 40.676317 16.036658
## [554,] 17.444143 39.268833
## [555,] 41.168457 15.544518
## [556,] 15.511556 41.201419
## [557,] 41.302831 15.410145
## [558,] 17.320774 39.392202
## [559,] 13.628987 43.083989
## [560,] 41.673650 15.039326
## [561,] 13.692129 43.020846
## [562,] 15.277871 41.435105
## [563,] 42.497151 14.215825
## [564,] 40.543216 16.169760
## [565,] 16.352812 40.360164
## [566,] 18.544930 38.168045
## [567,] 39.541857 17.171119
## [568,] 46.784456 9.928519
## [569,] 43.822652 12.890324
## [570,] 15.868165 40.844811
## [571,] 40.818102 15.894873
## [572,] 40.375090 16.337886
## [573,] 44.526715 12.186261
## [574,] 38.925216 17.787760
## [575,] 38.935307 17.777668
## [576,] 42.641153 14.071823
## [577,] 48.652987 8.059988
## [578,] 39.687404 17.025572
## [579,] 41.409094 15.303882
## [580,] 17.652693 39.060282
## [581,] 43.323440 13.389536
## [582,] 45.845442 10.867534
## [583,] 17.591765 39.121210
## [584,] 40.469760 16.243215
## [585,] 15.232856 41.480120
## [586,] 42.493406 14.219570
## [587,] 45.902626 10.810349
## [588,] 18.184074 38.528901
## [589,] 16.589639 40.123337
## [590,] 41.189926 15.523050
## [591,] 17.777558 38.935417
## [592,] 39.995146 16.717829
## [593,] 44.074254 12.638721
## [594,] 47.694887 9.018089
## [595,] 40.986591 15.726384
## [596,] 16.673528 40.039447
## [597,] 46.207421 10.505554
## [598,] 48.178617 8.534359
## [599,] 15.600277 41.112698
## [600,] 7.743186 48.969789
## [601,] 14.577974 42.135001
## [602,] 42.140904 14.572071
## [603,] 17.938006 38.774969
## [604,] 40.514984 16.197991
## [605,] 17.142220 39.570756
## [606,] 43.341310 13.371666
## [607,] 41.016437 15.696538
## [608,] 42.432610 14.280366
## [609,] 13.441184 43.271791
## [610,] 39.833034 16.879941
## [611,] 42.803156 13.909820
## [612,] 10.605669 46.107307
## [613,] 42.941229 13.771746
## [614,] 12.892587 43.820388
## [615,] 17.611585 39.101391
## [616,] 18.340842 38.372133
## [617,] 39.904668 16.808308
## [618,] 18.168566 38.544409
## [619,] 12.465104 44.247871
## [620,] 12.206802 44.506173
## [621,] 39.032284 17.680692
## [622,] 17.435224 39.277751
## [623,] 41.716372 14.996604
## [624,] 16.615957 40.097018
## [625,] 38.825920 17.887055
## [626,] 47.571713 9.141263
## [627,] 16.345455 40.367521
## [628,] 39.259423 17.453553
## [629,] 17.306208 39.406767
## [630,] 15.867135 40.845840
## [631,] 10.553852 46.159124
## [632,] 15.673557 41.039418
## [633,] 40.344147 16.368828
## [634,] 41.973868 14.739107
## [635,] 16.468344 40.244631
## [636,] 40.388609 16.324366
## [637,] 44.874083 11.838893
## [638,] 49.197464 7.515512
## [639,] 41.131513 15.581463
## [640,] 40.597623 16.115353
## [641,] 15.145747 41.567228
## [642,] 16.428731 40.284245
## [643,] 41.938170 14.774805
## [644,] 41.146846 15.566129
## [645,] 40.751791 15.961184
## [646,] 16.956111 39.756864
## [647,] 40.932340 15.780636
## [648,] 12.908527 43.804448
## [649,] 45.438347 11.274628
## [650,] 15.585494 41.127481
## [651,] 39.733489 16.979486
## [652,] 41.117037 15.595938
## [653,] 42.426717 14.286258
## [654,] 39.525587 17.187389
## [655,] 44.512441 12.200535
## [656,] 17.263380 39.449595
## [657,] 16.539118 40.173857
## [658,] 18.234634 38.478342
## [659,] 17.519738 39.193237
## [660,] 15.848679 40.864296
## [661,] 17.020160 39.692816
## [662,] 16.282094 40.430882
## [663,] 12.278382 44.434593
## [664,] 9.364056 47.348919
## [665,] 16.193422 40.519554
## [666,] 17.856578 38.856398
## [667,] 16.010525 40.702450
## [668,] 17.557445 39.155531
## [669,] 17.337711 39.375264
## [670,] 15.722596 40.990379
## [671,] 44.151227 12.561749
## [672,] 41.383506 15.329469
## [673,] 40.441111 16.271865
## [674,] 48.505728 8.207247
## [675,] 42.890948 13.822027
## [676,] 16.519004 40.193971
## [677,] 44.935116 11.777859
## [678,] 43.694015 13.018960
## [679,] 15.470179 41.242797
## [680,] 41.721868 14.991108
## [681,] 14.069065 42.643911
## [682,] 45.662435 11.050541
## [683,] 14.230133 42.482843
## [684,] 47.317972 9.395003
## [685,] 47.180112 9.532864
## [686,] 41.217196 15.495780
## [687,] 47.957359 8.755617
## [688,] 41.085638 15.627337
## [689,] 39.475226 17.237750
## [690,] 45.515224 11.197751
## [691,] 39.063678 17.649297
## [692,] 40.136644 16.576331
## [693,] 47.321821 9.391155
## [694,] 48.511766 8.201209
## [695,] 17.202376 39.510599
## [696,] 43.623547 13.089428
## [697,] 41.235680 15.477296
## [698,] 16.587225 40.125750
## [699,] 41.071650 15.641326
## [700,] 15.931765 40.781210
## [701,] 41.648704 15.064271
## [702,] 40.690834 16.022141
## [703,] 41.423976 15.289000
## [704,] 44.800090 11.912886
## [705,] 46.542954 10.170021
## [706,] 51.478066 5.234909
## [707,] 39.985224 16.727751
## [708,] 51.816131 4.896844
## [709,] 42.156969 14.556006
## [710,] 46.789302 9.923673
## [711,] 44.785147 11.927828
## [712,] 47.033954 9.679021
## [713,] 51.991081 4.721894
## [714,] 17.939534 38.773442
## [715,] 40.388572 16.324403
## [716,] 43.852414 12.860562
## [717,] 42.986149 13.726827
## [718,] 12.582705 44.130271
## [719,] 40.602830 16.110146
## [720,] 41.618293 15.094682
## [721,] 46.949851 9.763124
## [722,] 15.995097 40.717878
## [723,] 16.525116 40.187860
## [724,] 18.088077 38.624899
## [725,] 39.847651 16.865324
## [726,] 43.909733 12.803243
## [727,] 14.420848 42.292128
## [728,] 39.678887 17.034088
## [729,] 15.803214 40.909762
## [730,] 39.073850 17.639125
## [731,] 40.497825 16.215150
## [732,] 40.625114 16.087861
## [733,] 15.386031 41.326945
## [734,] 45.839495 10.873480
## [735,] 41.734782 14.978194
## [736,] 45.550802 11.162173
## [737,] 43.880849 12.832127
## [738,] 15.180581 41.532395
## [739,] 14.204322 42.508653
## [740,] 15.885888 40.827087
## [741,] 46.325575 10.387400
## [742,] 42.856689 13.856287
## [743,] 38.975272 17.737704
## [744,] 46.857229 9.855747
## [745,] 43.221246 13.491729
## [746,] 41.586639 15.126336
## [747,] 40.953283 15.759693
## [748,] 38.964295 17.748681
## [749,] 40.455755 16.257220
## [750,] 42.035816 14.677159
## [751,] 16.199566 40.513409
## [752,] 12.404138 44.308838
## [753,] 41.026138 15.686838
## [754,] 16.911264 39.801711
## [755,] 16.573321 40.139654
## [756,] 45.691797 11.021178
## [757,] 18.327690 38.385286
## [758,] 16.801802 39.911173
## [759,] 45.585617 11.127359
## [760,] 39.847554 16.865421
## [761,] 12.408280 44.304696
## [762,] 39.040940 17.672035
## [763,] 40.427360 16.285615
## [764,] 38.808080 17.904895
## [765,] 41.597075 15.115900
## [766,] 17.046261 39.666715
## [767,] 45.442685 11.270290
## [768,] 45.413882 11.299094
## [769,] 46.519439 10.193537
## [770,] 16.077227 40.635749
## [771,] 45.905112 10.807864
## [772,] 15.892853 40.820123
## [773,] 11.487710 45.225265
## [774,] 38.982141 17.730834
## [775,] 44.170004 12.542972
## [776,] 42.992476 13.720499
## [777,] 42.800140 13.912836
## [778,] 14.918980 41.793995
## [779,] 43.296495 13.416481
## [780,] 43.493131 13.219844
## [781,] 43.447472 13.265504
## [782,] 49.647363 7.065613
## [783,] 40.933615 15.779360
## [784,] 40.979971 15.733004
## [785,] 43.223226 13.489750
## [786,] 14.037143 42.675832
## [787,] 40.253440 16.459536
## [788,] 41.609674 15.103302
## [789,] 40.019841 16.693134
## [790,] 39.021293 17.691682
## [791,] 16.943069 39.769907
## [792,] 38.519345 18.193631
## [793,] 14.188733 42.524242
## [794,] 41.726306 14.986670
## [795,] 40.789262 15.923713
## [796,] 41.968979 14.743996
## [797,] 40.958076 15.754899
## [798,] 38.937311 17.775664
## [799,] 40.583640 16.129336
## [800,] 46.425265 10.287711
## [801,] 39.386938 17.326038
## [802,] 15.254605 41.458371
## [803,] 40.276430 16.436546
## [804,] 13.590664 43.122312
## [805,] 14.138133 42.574842
## [806,] 41.238877 15.474098
## [807,] 39.820370 16.892605
## [808,] 41.018459 15.694516
## [809,] 16.135632 40.577343
## [810,] 42.188299 14.524677
## [811,] 40.576433 16.136542
## [812,] 18.109353 38.603622
## [813,] 15.688920 41.024056
## [814,] 42.645322 14.067653
## [815,] 44.151560 12.561415
## [816,] 16.199682 40.513293
## [817,] 41.125843 15.587133
## [818,] 40.566917 16.146058
## [819,] 49.289136 7.423839
## [820,] 42.936683 13.776292
## [821,] 41.360363 15.352612
## [822,] 38.264808 18.448168
## [823,] 40.270093 16.442882
## [824,] 45.213239 11.499736
## [825,] 15.610357 41.102619
## [826,] 14.175765 42.537211
## [827,] 44.983785 11.729191
## [828,] 44.233821 12.479154
## [829,] 39.689827 17.023148
## [830,] 39.011788 17.701187
## [831,] 40.836809 15.876167
## [832,] 16.677779 40.035196
## [833,] 39.333316 17.379660
## [834,] 41.515173 15.197802
## [835,] 43.290172 13.422803
## [836,] 49.360465 7.352510
## [837,] 43.578182 13.134794
## [838,] 39.200069 17.512907
## [839,] 44.116239 12.596737
## [840,] 42.501050 14.211925
## [841,] 40.092031 16.620945
## [842,] 39.900827 16.812148
## [843,] 45.618491 11.094484
## [844,] 41.857680 14.855295
## [845,] 43.052873 13.660102
## [846,] 40.090241 16.622734
## [847,] 42.678594 14.034381
## [848,] 13.100300 43.612676
## [849,] 41.410645 15.302331
## [850,] 49.399813 7.313162
## [851,] 15.531682 41.181294
## [852,] 41.292767 15.420208
## [853,] 40.655632 16.057343
## [854,] 39.200737 17.512238
## [855,] 41.025748 15.687228
## [856,] 17.769347 38.943629
## [857,] 16.922252 39.790724
## [858,] 40.138020 16.574956
## [859,] 8.462537 48.250438
## [860,] 39.517066 17.195909
## [861,] 18.100017 38.612958
## [862,] 40.485095 16.227880
## [863,] 39.695010 17.017965
## [864,] 41.060965 15.652011
## [865,] 40.893101 15.819874
## [866,] 11.507211 45.205764
## [867,] 45.611610 11.101366
## [868,] 45.326365 11.386611
## [869,] 11.342092 45.370883
## [870,] 41.215841 15.497134
## [871,] 17.798164 38.914812
## [872,] 40.642342 16.070633
## [873,] 16.700959 40.012017
## [874,] 40.998006 15.714970
## [875,] 39.324534 17.388441
## [876,] 16.502283 40.210693
## [877,] 42.486641 14.226335
## [878,] 40.052772 16.660203
## [879,] 39.781287 16.931688
## [880,] 40.167569 16.545406
## [881,] 44.141764 12.571211
## [882,] 40.087520 16.625456
## [883,] 39.345624 17.367352
## [884,] 17.531309 39.181666
## [885,] 39.584891 17.128084
## [886,] 17.076299 39.636676
## [887,] 40.232852 16.480123
## [888,] 41.231814 15.481162
## [889,] 38.916149 17.796827
## [890,] 40.518414 16.194561
## [891,] 39.219541 17.493435
## [892,] 43.455687 13.257288
## [893,] 40.894248 15.818727
## [894,] 38.452624 18.260351
## [895,] 40.549726 16.163249
## [896,] 39.073660 17.639316
## [897,] 40.724560 15.988415
## [898,] 39.075012 17.637963
## [899,] 42.758726 13.954249
## [900,] 39.917893 16.795082
## [901,] 39.396710 17.316266
## [902,] 41.247325 15.465651
## [903,] 40.138234 16.574741
## [904,] 40.360527 16.352449
## [905,] 38.858543 17.854432
## [906,] 46.528963 10.184012
## [907,] 7.507534 49.205442
## [908,] 15.046920 41.666056
## [909,] 16.947548 39.765427
## [910,] 17.815924 38.897051
## [911,] 9.742806 46.970169
## [912,] 41.443213 15.269763
##
## $prob
## [,1] [,2]
## [1,] 0.2780780 0.72192200
## [2,] 0.3035127 0.69648725
## [3,] 0.2343011 0.76569885
## [4,] 0.2811630 0.71883703
## [5,] 0.2850635 0.71493654
## [6,] 0.2636019 0.73639805
## [7,] 0.2166999 0.78330010
## [8,] 0.1733621 0.82663786
## [9,] 0.2907049 0.70929508
## [10,] 0.3100604 0.68993960
## [11,] 0.3000351 0.69996487
## [12,] 0.2009852 0.79901479
## [13,] 0.1559381 0.84406191
## [14,] 0.2884685 0.71153154
## [15,] 0.1821696 0.81783038
## [16,] 0.7806775 0.21932251
## [17,] 0.7790094 0.22099065
## [18,] 0.7873590 0.21264099
## [19,] 0.2826377 0.71736233
## [20,] 0.1456164 0.85438359
## [21,] 0.2726054 0.72739464
## [22,] 0.2366951 0.76330485
## [23,] 0.3160196 0.68398036
## [24,] 0.3176036 0.68239639
## [25,] 0.1727827 0.82721726
## [26,] 0.2457094 0.75429056
## [27,] 0.7669431 0.23305691
## [28,] 0.2929834 0.70701664
## [29,] 0.2924948 0.70750524
## [30,] 0.2869652 0.71303480
## [31,] 0.2582165 0.74178346
## [32,] 0.2255881 0.77441187
## [33,] 0.2751231 0.72487692
## [34,] 0.7292270 0.27077295
## [35,] 0.2271967 0.77280332
## [36,] 0.7532459 0.24675413
## [37,] 0.8102226 0.18977735
## [38,] 0.7329582 0.26704182
## [39,] 0.3136974 0.68630259
## [40,] 0.2724044 0.72759560
## [41,] 0.7666356 0.23336435
## [42,] 0.2927279 0.70727214
## [43,] 0.2109950 0.78900497
## [44,] 0.7815440 0.21845596
## [45,] 0.8054964 0.19450361
## [46,] 0.7130649 0.28693511
## [47,] 0.7911808 0.20881924
## [48,] 0.2238691 0.77613088
## [49,] 0.6861843 0.31381571
## [50,] 0.7085873 0.29141271
## [51,] 0.7013382 0.29866175
## [52,] 0.3089244 0.69107559
## [53,] 0.2532489 0.74675108
## [54,] 0.3045068 0.69549317
## [55,] 0.7234807 0.27651932
## [56,] 0.3075934 0.69240665
## [57,] 0.1905205 0.80947946
## [58,] 0.6896048 0.31039522
## [59,] 0.6867893 0.31321072
## [60,] 0.7006057 0.29939433
## [61,] 0.2385232 0.76147685
## [62,] 0.2559568 0.74404321
## [63,] 0.7273127 0.27268733
## [64,] 0.2995159 0.70048406
## [65,] 0.3016170 0.69838299
## [66,] 0.2606369 0.73936307
## [67,] 0.6893121 0.31068790
## [68,] 0.7009843 0.29901567
## [69,] 0.7199948 0.28000518
## [70,] 0.2856272 0.71437277
## [71,] 0.8250080 0.17499203
## [72,] 0.7315845 0.26841548
## [73,] 0.2942590 0.70574105
## [74,] 0.2575357 0.74246434
## [75,] 0.7489395 0.25106051
## [76,] 0.7501737 0.24982631
## [77,] 0.2929474 0.70705265
## [78,] 0.8604677 0.13953227
## [79,] 0.7175983 0.28240175
## [80,] 0.6959609 0.30403907
## [81,] 0.6969342 0.30306576
## [82,] 0.2833940 0.71660599
## [83,] 0.6999445 0.30005551
## [84,] 0.2578518 0.74214818
## [85,] 0.6884606 0.31153943
## [86,] 0.7040480 0.29595197
## [87,] 0.7002696 0.29973043
## [88,] 0.7448991 0.25510091
## [89,] 0.7050377 0.29496229
## [90,] 0.6883503 0.31164971
## [91,] 0.6818990 0.31810099
## [92,] 0.7168517 0.28314831
## [93,] 0.2569254 0.74307457
## [94,] 0.2320434 0.76795657
## [95,] 0.2776001 0.72239987
## [96,] 0.7547791 0.24522091
## [97,] 0.2753159 0.72468413
## [98,] 0.7526689 0.24733110
## [99,] 0.7172312 0.28276877
## [100,] 0.2870275 0.71297249
## [101,] 0.3075865 0.69241355
## [102,] 0.7259090 0.27409104
## [103,] 0.2735098 0.72649017
## [104,] 0.7282783 0.27172167
## [105,] 0.3054111 0.69458887
## [106,] 0.2403151 0.75968486
## [107,] 0.7348168 0.26518316
## [108,] 0.2414285 0.75857149
## [109,] 0.7493374 0.25066265
## [110,] 0.7148843 0.28511570
## [111,] 0.2883434 0.71165661
## [112,] 0.7402977 0.25970231
## [113,] 0.7887453 0.21125468
## [114,] 0.3269963 0.67300375
## [115,] 0.7348647 0.26513526
## [116,] 0.7727094 0.22729056
## [117,] 0.2797978 0.72020222
## [118,] 0.7197313 0.28026872
## [119,] 0.7119198 0.28808020
## [120,] 0.7851239 0.21487606
## [121,] 0.6865326 0.31346738
## [122,] 0.8578811 0.14211895
## [123,] 0.6997941 0.30020594
## [124,] 0.7301520 0.26984798
## [125,] 0.7158878 0.28411223
## [126,] 0.7639070 0.23609299
## [127,] 0.8083766 0.19162341
## [128,] 0.6708804 0.32911956
## [129,] 0.3101894 0.68981058
## [130,] 0.2802322 0.71976781
## [131,] 0.7135891 0.28641092
## [132,] 0.2685956 0.73140440
## [133,] 0.7492713 0.25072868
## [134,] 0.3206334 0.67936660
## [135,] 0.2925193 0.70748072
## [136,] 0.7678237 0.23217631
## [137,] 0.3174764 0.68252356
## [138,] 0.7771459 0.22285414
## [139,] 0.8409872 0.15901279
## [140,] 0.7227022 0.27729781
## [141,] 0.2939985 0.70600153
## [142,] 0.8147592 0.18524076
## [143,] 0.8495166 0.15048335
## [144,] 0.1365329 0.86346711
## [145,] 0.2570483 0.74295169
## [146,] 0.7430558 0.25694422
## [147,] 0.3022627 0.69773725
## [148,] 0.7642221 0.23577789
## [149,] 0.7232284 0.27677155
## [150,] 0.7481993 0.25180068
## [151,] 0.2370037 0.76299631
## [152,] 0.7023619 0.29763808
## [153,] 0.7547330 0.24526697
## [154,] 0.1920534 0.80794659
## [155,] 0.7571676 0.24283237
## [156,] 0.2273305 0.77266953
## [157,] 0.3105389 0.68946111
## [158,] 0.2777195 0.72228054
## [159,] 0.7036250 0.29637499
## [160,] 0.3203600 0.67964005
## [161,] 0.2197928 0.78020719
## [162,] 0.2152383 0.78476173
## [163,] 0.6882426 0.31175743
## [164,] 0.3074292 0.69257081
## [165,] 0.7355701 0.26442986
## [166,] 0.2929833 0.70701665
## [167,] 0.6846038 0.31539617
## [168,] 0.8388153 0.16118468
## [169,] 0.2882137 0.71178633
## [170,] 0.6922476 0.30775237
## [171,] 0.2797796 0.72022037
## [172,] 0.2763663 0.72363367
## [173,] 0.7113742 0.28862580
## [174,] 0.7401105 0.25988951
## [175,] 0.2903805 0.70961946
## [176,] 0.7172925 0.28270751
## [177,] 0.7152328 0.28476723
## [178,] 0.2094237 0.79057629
## [179,] 0.7121582 0.28784183
## [180,] 0.7912490 0.20875104
## [181,] 0.7252575 0.27474247
## [182,] 0.7158436 0.28415636
## [183,] 0.2670597 0.73294035
## [184,] 0.7762962 0.22370376
## [185,] 0.7394810 0.26051896
## [186,] 0.3193720 0.68062802
## [187,] 0.7255279 0.27447210
## [188,] 0.7185620 0.28143796
## [189,] 0.7217456 0.27825441
## [190,] 0.2276115 0.77238847
## [191,] 0.7320989 0.26790115
## [192,] 0.8011984 0.19880156
## [193,] 0.6780866 0.32191339
## [194,] 0.6842307 0.31576927
## [195,] 0.2748136 0.72518644
## [196,] 0.7006067 0.29939332
## [197,] 0.7480954 0.25190458
## [198,] 0.6969408 0.30305920
## [199,] 0.7848723 0.21512775
## [200,] 0.3043991 0.69560087
## [201,] 0.2916285 0.70837153
## [202,] 0.3215249 0.67847510
## [203,] 0.3089194 0.69108061
## [204,] 0.2794542 0.72054580
## [205,] 0.3001105 0.69988949
## [206,] 0.2870964 0.71290355
## [207,] 0.2165004 0.78349959
## [208,] 0.2823080 0.71769203
## [209,] 0.3095843 0.69041574
## [210,] 0.2772310 0.72276898
## [211,] 0.6833638 0.31663618
## [212,] 0.7785031 0.22149691
## [213,] 0.7297008 0.27029915
## [214,] 0.7130839 0.28691608
## [215,] 0.7562810 0.24371895
## [216,] 0.2907914 0.70920858
## [217,] 0.7070693 0.29293071
## [218,] 0.2912738 0.70872620
## [219,] 0.7923251 0.20767486
## [220,] 0.7704412 0.22955876
## [221,] 0.2727802 0.72721976
## [222,] 0.8507745 0.14922550
## [223,] 0.2480749 0.75192512
## [224,] 0.8051497 0.19485031
## [225,] 0.2509149 0.74908506
## [226,] 0.2999783 0.70002166
## [227,] 0.8343412 0.16565880
## [228,] 0.8319104 0.16808964
## [229,] 0.7267684 0.27323165
## [230,] 0.8456153 0.15438472
## [231,] 0.7244486 0.27555136
## [232,] 0.6960528 0.30394720
## [233,] 0.8025540 0.19744602
## [234,] 0.6887961 0.31120387
## [235,] 0.6884030 0.31159701
## [236,] 0.7077154 0.29228464
## [237,] 0.8344091 0.16559093
## [238,] 0.8073641 0.19263586
## [239,] 0.8553910 0.14460905
## [240,] 0.3033235 0.69667653
## [241,] 0.7691987 0.23080129
## [242,] 0.7270943 0.27290573
## [243,] 0.2924767 0.70752327
## [244,] 0.7242020 0.27579801
## [245,] 0.2809192 0.71908078
## [246,] 0.7343770 0.26562301
## [247,] 0.7304144 0.26958557
## [248,] 0.7899443 0.21005573
## [249,] 0.8206756 0.17932442
## [250,] 0.9076947 0.09230532
## [251,] 0.9297111 0.07028892
## [252,] 0.9136557 0.08634434
## [253,] 0.7433390 0.25666095
## [254,] 0.8250193 0.17498065
## [255,] 0.8293332 0.17066679
## [256,] 0.3163215 0.68367850
## [257,] 0.7121575 0.28784248
## [258,] 0.7732342 0.22676577
## [259,] 0.6945027 0.30549735
## [260,] 0.3073717 0.69262832
## [261,] 0.7579597 0.24204032
## [262,] 0.2218664 0.77813358
## [263,] 0.7159355 0.28406455
## [264,] 0.7338408 0.26615924
## [265,] 0.2820359 0.71796406
## [266,] 0.3189407 0.68105929
## [267,] 0.7026197 0.29738035
## [268,] 0.7075748 0.29242517
## [269,] 0.7587105 0.24128953
## [270,] 0.3064654 0.69353457
## [271,] 0.7423784 0.25762156
## [272,] 0.6996439 0.30035610
## [273,] 0.2786525 0.72134748
## [274,] 0.6889755 0.31102451
## [275,] 0.7140839 0.28591606
## [276,] 0.7163284 0.28367161
## [277,] 0.2712965 0.72870351
## [278,] 0.8082717 0.19172827
## [279,] 0.7358948 0.26410523
## [280,] 0.6951654 0.30483456
## [281,] 0.8031813 0.19681869
## [282,] 0.7737356 0.22626439
## [283,] 0.7338366 0.26616336
## [284,] 0.8129207 0.18707926
## [285,] 0.2676739 0.73232615
## [286,] 0.2504598 0.74954017
## [287,] 0.2801103 0.71988970
## [288,] 0.8168426 0.18315739
## [289,] 0.8262171 0.17378293
## [290,] 0.7621051 0.23789493
## [291,] 0.7332826 0.26671738
## [292,] 0.8545558 0.14544425
## [293,] 0.7221149 0.27788513
## [294,] 0.6870437 0.31295626
## [295,] 0.7412028 0.25879720
## [296,] 0.2856413 0.71435873
## [297,] 0.2187178 0.78128219
## [298,] 0.2981904 0.70180961
## [299,] 0.2922316 0.70776844
## [300,] 0.1791511 0.82084894
## [301,] 0.8056674 0.19433257
## [302,] 0.2962603 0.70373972
## [303,] 0.8037952 0.19620482
## [304,] 0.7564114 0.24358860
## [305,] 0.7026179 0.29738206
## [306,] 0.7038084 0.29619156
## [307,] 0.7006956 0.29930435
## [308,] 0.2187908 0.78120916
## [309,] 0.6883952 0.31160479
## [310,] 0.8017920 0.19820804
## [311,] 0.7128415 0.28715854
## [312,] 0.6842893 0.31571074
## [313,] 0.7334666 0.26653336
## [314,] 0.3005707 0.69942926
## [315,] 0.8012749 0.19872507
## [316,] 0.2834841 0.71651590
## [317,] 0.7007164 0.29928359
## [318,] 0.2025588 0.79744124
## [319,] 0.6873584 0.31264158
## [320,] 0.7580712 0.24192875
## [321,] 0.7546798 0.24532015
## [322,] 0.6687022 0.33129775
## [323,] 0.2630611 0.73693886
## [324,] 0.7634319 0.23656810
## [325,] 0.7668991 0.23310088
## [326,] 0.8754145 0.12458547
## [327,] 0.7217681 0.27823193
## [328,] 0.7225855 0.27741454
## [329,] 0.7621400 0.23786002
## [330,] 0.2475120 0.75248798
## [331,] 0.7041174 0.29588262
## [332,] 0.7097748 0.29022522
## [333,] 0.7336888 0.26631122
## [334,] 0.7056558 0.29434418
## [335,] 0.6880488 0.31195122
## [336,] 0.2987512 0.70124881
## [337,] 0.6650411 0.33495891
## [338,] 0.6791981 0.32080191
## [339,] 0.2501850 0.74981504
## [340,] 0.7357453 0.26425469
## [341,] 0.7400243 0.25997572
## [342,] 0.7221994 0.27780061
## [343,] 0.6865679 0.31343206
## [344,] 0.7155971 0.28440291
## [345,] 0.8186004 0.18139959
## [346,] 0.6944961 0.30550394
## [347,] 0.2689791 0.73102091
## [348,] 0.7101802 0.28981984
## [349,] 0.2396394 0.76036059
## [350,] 0.2492927 0.75070726
## [351,] 0.3080250 0.69197499
## [352,] 0.6621255 0.33787447
## [353,] 0.7248882 0.27511183
## [354,] 0.7021386 0.29786138
## [355,] 0.8543134 0.14568656
## [356,] 0.7232641 0.27673589
## [357,] 0.7438915 0.25610853
## [358,] 0.7154700 0.28452999
## [359,] 0.3193159 0.68068413
## [360,] 0.6867573 0.31324267
## [361,] 0.2766372 0.72336278
## [362,] 0.7519500 0.24804999
## [363,] 0.7785090 0.22149103
## [364,] 0.2856433 0.71435669
## [365,] 0.7251576 0.27484245
## [366,] 0.3031846 0.69681540
## [367,] 0.7153022 0.28469778
## [368,] 0.7570875 0.24291253
## [369,] 0.7292928 0.27070722
## [370,] 0.6747099 0.32529007
## [371,] 0.7100684 0.28993157
## [372,] 0.7972292 0.20277081
## [373,] 0.2752519 0.72474806
## [374,] 0.2499563 0.75004371
## [375,] 0.7276046 0.27239541
## [376,] 0.7799595 0.22004055
## [377,] 0.6998368 0.30016320
## [378,] 0.6878812 0.31211882
## [379,] 0.7200611 0.27993888
## [380,] 0.2940734 0.70592657
## [381,] 0.7320225 0.26797752
## [382,] 0.7633204 0.23667958
## [383,] 0.7946230 0.20537698
## [384,] 0.6888988 0.31110116
## [385,] 0.7683988 0.23160121
## [386,] 0.6912011 0.30879894
## [387,] 0.3145153 0.68548468
## [388,] 0.7069287 0.29307129
## [389,] 0.7035573 0.29644272
## [390,] 0.8043749 0.19562515
## [391,] 0.7591362 0.24086379
## [392,] 0.7525367 0.24746332
## [393,] 0.2309930 0.76900701
## [394,] 0.2738647 0.72613530
## [395,] 0.8555816 0.14441835
## [396,] 0.7281009 0.27189912
## [397,] 0.7168665 0.28313350
## [398,] 0.6912128 0.30878715
## [399,] 0.7233926 0.27660738
## [400,] 0.2983841 0.70161588
## [401,] 0.1492170 0.85078305
## [402,] 0.3191513 0.68084875
## [403,] 0.7138595 0.28614052
## [404,] 0.7292357 0.27076426
## [405,] 0.7240136 0.27598642
## [406,] 0.2029026 0.79709738
## [407,] 0.8042535 0.19574648
## [408,] 0.7992239 0.20077612
## [409,] 0.1999911 0.80000887
## [410,] 0.3138288 0.68617123
## [411,] 0.7166322 0.28336784
## [412,] 0.2944821 0.70551785
## [413,] 0.7229035 0.27709655
## [414,] 0.6906146 0.30938537
## [415,] 0.6933957 0.30660428
## [416,] 0.2909790 0.70902104
## [417,] 0.7062365 0.29376352
## [418,] 0.7014495 0.29855052
## [419,] 0.8021342 0.19786579
## [420,] 0.7082607 0.29173934
## [421,] 0.7783362 0.22166375
## [422,] 0.7130728 0.28692721
## [423,] 0.8183907 0.18160930
## [424,] 0.7068492 0.29315083
## [425,] 0.6937676 0.30623242
## [426,] 0.7559629 0.24403713
## [427,] 0.2495151 0.75048492
## [428,] 0.7094118 0.29058824
## [429,] 0.7023984 0.29760160
## [430,] 0.7094141 0.29058585
## [431,] 0.7270261 0.27297390
## [432,] 0.7144470 0.28555302
## [433,] 0.7096399 0.29036009
## [434,] 0.6915444 0.30845560
## [435,] 0.7662389 0.23376111
## [436,] 0.7210739 0.27892607
## [437,] 0.7149991 0.28500091
## [438,] 0.6889721 0.31102786
## [439,] 0.7180819 0.28191811
## [440,] 0.6889960 0.31100402
## [441,] 0.7539496 0.24605038
## [442,] 0.7038582 0.29614179
## [443,] 0.6946684 0.30533164
## [444,] 0.7272996 0.27270039
## [445,] 0.7077434 0.29225660
## [446,] 0.7116630 0.28833699
## [447,] 0.8204289 0.17957111
## [448,] 0.2720689 0.72793105
## [449,] 0.2653171 0.73468294
## [450,] 0.3141419 0.68585806
## [451,] 0.1717915 0.82820851
## [452,] 0.2780780 0.72192200
## [453,] 0.3035127 0.69648725
## [454,] 0.2343011 0.76569885
## [455,] 0.2385790 0.76142100
## [456,] 0.2811630 0.71883703
## [457,] 0.2850635 0.71493654
## [458,] 0.2636019 0.73639805
## [459,] 0.2166999 0.78330010
## [460,] 0.1733621 0.82663786
## [461,] 0.2907049 0.70929508
## [462,] 0.3028684 0.69713156
## [463,] 0.2009852 0.79901479
## [464,] 0.1559381 0.84406191
## [465,] 0.2884685 0.71153154
## [466,] 0.1821696 0.81783038
## [467,] 0.2795734 0.72042661
## [468,] 0.7806775 0.21932251
## [469,] 0.7790094 0.22099065
## [470,] 0.7873590 0.21264099
## [471,] 0.2826377 0.71736233
## [472,] 0.2507044 0.74929562
## [473,] 0.2726054 0.72739464
## [474,] 0.1727827 0.82721726
## [475,] 0.2632722 0.73672776
## [476,] 0.2970888 0.70291121
## [477,] 0.2457094 0.75429056
## [478,] 0.2937976 0.70620242
## [479,] 0.2283327 0.77166725
## [480,] 0.7669431 0.23305691
## [481,] 0.2929834 0.70701664
## [482,] 0.2924948 0.70750524
## [483,] 0.3230691 0.67693094
## [484,] 0.2869652 0.71303480
## [485,] 0.1591016 0.84089837
## [486,] 0.2255881 0.77441187
## [487,] 0.2751231 0.72487692
## [488,] 0.7532459 0.24675413
## [489,] 0.6780605 0.32193950
## [490,] 0.7139376 0.28606239
## [491,] 0.8102226 0.18977735
## [492,] 0.7329582 0.26704182
## [493,] 0.3136974 0.68630259
## [494,] 0.2724044 0.72759560
## [495,] 0.7666356 0.23336435
## [496,] 0.2109950 0.78900497
## [497,] 0.7815440 0.21845596
## [498,] 0.7362523 0.26374770
## [499,] 0.8054964 0.19450361
## [500,] 0.7130649 0.28693511
## [501,] 0.2888860 0.71111399
## [502,] 0.7911808 0.20881924
## [503,] 0.2522189 0.74778113
## [504,] 0.6861843 0.31381571
## [505,] 0.7085873 0.29141271
## [506,] 0.7013382 0.29866175
## [507,] 0.3089244 0.69107559
## [508,] 0.2532489 0.74675108
## [509,] 0.3045068 0.69549317
## [510,] 0.7234807 0.27651932
## [511,] 0.2878235 0.71217654
## [512,] 0.6990857 0.30091434
## [513,] 0.3075934 0.69240665
## [514,] 0.1905205 0.80947946
## [515,] 0.6896048 0.31039522
## [516,] 0.7006057 0.29939433
## [517,] 0.2385232 0.76147685
## [518,] 0.2559568 0.74404321
## [519,] 0.7273127 0.27268733
## [520,] 0.2995159 0.70048406
## [521,] 0.3016170 0.69838299
## [522,] 0.2606369 0.73936307
## [523,] 0.6893121 0.31068790
## [524,] 0.7009843 0.29901567
## [525,] 0.7199948 0.28000518
## [526,] 0.2856272 0.71437277
## [527,] 0.8250080 0.17499203
## [528,] 0.7315845 0.26841548
## [529,] 0.2942590 0.70574105
## [530,] 0.6999524 0.30004762
## [531,] 0.7501737 0.24982631
## [532,] 0.2929474 0.70705265
## [533,] 0.2941708 0.70582918
## [534,] 0.8604677 0.13953227
## [535,] 0.7175983 0.28240175
## [536,] 0.6969342 0.30306576
## [537,] 0.2833940 0.71660599
## [538,] 0.6999445 0.30005551
## [539,] 0.2578518 0.74214818
## [540,] 0.6884606 0.31153943
## [541,] 0.7040480 0.29595197
## [542,] 0.7002696 0.29973043
## [543,] 0.7448991 0.25510091
## [544,] 0.7050377 0.29496229
## [545,] 0.6818990 0.31810099
## [546,] 0.7168517 0.28314831
## [547,] 0.2569254 0.74307457
## [548,] 0.2776001 0.72239987
## [549,] 0.7547791 0.24522091
## [550,] 0.2753159 0.72468413
## [551,] 0.8008771 0.19912291
## [552,] 0.7526689 0.24733110
## [553,] 0.7172312 0.28276877
## [554,] 0.3075865 0.69241355
## [555,] 0.7259090 0.27409104
## [556,] 0.2735098 0.72649017
## [557,] 0.7282783 0.27172167
## [558,] 0.3054111 0.69458887
## [559,] 0.2403151 0.75968486
## [560,] 0.7348168 0.26518316
## [561,] 0.2414285 0.75857149
## [562,] 0.2693893 0.73061067
## [563,] 0.7493374 0.25066265
## [564,] 0.7148843 0.28511570
## [565,] 0.2883434 0.71165661
## [566,] 0.3269963 0.67300375
## [567,] 0.6972277 0.30277231
## [568,] 0.8249339 0.17506609
## [569,] 0.7727094 0.22729056
## [570,] 0.2797978 0.72020222
## [571,] 0.7197313 0.28026872
## [572,] 0.7119198 0.28808020
## [573,] 0.7851239 0.21487606
## [574,] 0.6863547 0.31364533
## [575,] 0.6865326 0.31346738
## [576,] 0.7518765 0.24812351
## [577,] 0.8578811 0.14211895
## [578,] 0.6997941 0.30020594
## [579,] 0.7301520 0.26984798
## [580,] 0.3112637 0.68873626
## [581,] 0.7639070 0.23609299
## [582,] 0.8083766 0.19162341
## [583,] 0.3101894 0.68981058
## [584,] 0.7135891 0.28641092
## [585,] 0.2685956 0.73140440
## [586,] 0.7492713 0.25072868
## [587,] 0.8093849 0.19061510
## [588,] 0.3206334 0.67936660
## [589,] 0.2925193 0.70748072
## [590,] 0.7262875 0.27371249
## [591,] 0.3134654 0.68653455
## [592,] 0.7052204 0.29477962
## [593,] 0.7771459 0.22285414
## [594,] 0.8409872 0.15901279
## [595,] 0.7227022 0.27729781
## [596,] 0.2939985 0.70600153
## [597,] 0.8147592 0.18524076
## [598,] 0.8495166 0.15048335
## [599,] 0.2750742 0.72492578
## [600,] 0.1365329 0.86346711
## [601,] 0.2570483 0.74295169
## [602,] 0.7430558 0.25694422
## [603,] 0.3162946 0.68370542
## [604,] 0.7143865 0.28561349
## [605,] 0.3022627 0.69773725
## [606,] 0.7642221 0.23577789
## [607,] 0.7232284 0.27677155
## [608,] 0.7481993 0.25180068
## [609,] 0.2370037 0.76299631
## [610,] 0.7023619 0.29763808
## [611,] 0.7547330 0.24526697
## [612,] 0.1870060 0.81299396
## [613,] 0.7571676 0.24283237
## [614,] 0.2273305 0.77266953
## [615,] 0.3105389 0.68946111
## [616,] 0.3233976 0.67660236
## [617,] 0.7036250 0.29637499
## [618,] 0.3203600 0.67964005
## [619,] 0.2197928 0.78020719
## [620,] 0.2152383 0.78476173
## [621,] 0.6882426 0.31175743
## [622,] 0.3074292 0.69257081
## [623,] 0.7355701 0.26442986
## [624,] 0.2929833 0.70701665
## [625,] 0.6846038 0.31539617
## [626,] 0.8388153 0.16118468
## [627,] 0.2882137 0.71178633
## [628,] 0.6922476 0.30775237
## [629,] 0.3051543 0.69484570
## [630,] 0.2797796 0.72022037
## [631,] 0.1860924 0.81390764
## [632,] 0.2763663 0.72363367
## [633,] 0.7113742 0.28862580
## [634,] 0.7401105 0.25988951
## [635,] 0.2903805 0.70961946
## [636,] 0.7121582 0.28784183
## [637,] 0.7912490 0.20875104
## [638,] 0.8674816 0.13251838
## [639,] 0.7252575 0.27474247
## [640,] 0.7158436 0.28415636
## [641,] 0.2670597 0.73294035
## [642,] 0.2896820 0.71031796
## [643,] 0.7394810 0.26051896
## [644,] 0.7255279 0.27447210
## [645,] 0.7185620 0.28143796
## [646,] 0.2989812 0.70101884
## [647,] 0.7217456 0.27825441
## [648,] 0.2276115 0.77238847
## [649,] 0.8011984 0.19880156
## [650,] 0.2748136 0.72518644
## [651,] 0.7006067 0.29939332
## [652,] 0.7250023 0.27499771
## [653,] 0.7480954 0.25190458
## [654,] 0.6969408 0.30305920
## [655,] 0.7848723 0.21512775
## [656,] 0.3043991 0.69560087
## [657,] 0.2916285 0.70837153
## [658,] 0.3215249 0.67847510
## [659,] 0.3089194 0.69108061
## [660,] 0.2794542 0.72054580
## [661,] 0.3001105 0.69988949
## [662,] 0.2870964 0.71290355
## [663,] 0.2165004 0.78349959
## [664,] 0.1651131 0.83488688
## [665,] 0.2855329 0.71446707
## [666,] 0.3148588 0.68514123
## [667,] 0.2823080 0.71769203
## [668,] 0.3095843 0.69041574
## [669,] 0.3057098 0.69429022
## [670,] 0.2772310 0.72276898
## [671,] 0.7785031 0.22149691
## [672,] 0.7297008 0.27029915
## [673,] 0.7130839 0.28691608
## [674,] 0.8552845 0.14471551
## [675,] 0.7562810 0.24371895
## [676,] 0.2912738 0.70872620
## [677,] 0.7923251 0.20767486
## [678,] 0.7704412 0.22955876
## [679,] 0.2727802 0.72721976
## [680,] 0.7356671 0.26433294
## [681,] 0.2480749 0.75192512
## [682,] 0.8051497 0.19485031
## [683,] 0.2509149 0.74908506
## [684,] 0.8343412 0.16565880
## [685,] 0.8319104 0.16808964
## [686,] 0.7267684 0.27323165
## [687,] 0.8456153 0.15438472
## [688,] 0.7244486 0.27555136
## [689,] 0.6960528 0.30394720
## [690,] 0.8025540 0.19744602
## [691,] 0.6887961 0.31120387
## [692,] 0.7077154 0.29228464
## [693,] 0.8344091 0.16559093
## [694,] 0.8553910 0.14460905
## [695,] 0.3033235 0.69667653
## [696,] 0.7691987 0.23080129
## [697,] 0.7270943 0.27290573
## [698,] 0.2924767 0.70752327
## [699,] 0.7242020 0.27579801
## [700,] 0.2809192 0.71908078
## [701,] 0.7343770 0.26562301
## [702,] 0.7174872 0.28251279
## [703,] 0.7304144 0.26958557
## [704,] 0.7899443 0.21005573
## [705,] 0.8206756 0.17932442
## [706,] 0.9076947 0.09230532
## [707,] 0.7050454 0.29495457
## [708,] 0.9136557 0.08634434
## [709,] 0.7433390 0.25666095
## [710,] 0.8250193 0.17498065
## [711,] 0.7896808 0.21031921
## [712,] 0.8293332 0.17066679
## [713,] 0.9167405 0.08325950
## [714,] 0.3163215 0.68367850
## [715,] 0.7121575 0.28784248
## [716,] 0.7732342 0.22676577
## [717,] 0.7579597 0.24204032
## [718,] 0.2218664 0.77813358
## [719,] 0.7159355 0.28406455
## [720,] 0.7338408 0.26615924
## [721,] 0.8278503 0.17214975
## [722,] 0.2820359 0.71796406
## [723,] 0.2913816 0.70861843
## [724,] 0.3189407 0.68105929
## [725,] 0.7026197 0.29738035
## [726,] 0.7742449 0.22575509
## [727,] 0.2542778 0.74572225
## [728,] 0.6996439 0.30035610
## [729,] 0.2786525 0.72134748
## [730,] 0.6889755 0.31102451
## [731,] 0.7140839 0.28591606
## [732,] 0.7163284 0.28367161
## [733,] 0.2712965 0.72870351
## [734,] 0.8082717 0.19172827
## [735,] 0.7358948 0.26410523
## [736,] 0.8031813 0.19681869
## [737,] 0.7737356 0.22626439
## [738,] 0.2676739 0.73232615
## [739,] 0.2504598 0.74954017
## [740,] 0.2801103 0.71988970
## [741,] 0.8168426 0.18315739
## [742,] 0.7556770 0.24432305
## [743,] 0.6872373 0.31276271
## [744,] 0.8262171 0.17378293
## [745,] 0.7621051 0.23789493
## [746,] 0.7332826 0.26671738
## [747,] 0.7221149 0.27788513
## [748,] 0.6870437 0.31295626
## [749,] 0.7133421 0.28665786
## [750,] 0.7412028 0.25879720
## [751,] 0.2856413 0.71435873
## [752,] 0.2187178 0.78128219
## [753,] 0.7233995 0.27660051
## [754,] 0.2981904 0.70180961
## [755,] 0.2922316 0.70776844
## [756,] 0.8056674 0.19433257
## [757,] 0.3231657 0.67683428
## [758,] 0.2962603 0.70373972
## [759,] 0.8037952 0.19620482
## [760,] 0.7026179 0.29738206
## [761,] 0.2187908 0.78120916
## [762,] 0.6883952 0.31160479
## [763,] 0.7128415 0.28715854
## [764,] 0.6842893 0.31571074
## [765,] 0.7334666 0.26653336
## [766,] 0.3005707 0.69942926
## [767,] 0.8012749 0.19872507
## [768,] 0.8007670 0.19923296
## [769,] 0.8202609 0.17973906
## [770,] 0.2834841 0.71651590
## [771,] 0.8094287 0.19057127
## [772,] 0.2802331 0.71976690
## [773,] 0.2025588 0.79744124
## [774,] 0.6873584 0.31264158
## [775,] 0.7788342 0.22116582
## [776,] 0.7580712 0.24192875
## [777,] 0.7546798 0.24532015
## [778,] 0.2630611 0.73693886
## [779,] 0.7634319 0.23656810
## [780,] 0.7668991 0.23310088
## [781,] 0.7660940 0.23390597
## [782,] 0.8754145 0.12458547
## [783,] 0.7217681 0.27823193
## [784,] 0.7225855 0.27741454
## [785,] 0.7621400 0.23786002
## [786,] 0.2475120 0.75248798
## [787,] 0.7097748 0.29022522
## [788,] 0.7336888 0.26631122
## [789,] 0.7056558 0.29434418
## [790,] 0.6880488 0.31195122
## [791,] 0.2987512 0.70124881
## [792,] 0.6791981 0.32080191
## [793,] 0.2501850 0.74981504
## [794,] 0.7357453 0.26425469
## [795,] 0.7192227 0.28077725
## [796,] 0.7400243 0.25997572
## [797,] 0.7221994 0.27780061
## [798,] 0.6865679 0.31343206
## [799,] 0.7155971 0.28440291
## [800,] 0.8186004 0.18139959
## [801,] 0.6944961 0.30550394
## [802,] 0.2689791 0.73102091
## [803,] 0.7101802 0.28981984
## [804,] 0.2396394 0.76036059
## [805,] 0.2492927 0.75070726
## [806,] 0.7271507 0.27284935
## [807,] 0.7021386 0.29786138
## [808,] 0.7232641 0.27673589
## [809,] 0.2845139 0.71548606
## [810,] 0.7438915 0.25610853
## [811,] 0.7154700 0.28452999
## [812,] 0.3193159 0.68068413
## [813,] 0.2766372 0.72336278
## [814,] 0.7519500 0.24804999
## [815,] 0.7785090 0.22149103
## [816,] 0.2856433 0.71435669
## [817,] 0.7251576 0.27484245
## [818,] 0.7153022 0.28469778
## [819,] 0.8690980 0.13090196
## [820,] 0.7570875 0.24291253
## [821,] 0.7292928 0.27070722
## [822,] 0.6747099 0.32529007
## [823,] 0.7100684 0.28993157
## [824,] 0.7972292 0.20277081
## [825,] 0.2752519 0.72474806
## [826,] 0.2499563 0.75004371
## [827,] 0.7931833 0.20681671
## [828,] 0.7799595 0.22004055
## [829,] 0.6998368 0.30016320
## [830,] 0.6878812 0.31211882
## [831,] 0.7200611 0.27993888
## [832,] 0.2940734 0.70592657
## [833,] 0.6935506 0.30644944
## [834,] 0.7320225 0.26797752
## [835,] 0.7633204 0.23667958
## [836,] 0.8703558 0.12964423
## [837,] 0.7683988 0.23160121
## [838,] 0.6912011 0.30879894
## [839,] 0.7778862 0.22211384
## [840,] 0.7494061 0.25059389
## [841,] 0.7069287 0.29307129
## [842,] 0.7035573 0.29644272
## [843,] 0.8043749 0.19562515
## [844,] 0.7380618 0.26193821
## [845,] 0.7591362 0.24086379
## [846,] 0.7068972 0.29310284
## [847,] 0.7525367 0.24746332
## [848,] 0.2309930 0.76900701
## [849,] 0.7301794 0.26982063
## [850,] 0.8710496 0.12895043
## [851,] 0.2738647 0.72613530
## [852,] 0.7281009 0.27189912
## [853,] 0.7168665 0.28313350
## [854,] 0.6912128 0.30878715
## [855,] 0.7233926 0.27660738
## [856,] 0.3133207 0.68667934
## [857,] 0.2983841 0.70161588
## [858,] 0.7077396 0.29226038
## [859,] 0.1492170 0.85078305
## [860,] 0.6967906 0.30320944
## [861,] 0.3191513 0.68084875
## [862,] 0.7138595 0.28614052
## [863,] 0.6999282 0.30007181
## [864,] 0.7240136 0.27598642
## [865,] 0.7210537 0.27894629
## [866,] 0.2029026 0.79709738
## [867,] 0.8042535 0.19574648
## [868,] 0.7992239 0.20077612
## [869,] 0.1999911 0.80000887
## [870,] 0.7267445 0.27325554
## [871,] 0.3138288 0.68617123
## [872,] 0.7166322 0.28336784
## [873,] 0.2944821 0.70551785
## [874,] 0.7229035 0.27709655
## [875,] 0.6933957 0.30660428
## [876,] 0.2909790 0.70902104
## [877,] 0.7491520 0.25084797
## [878,] 0.7062365 0.29376352
## [879,] 0.7014495 0.29855052
## [880,] 0.7082607 0.29173934
## [881,] 0.7783362 0.22166375
## [882,] 0.7068492 0.29315083
## [883,] 0.6937676 0.30623242
## [884,] 0.3091234 0.69087658
## [885,] 0.6979865 0.30201350
## [886,] 0.3011004 0.69889960
## [887,] 0.7094118 0.29058824
## [888,] 0.7270261 0.27297390
## [889,] 0.6861948 0.31380520
## [890,] 0.7144470 0.28555302
## [891,] 0.6915444 0.30845560
## [892,] 0.7662389 0.23376111
## [893,] 0.7210739 0.27892607
## [894,] 0.6780216 0.32197837
## [895,] 0.7149991 0.28500091
## [896,] 0.6889721 0.31102786
## [897,] 0.7180819 0.28191811
## [898,] 0.6889960 0.31100402
## [899,] 0.7539496 0.24605038
## [900,] 0.7038582 0.29614179
## [901,] 0.6946684 0.30533164
## [902,] 0.7272996 0.27270039
## [903,] 0.7077434 0.29225660
## [904,] 0.7116630 0.28833699
## [905,] 0.6851791 0.31482094
## [906,] 0.8204289 0.17957111
## [907,] 0.1323777 0.86762229
## [908,] 0.2653171 0.73468294
## [909,] 0.2988302 0.70116983
## [910,] 0.3141419 0.68585806
## [911,] 0.1717915 0.82820851
## [912,] 0.7307536 0.26924637
##
## $class
## [1] "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "B" "B" "B"
## [19] "M" "M" "M" "M" "M" "M" "M" "M" "B" "M" "M" "M" "M" "M" "M" "B" "M" "B"
## [37] "B" "B" "M" "M" "B" "M" "M" "B" "B" "B" "B" "M" "B" "B" "B" "M" "M" "M"
## [55] "B" "M" "M" "B" "B" "B" "M" "M" "B" "M" "M" "M" "B" "B" "B" "M" "B" "B"
## [73] "M" "M" "B" "B" "M" "B" "B" "B" "B" "M" "B" "M" "B" "B" "B" "B" "B" "B"
## [91] "B" "B" "M" "M" "M" "B" "M" "B" "B" "M" "M" "B" "M" "B" "M" "M" "B" "M"
## [109] "B" "B" "M" "B" "B" "M" "B" "B" "M" "B" "B" "B" "B" "B" "B" "B" "B" "B"
## [127] "B" "B" "M" "M" "B" "M" "B" "M" "M" "B" "M" "B" "B" "B" "M" "B" "B" "M"
## [145] "M" "B" "M" "B" "B" "B" "M" "B" "B" "M" "B" "M" "M" "M" "B" "M" "M" "M"
## [163] "B" "M" "B" "M" "B" "B" "M" "B" "M" "M" "B" "B" "M" "B" "B" "M" "B" "B"
## [181] "B" "B" "M" "B" "B" "M" "B" "B" "B" "M" "B" "B" "B" "B" "M" "B" "B" "B"
## [199] "B" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "B" "B" "B" "B" "B" "M"
## [217] "B" "M" "B" "B" "M" "B" "M" "B" "M" "M" "B" "B" "B" "B" "B" "B" "B" "B"
## [235] "B" "B" "B" "B" "B" "M" "B" "B" "M" "B" "M" "B" "B" "B" "B" "B" "B" "B"
## [253] "B" "B" "B" "M" "B" "B" "B" "M" "B" "M" "B" "B" "M" "M" "B" "B" "B" "M"
## [271] "B" "B" "M" "B" "B" "B" "M" "B" "B" "B" "B" "B" "B" "B" "M" "M" "M" "B"
## [289] "B" "B" "B" "B" "B" "B" "B" "M" "M" "M" "M" "M" "B" "M" "B" "B" "B" "B"
## [307] "B" "M" "B" "B" "B" "B" "B" "M" "B" "M" "B" "M" "B" "B" "B" "B" "M" "B"
## [325] "B" "B" "B" "B" "B" "M" "B" "B" "B" "B" "B" "M" "B" "B" "M" "B" "B" "B"
## [343] "B" "B" "B" "B" "M" "B" "M" "M" "M" "B" "B" "B" "B" "B" "B" "B" "M" "B"
## [361] "M" "B" "B" "M" "B" "M" "B" "B" "B" "B" "B" "B" "M" "M" "B" "B" "B" "B"
## [379] "B" "M" "B" "B" "B" "B" "B" "B" "M" "B" "B" "B" "B" "B" "M" "M" "B" "B"
## [397] "B" "B" "B" "M" "M" "M" "B" "B" "B" "M" "B" "B" "M" "M" "B" "M" "B" "B"
## [415] "B" "M" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "M" "B" "B" "B" "B" "B"
## [433] "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "M" "M" "M"
## [451] "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "B"
## [469] "B" "B" "M" "M" "M" "M" "M" "M" "M" "M" "M" "B" "M" "M" "M" "M" "M" "M"
## [487] "M" "B" "B" "B" "B" "B" "M" "M" "B" "M" "B" "B" "B" "B" "M" "B" "M" "B"
## [505] "B" "B" "M" "M" "M" "B" "M" "B" "M" "M" "B" "B" "M" "M" "B" "M" "M" "M"
## [523] "B" "B" "B" "M" "B" "B" "M" "B" "B" "M" "M" "B" "B" "B" "M" "B" "M" "B"
## [541] "B" "B" "B" "B" "B" "B" "M" "M" "B" "M" "B" "B" "B" "M" "B" "M" "B" "M"
## [559] "M" "B" "M" "M" "B" "B" "M" "M" "B" "B" "B" "M" "B" "B" "B" "B" "B" "B"
## [577] "B" "B" "B" "M" "B" "B" "M" "B" "M" "B" "B" "M" "M" "B" "M" "B" "B" "B"
## [595] "B" "M" "B" "B" "M" "M" "M" "B" "M" "B" "M" "B" "B" "B" "M" "B" "B" "M"
## [613] "B" "M" "M" "M" "B" "M" "M" "M" "B" "M" "B" "M" "B" "B" "M" "B" "M" "M"
## [631] "M" "M" "B" "B" "M" "B" "B" "B" "B" "B" "M" "M" "B" "B" "B" "M" "B" "M"
## [649] "B" "M" "B" "B" "B" "B" "B" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M" "M"
## [667] "M" "M" "M" "M" "B" "B" "B" "B" "B" "M" "B" "B" "M" "B" "M" "B" "M" "B"
## [685] "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "M" "B" "B" "M" "B" "M" "B" "B"
## [703] "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "M" "B" "B" "B" "M" "B" "B"
## [721] "B" "M" "M" "M" "B" "B" "M" "B" "M" "B" "B" "B" "M" "B" "B" "B" "B" "M"
## [739] "M" "M" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "M" "M" "B" "M" "M" "B"
## [757] "M" "M" "B" "B" "M" "B" "B" "B" "B" "M" "B" "B" "B" "M" "B" "M" "M" "B"
## [775] "B" "B" "B" "M" "B" "B" "B" "B" "B" "B" "B" "M" "B" "B" "B" "B" "M" "B"
## [793] "M" "B" "B" "B" "B" "B" "B" "B" "B" "M" "B" "M" "M" "B" "B" "B" "M" "B"
## [811] "B" "M" "M" "B" "B" "M" "B" "B" "B" "B" "B" "B" "B" "B" "M" "M" "B" "B"
## [829] "B" "B" "B" "M" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B"
## [847] "B" "M" "B" "B" "M" "B" "B" "B" "B" "M" "M" "B" "M" "B" "M" "B" "B" "B"
## [865] "B" "M" "B" "B" "M" "B" "M" "B" "M" "B" "B" "M" "B" "B" "B" "B" "B" "B"
## [883] "B" "M" "B" "M" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B" "B"
## [901] "B" "B" "B" "B" "B" "B" "M" "M" "M" "M" "M" "B"
##
## $importance
## compactness_se smoothness_mean smoothness_worst symmetry_worst
## 17.98418 16.04764 16.93026 18.55361
## texture_mean texture_worst
## 16.81249 13.67182
##
## $terms
## .outcome ~ texture_mean + smoothness_mean + compactness_se +
## texture_worst + smoothness_worst + symmetry_worst
## attr(,"variables")
## list(.outcome, texture_mean, smoothness_mean, compactness_se,
## texture_worst, smoothness_worst, symmetry_worst)
## attr(,"factors")
## texture_mean smoothness_mean compactness_se texture_worst
## .outcome 0 0 0 0
## texture_mean 1 0 0 0
## smoothness_mean 0 1 0 0
## compactness_se 0 0 1 0
## texture_worst 0 0 0 1
## smoothness_worst 0 0 0 0
## symmetry_worst 0 0 0 0
## smoothness_worst symmetry_worst
## .outcome 0 0
## texture_mean 0 0
## smoothness_mean 0 0
## compactness_se 0 0
## texture_worst 0 0
## smoothness_worst 1 0
## symmetry_worst 0 1
## attr(,"term.labels")
## [1] "texture_mean" "smoothness_mean" "compactness_se" "texture_worst"
## [5] "smoothness_worst" "symmetry_worst"
## attr(,"order")
## [1] 1 1 1 1 1 1
## attr(,"intercept")
## [1] 1
## attr(,"response")
## [1] 1
## attr(,".Environment")
## <environment: 0x0000024d60f9b4c0>
## attr(,"predvars")
## list(.outcome, texture_mean, smoothness_mean, compactness_se,
## texture_worst, smoothness_worst, symmetry_worst)
## attr(,"dataClasses")
## .outcome texture_mean smoothness_mean compactness_se
## "factor" "numeric" "numeric" "numeric"
## texture_worst smoothness_worst symmetry_worst
## "numeric" "numeric" "numeric"
##
## $call
## (function (formula, data, boos = TRUE, mfinal = 100, coeflearn = "Breiman",
## control, ...)
## {
## if (!(as.character(coeflearn) %in% c("Freund", "Breiman",
## "Zhu"))) {
## stop("coeflearn must be 'Freund', 'Breiman' or 'Zhu' ")
## }
## formula <- as.formula(formula)
## vardep <- data[, as.character(formula[[2]])]
## n <- length(data[, 1])
## nclases <- nlevels(vardep)
## pesos <- rep(1/n, n)
## guardarpesos <- array(0, c(n, mfinal))
## w <- rep(1/n, n)
## data <- cbind(pesos, data)
## arboles <- list()
## pond <- rep(0, mfinal)
## pred <- data.frame(rep(0, n))
## arboles[[1]] <- rpart(formula, data = data[, -1], control = rpart.control(minsplit = 1,
## cp = -1, maxdepth = 30))
## nvar <- dim(varImp(arboles[[1]], surrogates = FALSE, competes = FALSE))[1]
## imp <- array(0, c(mfinal, nvar))
## for (m in 1:mfinal) {
## if (boos == TRUE) {
## k <- 1
## while (k == 1) {
## boostrap <- sample(1:n, replace = TRUE, prob = pesos)
## fit <- rpart(formula, data = data[boostrap, -1],
## control = control)
## k <- length(fit$frame$var)
## }
## flearn <- predict(fit, newdata = data[, -1], type = "class")
## ind <- as.numeric(vardep != flearn)
## err <- sum(pesos * ind)
## }
## if (boos == FALSE) {
## w <<- pesos
## fit <- rpart(formula = formula, data = data[, -1],
## weights = w, control = control)
## flearn <- predict(fit, data = data[, -1], type = "class")
## ind <- as.numeric(vardep != flearn)
## err <- sum(pesos * ind)
## }
## c <- log((1 - err)/err)
## if (coeflearn == "Breiman") {
## c <- (1/2) * c
## }
## if (coeflearn == "Zhu") {
## c <- c + log(nclases - 1)
## }
## guardarpesos[, m] <- pesos
## pesos <- pesos * exp(c * ind)
## pesos <- pesos/sum(pesos)
## maxerror <- 0.5
## eac <- 0.001
## if (coeflearn == "Zhu") {
## maxerror <- 1 - 1/nclases
## }
## if (err >= maxerror) {
## pesos <- rep(1/n, n)
## maxerror <- maxerror - eac
## c <- log((1 - maxerror)/maxerror)
## if (coeflearn == "Breiman") {
## c <- (1/2) * c
## }
## if (coeflearn == "Zhu") {
## c <- c + log(nclases - 1)
## }
## }
## if (err == 0) {
## pesos <- rep(1/n, n)
## c <- log((1 - eac)/eac)
## if (coeflearn == "Breiman") {
## c <- (1/2) * c
## }
## if (coeflearn == "Zhu") {
## c <- c + log(nclases - 1)
## }
## }
## arboles[[m]] <- fit
## pond[m] <- c
## if (m == 1) {
## pred <- flearn
## }
## else {
## pred <- data.frame(pred, flearn)
## }
## if (length(fit$frame$var) > 1) {
## k <- varImp(fit, surrogates = FALSE, competes = FALSE)
## imp[m, ] <- k[sort(row.names(k)), ]
## }
## else {
## imp[m, ] <- rep(0, nvar)
## }
## }
## classfinal <- array(0, c(n, nlevels(vardep)))
## for (i in 1:nlevels(vardep)) {
## classfinal[, i] <- matrix(as.numeric(pred == levels(vardep)[i]),
## nrow = n) %*% as.vector(pond)
## }
## predclass <- rep("O", n)
## predclass[] <- apply(classfinal, 1, FUN = select, vardep.summary = summary(vardep))
## imppond <- as.vector(as.vector(pond) %*% imp)
## imppond <- imppond/sum(imppond) * 100
## names(imppond) <- sort(row.names(k))
## votosporc <- classfinal/apply(classfinal, 1, sum)
## ans <- list(formula = formula, trees = arboles, weights = pond,
## votes = classfinal, prob = votosporc, class = predclass,
## importance = imppond)
## attr(ans, "vardep.summary") <- summary(vardep, maxsum = 700)
## mf <- model.frame(formula = formula, data = data[, -1])
## terms <- attr(mf, "terms")
## ans$terms <- terms
## ans$call <- match.call()
## class(ans) <- "boosting"
## ans
## })(formula = .outcome ~ ., data = list(texture_mean = c(2.33988087773774,
## 2.87751164216656, 3.05635689537043, 2.75366071235426, 2.99473177322041,
## 3.03639425527288, 3.08282698040492, 3.17971910966701, 3.14587493198371,
## 2.88424189752063, 3.21084365317094, 3.11839228628988, 3.31563949330051,
## 3.0022112396517, 3.02916704964023, 2.66444656362008, 2.75429745226753,
## 2.52091708731103, 2.65745841498615, 3.0624559055969, 2.79728133483015,
## 3.06944731137627, 3.00815479355255, 2.71137799119488, 2.92852352386054,
## 2.88368276974537, 2.91343703082716, 3.22684399451738, 3.03591406318682,
## 3.06105173967463, 3.21124679770371, 3.08236858021354, 2.86789890204411,
## 2.82375700881418, 2.9263821954192, 2.68307421503203, 2.79361608943186,
## 2.90361698464619, 2.92852352386054, 3.09195113129453, 2.93119375241642,
## 2.92154737536461, 3.07223024452672, 2.96062309644042, 2.70001802940495,
## 3.04356960296815, 2.62900699376176, 3.17136484219715, 3.04499851485691,
## 2.94654202936322, 2.85243910372751, 3.05917644611053, 3.19948911106801,
## 2.75937682826755, 2.80457176809283, 2.78192004966867, 3.17680304844629,
## 2.89037175789616, 3.04309284491383, 2.7638002162067, 3.21526932927409,
## 3.26918863874179, 2.75047091698616, 2.91885122921803, 3.06619073720255,
## 3.2023398562281, 3.08190996979504, 2.7239235502585, 3.17888681665184,
## 3.12500460925813, 2.69192081917233, 2.90690105984738, 2.9871959425317,
## 3.13679771383259, 2.88144312715186, 2.55256529826182, 2.98416563718253,
## 2.59749101053515, 3.02140002030257, 2.96527306606928, 2.95958682691764,
## 2.74470351875025, 2.91993056013771, 2.97909463240097, 3.0568273729138,
## 2.83262493568384, 3.03302805829769, 2.97807733831527, 3.00518743232475,
## 2.76190687389292, 3.06944731137627, 2.75747508442973, 2.8136106967627,
## 3.1315734964654, 2.99623214859564, 2.38139627341834, 3.00568260440716,
## 2.79667139275574, 2.84549061022345, 3.20639830335709, 2.93969088267037,
## 2.79667139275574, 3.2236643416, 2.58701187272515, 2.96938829821439,
## 3.06991167172824, 2.63404478779171, 3.08694315360738, 2.8136106967627,
## 2.73371794785079, 2.86619290219901, 2.59450815970308, 2.48240351956988,
## 2.89314568477889, 2.76757618041624, 2.68444033546308, 2.80819714970715,
## 2.93225985059842, 2.71997877196748, 2.88535921607262, 3.03013370027132,
## 2.57108434602905, 2.73046379593911, 2.88703285663065, 2.96836107675786,
## 2.54474665014402, 2.56186769092413, 3.00469201492546, 2.76883167336207,
## 2.89867056071086, 3.10099278421148, 3.09285898428471, 2.98365969231972,
## 2.9338568698359, 3.20599319903719, 2.51688969564105, 2.97705900828837,
## 2.71800053195538, 2.67069441455844, 2.89369954798884, 3.00121720378456,
## 3.10099278421148, 2.56955412384829, 3.27978275977172, 3.01111337559229,
## 2.70270259477561, 2.92208573338569, 2.84432781939476, 2.85589532836619,
## 2.76631910922619, 3.14069804380418, 3.06385810260159, 2.90251989183181,
## 3.29063819109509, 2.79300390698237, 3.10413814739778, 3.0837431508767,
## 3.00667221359233, 2.97348666460667, 2.96114082878437, 3.28353933819392,
## 3.16758253048065, 2.92316158071916, 2.8142103969306, 2.84897089215859,
## 3.00864849882054, 3.11529150861163, 2.55800220485855, 3.09738592728049,
## 2.94127608775793, 3.24102862950933, 2.82908719614504, 2.90962957450058,
## 2.86105737022739, 3.48031658611475, 2.63188884013665, 2.86391369893314,
## 3.00815479355255, 2.83438912314523, 2.60046499042227, 2.74148497718845,
## 3.17680304844629, 3.10593106585207, 3.29879544804407, 3.52075661671979,
## 3.32539566824587, 2.7669478423497, 3.05635689537043, 3.32683296637329,
## 3.67071548348627, 2.74727091425549, 2.71071331852169, 2.90087199253003,
## 3.1684242813721, 3.15700042115011, 2.98870765861703, 2.64688376586472,
## 3.22763733053677, 2.7033726115511, 3.15955035878339, 2.98669152890184,
## 2.83790818836042, 2.96165829322024, 2.83615020372953, 3.35933317756346,
## 2.84897089215859, 3.14415227867226, 3.09693415406296, 2.96424160646262,
## 3.43785069931019, 2.94180393152844, 3.0837431508767, 2.78562833574758,
## 3.01504458458636, 2.56802155649851, 3.04404613383254, 2.75174805636793,
## 3.19785645764413, 2.8541687092322, 2.65042108826557, 2.99473177322041,
## 2.88144312715186, 3.28091121578765, 2.64048488160644, 2.90032208874933,
## 2.93225985059842, 2.75366071235426, 2.91235066461494, 3.03302805829769,
## 2.57413778351594, 2.99373027088332, 2.93863268151342, 2.98214032003452,
## 2.94968833505258, 2.77383794164021, 2.85991255041146, 2.62321826558551,
## 2.58550584834412, 2.51365606307399, 2.89811944468699, 2.89977188240808,
## 3.1393996233664, 2.9391619220656, 2.99021709286588, 3.17220341666977,
## 2.92369907065416, 3.19826487096408, 2.76127496233951, 2.66722820658195,
## 2.54238908520136, 2.62756295018952, 2.75302356674494, 2.59301339111385,
## 2.37211115564266, 2.82435065679837, 2.93757335938046, 2.9391619220656,
## 2.83321334405622, 2.78377591163035, 2.97858611471902, 2.58926666511224,
## 3.06851794327964, 2.7219531062712, 2.85070650150373, 3.03061667540749,
## 2.74148497718845, 2.96269241947579, 2.98870765861703, 2.94549105711724,
## 3.04452243772342, 2.65535241210176, 2.86391369893314, 3.18924101973851,
## 2.80578168959555, 2.82375700881418, 2.70537997254633, 3.07639017657145,
## 2.73760900334375, 2.68852753461335, 2.9391619220656, 2.69056488676119,
## 2.77446196662146, 2.70537997254633, 2.83732253680635, 2.95595140354215,
## 2.85991255041146, 3.24804620216798, 2.6440448711263, 2.78562833574758,
## 2.74019465442878, 2.90799335924598, 2.89425310460414, 3.07130346040107,
## 2.93598226914822, 2.83026783382646, 3.08099211750481, 3.28952066443753,
## 2.84781214347737, 3.08648663682246, 3.14802408389625, 2.58097411853423,
## 2.85359250639287, 2.77695417974942, 2.77695417974942, 3.00667221359233,
## 3.33967652501391, 2.71800053195538, 2.93545134266906, 2.56186769092413,
## 2.7033726115511, 3.12324559385295, 2.86105737022739, 2.61885462229774,
## 3.14802408389625, 2.64546532591059, 3.14458322028635, 2.82375700881418,
## 3.10368941505908, 2.87469394517693, 2.85991255041146, 2.69665215614984,
## 2.84839168565528, 3.04547436544881, 2.38967979984498, 2.90635446240277,
## 2.7047112998367, 2.92262380173335, 2.69867303928961, 3.06198806933106,
## 3.02819946369149, 2.88591740754678, 2.86619290219901, 2.82316300820271,
## 3.07639017657145, 3.09602999486936, 3.39484390768998, 3.05258508514677,
## 3.07731226054641, 3.04832472367316, 2.49897390699944, 2.94654202936322,
## 2.77383794164021, 2.95125778345216, 2.95073490762326, 3.05776766447344,
## 3.09013294897548, 3.11484775444415, 2.8724340572095, 2.97246364661464,
## 3.08967788639652, 2.97654945413722, 2.97246364661464, 2.77133794033813,
## 2.97552956623647, 2.75110969056266, 2.84490938381941, 2.75937682826755,
## 2.90799335924598, 2.82435065679837, 3.2144661163795, 3.3332753651767,
## 2.87130219517581, 2.96217549002515, 3.02140002030257, 3.06991167172824,
## 3.2188758248682, 3.3403852422654, 2.84199817361195, 3.42491390827947,
## 3.37724616083964, 3.2240623515555, 3.33932197794407, 3.30137704637994,
## 3.26842760369745, 2.91017438519234, 2.90251989183181, 3.0022112396517,
## 3.03206420280138, 2.89591193827178, 3.14974008603334, 2.91723004539903,
## 3.33719205168624, 2.7033726115511, 2.74855214441154, 2.75556971707019,
## 3.02188723103084, 2.97092715463502, 2.89203703721523, 2.95699144523756,
## 2.64333388638252, 2.42303124606991, 2.82435065679837, 2.93492013415723,
## 3.02334744058696, 2.55178617862755, 3.02237420450041, 3.00617753141553,
## 2.89977188240808, 2.85128436918812, 3.05588619637374, 3.19826487096408,
## 2.9871959425317, 2.55489902160804, 2.57566101305646, 2.8402473707136,
## 3.17596832385692, 2.68716699018579, 2.68784749378469, 3.02140002030257,
## 2.94811641961233, 2.92369907065416, 3.0243197304059, 2.90251989183181,
## 2.81540871942271, 2.63188884013665, 3.07269331469012, 2.9274534328007,
## 2.75238601492226, 2.57261223020711, 2.93119375241642, 2.50715725872282,
## 2.57794151575519, 2.598235335095, 2.86562358820697, 2.99673177388707,
## 2.79300390698237, 3.11573506594869, 3.19622113430339, 3.23828621838802,
## 3.23632273847192, 2.67000213346468, 3.23553626576131, 3.33434507467431,
## 3.14544454678232, 2.79422789734326, 2.80819714970715, 3.06712226964066,
## 3.1108450806545, 3.38201456224538, 3.08831145484708, 3.36453339729056,
## 3.31817802594206, 2.97501923195645, 3.32790958589232, 3.12148347885955,
## 3.17513290192028, 3.37997374521053, 3.22246936037833, 3.10861443061066,
## 3.33505757915761, 3.37861088298936, 2.33988087773774, 2.87751164216656,
## 3.05635689537043, 3.01455402779458, 2.75366071235426, 2.99473177322041,
## 3.03639425527288, 3.08282698040492, 3.17971910966701, 3.14587493198371,
## 3.17596832385692, 3.11839228628988, 3.31563949330051, 3.0022112396517,
## 3.02916704964023, 3.09783749649114, 2.66444656362008, 2.75429745226753,
## 2.52091708731103, 2.65745841498615, 3.13723183582769, 2.79728133483015,
## 2.92852352386054, 3.17722014959937, 3.27601201623901, 2.88368276974537,
## 3.07223024452672, 3.07823349506573, 2.91343703082716, 3.22684399451738,
## 3.03591406318682, 3.07176695982999, 3.06105173967463, 3.00963517872298,
## 3.08236858021354, 2.86789890204411, 2.68307421503203, 3.10458667846607,
## 3.07269331469012, 2.79361608943186, 2.90361698464619, 2.92852352386054,
## 3.09195113129453, 2.93119375241642, 3.07223024452672, 2.96062309644042,
## 2.46725171454928, 2.70001802940495, 3.04356960296815, 3.09783749649114,
## 2.62900699376176, 3.17555070012983, 3.04499851485691, 2.94654202936322,
## 2.85243910372751, 3.05917644611053, 3.19948911106801, 2.75937682826755,
## 2.80457176809283, 2.97807733831527, 2.39242579699384, 2.78192004966867,
## 3.17680304844629, 2.89037175789616, 2.7638002162067, 3.21526932927409,
## 3.26918863874179, 2.75047091698616, 2.91885122921803, 3.06619073720255,
## 3.2023398562281, 3.08190996979504, 2.7239235502585, 3.17888681665184,
## 3.12500460925813, 2.69192081917233, 2.90690105984738, 2.9871959425317,
## 2.99272776453369, 2.55256529826182, 2.98416563718253, 3.21807550469743,
## 2.59749101053515, 3.02140002030257, 2.95958682691764, 2.74470351875025,
## 2.91993056013771, 2.97909463240097, 3.0568273729138, 2.83262493568384,
## 3.03302805829769, 2.97807733831527, 3.00518743232475, 3.06944731137627,
## 2.75747508442973, 2.8136106967627, 2.99623214859564, 2.38139627341834,
## 3.00568260440716, 2.38784493694487, 2.79667139275574, 2.84549061022345,
## 2.93969088267037, 2.79667139275574, 3.2236643416, 2.58701187272515,
## 2.96938829821439, 3.06991167172824, 2.63404478779171, 3.08694315360738,
## 3.11218108619724, 2.8136106967627, 2.73371794785079, 2.86619290219901,
## 2.89314568477889, 2.85128436918812, 2.70604819843154, 2.68444033546308,
## 2.80819714970715, 2.93225985059842, 2.71997877196748, 2.88535921607262,
## 3.03399098567108, 3.03013370027132, 2.73046379593911, 2.57108434602905,
## 2.73046379593911, 2.88703285663065, 3.03206420280138, 2.54474665014402,
## 2.56186769092413, 2.76883167336207, 3.10099278421148, 3.09285898428471,
## 2.98365969231972, 2.27315628230323, 2.9338568698359, 3.20599319903719,
## 2.83026783382646, 2.47569771070269, 2.68852753461335, 2.71800053195538,
## 2.67069441455844, 2.89369954798884, 3.00121720378456, 3.10099278421148,
## 2.56955412384829, 3.08511583468868, 3.27978275977172, 3.01111337559229,
## 2.70270259477561, 3.10950728781284, 2.71535677628465, 2.92208573338569,
## 2.84432781939476, 2.85589532836619, 2.76631910922619, 3.14069804380418,
## 3.06385810260159, 2.90251989183181, 3.14458322028635, 2.79300390698237,
## 3.10413814739778, 3.0837431508767, 3.11307076597122, 2.97348666460667,
## 2.96114082878437, 3.28353933819392, 3.16758253048065, 2.92316158071916,
## 2.8142103969306, 2.84897089215859, 3.00864849882054, 3.11529150861163,
## 2.55800220485855, 3.09738592728049, 2.94127608775793, 2.91614779421115,
## 3.24102862950933, 3.17010566049877, 2.82908719614504, 2.90962957450058,
## 2.86105737022739, 3.48031658611475, 2.83438912314523, 2.60046499042227,
## 2.73825604315928, 2.74148497718845, 3.17680304844629, 3.10593106585207,
## 2.94864066602014, 3.52075661671979, 2.7669478423497, 3.05635689537043,
## 3.06619073720255, 3.32683296637329, 3.67071548348627, 2.71071331852169,
## 3.15700042115011, 2.98870765861703, 2.85819285953193, 2.64688376586472,
## 3.22763733053677, 2.7033726115511, 3.15955035878339, 2.98669152890184,
## 2.83790818836042, 2.96165829322024, 2.83615020372953, 3.35933317756346,
## 2.84897089215859, 3.14415227867226, 3.51333488159901, 3.29805662274264,
## 3.13809951484091, 3.09693415406296, 2.96424160646262, 3.09421922026864,
## 3.43785069931019, 3.0837431508767, 2.78562833574758, 3.01504458458636,
## 2.8225686545448, 2.56802155649851, 3.19785645764413, 2.8541687092322,
## 2.65042108826557, 2.99473177322041, 2.71997877196748, 3.28091121578765,
## 2.64048488160644, 2.90032208874933, 2.75366071235426, 2.91235066461494,
## 3.03302805829769, 2.57413778351594, 2.99373027088332, 2.93863268151342,
## 2.98214032003452, 2.94968833505258, 2.85991255041146, 2.62321826558551,
## 2.51365606307399, 2.89811944468699, 2.89977188240808, 3.1393996233664,
## 2.9391619220656, 2.99021709286588, 3.17220341666977, 2.92369907065416,
## 2.89922137317315, 3.19826487096408, 2.76127496233951, 2.66722820658195,
## 2.54238908520136, 2.95021175825218, 2.75302356674494, 2.59301339111385,
## 2.37211115564266, 2.92316158071916, 2.82435065679837, 2.6447553507299,
## 2.93757335938046, 2.9391619220656, 2.83321334405622, 2.58926666511224,
## 3.06851794327964, 2.7219531062712, 2.85070650150373, 2.55567572067621,
## 3.03061667540749, 3.08557297755378, 2.74148497718845, 2.96269241947579,
## 2.69327491552006, 3.06479180948549, 2.86391369893314, 3.18924101973851,
## 2.80578168959555, 2.82375700881418, 2.70537997254633, 3.07639017657145,
## 2.73760900334375, 2.68852753461335, 2.69056488676119, 2.77446196662146,
## 2.95595140354215, 2.85991255041146, 3.24804620216798, 2.6440448711263,
## 2.94811641961233, 2.92262380173335, 2.78562833574758, 2.74019465442878,
## 2.90799335924598, 3.07130346040107, 2.93598226914822, 2.90635446240277,
## 2.83026783382646, 3.08099211750481, 3.28952066443753, 2.89148225218019,
## 2.84781214347737, 3.08648663682246, 2.58097411853423, 2.71469474382088,
## 2.85359250639287, 2.77695417974942, 3.00667221359233, 2.93545134266906,
## 2.56186769092413, 3.12324559385295, 2.86105737022739, 2.61885462229774,
## 3.14802408389625, 2.64546532591059, 2.78253905309295, 2.7408400239252,
## 3.14458322028635, 2.50307395374345, 2.99423114742772, 3.10368941505908,
## 2.87469394517693, 2.84374591655611, 2.85991255041146, 2.69665215614984,
## 3.04547436544881, 2.38967979984498, 2.90635446240277, 2.78315767358902,
## 2.7047112998367, 2.92262380173335, 2.69867303928961, 3.06198806933106,
## 3.02819946369149, 2.86619290219901, 2.82316300820271, 3.07639017657145,
## 3.09602999486936, 3.39484390768998, 3.07731226054641, 3.04832472367316,
## 2.49897390699944, 3.06385810260159, 2.94654202936322, 2.77383794164021,
## 2.95125778345216, 2.95073490762326, 3.05776766447344, 3.09013294897548,
## 3.11484775444415, 2.8724340572095, 2.97246364661464, 3.08967788639652,
## 2.82967768922391, 2.97552956623647, 2.84490938381941, 3.23553626576131,
## 2.75937682826755, 2.90799335924598, 2.82435065679837, 3.3332753651767,
## 2.87130219517581, 2.96217549002515, 3.02140002030257, 3.06991167172824,
## 3.3403852422654, 2.63762773680566, 2.84199817361195, 3.42491390827947,
## 3.37724616083964, 3.2240623515555, 3.33932197794407, 3.30137704637994,
## 3.26842760369745, 3.29546642702991, 2.90251989183181, 3.0022112396517,
## 3.03206420280138, 2.89591193827178, 3.14974008603334, 2.90032208874933,
## 2.91723004539903, 3.33719205168624, 3.40019688132857, 2.75556971707019,
## 3.02188723103084, 2.8106067894273, 2.68033636253469, 2.89203703721523,
## 2.95699144523756, 2.64333388638252, 2.87016905057865, 2.42303124606991,
## 2.797890905102, 2.82435065679837, 2.93492013415723, 2.78315767358902,
## 2.58248697812686, 3.02334744058696, 3.02237420450041, 3.00617753141553,
## 2.89977188240808, 2.85128436918812, 2.86334308550825, 3.05588619637374,
## 2.81780106506133, 3.19826487096408, 2.79239134953596, 2.9871959425317,
## 2.55489902160804, 2.99773027621666, 2.8402473707136, 2.75366071235426,
## 3.17596832385692, 2.68716699018579, 2.68784749378469, 3.02140002030257,
## 2.61447185414264, 2.94811641961233, 2.92369907065416, 3.0243197304059,
## 2.90251989183181, 2.63188884013665, 3.07269331469012, 2.9871959425317,
## 2.9274534328007, 2.75238601492226, 2.93119375241642, 2.50715725872282,
## 2.86562358820697, 2.99673177388707, 3.02868337369368, 2.86903462050803,
## 3.03783344957263, 3.19622113430339, 2.67000213346468, 3.21847574484686,
## 3.23553626576131, 3.14544454678232, 2.79422789734326, 2.80819714970715,
## 2.96217549002515, 3.06712226964066, 3.1108450806545, 3.38201456224538,
## 3.08831145484708, 3.36453339729056, 3.31817802594206, 2.97501923195645,
## 3.32790958589232, 3.12148347885955, 3.17513290192028, 3.30137704637994,
## 3.37997374521053, 3.42165339022954, 3.10861443061066, 3.34109345759245,
## 3.33505757915761, 3.37861088298936, 3.20030443928277), smoothness_mean = c(-2.13368655653223,
## -2.46816753378372, -2.21091790446822, -2.0572887370387, -2.357780728462,
## -2.1294724752854, -2.06120877341878, -2.13199879241851, -2.5003045919681,
## -2.33201390368486, -2.32892906833365, -2.17948289586006, -2.172434408529,
## -2.31597433011306, -2.14558134418438, -2.32493295665795, -2.23026443141442,
## -2.27886856637673, -2.23212662934548, -2.18836394890402, -2.13199879241851,
## -2.24999264287488, -2.36021420583068, -2.31800334572243, -2.19912638462582,
## -2.26336437984076, -2.40983628374102, -2.36584443263324, -2.28671174383776,
## -2.09801292726527, -2.3989858672804, -2.33160204206454, -2.20818441757256,
## -2.45340798272863, -2.15589071384324, -2.27205588795922, -2.56589980899753,
## -2.49362454040772, -2.16456379509667, -2.40174266452726, -2.35135525736347,
## -2.25094185984221, -2.17419187822565, -2.51825662946955, -2.17683388768849,
## -2.08505728046547, -2.56122629666141, -2.18747228589354, -2.2595256035336,
## -2.50850286364319, -2.23867176725039, -2.40694610831879, -2.23399230152843,
## -2.29560947925762, -2.38901482099243, -2.23961029383266, -2.05104846717862,
## -2.30920696930293, -2.20545838226332, -2.22747762050724, -2.24148999363423,
## -2.10784101620153, -2.33067597316057, -2.31526514615142, -2.35979056676483,
## -2.40472856666275, -2.43360535543245, -2.17859911321305, -2.4108386784343,
## -2.3859667019331, -2.60978983193469, -2.28082360121253, -2.26432638087696,
## -2.39931628195382, -2.25856820757727, -2.40983628374102, -2.32769779380912,
## -2.14558134418438, -2.52410496319216, -2.29759755148301, -2.30368569843808,
## -1.96754244918243, -2.46781357236182, -2.02041820123037, -2.43508844280714,
## -2.26625316374666, -2.30930763875487, -2.54631407791736, -2.18747228589354,
## -2.16282315061889, -2.32605844917969, -2.35788640877914, -2.15244243456433,
## -2.15848474902029, -2.4767004132409, -2.36733697022374, -1.9330926453447,
## -2.64296495444628, -2.43212446434903, -2.37968214337901, -2.49896500703904,
## -2.16282315061889, -2.2876964805003, -2.23867176725039, -2.21457421567133,
## -2.29461692334487, -2.35788640877914, -2.36127408934273, -2.25284300109923,
## -2.33935281718626, -2.14814873968963, -2.1507227436848, -2.38054663446376,
## -2.33088169214916, -2.44449433917674, -2.16108553072035, -2.21549038614311,
## -2.50862573640859, -2.30558960201434, -2.53275325924522, -2.3639287232351,
## -2.32749272870023, -2.36616407463692, -2.4471485441854, -2.59762751985212,
## -2.3739736890817, -2.58826916278315, -2.21732524904322, -2.44253705342149,
## -2.18925640768704, -2.29065652212877, -2.47230636781226, -2.47444159994024,
## -2.42305924646192, -2.25474776357989, -2.27496992596107, -2.40262644717427,
## -2.43132796888677, -2.39272864284701, -2.33314739857669, -2.3196295276033,
## -2.77242873503842, -2.43737441934268, -2.17068002211411, -2.3437196363526,
## -2.40141144730094, -2.45480430597156, -2.32544438714013, -2.29560947925762,
## -2.51577831345509, -2.23026443141442, -2.43623077786396, -2.66642852641139,
## -2.26915031690781, -2.53313097407502, -2.12026353620009, -2.60761680378094,
## -2.31546771882506, -2.34486648525065, -2.41150750021823, -2.17068002211411,
## -2.02268320786123, -2.30609123232333, -2.42181918091774, -2.21732524904322,
## -2.43360535543245, -2.30058709033137, -2.50323356648088, -2.3979952777987,
## -2.42238265644964, -2.2966030213165, -2.27691734624547, -2.36840443403134,
## -2.51900132355883, -2.47456035773386, -2.25284300109923, -2.2433732333622,
## -2.27789248040367, -2.47159563572833, -2.31202955182205, -2.48039683431017,
## -2.53792775176525, -2.2182439445603, -2.67611558257186, -2.55361384779779,
## -2.39043318356724, -2.46934831087215, -2.40019792186105, -2.49823507999933,
## -2.32156405959185, -2.36201667676347, -2.53502169121985, -2.34424076826288,
## -2.52036803806616, -2.27594316204762, -2.23492644452023, -2.43497428103979,
## -2.33748714500331, -2.2896688677275, -2.29560947925762, -2.2424311701743,
## -2.29461692334487, -2.26818366627671, -2.21091790446822, -2.37946613733,
## -2.01365380114183, -2.19912638462582, -2.40805672593628, -2.54593135162578,
## -2.35714688098673, -2.3342821797373, -2.53124382499637, -2.36180445265402,
## -2.22377391256976, -2.31932441699834, -2.36435411939168, -2.40384292506827,
## -2.42418791475329, -2.09964424899736, -2.36669703846129, -2.41653797307008,
## -2.53224986129852, -2.28278246569787, -2.54938117297348, -2.26625316374666,
## -2.23867176725039, -2.54874141861733, -2.4777721608874, -2.45282675632459,
## -2.66570936061269, -2.52323176410967, -2.44035402273894, -2.435316805448,
## -2.40883489283676, -2.29759755148301, -2.48027738140434, -2.33645216250562,
## -2.38618411687036, -2.46298861448707, -2.30579022394299, -2.72174352823421,
## -2.2876964805003, -2.16282315061889, -2.47088540842575, -2.22562405185792,
## -2.23679735245604, -2.5937398549248, -2.46381074149327, -2.65854600619912,
## -2.6069386997338, -2.48266915484781, -2.57465631793168, -2.43110051522947,
## -2.45375688079578, -2.46381074149327, -2.32831324156678, -2.30579022394299,
## -2.58269589994951, -2.2433732333622, -2.52323176410967, -2.17683388768849,
## -2.14558134418438, -2.44495543428577, -2.27496992596107, -2.14643641050411,
## -2.35482620483974, -2.34559698358734, -2.24999264287488, -2.48734963143914,
## -2.19014966366426, -2.35788640877914, -2.29560947925762, -2.23586146095114,
## -2.32780034213511, -2.46734181860853, -2.27011790285654, -2.32309396962559,
## -2.16282315061889, -2.31445526556453, -2.47860653723952, -2.42193185062661,
## -2.3995366190705, -2.15589071384324, -2.58216672941196, -2.08505728046547,
## -2.16369309412743, -2.27886856637673, -2.62086383942329, -2.43691680578934,
## -2.48975840051902, -2.29362535162257, -2.59883711612889, -2.45550319941876,
## -2.33552159324031, -2.53363481587935, -2.39141630670066, -2.31213050583758,
## -2.36616407463692, -2.24148999363423, -2.32872375060374, -2.53036437271278,
## -2.35957881451417, -2.48867373635724, -2.31465767422831, -2.3998672158006,
## -2.58800306379276, -2.49277754416857, -2.10701830945007, -2.08989599958369,
## -2.52735496605284, -2.6685891322213, -2.26144314966287, -2.4813529715887,
## -2.44391826911069, -2.51231895739834, -2.29263476214088, -2.26432638087696,
## -2.14814873968963, -2.27399763614213, -2.52024371407769, -2.55863930776551,
## -2.39832536527488, -2.09557092360972, -2.42226993594003, -2.61033382759614,
## -2.44391826911069, -2.29859307172451, -2.35440484172384, -2.5834901811668,
## -2.26721794915675, -2.44357278629502, -2.42384918048708, -2.22840569481979,
## -2.52961117157248, -2.46334087187344, -2.48650793115497, -2.32554670463139,
## -2.2595256035336, -2.18925640768704, -2.43212446434903, -2.45970684876626,
## -2.2182439445603, -2.40163224659528, -2.23026443141442, -2.51120955820905,
## -2.43030483459642, -2.30789918781781, -2.24999264287488, -2.17771611094818,
## -2.28474517486571, -2.24431618487007, -2.39294753307444, -2.47005744693779,
## -2.44368793397205, -2.52998770122864, -2.41743493467299, -2.4284885098832,
## -2.5081343359073, -2.41385190542263, -2.2730262907525, -2.30288513800305,
## -2.38825191975114, -2.47836807294842, -2.33469514691228, -2.71613277729557,
## -2.27108642593467, -2.4725433908046, -2.45538668325201, -2.381087321149,
## -2.36904545969432, -2.48063578293735, -2.52773064697893, -2.3128374694584,
## -2.22192718997659, -2.4641632886501, -2.5948108054957, -2.49084424245475,
## -2.44472486015517, -2.4875902474172, -2.37633928158271, -2.41396368097744,
## -2.43588794030847, -2.51355306837812, -2.29560947925762, -2.40351101158401,
## -2.41564181528634, -2.27691734624547, -2.39832536527488, -2.52685427759577,
## -2.23399230152843, -2.26048391697541, -2.44865186912883, -2.21732524904322,
## -2.30158559266096, -2.60748114618051, -2.61251277439315, -2.34476217189323,
## -2.22933463125445, -2.41597778034914, -2.22100510600162, -2.15244243456433,
## -2.37064982390818, -1.81155409655623, -2.0754495204103, -2.12527607802364,
## -2.1345315079978, -2.51342958872124, -2.46840357768899, -2.2018351898939,
## -2.3843375948663, -2.25474776357989, -2.23679735245604, -2.10537492370634,
## -2.18480205733766, -1.98704469241387, -2.2730262907525, -2.31102057181515,
## -2.35472084741457, -2.26721794915675, -2.23026443141442, -2.40761233086175,
## -2.08104282304681, -2.20727491318972, -2.23212662934548, -2.28671174383776,
## -2.37763196731696, -2.26528930825035, -2.09070473395855, -2.51355306837812,
## -2.44553210231398, -2.30418637436102, -2.49193126472496, -2.44518606159307,
## -2.38097916042206, -2.36085000112602, -2.42170652390191, -2.59950974681276,
## -2.34695504072999, -2.49181042610479, -2.3816283003345, -2.51047064191927,
## -2.4046178185592, -2.29958958401425, -2.51047064191927, -2.46828554877176,
## -2.37935815179996, -2.59709039079395, -2.20818441757256, -2.1982250776698,
## -2.47041220363755, -2.13876700776465, -2.13368655653223, -2.46816753378372,
## -2.21091790446822, -1.94841327927343, -2.0572887370387, -2.357780728462,
## -2.1294724752854, -2.06120877341878, -2.13199879241851, -2.5003045919681,
## -2.47681943960538, -2.17948289586006, -2.172434408529, -2.31597433011306,
## -2.14558134418438, -2.3196295276033, -2.32493295665795, -2.23026443141442,
## -2.27886856637673, -2.23212662934548, -2.36148620091421, -2.13199879241851,
## -2.19912638462582, -2.12276666641821, -2.36435411939168, -2.26336437984076,
## -2.34236596300589, -2.32044361129536, -2.40983628374102, -2.36584443263324,
## -2.28671174383776, -2.50568094900448, -2.09801292726527, -2.26240330336121,
## -2.33160204206454, -2.20818441757256, -2.27205588795922, -2.43588794030847,
## -2.44911488568994, -2.56589980899753, -2.49362454040772, -2.16456379509667,
## -2.40174266452726, -2.35135525736347, -2.17419187822565, -2.51825662946955,
## -2.32769779380912, -2.17683388768849, -2.08505728046547, -2.25474776357989,
## -2.56122629666141, -2.14387340183922, -2.2595256035336, -2.50850286364319,
## -2.23867176725039, -2.40694610831879, -2.23399230152843, -2.29560947925762,
## -2.38901482099243, -2.38945102601571, -2.04716798112954, -2.23961029383266,
## -2.05104846717862, -2.30920696930293, -2.22747762050724, -2.24148999363423,
## -2.10784101620153, -2.33067597316057, -2.31526514615142, -2.35979056676483,
## -2.40472856666275, -2.43360535543245, -2.17859911321305, -2.4108386784343,
## -2.3859667019331, -2.60978983193469, -2.28082360121253, -2.26432638087696,
## -2.27886856637673, -2.40983628374102, -2.32769779380912, -2.35514234373272,
## -2.14558134418438, -2.52410496319216, -2.30368569843808, -1.96754244918243,
## -2.46781357236182, -2.02041820123037, -2.43508844280714, -2.26625316374666,
## -2.30930763875487, -2.54631407791736, -2.18747228589354, -2.32605844917969,
## -2.35788640877914, -2.15244243456433, -2.4767004132409, -2.36733697022374,
## -1.9330926453447, -2.2063662352535, -2.64296495444628, -2.43212446434903,
## -2.49896500703904, -2.16282315061889, -2.2876964805003, -2.23867176725039,
## -2.21457421567133, -2.29461692334487, -2.35788640877914, -2.36127408934273,
## -2.40185309465271, -2.25284300109923, -2.33935281718626, -2.14814873968963,
## -2.33088169214916, -2.21457421567133, -2.55194429112667, -2.16108553072035,
## -2.21549038614311, -2.50862573640859, -2.30558960201434, -2.53275325924522,
## -2.17595244206068, -2.3639287232351, -2.23305903034544, -2.32749272870023,
## -2.36616407463692, -2.4471485441854, -2.19373068808196, -2.3739736890817,
## -2.58826916278315, -2.44253705342149, -2.29065652212877, -2.47230636781226,
## -2.47444159994024, -2.34403228290822, -2.42305924646192, -2.25474776357989,
## -2.31719124538341, -2.07385716338594, -2.2966030213165, -2.43132796888677,
## -2.39272864284701, -2.33314739857669, -2.3196295276033, -2.77242873503842,
## -2.43737441934268, -2.21274438899426, -2.17068002211411, -2.3437196363526,
## -2.40141144730094, -2.40163224659528, -2.37871048338354, -2.45480430597156,
## -2.32544438714013, -2.29560947925762, -2.51577831345509, -2.23026443141442,
## -2.43623077786396, -2.66642852641139, -2.2595256035336, -2.53313097407502,
## -2.12026353620009, -2.60761680378094, -2.46240179444793, -2.34486648525065,
## -2.41150750021823, -2.17068002211411, -2.02268320786123, -2.30609123232333,
## -2.42181918091774, -2.21732524904322, -2.43360535543245, -2.30058709033137,
## -2.50323356648088, -2.3979952777987, -2.42238265644964, -2.16980398176023,
## -2.2966030213165, -2.357780728462, -2.27691734624547, -2.36840443403134,
## -2.51900132355883, -2.47456035773386, -2.47159563572833, -2.31202955182205,
## -2.25094185984221, -2.48039683431017, -2.53792775176525, -2.2182439445603,
## -2.17068002211411, -2.55361384779779, -2.46934831087215, -2.40019792186105,
## -2.48231002394073, -2.49823507999933, -2.32156405959185, -2.53502169121985,
## -2.27594316204762, -2.23492644452023, -2.62900799376226, -2.43497428103979,
## -2.33748714500331, -2.2896688677275, -2.29560947925762, -2.2424311701743,
## -2.29461692334487, -2.26818366627671, -2.21091790446822, -2.37946613733,
## -2.01365380114183, -2.19912638462582, -2.24148999363423, -2.30258509299405,
## -2.44622454319566, -2.40805672593628, -2.54593135162578, -2.3303674740065,
## -2.35714688098673, -2.53124382499637, -2.36180445265402, -2.22377391256976,
## -2.74435118082854, -2.31932441699834, -2.42418791475329, -2.09964424899736,
## -2.36669703846129, -2.41653797307008, -2.35219559354738, -2.28278246569787,
## -2.54938117297348, -2.26625316374666, -2.54874141861733, -2.4777721608874,
## -2.45282675632459, -2.66570936061269, -2.52323176410967, -2.44035402273894,
## -2.435316805448, -2.40883489283676, -2.48027738140434, -2.33645216250562,
## -2.46298861448707, -2.30579022394299, -2.72174352823421, -2.2876964805003,
## -2.16282315061889, -2.47088540842575, -2.22562405185792, -2.23679735245604,
## -2.42441380135918, -2.5937398549248, -2.46381074149327, -2.65854600619912,
## -2.6069386997338, -2.4288288195683, -2.57465631793168, -2.43110051522947,
## -2.45375688079578, -2.23119509690737, -2.46381074149327, -2.55954399280299,
## -2.32831324156678, -2.30579022394299, -2.58269589994951, -2.17683388768849,
## -2.14558134418438, -2.44495543428577, -2.27496992596107, -2.37418851185374,
## -2.14643641050411, -2.14986400597638, -2.35482620483974, -2.34559698358734,
## -2.48819204077953, -2.39513907460465, -2.29560947925762, -2.23586146095114,
## -2.32780034213511, -2.46734181860853, -2.27011790285654, -2.32309396962559,
## -2.16282315061889, -2.31445526556453, -2.42193185062661, -2.3995366190705,
## -2.08505728046547, -2.16369309412743, -2.27886856637673, -2.62086383942329,
## -2.43497428103979, -2.22377391256976, -2.43691680578934, -2.48975840051902,
## -2.29362535162257, -2.45550319941876, -2.33552159324031, -2.33448864200705,
## -2.53363481587935, -2.39141630670066, -2.31213050583758, -2.38260280098008,
## -2.36616407463692, -2.24148999363423, -2.53036437271278, -2.30158559266096,
## -2.35957881451417, -2.48867373635724, -2.3998672158006, -2.10701830945007,
## -2.08989599958369, -2.6685891322213, -2.26144314966287, -2.4813529715887,
## -2.44391826911069, -2.51231895739834, -2.65555263214446, -2.4811138515493,
## -2.29263476214088, -2.30298517301539, -2.15416508787577, -2.14814873968963,
## -2.27399763614213, -2.52011940554375, -2.52024371407769, -2.55863930776551,
## -2.09557092360972, -2.42226993594003, -2.61033382759614, -2.31475889392575,
## -2.44391826911069, -2.29859307172451, -2.35440484172384, -2.5834901811668,
## -2.26721794915675, -2.42384918048708, -2.22840569481979, -2.52961117157248,
## -2.46334087187344, -2.48650793115497, -2.2595256035336, -2.18925640768704,
## -2.43212446434903, -2.28474517486571, -2.45970684876626, -2.2182439445603,
## -2.40163224659528, -2.23026443141442, -2.51120955820905, -2.43030483459642,
## -2.30789918781781, -2.24999264287488, -2.17771611094818, -2.28474517486571,
## -2.41642590941781, -2.44368793397205, -2.41743493467299, -2.48518668899532,
## -2.4284885098832, -2.5081343359073, -2.41385190542263, -2.30288513800305,
## -2.38825191975114, -2.47836807294842, -2.33469514691228, -2.71613277729557,
## -2.4725433908046, -2.20818441757256, -2.45538668325201, -2.381087321149,
## -2.36904545969432, -2.48063578293735, -2.52773064697893, -2.3128374694584,
## -2.22192718997659, -2.65997457787065, -2.5948108054957, -2.49084424245475,
## -2.44472486015517, -2.4875902474172, -2.37633928158271, -2.14131694539792,
## -2.41396368097744, -2.43588794030847, -2.56407973569218, -2.40351101158401,
## -2.41564181528634, -2.6841383810559, -2.25761172735131, -2.39832536527488,
## -2.52685427759577, -2.23399230152843, -2.30759763481759, -2.26048391697541,
## -2.352405787978, -2.44865186912883, -2.21732524904322, -2.18213893991818,
## -2.54618648621065, -2.30158559266096, -2.61251277439315, -2.34476217189323,
## -2.22933463125445, -2.41597778034914, -2.29065652212877, -2.22100510600162,
## -2.31435407659404, -2.15244243456433, -2.15589071384324, -2.37064982390818,
## -1.81155409655623, -2.21091790446822, -2.12527607802364, -2.36159227357408,
## -2.1345315079978, -2.51342958872124, -2.46840357768899, -2.2018351898939,
## -2.31952781372436, -2.3843375948663, -2.25474776357989, -2.23679735245604,
## -2.10537492370634, -1.98704469241387, -2.2730262907525, -2.46381074149327,
## -2.31102057181515, -2.35472084741457, -2.23026443141442, -2.40761233086175,
## -2.23212662934548, -2.28671174383776, -2.39076078389777, -2.33438540554384,
## -2.25761172735131, -2.09070473395855, -2.30418637436102, -2.42622273345222,
## -2.49193126472496, -2.38097916042206, -2.36085000112602, -2.42170652390191,
## -2.46616340697086, -2.59950974681276, -2.34695504072999, -2.49181042610479,
## -2.3816283003345, -2.51047064191927, -2.4046178185592, -2.29958958401425,
## -2.51047064191927, -2.46828554877176, -2.37935815179996, -2.30971041793663,
## -2.59709039079395, -2.2557015070952, -2.1982250776698, -2.32483070194137,
## -2.47041220363755, -2.13876700776465, -2.94446897961645), compactness_se = c(-3.01511898735418,
## -4.33667093295308, -3.21737694874447, -3.3977034924079, -4.28163846064261,
## -3.49693765394299, -3.35183595212443, -2.62873083189796, -4.68107978008844,
## -3.20374093728393, -2.48927618230594, -2.82413468012301, -3.16060691674423,
## -4.45502752755837, -3.68847953409261, -4.22673375026785, -3.96436948580036,
## -4.24609811744964, -2.93219425275, -3.9728351448249, -3.2704323117826,
## -3.48839059336453, -3.60380328175882, -3.49561795728165, -3.37728556161584,
## -3.55155526325072, -5.3187241763257, -4.5153294819883, -3.79914084837147,
## -4.03758622840349, -2.2966030213165, -4.28019232879261, -3.22037695099447,
## -4.10682208373321, -3.75673012102211, -4.24959584749339, -4.44220135770995,
## -4.78190736448814, -3.51964313686864, -4.57561138374655, -4.74190670956156,
## -3.76965576414122, -3.55609834288012, -4.75680736065105, -4.51076951056661,
## -3.45396544720081, -3.23449720577116, -3.63136554791333, -4.04270132907026,
## -4.6868141750286, -2.45271055157168, -4.1031835108893, -2.87955051926458,
## -3.8800399595751, -4.00688328645211, -2.84061063668669, -2.68311371581218,
## -4.09775035535693, -4.0710187369186, -3.33120500984212, -2.86593283601915,
## -2.80511191394534, -4.0107389783673, -4.10500114241063, -3.51224068045548,
## -3.99431824815498, -3.69168338144667, -3.12084208459684, -4.0074331902328,
## -3.71153414467874, -4.56787440124439, -4.20572315020549, -3.29279150781837,
## -3.35756334464209, -4.44050356443286, -4.31999124375443, -3.54218507496313,
## -4.52451228297064, -5.09979443041607, -3.81853266234081, -3.80811354203763,
## -3.53633005565365, -4.55924125409969, -2.44553210231398, -4.16240929313623,
## -3.52948540196369, -3.20843056700666, -2.59749321053757, -3.28421466617654,
## -3.81082112491918, -3.68608336681194, -2.69414729593322, -3.66399173860616,
## -3.22489389719376, -4.7769079492424, -4.18055625904117, -2.32217574275905,
## -3.42038020107893, -4.69192704991437, -4.44475346458219, -3.60050234349652,
## -3.17366348204665, -3.44860350405943, -3.7066361757032, -4.2104290412429,
## -3.93631569799719, -4.18975474702676, -4.25310585460704, -4.28308668681898,
## -4.18580217271094, -3.35813789220171, -3.35297880939168, -5.17503835038787,
## -3.94351367251952, -4.15856313454875, -3.04892210206811, -3.31511143274903,
## -3.02063990926018, -3.77356626228113, -4.1401790985659, -2.91581265037812,
## -4.71219875874661, -3.90355892701602, -4.1592031345706, -3.62646819399254,
## -4.62649597270284, -5.09390821329008, -3.72762028243037, -3.48839059336453,
## -3.78231139890649, -3.44986255364344, -3.67143254051022, -4.83044099831848,
## -3.70095203534821, -3.31483621601411, -4.43965574751052, -4.54407508662828,
## -4.58831306892167, -4.82743911989164, -2.42950978656771, -3.19564769874899,
## -6.09593656870469, -5.0570981685066, -3.04513262384912, -3.78847881933669,
## -3.28929828916475, -4.69061914782057, -4.74397258886855, -4.73573455600921,
## -3.811273102328, -1.99952191850396, -4.02295456613543, -5.00028900204211,
## -3.25839658303048, -4.14332474444382, -3.39680703336086, -2.93821836735338,
## -4.13704331675522, -4.02911877730439, -3.66165346786034, -2.97182045350768,
## -3.47119075348269, -3.95754352072996, -3.95336596896253, -4.38282695484465,
## -4.19770707521725, -3.49298377729286, -4.39328982695259, -3.32118544369277,
## -4.05878438682355, -2.45865427864393, -3.37028027902743, -3.17199223896935,
## -3.4673371841667, -3.63287726399865, -3.76619255721661, -4.26869794936688,
## -3.74059388980062, -4.50623023813319, -4.2488953220707, -3.43983426527707,
## -3.77487316728091, -3.25502127150455, -4.10621473479259, -4.98892272719061,
## -3.88149380039332, -4.56594947283481, -4.28091513330547, -3.5596071184098,
## -3.58092231260884, -4.38362791604086, -5.31241629092754, -2.82784776455781,
## -3.62421594078821, -3.42589999430253, -4.27874828522038, -2.88383318105658,
## -4.57076875927076, -4.39978335626314, -3.24214445180756, -2.98439676389025,
## -4.19770707521725, -4.01738352108597, -3.61972659542562, -3.04387264888193,
## -3.08172598672097, -2.81424439750861, -2.81658240792567, -4.69893156580257,
## -4.21448036346208, -3.3295280949718, -3.7272045684356, -3.82676316147732,
## -3.03968416108317, -4.49005737888759, -3.00376444525126, -4.54031921366848,
## -4.44902150349816, -4.20706543228622, -4.71075294856316, -4.497213044483,
## -5.24496619105104, -3.70949040801806, -4.23913914712533, -3.81716747441547,
## -3.35785057715879, -3.22867366734831, -4.82731423696922, -2.96500910091115,
## -4.30877617293429, -2.49350349701352, -4.27227577071476, -2.24054970207459,
## -3.85611537348985, -3.91052412930441, -4.19970507787993, -4.43965574751052,
## -4.80936932130136, -4.40549999085952, -4.60018264447705, -4.28526296624157,
## -4.23844590619636, -3.44108231536107, -3.39055422156368, -3.05082223987195,
## -4.67109633610426, -3.75630213043137, -4.84584145417052, -5.32199549390261,
## -5.58706667496024, -5.35785507926423, -5.11250192049983, -3.58704514823267,
## -4.31250057202527, -5.80515096904449, -4.09895517477978, -2.71961683747368,
## -4.52913549971209, -3.15472800504445, -4.28598944647694, -3.90405483577897,
## -3.71686749065411, -4.25592275787816, -4.65499088097205, -3.87136102370926,
## -3.42805483583204, -3.42497791032756, -4.50623023813319, -4.77572128718313,
## -4.01905157818667, -3.77008950953948, -4.23429652264955, -3.92662915781751,
## -3.48904547223535, -3.36072744910844, -3.97549542838372, -3.39085108960801,
## -4.512591004695, -3.06379700446812, -4.51350299746227, -4.19571305661039,
## -4.76205794152524, -3.72264308426675, -4.96313167145229, -2.72433203367649,
## -3.1598996197679, -3.71686749065411, -3.38522561215246, -4.83596795623916,
## -3.54080433604857, -4.71253270431659, -5.36168324216986, -3.88977239649333,
## -4.10682208373321, -4.38202663467388, -3.96016338075608, -3.06315494987143,
## -4.4990099901597, -3.56807875396823, -3.2398441313044, -4.20975541373343,
## -3.9659513474537, -4.14332474444382, -4.0107389783673, -2.57137998972734,
## -4.42035174899555, -3.51190550364658, -3.09026275740022, -4.03024363681558,
## -3.96173859767408, -3.0878475624618, -3.29548692724004, -3.87617335253445,
## -4.05013630755778, -3.49792857062193, -3.19491518742336, -3.92916916426312,
## -3.28983491380869, -4.11597686234921, -3.35900033296237, -4.22058828879635,
## -3.93018697605582, -3.29198429668884, -4.41952083910146, -3.29333001079315,
## -4.76874849996491, -3.84717203310853, -4.38523176562283, -3.1055471395612,
## -3.58560110466043, -3.76792266145439, -4.61048428081186, -4.67184431925608,
## -3.62159470730834, -3.57269769982453, -4.24959584749339, -3.48904547223535,
## -4.08995421392081, -3.24701813081448, -4.37168034261974, -3.88879487930894,
## -3.90952612522956, -3.57234163786598, -3.97124236508812, -5.16781555848388,
## -3.84063300934147, -2.77852631490653, -3.41276401803362, -3.60637823260219,
## -3.19711433308669, -4.02239606290952, -4.29621597826077, -4.34665949083659,
## -4.51441582271963, -4.68442986697372, -3.07088694715475, -4.46020441573791,
## -4.31923964657512, -4.16048436472665, -3.89960048542959, -3.90455099058945,
## -4.44731210137251, -3.88830647881083, -3.87520903230543, -2.8029654588985,
## -3.9481684520645, -3.65389848521869, -4.76311137405667, -4.38523176562283,
## -3.72222943379549, -4.71342377142835, -4.36615328551759, -3.80811354203763,
## -2.92359767579479, -4.07395387257437, -4.20438266743104, -2.70950125336952,
## -3.44860350405943, -4.37644225637998, -2.65569497076828, -3.91753818611626,
## -4.16369464035689, -4.03419063940235, -3.47602869228516, -4.01960821610808,
## -3.41489070879416, -2.9058915695542, -4.09474464224344, -4.5263590055638,
## -4.23222826958549, -4.00633368489939, -4.27371049119042, -3.56383396513971,
## -3.61191841297781, -4.60217467700829, -3.88635525867957, -4.28962978540791,
## -3.68807977394337, -4.28962978540791, -3.57877023143391, -3.25553980923979,
## -3.39442039738313, -3.09180274230472, -3.0183869641188, -3.75160628402224,
## -3.00780485478826, -3.24009946131983, -4.42535175941225, -3.78759542705309,
## -4.11659017116942, -4.10986437388014, -3.97549542838372, -3.75973119508589,
## -4.1598435444548, -3.67300610495765, -3.43827638990358, -3.7937956240635,
## -3.89124046624562, -4.0387206584741, -4.23844590619636, -4.69192704991437,
## -3.42743869286738, -4.46280294470117, -4.00908471827128, -4.05878438682355,
## -4.92979290625216, -3.65621867187439, -3.35383681030232, -4.63645447602786,
## -2.74000537234376, -3.19126065783523, -4.44645849483327, -4.2889006566844,
## -3.8637091451496, -4.92716768885499, -3.47894273028701, -4.50623023813319,
## -3.48970078025356, -4.39571996180588, -3.99867081215382, -3.83830796760587,
## -3.59867318622779, -3.80676249477065, -4.48827643451659, -3.07067136021848,
## -3.51257596964554, -4.72417897257029, -3.14423228187243, -3.54356772295388,
## -3.28849389174907, -2.78741813648578, -3.01511898735418, -4.33667093295308,
## -3.21737694874447, -2.59588290423146, -3.3977034924079, -4.28163846064261,
## -3.49693765394299, -3.35183595212443, -2.62873083189796, -4.68107978008844,
## -3.46541595398881, -2.82413468012301, -3.16060691674423, -4.45502752755837,
## -3.68847953409261, -3.96700731376091, -4.22673375026785, -3.96436948580036,
## -4.24609811744964, -2.93219425275, -4.37485243092588, -3.2704323117826,
## -3.37728556161584, -3.47959144909017, -3.40580799421984, -3.55155526325072,
## -3.68927953413528, -3.50822595442068, -5.3187241763257, -4.5153294819883,
## -3.79914084837147, -4.50804347525737, -4.03758622840349, -3.84109866709145,
## -4.28019232879261, -3.22037695099447, -4.24959584749339, -4.28163846064261,
## -4.62966781758849, -4.44220135770995, -4.78190736448814, -3.51964313686864,
## -4.57561138374655, -4.74190670956156, -3.55609834288012, -4.75680736065105,
## -4.55162941906006, -4.51076951056661, -3.45396544720081, -2.6512918672836,
## -3.23449720577116, -3.76792266145439, -4.04270132907026, -4.6868141750286,
## -2.45271055157168, -4.1031835108893, -2.87955051926458, -3.8800399595751,
## -4.00688328645211, -3.81535011816789, -3.56171830849726, -2.84061063668669,
## -2.68311371581218, -4.09775035535693, -3.33120500984212, -2.86593283601915,
## -2.80511191394534, -4.0107389783673, -4.10500114241063, -3.51224068045548,
## -3.99431824815498, -3.69168338144667, -3.12084208459684, -4.0074331902328,
## -3.71153414467874, -4.56787440124439, -4.20572315020549, -3.29279150781837,
## -4.2246810639501, -4.31999124375443, -3.54218507496313, -4.20773724957719,
## -4.52451228297064, -5.09979443041607, -3.80811354203763, -3.53633005565365,
## -4.55924125409969, -2.44553210231398, -4.16240929313623, -3.52948540196369,
## -3.20843056700666, -2.59749321053757, -3.28421466617654, -3.68608336681194,
## -2.69414729593322, -3.66399173860616, -4.7769079492424, -4.18055625904117,
## -2.32217574275905, -4.36144000106549, -3.42038020107893, -4.69192704991437,
## -3.60050234349652, -3.17366348204665, -3.44860350405943, -3.7066361757032,
## -4.2104290412429, -3.93631569799719, -4.18975474702676, -4.25310585460704,
## -4.42118334987508, -4.28308668681898, -4.18580217271094, -3.35813789220171,
## -3.94351367251952, -4.05416277258927, -4.02799518168057, -3.04892210206811,
## -3.31511143274903, -3.02063990926018, -3.77356626228113, -4.1401790985659,
## -4.47238907475427, -2.91581265037812, -2.34486648525065, -4.71219875874661,
## -3.90355892701602, -4.1592031345706, -3.00497485492092, -4.62649597270284,
## -5.09390821329008, -3.48839059336453, -3.44986255364344, -3.67143254051022,
## -4.83044099831848, -4.62374157157353, -3.70095203534821, -3.31483621601411,
## -4.29035944614806, -3.76317205869579, -3.85328251040555, -4.58831306892167,
## -4.82743911989164, -2.42950978656771, -3.19564769874899, -6.09593656870469,
## -5.0570981685066, -3.67418790537095, -3.04513262384912, -3.78847881933669,
## -3.28929828916475, -4.27227577071476, -4.42284862919414, -4.69061914782057,
## -4.74397258886855, -4.73573455600921, -3.811273102328, -1.99952191850396,
## -4.02295456613543, -5.00028900204211, -2.97162519821076, -4.14332474444382,
## -3.39680703336086, -2.93821836735338, -3.29737805302316, -4.02911877730439,
## -3.66165346786034, -2.97182045350768, -3.47119075348269, -3.95754352072996,
## -3.95336596896253, -4.38282695484465, -4.19770707521725, -3.49298377729286,
## -4.39328982695259, -3.32118544369277, -4.05878438682355, -3.58560110466043,
## -2.45865427864393, -3.29413830936875, -3.37028027902743, -3.17199223896935,
## -3.4673371841667, -3.63287726399865, -4.50623023813319, -4.2488953220707,
## -4.82071766068067, -3.43983426527707, -3.77487316728091, -3.25502127150455,
## -3.90803098415861, -4.98892272719061, -4.56594947283481, -4.28091513330547,
## -3.46317917401804, -3.5596071184098, -3.58092231260884, -5.31241629092754,
## -3.42589999430253, -4.27874828522038, -4.1547317122372, -2.88383318105658,
## -4.57076875927076, -4.39978335626314, -3.24214445180756, -2.98439676389025,
## -4.19770707521725, -4.01738352108597, -3.61972659542562, -3.04387264888193,
## -3.08172598672097, -2.81424439750861, -3.6667266494728, -4.14901196356441,
## -4.50442028288795, -2.81658240792567, -4.69893156580257, -4.4178610876831,
## -4.21448036346208, -3.7272045684356, -3.82676316147732, -3.03968416108317,
## -5.59672340236279, -4.49005737888759, -4.44902150349816, -4.20706543228622,
## -4.71075294856316, -4.497213044483, -4.17273862965011, -3.70949040801806,
## -4.23913914712533, -3.81716747441547, -3.22867366734831, -4.82731423696922,
## -2.96500910091115, -4.30877617293429, -2.49350349701352, -4.27227577071476,
## -2.24054970207459, -3.85611537348985, -4.19970507787993, -4.43965574751052,
## -4.40549999085952, -4.60018264447705, -4.28526296624157, -4.23844590619636,
## -3.44108231536107, -3.39055422156368, -3.05082223987195, -4.67109633610426,
## -3.62985611366448, -3.75630213043137, -4.84584145417052, -5.32199549390261,
## -5.58706667496024, -4.69838256771027, -5.11250192049983, -3.58704514823267,
## -4.31250057202527, -4.26655738486777, -5.80515096904449, -5.15560326434886,
## -4.09895517477978, -2.71961683747368, -4.52913549971209, -3.90405483577897,
## -3.71686749065411, -4.25592275787816, -4.65499088097205, -4.4811842062071,
## -3.87136102370926, -3.28181562464207, -3.42805483583204, -3.42497791032756,
## -4.94428591355472, -3.24496315963152, -4.23429652264955, -3.92662915781751,
## -3.48904547223535, -3.36072744910844, -3.97549542838372, -3.39085108960801,
## -4.512591004695, -3.06379700446812, -4.19571305661039, -4.76205794152524,
## -2.72433203367649, -3.1598996197679, -3.71686749065411, -3.38522561215246,
## -3.16178685788407, -3.23602198370317, -4.83596795623916, -3.54080433604857,
## -4.71253270431659, -3.88977239649333, -4.10682208373321, -4.01460959420325,
## -4.38202663467388, -3.96016338075608, -3.06315494987143, -4.27730632390348,
## -4.4990099901597, -3.56807875396823, -4.20975541373343, -3.62122080564012,
## -3.9659513474537, -4.14332474444382, -2.57137998972734, -3.09026275740022,
## -4.03024363681558, -3.0878475624618, -3.29548692724004, -3.87617335253445,
## -4.05013630755778, -3.49792857062193, -4.25804065489289, -2.70770026233792,
## -3.19491518742336, -4.29401575735117, -3.53085058988903, -3.28983491380869,
## -4.11597686234921, -4.36379386641282, -3.35900033296237, -4.22058828879635,
## -3.29198429668884, -4.41952083910146, -3.29333001079315, -4.35441146764091,
## -4.76874849996491, -3.84717203310853, -4.38523176562283, -3.1055471395612,
## -3.58560110466043, -4.61048428081186, -4.67184431925608, -3.62159470730834,
## -3.57269769982453, -4.24959584749339, -4.08995421392081, -3.24701813081448,
## -4.37168034261974, -4.66258745685804, -3.88879487930894, -3.90952612522956,
## -3.57234163786598, -3.97124236508812, -5.16781555848388, -3.84063300934147,
## -2.77852631490653, -3.41276401803362, -3.60637823260219, -3.19711433308669,
## -4.09534506255568, -4.51441582271963, -3.07088694715475, -3.14469642669395,
## -4.46020441573791, -4.31923964657512, -4.16048436472665, -3.90455099058945,
## -4.44731210137251, -3.88830647881083, -3.87520903230543, -2.8029654588985,
## -3.65389848521869, -4.38122695450332, -4.76311137405667, -4.38523176562283,
## -3.72222943379549, -4.71342377142835, -4.36615328551759, -3.80811354203763,
## -2.92359767579479, -4.08697180789269, -4.20438266743104, -2.70950125336952,
## -3.44860350405943, -4.37644225637998, -2.65569497076828, -3.44014613180073,
## -3.91753818611626, -4.16369464035689, -4.79839092365105, -4.01960821610808,
## -3.41489070879416, -4.33133352035836, -3.85658829854005, -4.09474464224344,
## -4.5263590055638, -4.23222826958549, -4.80532990091193, -4.00633368489939,
## -2.59414132699443, -4.27371049119042, -3.56383396513971, -4.34897878062768,
## -4.79463721094292, -3.61191841297781, -3.88635525867957, -4.28962978540791,
## -3.68807977394337, -4.28962978540791, -3.43765391867277, -3.57877023143391,
## -4.00688328645211, -3.25553980923979, -4.15600722261421, -3.39442039738313,
## -3.09180274230472, -3.81762233000666, -3.75160628402224, -4.42035174899555,
## -3.00780485478826, -3.24009946131983, -4.42535175941225, -3.78759542705309,
## -4.0107389783673, -4.11659017116942, -4.10986437388014, -3.97549542838372,
## -3.75973119508589, -3.67300610495765, -3.43827638990358, -5.13280292807046,
## -3.7937956240635, -3.89124046624562, -4.23844590619636, -4.69192704991437,
## -4.00908471827128, -4.05878438682355, -3.47732277205165, -3.38788636760248,
## -3.64352375239314, -3.35383681030232, -3.19126065783523, -3.06765801312558,
## -4.44645849483327, -3.8637091451496, -4.92716768885499, -3.47894273028701,
## -4.48916651023178, -4.50623023813319, -3.48970078025356, -4.39571996180588,
## -3.99867081215382, -3.83830796760587, -3.59867318622779, -3.80676249477065,
## -4.48827643451659, -3.07067136021848, -3.51257596964554, -3.62009993871911,
## -4.72417897257029, -3.02742936029725, -3.54356772295388, -3.7201637441537,
## -3.28849389174907, -2.78741813648578, -5.36873983084458), texture_worst = c(3.84564929607836,
## 4.39399418633571, 4.55828921539398, 4.4211241551246, 4.71271039433389,
## 4.74618886322936, 4.91933405670256, 5.49170795573549, 5.1148322519163,
## 4.68587539087759, 4.86780056544559, 5.00062492188965, 5.30184459476561,
## 4.92899890199126, 4.9672866665904, 4.03444047673929, 4.14699403807913,
## 3.66818867730445, 4.01749018754573, 4.9723474900504, 4.22683509877394,
## 5.07450645475586, 4.68445475787559, 4.05870187270786, 4.74480308344018,
## 4.68445475787559, 4.3453390314034, 4.53345042416724, 4.59470138513544,
## 5.20054352680722, 5.07207842375622, 4.86450250302232, 4.2199261916611,
## 4.27462740540287, 4.73299200833398, 4.16566697228562, 4.37627106269926,
## 4.22079093808333, 4.45108063957035, 4.98054948250216, 4.31731157691913,
## 4.74618886322936, 4.91739656048269, 4.29899485449732, 3.85786568417726,
## 4.66877250239633, 4.03162425409815, 5.09023188209693, 4.9723474900504,
## 4.42825357700912, 4.33219157542828, 4.63564962539988, 5.11124712616235,
## 4.17979251337981, 4.37788780062843, 4.00136370799379, 4.98243842645474,
## 4.50452373965997, 5.0099800597908, 4.37627106269926, 5.09925984037004,
## 5.04460038283339, 4.51064286511708, 4.71411457400721, 4.82189254351365,
## 4.89858891132473, 4.90444106677773, 3.93665456449412, 4.81247233739534,
## 4.5813897395165, 4.30733851517092, 4.58879422265363, 4.4589011555627,
## 4.97424265314624, 4.18506724920057, 3.82822637799617, 4.92771242891074,
## 4.06055740376743, 5.05195727576962, 4.65370756626545, 4.38595481282061,
## 4.31149915094838, 4.70074195576864, 4.73716716909308, 4.81516751663526,
## 4.23286346360635, 4.55379225305673, 4.41953690473458, 4.34041740713414,
## 4.06796436841826, 4.60426960897484, 3.81894651195118, 4.69225794719606,
## 4.90444106677773, 4.72462012983057, 3.70223861716733, 4.44008824220022,
## 4.34041740713414, 4.40759828979585, 5.21780342847701, 4.57321828343664,
## 3.94545627679901, 5.09685554064732, 3.89411597678992, 4.59322595812653,
## 4.97991951661426, 4.03350212602557, 4.96158092693842, 4.55454233489475,
## 4.27968997796331, 4.22942061247119, 3.6803321374997, 3.4881652680553,
## 4.53874091045711, 4.26277156096358, 3.76030887491783, 4.62110518228067,
## 4.55379225305673, 4.08912563560772, 4.31648211802796, 4.85325597503937,
## 3.73790915557037, 4.14788668902207, 4.53496319009936, 4.74133518110845,
## 3.95325104412725, 3.93273153639511, 4.60867288259549, 3.89411597678992,
## 4.62183449590202, 4.78331042054293, 4.75172411533717, 4.57990609284585,
## 4.61526269721601, 5.02054044452534, 3.66597344862832, 4.86318204216207,
## 4.02880451447434, 3.81584452507305, 4.47136044125528, 4.76756833954403,
## 4.80639733438528, 3.72176758750256, 5.0908347519036, 5.05073262206906,
## 3.88310217785028, 4.61964589127126, 4.22597261650997, 4.62838838909742,
## 4.06518959300002, 5.30461763543518, 4.40120616682715, 4.17715059261016,
## 5.42165939938186, 4.27800368129042, 5.12258263348064, 4.49531539967184,
## 4.8796372255157, 4.76138067250417, 4.5813897395165, 5.04214328851219,
## 5.55137593525295, 4.49069780205868, 4.12456366980809, 4.37869575168726,
## 4.52207395926043, 4.81584084911856, 3.69678266390047, 4.72531888820021,
## 4.51750811014081, 4.74133518110845, 4.66089306552781, 4.67305957436053,
## 4.47756557994607, 5.7250741812419, 3.82513742469316, 4.34779596076712,
## 4.89076444551659, 4.40919360925164, 3.80131055253282, 4.0391263923702,
## 4.95649816088697, 4.88160449712241, 5.10705804269966, 5.54784383979588,
## 5.31568005236977, 4.02503937366727, 4.89011128798296, 5.48447656291417,
## 5.69944420254844, 4.01465271367111, 4.1362549488282, 4.73368836607226,
## 4.61818571423201, 4.90638871527655, 4.83595544584434, 3.88310217785028,
## 5.19186958268757, 4.20865485453779, 4.66591022465882, 4.56277773074029,
## 4.52511300047923, 4.48529922188488, 4.28390036534887, 5.25367423284224,
## 4.33301546451814, 4.977398061134, 4.68303329080159, 4.97928939162397,
## 5.80649267996141, 4.35596712356414, 4.87438349362812, 4.41238100619215,
## 4.53420692714647, 3.72500546771211, 4.74895806068259, 4.18155210571024,
## 5.16274115651175, 4.00896702468464, 4.00896702468464, 4.4643601672029,
## 4.60059403590832, 5.23266824566795, 3.93861349816864, 4.78126284597665,
## 4.52511300047923, 4.07442566880621, 4.36735901153395, 4.68658539494554,
## 3.65486318971188, 4.30567217570052, 4.60353494442031, 4.28894276917322,
## 4.60720602154206, 4.09644010087559, 4.57470607871923, 3.86090927635756,
## 3.8044330617929, 3.57313465469149, 4.39238851250693, 4.53798584507042,
## 4.45812026715483, 4.61087149610063, 4.36654714126594, 4.83395137977617,
## 4.48298177738099, 4.97613637696856, 4.14341994534426, 4.10918408776423,
## 3.80547291455154, 3.85278371543535, 4.25682080339513, 3.74860430284226,
## 3.33461827035041, 4.07626837901377, 4.51826969357939, 4.39319148665841,
## 4.12185715449991, 4.15768292970039, 4.36329684217773, 4.19907427922337,
## 4.99123464718118, 4.22510981161163, 4.20081917204225, 4.8966351137691,
## 4.27631615777365, 4.27378256776766, 4.57693603923099, 4.76825489106129,
## 5.07086352975503, 3.80235188130695, 4.65442707939607, 4.91933405670256,
## 4.23630118715586, 4.36654714126594, 4.09369993366443, 5.16098279528589,
## 3.92880160458909, 4.05498624312565, 4.67020237121107, 3.90606913806289,
## 4.1736232924066, 3.88510875184718, 4.07902964158111, 4.45421195165986,
## 4.40759828979585, 5.0751130969265, 3.68582950234986, 4.56203022993144,
## 3.88310217785028, 4.51979212962875, 4.19032989622769, 4.81853233316707,
## 4.59248790411271, 4.2525608748516, 4.62037564745409, 5.11064911059594,
## 4.62547775120469, 4.5784215265119, 4.93862626226538, 3.67592419710848,
## 4.37465320891252, 4.12185715449991, 4.02315444464448, 4.34615829494115,
## 5.21722997447023, 4.06981233359486, 5.05073262206906, 4.15056255337101,
## 4.17715059261016, 4.78535628237399, 4.37141414682473, 3.84972892391568,
## 4.98180893733216, 4.03725319225232, 4.90054116752379, 4.34451948048039,
## 4.78740043468991, 4.5784215265119, 4.19732806150465, 4.13446018295962,
## 4.4791143283873, 4.72112332197546, 4.11552943063158, 4.48838562918339,
## 3.79609650233605, 4.56203022993144, 4.0642639124341, 4.66662611195927,
## 4.54928680787707, 4.7969173026623, 5.25649971528638, 4.62547775120469,
## 4.73577624450677, 4.97171544875428, 5.289607608586, 4.68089952389297,
## 4.95204221081654, 4.7120079998809, 3.80339271755236, 4.66447781347784,
## 4.07258146061223, 4.55604179233441, 4.37465320891252, 4.80098466524709,
## 5.00249875093668, 4.70638151598143, 4.35351877174842, 4.52359396475879,
## 4.93221217091729, 4.92384900352362, 4.47058365452776, 4.24230491665047,
## 4.73716716909308, 4.0391263923702, 4.6565843327947, 3.86293601702622,
## 4.38595481282061, 4.27968997796331, 4.89533172440296, 5.3789243482449,
## 4.33959612712433, 4.76344497143527, 5.00437117689173, 4.74895806068259,
## 4.93413813813035, 5.3431303609114, 4.29062114000629, 5.53924614207584,
## 5.39342611034402, 4.99248872462087, 5.29016509938456, 5.15099568604474,
## 4.96031120783185, 4.44244806790354, 4.43220506774196, 4.55604179233441,
## 4.55304193558474, 4.48915660389326, 4.79623875179676, 4.56501883193107,
## 5.0817765373419, 3.93469391170234, 4.04286814109814, 4.04286814109814,
## 4.68445475787559, 4.36492255576514, 4.72741395924518, 4.62402111104245,
## 3.9444800183149, 3.50017120752803, 4.55154059307216, 4.89728655105185,
## 4.59764951947146, 3.74433252136007, 4.76825489106129, 4.76962741533791,
## 4.29062114000629, 4.46747395638393, 4.92127004033648, 5.05807160953597,
## 4.43062526611781, 3.74646945527153, 3.91697011212309, 4.16920652420897,
## 5.25706443832225, 3.87304201366326, 3.87102449594356, 4.84927432396436,
## 4.74064100771146, 4.36329684217773, 4.60793956404291, 4.4698066128653,
## 4.25596944489609, 3.89711029325023, 4.66089306552781, 4.56576539912967,
## 4.36004202368276, 3.95713827061974, 4.53042200855463, 4.03537843777246,
## 3.63696722222788, 3.6803321374997, 4.73508048403053, 4.7921634933305,
## 4.12095426849246, 5.13801256667971, 5.01184689693292, 4.93156984974714,
## 4.99311552733089, 4.07350375212876, 5.01805965188487, 5.30406326607349,
## 4.81112363659015, 4.25852258139477, 4.28137504979732, 4.5006912691636,
## 4.75448703520705, 5.2383625553961, 4.52207395926043, 5.22353090036469,
## 5.17559876883805, 4.35106785374955, 5.13623697910506, 4.68516517856677,
## 5.30350877738352, 5.3659655010133, 4.83261442759844, 4.62256358846899,
## 5.12912215905078, 5.42589455648472, 3.84564929607836, 4.39399418633571,
## 4.55828921539398, 4.62984238922248, 4.4211241551246, 4.71271039433389,
## 4.74618886322936, 4.91933405670256, 5.49170795573549, 5.1148322519163,
## 4.71271039433389, 5.00062492188965, 5.30184459476561, 4.92899890199126,
## 4.9672866665904, 4.92899890199126, 4.03444047673929, 4.14699403807913,
## 3.66818867730445, 4.01749018754573, 5.21493487007181, 4.22683509877394,
## 4.74480308344018, 5.00561868253273, 4.93028470867799, 4.68445475787559,
## 4.80639733438528, 4.89533172440296, 4.3453390314034, 4.53345042416724,
## 4.59470138513544, 4.88815077944616, 5.20054352680722, 4.73647180615168,
## 4.86450250302232, 4.2199261916611, 4.16566697228562, 4.98872460173249,
## 4.57247403882814, 4.37627106269926, 4.22079093808333, 4.45108063957035,
## 4.98054948250216, 4.31731157691913, 4.91739656048269, 4.29899485449732,
## 3.63921234008317, 3.85786568417726, 4.66877250239633, 4.83929193082027,
## 4.03162425409815, 5.08540372897595, 4.9723474900504, 4.42825357700912,
## 4.33219157542828, 4.63564962539988, 5.11124712616235, 4.17979251337981,
## 4.37788780062843, 4.48452699202674, 3.28480883728916, 4.00136370799379,
## 4.98243842645474, 4.50452373965997, 4.37627106269926, 5.09925984037004,
## 5.04460038283339, 4.51064286511708, 4.71411457400721, 4.82189254351365,
## 4.89858891132473, 4.90444106677773, 3.93665456449412, 4.81247233739534,
## 4.5813897395165, 4.30733851517092, 4.58879422265363, 4.4589011555627,
## 4.61453138729751, 3.82822637799617, 4.92771242891074, 5.19649935898102,
## 4.06055740376743, 5.05195727576962, 4.38595481282061, 4.31149915094838,
## 4.70074195576864, 4.73716716909308, 4.81516751663526, 4.23286346360635,
## 4.55379225305673, 4.41953690473458, 4.34041740713414, 4.60426960897484,
## 3.81894651195118, 4.69225794719606, 4.72462012983057, 3.70223861716733,
## 4.44008824220022, 3.70332816754794, 4.34041740713414, 4.40759828979585,
## 4.57321828343664, 3.94545627679901, 5.09685554064732, 3.89411597678992,
## 4.59322595812653, 4.97991951661426, 4.03350212602557, 4.96158092693842,
## 5.08419524488049, 4.55454233489475, 4.27968997796331, 4.22942061247119,
## 4.53874091045711, 4.64866495826235, 4.16743743155859, 3.76030887491783,
## 4.62110518228067, 4.55379225305673, 4.08912563560772, 4.31648211802796,
## 4.44951342604587, 4.85325597503937, 4.05591572233258, 3.73790915557037,
## 4.14788668902207, 4.53496319009936, 4.52663106781278, 3.95325104412725,
## 3.93273153639511, 3.89411597678992, 4.78331042054293, 4.75172411533717,
## 4.57990609284585, 3.221496909402, 4.61526269721601, 5.02054044452534,
## 4.36085615277429, 3.81584452507305, 3.79296212815746, 4.02880451447434,
## 3.81584452507305, 4.47136044125528, 4.76756833954403, 4.80639733438528,
## 3.72176758750256, 5.05256937888436, 5.0908347519036, 5.05073262206906,
## 3.88310217785028, 4.7385572993759, 4.20778553942227, 4.61964589127126,
## 4.22597261650997, 4.62838838909742, 4.06518959300002, 5.30461763543518,
## 4.40120616682715, 4.17715059261016, 4.72112332197546, 4.27800368129042,
## 5.12258263348064, 4.49531539967184, 5.00374719065992, 4.76138067250417,
## 4.5813897395165, 5.04214328851219, 5.55137593525295, 4.49069780205868,
## 4.12456366980809, 4.37869575168726, 4.52207395926043, 4.81584084911856,
## 3.69678266390047, 4.72531888820021, 4.51750811014081, 3.95907935613536,
## 4.74133518110845, 5.1720985907114, 4.66089306552781, 4.67305957436053,
## 4.47756557994607, 5.7250741812419, 4.40919360925164, 3.80131055253282,
## 4.08454211051463, 4.0391263923702, 4.95649816088697, 4.88160449712241,
## 4.50987883531407, 5.54784383979588, 4.02503937366727, 4.89011128798296,
## 4.60573826407968, 5.48447656291417, 5.69944420254844, 4.1362549488282,
## 4.90638871527655, 4.83595544584434, 4.72392117056727, 3.88310217785028,
## 5.19186958268757, 4.20865485453779, 4.66591022465882, 4.56277773074029,
## 4.52511300047923, 4.48529922188488, 4.28390036534887, 5.25367423284224,
## 4.33301546451814, 4.977398061134, 5.91342843222292, 5.41210504547204,
## 4.96665334045321, 4.68303329080159, 4.97928939162397, 4.82725933452894,
## 5.80649267996141, 4.87438349362812, 4.41238100619215, 4.53420692714647,
## 4.16123482951012, 3.72500546771211, 5.16274115651175, 4.00896702468464,
## 4.00896702468464, 4.4643601672029, 4.25596944489609, 5.23266824566795,
## 3.93861349816864, 4.78126284597665, 4.07442566880621, 4.36735901153395,
## 4.68658539494554, 3.65486318971188, 4.30567217570052, 4.60353494442031,
## 4.28894276917322, 4.60720602154206, 4.57470607871923, 3.86090927635756,
## 3.57313465469149, 4.39238851250693, 4.53798584507042, 4.45812026715483,
## 4.61087149610063, 4.36654714126594, 4.83395137977617, 4.48298177738099,
## 4.24487317517505, 4.97613637696856, 4.14341994534426, 4.10918408776423,
## 3.80547291455154, 4.63347355254343, 4.25682080339513, 3.74860430284226,
## 3.33461827035041, 4.31482231351887, 4.07626837901377, 3.75605990535429,
## 4.51826969357939, 4.39319148665841, 4.12185715449991, 4.19907427922337,
## 4.99123464718118, 4.22510981161163, 4.20081917204225, 3.91301228983002,
## 4.8966351137691, 4.53420692714647, 4.27631615777365, 4.27378256776766,
## 4.28305889879237, 5.14392222535321, 4.65442707939607, 4.91933405670256,
## 4.23630118715586, 4.36654714126594, 4.09369993366443, 5.16098279528589,
## 3.92880160458909, 4.05498624312565, 3.90606913806289, 4.1736232924066,
## 4.45421195165986, 4.40759828979585, 5.0751130969265, 3.68582950234986,
## 4.31399196742674, 4.50682025822858, 4.56203022993144, 3.88310217785028,
## 4.51979212962875, 4.81853233316707, 4.59248790411271, 4.55229138231215,
## 4.2525608748516, 4.62037564745409, 5.11064911059594, 4.44401997544328,
## 4.62547775120469, 4.5784215265119, 3.67592419710848, 4.26446897202882,
## 4.37465320891252, 4.12185715449991, 4.34615829494115, 5.05073262206906,
## 4.15056255337101, 4.78535628237399, 4.37141414682473, 3.84972892391568,
## 4.98180893733216, 4.03725319225232, 4.10008852884187, 4.00326694607662,
## 4.90054116752379, 3.66708134606272, 4.83261442759844, 4.78740043468991,
## 4.5784215265119, 4.54401968563775, 4.19732806150465, 4.13446018295962,
## 4.72112332197546, 4.11552943063158, 4.48838562918339, 4.36248356180047,
## 3.79609650233605, 4.56203022993144, 4.0642639124341, 4.66662611195927,
## 4.54928680787707, 5.25649971528638, 4.62547775120469, 4.73577624450677,
## 4.97171544875428, 5.289607608586, 4.95204221081654, 4.7120079998809,
## 3.80339271755236, 4.79962962791266, 4.66447781347784, 4.07258146061223,
## 4.55604179233441, 4.37465320891252, 4.80098466524709, 5.00249875093668,
## 4.70638151598143, 4.35351877174842, 4.52359396475879, 4.93221217091729,
## 4.15145381301131, 4.73716716909308, 4.6565843327947, 5.20746150097278,
## 3.86293601702622, 4.38595481282061, 4.27968997796331, 5.3789243482449,
## 4.33959612712433, 4.76344497143527, 5.00437117689173, 4.74895806068259,
## 5.3431303609114, 3.8044330617929, 4.29062114000629, 5.53924614207584,
## 5.39342611034402, 4.99248872462087, 5.29016509938456, 5.15099568604474,
## 4.96031120783185, 4.99874968738276, 4.43220506774196, 4.55604179233441,
## 4.55304193558474, 4.48915660389326, 4.79623875179676, 4.54853507309582,
## 4.56501883193107, 5.0817765373419, 5.35239743682106, 4.04286814109814,
## 4.68445475787559, 4.26107290477726, 4.26955375498823, 4.72741395924518,
## 4.62402111104245, 3.9444800183149, 4.40360525003346, 3.50017120752803,
## 4.19470624578159, 4.55154059307216, 4.89728655105185, 4.24316132001179,
## 3.95422348371184, 4.59764951947146, 4.76825489106129, 4.76962741533791,
## 4.29062114000629, 4.46747395638393, 4.35106785374955, 4.92127004033648,
## 4.14163080129856, 5.05807160953597, 4.22683509877394, 4.43062526611781,
## 3.74646945527153, 4.45421195165986, 4.16920652420897, 3.88911647055105,
## 5.25706443832225, 3.87304201366326, 3.87102449594356, 4.84927432396436,
## 3.83644251728315, 4.74064100771146, 4.36329684217773, 4.60793956404291,
## 4.4698066128653, 3.89711029325023, 4.66089306552781, 4.62474954134985,
## 4.56576539912967, 4.36004202368276, 4.53042200855463, 4.03537843777246,
## 4.73508048403053, 4.7921634933305, 4.67662633771017, 4.63056906010227,
## 4.55454233489475, 5.01184689693292, 4.07350375212876, 4.98306775683589,
## 5.01805965188487, 4.81112363659015, 4.25852258139477, 4.28137504979732,
## 4.56277773074029, 4.5006912691636, 4.75448703520705, 5.2383625553961,
## 4.52207395926043, 5.22353090036469, 5.17559876883805, 4.35106785374955,
## 5.13623697910506, 4.68516517856677, 5.30350877738352, 5.07207842375622,
## 5.3659655010133, 5.59835498011832, 4.62256358846899, 5.36325756817213,
## 5.12912215905078, 5.42589455648472, 4.89598350492003), smoothness_worst = c(-1.40183650096854,
## -1.55220612193591, -1.46803241840246, -1.34354252185399, -1.46880785019902,
## -1.39048277340616, -1.3733916940331, -1.3231236019111, -1.57721525174517,
## -1.48685434191659, -1.64440052484827, -1.39154061904023, -1.38206798475272,
## -1.46031914038324, -1.34420937914042, -1.46958403520788, -1.52091312537128,
## -1.51595585913422, -1.48923877900503, -1.33888905569403, -1.42981400758559,
## -1.43724005615725, -1.51021197356396, -1.54490367636292, -1.39649510187451,
## -1.46725773804559, -1.67785425471799, -1.6941150453434, -1.40613457096515,
## -1.30508782071433, -1.54833143908053, -1.44548787503347, -1.38171920194798,
## -1.52715454306599, -1.34521064424351, -1.44888634599264, -1.61942689900374,
## -1.59390494932646, -1.53428953643525, -1.48963688528217, -1.54747305858427,
## -1.40112232198728, -1.49804385635261, -1.65226073987208, -1.53640067966101,
## -1.39578550727826, -1.67097631500598, -1.32377475234327, -1.42870591413312,
## -1.53008450627828, -1.45344000592858, -1.57188106834428, -1.41516143031734,
## -1.48092447390449, -1.57944902805708, -1.4549636269214, -1.39578550727826,
## -1.53050398030052, -1.42539062731438, -1.43314735095279, -1.4195297558875,
## -1.48844316751003, -1.49442988431315, -1.48606112392627, -1.52340366783141,
## -1.54747305858427, -1.52423562829466, -1.53555552814175, -1.60725220038208,
## -1.5440491267089, -1.65970839141552, -1.50980330884167, -1.42759932289029,
## -1.57321108034171, -1.5957317618608, -1.47308623661435, -1.47308623661435,
## -1.41552457144521, -1.60354616102531, -1.42465571958716, -1.56045122596102,
## -1.32019949774685, -1.57587816190535, -1.37408250358882, -1.37477388194629,
## -1.45916852210433, -1.53134361222597, -1.71744592147873, -1.52590223262381,
## -1.36617225167842, -1.44661911204314, -1.57810796637572, -1.31502542614399,
## -1.32247294325797, -1.62749770507757, -1.42870591413312, -1.37581201847778,
## -1.65028764768472, -1.58799851382772, -1.45763692373978, -1.65226073987208,
## -1.49083240917807, -1.53640067966101, -1.47464772083503, -1.42944447588348,
## -1.48725124924937, -1.55870797763166, -1.45993541772284, -1.53809377073995,
## -1.57321108034171, -1.48092447390449, -1.49844643513038, -1.55393453399349,
## -1.52008471650699, -1.52091312537128, -1.47582084584847, -1.49123131965263,
## -1.6642143643724, -1.51925718980053, -1.67734248825545, -1.39898347234133,
## -1.50898661988177, -1.4549636269214, -1.56132435202346, -1.72699126094039,
## -1.53640067966101, -1.5816894624428, -1.50207895410352, -1.60308420688558,
## -1.47036097520833, -1.46493818798572, -1.55653452068464, -1.64050222044834,
## -1.5830369390616, -1.49483061874586, -1.49123131965263, -1.48487278500486,
## -1.62559067609645, -1.58573917754001, -1.52548524538613, -1.47935063364482,
## -1.76360044982201, -1.58573917754001, -1.45002235376791, -1.47974380144674,
## -1.60447093386092, -1.55393453399349, -1.51636776051741, -1.50288846770294,
## -1.61612925391863, -1.43426150312693, -1.69406265936793, -1.82475510742266,
## -1.34554466088782, -1.61565936633134, -1.36173387206049, -1.72435994432552,
## -1.42723079211313, -1.47386659627565, -1.4928289806949, -1.43314735095279,
## -1.20942243559218, -1.47503857052102, -1.45002235376791, -1.47935063364482,
## -1.60911220342376, -1.50572836428609, -1.55914341511068, -1.57855472125531,
## -1.53428953643525, -1.5500510954301, -1.46185586833818, -1.48329110073087,
## -1.68681869997878, -1.48210690487441, -1.49483061874586, -1.48289617228522,
## -1.42723079211313, -1.53597798799109, -1.52715454306599, -1.59756306683032,
## -1.55696871583034, -1.34822152100929, -1.70382130009779, -1.66300999675367,
## -1.55870797763166, -1.44511114728777, -1.53134361222597, -1.62321385571701,
## -1.49925221154687, -1.53513329980447, -1.64440052484827, -1.50654165744372,
## -1.69511112194495, -1.46070304656148, -1.44097866837128, -1.54533130870895,
## -1.44699654296734, -1.48963688528217, -1.56394977145742, -1.37097832354581,
## -1.47895766029479, -1.44775193389517, -1.44210361946329, -1.5338680007895,
## -1.35225267363822, -1.44511114728777, -1.59848041238222, -1.62179145524877,
## -1.48487278500486, -1.56307362195144, -1.6602076580791, -1.54447628247046,
## -1.5114392519592, -1.50207895410352, -1.53767014868934, -1.45916852210433,
## -1.51967084302662, -1.50167451022945, -1.54319552829131, -1.5500510954301,
## -1.63807637754432, -1.34654749739611, -1.6448892914313, -1.43985527553856,
## -1.49925221154687, -1.66642765300822, -1.64391208808319, -1.60678792964066,
## -1.68221925036159, -1.64881136280715, -1.60586026106885, -1.6635617469794,
## -1.52049881052667, -1.4530595505586, -1.49764148352008, -1.52673688116944,
## -1.58213835253834, -1.69805517741032, -1.59710481795407, -1.69108309352359,
## -1.59436123275711, -1.44850803159816, -1.63807637754432, -1.50654165744372,
## -1.48013716393439, -1.68588605530976, -1.60123926203061, -1.66970988531176,
## -1.7134488980851, -1.65473472907519, -1.65721725562206, -1.57143825131019,
## -1.61801181100177, -1.61237852008231, -1.4719171273346, -1.55957910209346,
## -1.74733746712263, -1.48447706788501, -1.63614151870987, -1.42907511146058,
## -1.41262392240711, -1.54447628247046, -1.49163043179358, -1.41334812638161,
## -1.47152780472688, -1.53092368218281, -1.47582084584847, -1.55914341511068,
## -1.4660971225983, -1.59481779571206, -1.48447706788501, -1.44097866837128,
## -1.52840888279519, -1.55436724964736, -1.47817229601729, -1.56088766354308,
## -1.46185586833818, -1.50127027462458, -1.53050398030052, -1.56001503892749,
## -1.43500511941966, -1.52882744837297, -1.66481730933909, -1.42796802003095,
## -1.43537718258408, -1.38837110473461, -1.72206575085892, -1.58981090884964,
## -1.65523054910995, -1.51225851047311, -1.69905686874236, -1.5957317618608,
## -1.52673688116944, -1.56307362195144, -1.5147214601588, -1.53513329980447,
## -1.50005881482507, -1.52840888279519, -1.47935063364482, -1.60216116098467,
## -1.42355459440915, -1.61191101750064, -1.55350206373273, -1.59436123275711,
## -1.61051028430753, -1.53640067966101, -1.22152482402453, -1.40613457096515,
## -1.61659944171252, -1.72561952965551, -1.47464772083503, -1.58393660330422,
## -1.52091312537128, -1.64979521705898, -1.5466156397054, -1.46185586833818,
## -1.44586477827542, -1.47269634305255, -1.68645585838021, -1.60493375343096,
## -1.54962081859927, -1.31663899762533, -1.49563269960508, -1.69716013926355,
## -1.63904574231491, -1.50005881482507, -1.56614458542139, -1.6933296655119,
## -1.45344000592858, -1.55134338153222, -1.46455224894641, -1.49764148352008,
## -1.620844718463, -1.62511469446343, -1.59208260255658, -1.47503857052102,
## -1.43686713977629, -1.44022956699973, -1.52008471650699, -1.54747305858427,
## -1.47935063364482, -1.59253777203273, -1.48606112392627, -1.60216116098467,
## -1.52465194351403, -1.47777990462435, -1.46571062459338, -1.38661545837086,
## -1.48923877900503, -1.40505795674331, -1.5626359263642, -1.57099569380803,
## -1.60586026106885, -1.64586781621825, -1.48884087316261, -1.62749770507757,
## -1.67310899881542, -1.54149117490669, -1.52132766132703, -1.48487278500486,
## -1.55740315875384, -1.61942689900374, -1.49804385635261, -1.69045742134115,
## -1.43612181989407, -1.56526589687447, -1.55783784979882, -1.58124084060449,
## -1.48289617228522, -1.56088766354308, -1.60957793643666, -1.40973345879038,
## -1.50248360650758, -1.50857859510293, -1.53344669542821, -1.5683457783542,
## -1.50491591473139, -1.4900351922394, -1.56570511374899, -1.55393453399349,
## -1.64244875130296, -1.6308470262566, -1.51266846242613, -1.51513270923389,
## -1.58981090884964, -1.49442988431315, -1.56526589687447, -1.63132675755449,
## -1.48804566180285, -1.43129381087306, -1.59481779571206, -1.44135347866954,
## -1.52215739771971, -1.69139613830031, -1.60586026106885, -1.56176129175274,
## -1.43426150312693, -1.53597798799109, -1.46031914038324, -1.34254321262109,
## -1.56922805174919, -1.30732186646871, -1.27470462970427, -1.38766840258161,
## -1.36343818040496, -1.62702048148246, -1.59618916589, -1.41916483956829,
## -1.54876099075454, -1.4530595505586, -1.44964350687541, -1.46339554627544,
## -1.45610823795418, -1.32410051225878, -1.50857859510293, -1.47542961211584,
## -1.47777990462435, -1.39507651854449, -1.40112232198728, -1.52924624061376,
## -1.48764835579712, -1.43873343573348, -1.50572836428609, -1.42796802003095,
## -1.54106567708497, -1.49322890198397, -1.351243091246, -1.54447628247046,
## -1.41117742285008, -1.50735579633476, -1.63324884496924, -1.62749770507757,
## -1.56176129175274, -1.5321841580014, -1.46147141032506, -1.66220820932499,
## -1.62037180769071, -1.55393453399349, -1.61284631918178, -1.55696871583034,
## -1.49163043179358, -1.54064041483767, -1.62702048148246, -1.64979521705898,
## -1.52673688116944, -1.70042993006451, -1.48250144041222, -1.48131842185686,
## -1.59618916589, -1.39189353324403, -1.40183650096854, -1.55220612193591,
## -1.46803241840246, -1.24682371794616, -1.34354252185399, -1.46880785019902,
## -1.39048277340616, -1.3733916940331, -1.3231236019111, -1.57721525174517,
## -1.59985855657163, -1.39154061904023, -1.38206798475272, -1.46031914038324,
## -1.34420937914042, -1.44210361946329, -1.46958403520788, -1.52091312537128,
## -1.51595585913422, -1.48923877900503, -1.48487278500486, -1.42981400758559,
## -1.39649510187451, -1.39756063251601, -1.44323013407429, -1.46725773804559,
## -1.42318788167546, -1.46725773804559, -1.67785425471799, -1.6941150453434,
## -1.40613457096515, -1.61706993017923, -1.30508782071433, -1.43537718258408,
## -1.44548787503347, -1.38171920194798, -1.44888634599264, -1.58573917754001,
## -1.62131793415638, -1.61942689900374, -1.59390494932646, -1.53428953643525,
## -1.48963688528217, -1.54747305858427, -1.49804385635261, -1.65226073987208,
## -1.36309704347488, -1.53640067966101, -1.39578550727826, -1.39543093725709,
## -1.67097631500598, -1.39259981106491, -1.42870591413312, -1.53008450627828,
## -1.45344000592858, -1.57188106834428, -1.41516143031734, -1.48092447390449,
## -1.57944902805708, -1.44661911204314, -1.46532431312868, -1.4549636269214,
## -1.39578550727826, -1.53050398030052, -1.43314735095279, -1.4195297558875,
## -1.48844316751003, -1.49442988431315, -1.48606112392627, -1.52340366783141,
## -1.54747305858427, -1.52423562829466, -1.53555552814175, -1.60725220038208,
## -1.5440491267089, -1.65970839141552, -1.50980330884167, -1.42759932289029,
## -1.51925718980053, -1.47308623661435, -1.47308623661435, -1.54064041483767,
## -1.41552457144521, -1.60354616102531, -1.56045122596102, -1.32019949774685,
## -1.57587816190535, -1.37408250358882, -1.37477388194629, -1.45916852210433,
## -1.53134361222597, -1.71744592147873, -1.52590223262381, -1.44661911204314,
## -1.57810796637572, -1.31502542614399, -1.62749770507757, -1.42870591413312,
## -1.37581201847778, -1.52049881052667, -1.65028764768472, -1.58799851382772,
## -1.65226073987208, -1.49083240917807, -1.53640067966101, -1.47464772083503,
## -1.42944447588348, -1.48725124924937, -1.55870797763166, -1.45993541772284,
## -1.47777990462435, -1.53809377073995, -1.57321108034171, -1.48092447390449,
## -1.52008471650699, -1.48013716393439, -1.62559067609645, -1.47582084584847,
## -1.49123131965263, -1.6642143643724, -1.51925718980053, -1.67734248825545,
## -1.52715454306599, -1.39898347234133, -1.52924624061376, -1.50898661988177,
## -1.4549636269214, -1.56132435202346, -1.47817229601729, -1.53640067966101,
## -1.5816894624428, -1.60308420688558, -1.46493818798572, -1.55653452068464,
## -1.64050222044834, -1.47113867198634, -1.5830369390616, -1.49483061874586,
## -1.56176129175274, -1.43500511941966, -1.56176129175274, -1.62559067609645,
## -1.58573917754001, -1.52548524538613, -1.47935063364482, -1.76360044982201,
## -1.58573917754001, -1.4572544797648, -1.45002235376791, -1.47974380144674,
## -1.60447093386092, -1.52548524538613, -1.43873343573348, -1.55393453399349,
## -1.51636776051741, -1.50288846770294, -1.61612925391863, -1.43426150312693,
## -1.69406265936793, -1.82475510742266, -1.51925718980053, -1.61565936633134,
## -1.36173387206049, -1.72435994432552, -1.51677987989553, -1.47386659627565,
## -1.4928289806949, -1.43314735095279, -1.20942243559218, -1.47503857052102,
## -1.45002235376791, -1.47935063364482, -1.60911220342376, -1.50572836428609,
## -1.55914341511068, -1.57855472125531, -1.53428953643525, -1.59481779571206,
## -1.5500510954301, -1.42465571958716, -1.46185586833818, -1.48329110073087,
## -1.68681869997878, -1.48210690487441, -1.53597798799109, -1.52715454306599,
## -1.50898661988177, -1.59756306683032, -1.55696871583034, -1.34822152100929,
## -1.3733916940331, -1.66300999675367, -1.44511114728777, -1.53134361222597,
## -1.55653452068464, -1.62321385571701, -1.49925221154687, -1.64440052484827,
## -1.46070304656148, -1.44097866837128, -1.6137828087638, -1.54533130870895,
## -1.44699654296734, -1.48963688528217, -1.56394977145742, -1.37097832354581,
## -1.47895766029479, -1.44775193389517, -1.44210361946329, -1.5338680007895,
## -1.35225267363822, -1.44511114728777, -1.31341485905556, -1.43835983334882,
## -1.55134338153222, -1.59848041238222, -1.62179145524877, -1.42502309090337,
## -1.48487278500486, -1.6602076580791, -1.54447628247046, -1.5114392519592,
## -1.73845567122268, -1.50207895410352, -1.51967084302662, -1.50167451022945,
## -1.54319552829131, -1.5500510954301, -1.57543299233561, -1.34654749739611,
## -1.6448892914313, -1.43985527553856, -1.66642765300822, -1.64391208808319,
## -1.60678792964066, -1.68221925036159, -1.64881136280715, -1.60586026106885,
## -1.6635617469794, -1.52049881052667, -1.49764148352008, -1.52673688116944,
## -1.69805517741032, -1.59710481795407, -1.69108309352359, -1.59436123275711,
## -1.44850803159816, -1.63807637754432, -1.50654165744372, -1.48013716393439,
## -1.59390494932646, -1.68588605530976, -1.60123926203061, -1.66970988531176,
## -1.7134488980851, -1.55870797763166, -1.65721725562206, -1.57143825131019,
## -1.61801181100177, -1.50654165744372, -1.61237852008231, -1.66220820932499,
## -1.4719171273346, -1.55957910209346, -1.74733746712263, -1.42907511146058,
## -1.41262392240711, -1.54447628247046, -1.49163043179358, -1.5338680007895,
## -1.41334812638161, -1.47503857052102, -1.47152780472688, -1.53092368218281,
## -1.58393660330422, -1.44737415018464, -1.48447706788501, -1.44097866837128,
## -1.52840888279519, -1.55436724964736, -1.47817229601729, -1.56088766354308,
## -1.46185586833818, -1.50127027462458, -1.56001503892749, -1.43500511941966,
## -1.42796802003095, -1.43537718258408, -1.38837110473461, -1.72206575085892,
## -1.61612925391863, -1.50817078309823, -1.58981090884964, -1.65523054910995,
## -1.51225851047311, -1.5957317618608, -1.52673688116944, -1.51021197356396,
## -1.56307362195144, -1.5147214601588, -1.53513329980447, -1.49804385635261,
## -1.50005881482507, -1.52840888279519, -1.60216116098467, -1.57232414528024,
## -1.42355459440915, -1.61191101750064, -1.59436123275711, -1.22152482402453,
## -1.40613457096515, -1.72561952965551, -1.47464772083503, -1.58393660330422,
## -1.52091312537128, -1.64979521705898, -1.7431069913647, -1.61331441525618,
## -1.5466156397054, -1.50817078309823, -1.3810220723387, -1.44586477827542,
## -1.47269634305255, -1.63036761017151, -1.68645585838021, -1.60493375343096,
## -1.31663899762533, -1.49563269960508, -1.69716013926355, -1.58124084060449,
## -1.63904574231491, -1.50005881482507, -1.56614458542139, -1.6933296655119,
## -1.45344000592858, -1.46455224894641, -1.49764148352008, -1.620844718463,
## -1.62511469446343, -1.59208260255658, -1.43686713977629, -1.44022956699973,
## -1.52008471650699, -1.53344669542821, -1.54747305858427, -1.47935063364482,
## -1.59253777203273, -1.48606112392627, -1.60216116098467, -1.52465194351403,
## -1.47777990462435, -1.46571062459338, -1.38661545837086, -1.48923877900503,
## -1.56088766354308, -1.60586026106885, -1.48884087316261, -1.47113867198634,
## -1.62749770507757, -1.67310899881542, -1.54149117490669, -1.48487278500486,
## -1.55740315875384, -1.61942689900374, -1.49804385635261, -1.69045742134115,
## -1.56526589687447, -1.50654165744372, -1.55783784979882, -1.58124084060449,
## -1.48289617228522, -1.56088766354308, -1.60957793643666, -1.40973345879038,
## -1.50248360650758, -1.65423925053115, -1.53344669542821, -1.5683457783542,
## -1.50491591473139, -1.4900351922394, -1.56570511374899, -1.34721671179882,
## -1.55393453399349, -1.64244875130296, -1.69228385560889, -1.51513270923389,
## -1.58981090884964, -1.69427222671242, -1.50451000564169, -1.56526589687447,
## -1.63132675755449, -1.48804566180285, -1.56922805174919, -1.43129381087306,
## -1.57989658074747, -1.59481779571206, -1.44135347866954, -1.43686713977629,
## -1.66965927489485, -1.52215739771971, -1.60586026106885, -1.56176129175274,
## -1.43426150312693, -1.53597798799109, -1.48053072134206, -1.46031914038324,
## -1.59802159816531, -1.34254321262109, -1.41697874282747, -1.56922805174919,
## -1.30732186646871, -1.48447706788501, -1.38766840258161, -1.50369881751537,
## -1.36343818040496, -1.62702048148246, -1.59618916589, -1.41916483956829,
## -1.55870797763166, -1.54876099075454, -1.4530595505586, -1.44964350687541,
## -1.46339554627544, -1.32410051225878, -1.50857859510293, -1.61051028430753,
## -1.47542961211584, -1.47777990462435, -1.40112232198728, -1.52924624061376,
## -1.50572836428609, -1.42796802003095, -1.61565936633134, -1.47425706282928,
## -1.53936603838066, -1.351243091246, -1.50735579633476, -1.50939485770457,
## -1.63324884496924, -1.56176129175274, -1.5321841580014, -1.46147141032506,
## -1.56922805174919, -1.66220820932499, -1.62037180769071, -1.55393453399349,
## -1.61284631918178, -1.55696871583034, -1.49163043179358, -1.54064041483767,
## -1.62702048148246, -1.64979521705898, -1.52673688116944, -1.55091237656436,
## -1.70042993006451, -1.47856488116327, -1.48131842185686, -1.58393660330422,
## -1.59618916589, -1.39189353324403, -1.71490460342403), symmetry_worst = c(-0.948518649356509,
## -1.81385035698237, -1.32733106607223, -1.1682236591391, -1.61373662475265,
## -1.53774568838613, -1.02267961437611, -1.02683069843001, -1.68354734341879,
## -1.24784901230815, -1.54886720493838, -1.33518672982535, -1.07947517897193,
## -1.6339618107143, -1.28531705455465, -1.6655620734172, -1.54440602504168,
## -2.04061017820884, -0.927595663724632, -1.32733106607223, -1.13650734223903,
## -1.06281945915844, -2.13360799774556, -1.8096965887973, -0.898550724637681,
## -1.0606668389142, -2.48674163825318, -3.05560139165559, -1.77492901430698,
## -1.67359179185323, -0.926655171438096, -1.29109441738175, -1.24485540536667,
## -1.58921268038777, -1.20256307610174, -1.81593234259298, -2.12920069507667,
## -1.78980963790557, -1.63877023069482, -1.86694595618265, -1.47839238026982,
## -1.36288848203546, -1.28886874970487, -2.04971159487191, -1.35342087180173,
## -1.66864419123526, -1.49108728516819, -1.43857892266411, -1.7280746576794,
## -2.08248290463863, -1.07583125960433, -1.9598138139316, -1.47471570930472,
## -1.93064634475351, -1.90881552497052, -1.26555094594136, -0.711630722733202,
## -1.79389862629642, -1.80555636306181, -1.3676524573408, -2.12130294982622,
## -2.16035148134034, -1.44061358492657, -1.29020360821199, -1.63937262623419,
## -1.17981500374447, -1.66864419123526, -1.56291772787971, -1.98251531466764,
## -1.95593886064618, -2.44225128149598, -1.86477939744156, -1.75690378382672,
## -1.29287820852485, -2.23808718958129, -1.79868588198793, -1.83623558432291,
## -1.69358427576559, -1.85328560426458, -1.90583283432253, -1.76221765322067,
## -1.56518133530493, -1.6618737658522, -1.14075866336532, -1.76022231091473,
## -1.97385851785602, -2.23903903399297, -2.08418500306765, -2.09701896683472,
## -1.69736933323268, -2.05054204942691, -2.92067831305123, -1.34029955479447,
## -1.49534990859376, -0.862405173218095, -1.64178520364615, -1.52344281721127,
## -2.41941738241592, -2.11345034894864, -1.39519918732522, -1.75227257197787,
## -1.9298874594593, -1.47891857229768, -1.39568851487512, -1.75491687996779,
## -1.39031751810405, -1.8327118988321, -1.5869030194791, -2.83368244522832,
## -1.8662233651353, -1.42293173005074, -2.36228102239682, -1.58921268038777,
## -1.78505582052232, -1.35719831132296, -1.82989993080951, -0.632034671494683,
## -1.74502943136569, -1.85543287977493, -2.12568498503517, -1.47000564743879,
## -1.74044194062115, -1.22371054699841, -1.58459781164493, -1.97857340009713,
## -1.98884682155025, -1.82219881722458, -1.55334527259351, -2.04639487720259,
## -1.31360250635687, -2.19960525565808, -1.3384376262469, -1.8880789567987,
## -1.77291338952272, -2.30331482911935, -1.761552186221, -1.72419461361929,
## -1.84189375197649, -1.92837100659193, -1.91180505926984, -1.61905746683644,
## -2.17482860723433, -2.73646490874199, -1.12423733983001, -1.41145957686351,
## -2.69970686492617, -1.53222399672915, -1.9436150336255, -1.53553390593274,
## -2.0144782295185, -0.782612903765608, -2.28451216777744, -2.57748607123174,
## -1.52727651691069, -1.63696483726654, -1.60374985078224, -2.10910714179416,
## -0.900989043238912, -1.87201546331183, -1.69610635477286, -1.5366392784487,
## -1.00420884063055, -1.6429932929823, -1.42243053204966, -1.65819656951131,
## -1.49481622664415, -1.11286403182345, -2.152273992687, -1.70815718395649,
## -1.97229061149479, -2.99531908151406, -1.31953065154646, -1.4314859277946,
## -1.73456843486636, -1.83976900644465, -1.61255739919515, -1.6184651012597,
## -1.1650482901036, -1.99520862275621, -1.61432667073989, -1.6798045339746,
## -1.76221765322067, -1.42644629234223, -1.74700067253984, -1.70688312201064,
## -2.05137301508329, -1.80900561271674, -2.23903903399297, -2.60043706228236,
## -1.74437304201154, -1.97621386243772, -1.71326660034339, -1.4773407121036,
## -1.87564877129273, -1.70116605098803, -1.72484044638468, -1.89323214521904,
## -1.42544102716222, -1.87492129146064, -1.57713646794481, -1.81454397349388,
## -1.57029026747251, -1.44061358492657, -1.61079072337059, -1.76755413484372,
## -1.50392220818424, -1.49374959569404, -1.61137932729061, -1.86117654460525,
## -1.73456843486636, -1.68855556781659, -2.04391267992747, -1.33148302326385,
## -1.91855666953546, -1.82569355949001, -1.75756678635263, -1.75227257197787,
## -1.99680383488716, -2.24476359978009, -1.80831501247306, -1.94745384990834,
## -2.13891543583711, -1.50392220818424, -1.59152680872644, -1.33797263040563,
## -1.80005700128253, -2.47325440842584, -1.99600599001747, -2.0282974725538,
## -2.16215288929717, -1.67918184092775, -1.49909254054319, -2.19590679148344,
## -1.67483182287313, -1.44010458076891, -1.59152680872644, -2.10910714179416,
## -1.76421617535536, -2.32034978967903, -2.49693750413288, -1.8954469356581,
## -2.23808718958129, -1.67111549107176, -1.85973822213232, -1.48472248992878,
## -2.25147166248137, -1.51147493610312, -1.88441061089581, -1.65697329692419,
## -2.20053140083543, -2.35710200950299, -1.97621386243772, -1.94976252766682,
## -1.43502693286363, -2.56792473218354, -1.77157143202357, -1.57199813184547,
## -2.58710770259687, -1.91630224993979, -1.6184651012597, -2.09787887217882,
## -0.682691445788528, -1.87710496100081, -1.8575837490523, -1.59036918522861,
## -1.63997532105098, -1.33518672982535, -1.48578087187875, -1.95748747642702,
## -1.9050882279228, -2.05553552826906, -1.71775470803394, -1.9276134381717,
## -1.61965012343041, -1.66248771065756, -1.48102573092353, -1.14463853966039,
## -1.80349130564482, -2.05386898390929, -1.72677996249965, -1.5869030194791,
## -1.74568616603648, -1.57828133482257, -1.8270941429378, -1.06966617277021,
## -1.29243198883197, -1.88221458097702, -1.9405520311955, -2.11519149203145,
## -2.05386898390929, -2.0373157543605, -2.23238955690485, -1.89470825224657,
## -1.81801774160606, -1.8201065633589, -1.91555164097267, -1.49695242258118,
## -1.992023920279, -1.8201065633589, -0.879561418604534, -2.01044073656347,
## -1.85686639270039, -1.46948348873484, -1.56178753983291, -1.92458754139111,
## -1.85328560426458, -1.43654786582851, -1.10310698975869, -1.45908956637123,
## -1.53442956921802, -2.27276302636913, -1.7668858325529, -1.82359556450936,
## -2.01852794126098, -1.86550117901428, -1.9405520311955, -2.04473957139505,
## -1.23254091917618, -1.62202366475002, -2.48561304016257, -1.9298874594593,
## -1.79389862629642, -1.51093383510043, -2.03238919272756, -1.53167324846093,
## -2.17210263331832, -2.21543355460736, -1.79458141198112, -2.00963469882313,
## -1.61550763031094, -1.40256139153357, -1.66802714570214, -1.65270148685892,
## -2.10304969931109, -1.55615268811606, -1.51744360676264, -1.60667853866973,
## -1.73196685431038, -1.64965934300906, -1.79663198301, -1.63036797913392,
## -1.78844957698328, -1.81801774160606, -1.53774568838613, -2.12656307060115,
## -1.67297224689699, -1.73587126708281, -1.95593886064618, -1.63216356146074,
## -1.64723109301103, -1.54719228915016, -1.47471570930472, -1.83130514088461,
## -1.98964038503598, -2.32742322407915, -1.96837895066273, -2.63863607042713,
## -1.84971477076266, -1.75161236742064, -1.92307721324537, -1.66864419123526,
## -1.33333333333333, -2.02340381113284, -2.19960525565808, -2.16576273636713,
## -2.17664889448334, -2.04308629539244, -1.49748708391334, -2.2678959977632,
## -1.72419461361929, -2.16485939411725, -2.15137939524202, -1.64541308288245,
## -1.89175771526834, -1.53002254809104, -2.23049544646902, -1.74963384269795,
## -1.95053298754495, -1.61728124220584, -2.16938199225083, -1.54998513446837,
## -2.07908508224002, -1.8575837490523, -1.93673329589478, -1.64783770109722,
## -1.97307433264506, -1.47209664229407, -2.07738934943665, -2.12042821715165,
## -1.52071210083019, -1.9551652341871, -2.03649340670876, -1.49962818425708,
## -1.50769247373693, -2.18853908291695, -1.8583015058264, -2.10910714179416,
## -1.51907671538893, -1.63036797913392, -2.01609664451249, -1.30997016489089,
## -1.70879470776142, -1.6285750571483, -1.74765844497931, -1.79321620905441,
## -1.64359778867931, -1.9193090083481, -2.13980199800554, -1.34029955479447,
## -2.18670321209332, -1.61550763031094, -1.60726513354043, -1.93597088022076,
## -1.60902663692541, -1.29645450956099, -1.55953038653916, -1.74700067253984,
## -1.99840095936045, -1.6618737658522, -1.37195739086684, -1.58632630060416,
## -2.30331482911935, -1.79253415992383, -2.01771701845154, -1.53222399672915,
## -1.77560160745518, -1.8504281432826, -1.77762154604325, -1.61668974825337,
## -1.78641222831377, -2.14424337037824, -1.83341585914639, -2.02177654966643,
## -1.89102112820481, -1.86261647624009, -1.85543287977493, -2.03402937550546,
## -1.55278458694543, -2.07654227570788, -2.05303648179888, -2.10650781176591,
## -2.23903903399297, -2.2051713353118, -2.02015126103685, -2.20889437396518,
## -2.35194139889245, -3.05398695719269, -1.69547535069575, -2.40652649239232,
## -2.24667694835586, -1.12843889570337, -0.948518649356509, -1.81385035698237,
## -1.32733106607223, -0.45477319096941, -1.1682236591391, -1.61373662475265,
## -1.53774568838613, -1.02267961437611, -1.02683069843001, -1.68354734341879,
## -1.77358490566038, -1.33518672982535, -1.07947517897193, -1.6339618107143,
## -1.28531705455465, -1.80142960634853, -1.6655620734172, -1.54440602504168,
## -2.04061017820884, -0.927595663724632, -1.76488305748928, -1.13650734223903,
## -0.898550724637681, -1.36622113937971, -1.30049180992225, -1.0606668389142,
## -0.867991506742528, -1.33750782881173, -2.48674163825318, -3.05560139165559,
## -1.77492901430698, -1.6551406867881, -1.67359179185323, -1.27078700106134,
## -1.29109441738175, -1.24485540536667, -1.81593234259298, -1.7326167397057,
## -2.05470199922352, -2.12920069507667, -1.78980963790557, -1.63877023069482,
## -1.86694595618265, -1.47839238026982, -1.28886874970487, -2.04971159487191,
## -1.52453688425121, -1.35342087180173, -1.66864419123526, -1.75029300308675,
## -1.49108728516819, -1.47052804196902, -1.7280746576794, -2.08248290463863,
## -1.07583125960433, -1.9598138139316, -1.47471570930472, -1.93064634475351,
## -1.90881552497052, -1.88514344942906, -1.84189375197649, -1.26555094594136,
## -0.711630722733202, -1.79389862629642, -1.3676524573408, -2.12130294982622,
## -2.16035148134034, -1.44061358492657, -1.29020360821199, -1.63937262623419,
## -1.17981500374447, -1.66864419123526, -1.56291772787971, -1.98251531466764,
## -1.95593886064618, -2.44225128149598, -1.86477939744156, -1.75690378382672,
## -2.54780422488025, -1.79868588198793, -1.83623558432291, -1.88441061089581,
## -1.69358427576559, -1.85328560426458, -1.76221765322067, -1.56518133530493,
## -1.6618737658522, -1.14075866336532, -1.76022231091473, -1.97385851785602,
## -2.23903903399297, -2.08418500306765, -2.09701896683472, -2.05054204942691,
## -2.92067831305123, -1.34029955479447, -0.862405173218095, -1.64178520364615,
## -1.52344281721127, -1.72097048123965, -2.41941738241592, -2.11345034894864,
## -1.75227257197787, -1.9298874594593, -1.47891857229768, -1.39568851487512,
## -1.75491687996779, -1.39031751810405, -1.8327118988321, -1.5869030194791,
## -1.76022231091473, -2.83368244522832, -1.8662233651353, -1.42293173005074,
## -1.78505582052232, -1.93444737682317, -2.1702882811415, -1.82989993080951,
## -0.632034671494683, -1.74502943136569, -1.85543287977493, -2.12568498503517,
## -1.53774568838613, -1.47000564743879, -1.58748001667088, -1.74044194062115,
## -1.22371054699841, -1.58459781164493, -2.02993266542511, -1.98884682155025,
## -1.82219881722458, -2.04639487720259, -2.19960525565808, -1.3384376262469,
## -1.8880789567987, -2.37478639259807, -1.77291338952272, -2.30331482911935,
## -2.07908508224002, -1.52672807929299, -2.58590167978055, -1.84189375197649,
## -1.92837100659193, -1.91180505926984, -1.61905746683644, -2.17482860723433,
## -2.73646490874199, -1.7424059428256, -1.12423733983001, -1.41145957686351,
## -2.69970686492617, -1.54942603766446, -1.69295456230508, -1.53222399672915,
## -1.9436150336255, -1.53553390593274, -2.0144782295185, -0.782612903765608,
## -2.28451216777744, -2.57748607123174, -1.65148371670111, -1.63696483726654,
## -1.60374985078224, -2.10910714179416, -1.53940725201044, -1.87201546331183,
## -1.69610635477286, -1.5366392784487, -1.00420884063055, -1.6429932929823,
## -1.42243053204966, -1.65819656951131, -1.49481622664415, -1.11286403182345,
## -2.152273992687, -1.70815718395649, -1.97229061149479, -2.92664639082147,
## -2.99531908151406, -0.909879809896627, -1.31953065154646, -1.4314859277946,
## -1.73456843486636, -1.83976900644465, -1.99520862275621, -1.61432667073989,
## -1.91105703267169, -1.6798045339746, -1.76221765322067, -1.42644629234223,
## -1.5869030194791, -1.70688312201064, -1.80900561271674, -2.23903903399297,
## -2.22765904290076, -2.60043706228236, -1.74437304201154, -1.71326660034339,
## -1.70116605098803, -1.72484044638468, -1.80211646688469, -1.89323214521904,
## -1.42544102716222, -1.87492129146064, -1.57713646794481, -1.81454397349388,
## -1.57029026747251, -1.44061358492657, -1.61079072337059, -1.76755413484372,
## -1.50392220818424, -1.49374959569404, -1.37483653531413, -1.56291772787971,
## -2.0389619586101, -1.61137932729061, -1.86117654460525, -1.52672807929299,
## -1.73456843486636, -2.04391267992747, -1.33148302326385, -1.91855666953546,
## -2.03402937550546, -1.82569355949001, -1.99680383488716, -2.24476359978009,
## -1.80831501247306, -1.94745384990834, -1.67918184092775, -1.50392220818424,
## -1.59152680872644, -1.33797263040563, -2.47325440842584, -1.99600599001747,
## -2.0282974725538, -2.16215288929717, -1.67918184092775, -1.49909254054319,
## -2.19590679148344, -1.67483182287313, -1.59152680872644, -2.10910714179416,
## -2.32034978967903, -2.49693750413288, -1.8954469356581, -2.23808718958129,
## -1.67111549107176, -1.85973822213232, -1.48472248992878, -2.25147166248137,
## -2.25628265379374, -1.51147493610312, -1.88441061089581, -1.65697329692419,
## -2.20053140083543, -1.38691285149289, -1.97621386243772, -1.94976252766682,
## -1.43502693286363, -1.56801692100357, -2.56792473218354, -2.17664889448334,
## -1.77157143202357, -1.57199813184547, -2.58710770259687, -2.09787887217882,
## -0.682691445788528, -1.87710496100081, -1.8575837490523, -2.36435780471985,
## -1.59036918522861, -1.82359556450936, -1.63997532105098, -1.33518672982535,
## -1.76956117794169, -1.29735040698467, -1.71775470803394, -1.9276134381717,
## -1.61965012343041, -1.66248771065756, -1.48102573092353, -1.14463853966039,
## -1.80349130564482, -2.05386898390929, -1.5869030194791, -1.74568616603648,
## -1.06966617277021, -1.29243198883197, -1.88221458097702, -1.9405520311955,
## -2.34269832707947, -1.58459781164493, -2.11519149203145, -2.05386898390929,
## -2.0373157543605, -1.89470825224657, -1.81801774160606, -2.08759559656644,
## -1.8201065633589, -1.91555164097267, -1.49695242258118, -1.52563197108336,
## -1.992023920279, -1.8201065633589, -2.01044073656347, -1.82779501175476,
## -1.85686639270039, -1.46948348873484, -1.92458754139111, -1.10310698975869,
## -1.45908956637123, -2.27276302636913, -1.7668858325529, -1.82359556450936,
## -2.01852794126098, -1.86550117901428, -1.9668175187901, -2.30630635873273,
## -1.9405520311955, -1.69043892539032, -1.5427374148158, -1.23254091917618,
## -1.62202366475002, -1.98172599441121, -2.48561304016257, -1.9298874594593,
## -1.51093383510043, -2.03238919272756, -1.53167324846093, -1.48313672307081,
## -2.17210263331832, -2.21543355460736, -1.79458141198112, -2.00963469882313,
## -1.61550763031094, -1.66802714570214, -1.65270148685892, -2.10304969931109,
## -1.55615268811606, -1.51744360676264, -1.73196685431038, -1.64965934300906,
## -1.79663198301, -1.66617787553383, -1.63036797913392, -1.78844957698328,
## -1.81801774160606, -1.53774568838613, -2.12656307060115, -1.67297224689699,
## -1.73587126708281, -1.95593886064618, -1.63216356146074, -1.64723109301103,
## -1.98014876083996, -1.98964038503598, -1.96837895066273, -2,
## -2.63863607042713, -1.84971477076266, -1.75161236742064, -1.66864419123526,
## -1.33333333333333, -2.02340381113284, -2.19960525565808, -2.16576273636713,
## -2.04308629539244, -1.91780476380667, -1.49748708391334, -2.2678959977632,
## -1.72419461361929, -2.16485939411725, -2.15137939524202, -1.64541308288245,
## -1.89175771526834, -2.130081027822, -2.23049544646902, -1.74963384269795,
## -1.95053298754495, -1.61728124220584, -2.16938199225083, -1.87783367164741,
## -1.54998513446837, -2.07908508224002, -2.07484971085364, -1.64783770109722,
## -1.97307433264506, -1.86405802003958, -1.68792842242945, -2.07738934943665,
## -2.12042821715165, -1.52071210083019, -1.98567729636041, -1.9551652341871,
## -1.5185320931284, -2.03649340670876, -1.49962818425708, -1.77694787300249,
## -2.73646490874199, -1.50769247373693, -1.8583015058264, -2.10910714179416,
## -1.51907671538893, -1.63036797913392, -1.992023920279, -2.01609664451249,
## -2.28647979689047, -1.30997016489089, -1.56065842662025, -1.70879470776142,
## -1.6285750571483, -1.8426027846949, -1.79321620905441, -2.1702882811415,
## -1.64359778867931, -1.9193090083481, -2.13980199800554, -1.34029955479447,
## -1.90285696125482, -2.18670321209332, -1.61550763031094, -1.60726513354043,
## -1.93597088022076, -1.29645450956099, -1.55953038653916, -1.9551652341871,
## -1.74700067253984, -1.99840095936045, -1.37195739086684, -1.58632630060416,
## -2.01771701845154, -1.53222399672915, -1.52453688425121, -2.18029662347075,
## -1.60550620691808, -1.77762154604325, -2.14424337037824, -1.5427374148158,
## -1.83341585914639, -1.89102112820481, -1.86261647624009, -1.85543287977493,
## -1.95903791232448, -2.03402937550546, -1.55278458694543, -2.07654227570788,
## -2.05303648179888, -2.10650781176591, -2.23903903399297, -2.2051713353118,
## -2.02015126103685, -2.20889437396518, -2.35194139889245, -2.21637021355784,
## -3.05398695719269, -1.12767371557802, -2.40652649239232, -1.9436150336255,
## -2.24667694835586, -1.12843889570337, -1.7326167397057), .outcome = c(2L,
## 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L,
## 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L,
## 1L, 2L, 1L, 1L, 1L, 2L, 2L, 1L, 2L, 2L, 1L, 1L, 1L, 1L, 2L, 1L,
## 1L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 1L, 2L, 2L,
## 2L, 1L, 1L, 1L, 2L, 1L, 1L, 2L, 2L, 1L, 1L, 2L, 1L, 1L, 1L, 1L,
## 2L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 1L, 2L,
## 1L, 1L, 2L, 2L, 1L, 2L, 1L, 2L, 2L, 1L, 2L, 1L, 1L, 2L, 1L, 1L,
## 2L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L,
## 2L, 1L, 2L, 1L, 2L, 2L, 1L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 2L, 2L,
## 1L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 2L, 2L, 1L, 2L, 2L,
## 2L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 2L, 1L, 1L, 2L, 1L, 1L,
## 2L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 1L,
## 1L, 2L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L,
## 2L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 1L, 2L,
## 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L,
## 1L, 2L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L,
## 1L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 2L,
## 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 1L, 1L,
## 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 1L, 1L,
## 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 2L, 1L, 1L, 1L,
## 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 1L,
## 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 2L, 2L, 2L, 1L, 1L,
## 1L, 1L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 1L,
## 1L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L,
## 1L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 2L,
## 2L, 1L, 1L, 1L, 2L, 1L, 1L, 2L, 2L, 1L, 2L, 1L, 1L, 1L, 2L, 1L,
## 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L,
## 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L,
## 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L,
## 2L, 2L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L,
## 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 1L, 2L, 1L,
## 1L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 2L, 2L, 2L, 1L, 2L, 1L, 2L,
## 2L, 1L, 1L, 2L, 2L, 1L, 2L, 2L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 2L,
## 1L, 1L, 2L, 2L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L,
## 1L, 2L, 2L, 1L, 2L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 2L, 2L, 1L, 2L,
## 2L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L,
## 1L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 2L, 1L, 2L, 1L, 1L,
## 1L, 1L, 2L, 1L, 1L, 2L, 2L, 2L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 2L,
## 1L, 1L, 2L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 1L, 2L, 1L, 2L, 1L,
## 1L, 2L, 1L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 2L,
## 2L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 2L,
## 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L,
## 1L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L,
## 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 1L, 1L,
## 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 1L,
## 2L, 2L, 2L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L,
## 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 1L,
## 2L, 2L, 1L, 2L, 2L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L,
## 2L, 1L, 2L, 2L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L,
## 2L, 1L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L,
## 2L, 1L, 2L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 2L, 2L, 1L, 1L, 2L, 1L,
## 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 1L,
## 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L,
## 1L, 2L, 1L, 1L, 1L, 1L, 2L, 2L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 1L,
## 2L, 1L, 1L, 2L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L,
## 1L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L,
## 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 1L)),
## mfinal = 100, coeflearn = "Breiman", control = list(minsplit = 0,
## minbucket = 0, cp = -1, maxcompete = 4L, maxsurrogate = 5L,
## usesurrogate = 2L, surrogatestyle = 0L, maxdepth = 6,
## xval = 0))
##
## $xNames
## [1] "texture_mean" "smoothness_mean" "compactness_se" "texture_worst"
## [5] "smoothness_worst" "symmetry_worst"
##
## $problemType
## [1] "Classification"
##
## $tuneValue
## mfinal maxdepth coeflearn
## 6 100 6 Breiman
##
## $obsLevels
## [1] "B" "M"
## attr(,"ordered")
## [1] FALSE
##
## $param
## list()
##
## attr(,"vardep.summary")
## B M
## 572 340
## attr(,"class")
## [1] "boosting"
$results MBS_AB_Tune
## coeflearn maxdepth mfinal ROC Sens Spec ROCSD SensSD
## 1 Breiman 4 50 0.9539212 0.9454523 0.8682353 0.02278794 0.02792403
## 3 Breiman 5 50 0.9624131 0.9531137 0.9011765 0.01779028 0.02435207
## 5 Breiman 6 50 0.9666688 0.9496323 0.8982353 0.01854771 0.02567639
## 2 Breiman 4 100 0.9600671 0.9506972 0.9017647 0.02131985 0.02653148
## 4 Breiman 5 100 0.9671294 0.9506636 0.9000000 0.01843939 0.02577619
## 6 Breiman 6 100 0.9710985 0.9527750 0.8964706 0.01750303 0.02587865
## SpecSD
## 1 0.04502338
## 3 0.03737725
## 5 0.04744784
## 2 0.04281576
## 4 0.03422608
## 6 0.04561985
<- MBS_AB_Tune$results[MBS_AB_Tune$results$mfinal==MBS_AB_Tune$bestTune$mfinal &
(MBS_AB_Train_AUROC $results$maxdepth==MBS_AB_Tune$bestTune$maxdepth &
MBS_AB_Tune$results$coeflearn==MBS_AB_Tune$bestTune$coeflearn,
MBS_AB_Tunec("ROC")])
## [1] 0.9710985
##################################
# Identifying and plotting the
# best model predictors
##################################
<- varImp(MBS_AB_Tune, scale = TRUE)
MBS_AB_VarImp plot(MBS_AB_VarImp,
top=6,
scales=list(y=list(cex = .95)),
main="Ranked Variable Importance : Adaptive Boosting",
xlab="Scaled Variable Importance Metrics",
ylab="Predictors",
cex=2,
origin=0,
alpha=0.45)
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(MBS_AB_Test_Observed = MA_Test$diagnosis,
MBS_AB_Test MBS_AB_Test_Predicted = predict(MBS_AB_Tune,
!names(MA_Test) %in% c("diagnosis")],
MA_Test[,type = "prob"))
##################################
# Reporting the independent evaluation results
# for the test set
##################################
<- roc(response = MBS_AB_Test$MBS_AB_Test_Observed,
MBS_AB_Test_ROC predictor = MBS_AB_Test$MBS_AB_Test_Predicted.M,
levels = rev(levels(MBS_AB_Test$MBS_AB_Test_Observed)))
<- auc(MBS_AB_Test_ROC)[1]) (MBS_AB_Test_AUROC
## [1] 0.9956405
##################################
# Setting the conditions
# for hyperparameter tuning
##################################
= expand.grid(n.trees = 500,
GBM_Grid interaction.depth = c(4,5,6),
shrinkage = c(0.1,0.01,0.001),
n.minobsinnode = c(5, 10, 15))
##################################
# Running the stochastic gradient boosting model
# by setting the caret method to 'gbm'
##################################
set.seed(12345678)
<- train(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
MBS_GBM_Tune y = MA_Train$diagnosis,
method = "gbm",
tuneGrid = GBM_Grid,
metric = "ROC",
trControl = RKFold_Control)
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3196 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3162 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3144 nan 0.0010 0.0004
## 9 1.3135 nan 0.0010 0.0004
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3043 nan 0.0010 0.0004
## 40 1.2881 nan 0.0010 0.0003
## 60 1.2723 nan 0.0010 0.0003
## 80 1.2570 nan 0.0010 0.0004
## 100 1.2426 nan 0.0010 0.0003
## 120 1.2286 nan 0.0010 0.0003
## 140 1.2148 nan 0.0010 0.0003
## 160 1.2015 nan 0.0010 0.0003
## 180 1.1885 nan 0.0010 0.0003
## 200 1.1760 nan 0.0010 0.0002
## 220 1.1635 nan 0.0010 0.0003
## 240 1.1518 nan 0.0010 0.0002
## 260 1.1404 nan 0.0010 0.0002
## 280 1.1296 nan 0.0010 0.0002
## 300 1.1187 nan 0.0010 0.0003
## 320 1.1085 nan 0.0010 0.0002
## 340 1.0984 nan 0.0010 0.0003
## 360 1.0884 nan 0.0010 0.0002
## 380 1.0787 nan 0.0010 0.0002
## 400 1.0691 nan 0.0010 0.0002
## 420 1.0600 nan 0.0010 0.0002
## 440 1.0514 nan 0.0010 0.0002
## 460 1.0430 nan 0.0010 0.0002
## 480 1.0347 nan 0.0010 0.0002
## 500 1.0265 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0003
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3135 nan 0.0010 0.0004
## 10 1.3127 nan 0.0010 0.0003
## 20 1.3044 nan 0.0010 0.0004
## 40 1.2883 nan 0.0010 0.0004
## 60 1.2725 nan 0.0010 0.0003
## 80 1.2574 nan 0.0010 0.0003
## 100 1.2426 nan 0.0010 0.0003
## 120 1.2288 nan 0.0010 0.0004
## 140 1.2151 nan 0.0010 0.0003
## 160 1.2019 nan 0.0010 0.0003
## 180 1.1892 nan 0.0010 0.0003
## 200 1.1769 nan 0.0010 0.0003
## 220 1.1648 nan 0.0010 0.0003
## 240 1.1532 nan 0.0010 0.0003
## 260 1.1419 nan 0.0010 0.0003
## 280 1.1308 nan 0.0010 0.0002
## 300 1.1201 nan 0.0010 0.0003
## 320 1.1097 nan 0.0010 0.0002
## 340 1.0997 nan 0.0010 0.0002
## 360 1.0899 nan 0.0010 0.0002
## 380 1.0800 nan 0.0010 0.0002
## 400 1.0707 nan 0.0010 0.0002
## 420 1.0617 nan 0.0010 0.0002
## 440 1.0529 nan 0.0010 0.0002
## 460 1.0443 nan 0.0010 0.0002
## 480 1.0359 nan 0.0010 0.0002
## 500 1.0276 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3196 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0003
## 4 1.3179 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3153 nan 0.0010 0.0004
## 8 1.3145 nan 0.0010 0.0004
## 9 1.3137 nan 0.0010 0.0004
## 10 1.3128 nan 0.0010 0.0004
## 20 1.3044 nan 0.0010 0.0004
## 40 1.2885 nan 0.0010 0.0004
## 60 1.2731 nan 0.0010 0.0004
## 80 1.2581 nan 0.0010 0.0003
## 100 1.2435 nan 0.0010 0.0003
## 120 1.2296 nan 0.0010 0.0003
## 140 1.2161 nan 0.0010 0.0003
## 160 1.2029 nan 0.0010 0.0003
## 180 1.1903 nan 0.0010 0.0003
## 200 1.1781 nan 0.0010 0.0003
## 220 1.1659 nan 0.0010 0.0003
## 240 1.1542 nan 0.0010 0.0003
## 260 1.1428 nan 0.0010 0.0003
## 280 1.1317 nan 0.0010 0.0002
## 300 1.1211 nan 0.0010 0.0002
## 320 1.1107 nan 0.0010 0.0002
## 340 1.1008 nan 0.0010 0.0002
## 360 1.0911 nan 0.0010 0.0002
## 380 1.0815 nan 0.0010 0.0002
## 400 1.0723 nan 0.0010 0.0002
## 420 1.0633 nan 0.0010 0.0002
## 440 1.0543 nan 0.0010 0.0002
## 460 1.0457 nan 0.0010 0.0002
## 480 1.0373 nan 0.0010 0.0002
## 500 1.0290 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0003
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3035 nan 0.0010 0.0004
## 40 1.2861 nan 0.0010 0.0004
## 60 1.2695 nan 0.0010 0.0004
## 80 1.2535 nan 0.0010 0.0003
## 100 1.2381 nan 0.0010 0.0003
## 120 1.2231 nan 0.0010 0.0003
## 140 1.2084 nan 0.0010 0.0003
## 160 1.1946 nan 0.0010 0.0003
## 180 1.1814 nan 0.0010 0.0003
## 200 1.1680 nan 0.0010 0.0003
## 220 1.1549 nan 0.0010 0.0003
## 240 1.1424 nan 0.0010 0.0002
## 260 1.1304 nan 0.0010 0.0002
## 280 1.1188 nan 0.0010 0.0003
## 300 1.1077 nan 0.0010 0.0002
## 320 1.0965 nan 0.0010 0.0003
## 340 1.0859 nan 0.0010 0.0003
## 360 1.0755 nan 0.0010 0.0002
## 380 1.0654 nan 0.0010 0.0002
## 400 1.0556 nan 0.0010 0.0002
## 420 1.0459 nan 0.0010 0.0002
## 440 1.0366 nan 0.0010 0.0002
## 460 1.0276 nan 0.0010 0.0002
## 480 1.0186 nan 0.0010 0.0002
## 500 1.0099 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0003
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3035 nan 0.0010 0.0004
## 40 1.2863 nan 0.0010 0.0004
## 60 1.2698 nan 0.0010 0.0004
## 80 1.2541 nan 0.0010 0.0004
## 100 1.2389 nan 0.0010 0.0003
## 120 1.2243 nan 0.0010 0.0003
## 140 1.2096 nan 0.0010 0.0003
## 160 1.1959 nan 0.0010 0.0003
## 180 1.1823 nan 0.0010 0.0003
## 200 1.1690 nan 0.0010 0.0003
## 220 1.1562 nan 0.0010 0.0002
## 240 1.1439 nan 0.0010 0.0003
## 260 1.1320 nan 0.0010 0.0002
## 280 1.1205 nan 0.0010 0.0002
## 300 1.1093 nan 0.0010 0.0003
## 320 1.0985 nan 0.0010 0.0002
## 340 1.0875 nan 0.0010 0.0002
## 360 1.0772 nan 0.0010 0.0002
## 380 1.0671 nan 0.0010 0.0002
## 400 1.0574 nan 0.0010 0.0002
## 420 1.0476 nan 0.0010 0.0002
## 440 1.0381 nan 0.0010 0.0002
## 460 1.0290 nan 0.0010 0.0002
## 480 1.0200 nan 0.0010 0.0002
## 500 1.0115 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3134 nan 0.0010 0.0004
## 10 1.3125 nan 0.0010 0.0004
## 20 1.3038 nan 0.0010 0.0004
## 40 1.2870 nan 0.0010 0.0004
## 60 1.2704 nan 0.0010 0.0004
## 80 1.2549 nan 0.0010 0.0004
## 100 1.2395 nan 0.0010 0.0003
## 120 1.2247 nan 0.0010 0.0004
## 140 1.2106 nan 0.0010 0.0003
## 160 1.1967 nan 0.0010 0.0003
## 180 1.1835 nan 0.0010 0.0002
## 200 1.1703 nan 0.0010 0.0003
## 220 1.1578 nan 0.0010 0.0002
## 240 1.1457 nan 0.0010 0.0003
## 260 1.1340 nan 0.0010 0.0002
## 280 1.1225 nan 0.0010 0.0002
## 300 1.1112 nan 0.0010 0.0002
## 320 1.1006 nan 0.0010 0.0002
## 340 1.0901 nan 0.0010 0.0002
## 360 1.0797 nan 0.0010 0.0002
## 380 1.0698 nan 0.0010 0.0002
## 400 1.0601 nan 0.0010 0.0002
## 420 1.0507 nan 0.0010 0.0002
## 440 1.0418 nan 0.0010 0.0002
## 460 1.0327 nan 0.0010 0.0002
## 480 1.0243 nan 0.0010 0.0001
## 500 1.0157 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0005
## 2 1.3192 nan 0.0010 0.0004
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0005
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3026 nan 0.0010 0.0004
## 40 1.2848 nan 0.0010 0.0004
## 60 1.2675 nan 0.0010 0.0004
## 80 1.2509 nan 0.0010 0.0003
## 100 1.2346 nan 0.0010 0.0003
## 120 1.2190 nan 0.0010 0.0003
## 140 1.2040 nan 0.0010 0.0002
## 160 1.1895 nan 0.0010 0.0003
## 180 1.1753 nan 0.0010 0.0003
## 200 1.1617 nan 0.0010 0.0003
## 220 1.1482 nan 0.0010 0.0003
## 240 1.1352 nan 0.0010 0.0003
## 260 1.1224 nan 0.0010 0.0003
## 280 1.1105 nan 0.0010 0.0002
## 300 1.0987 nan 0.0010 0.0002
## 320 1.0874 nan 0.0010 0.0003
## 340 1.0762 nan 0.0010 0.0002
## 360 1.0651 nan 0.0010 0.0002
## 380 1.0547 nan 0.0010 0.0002
## 400 1.0446 nan 0.0010 0.0002
## 420 1.0347 nan 0.0010 0.0002
## 440 1.0249 nan 0.0010 0.0001
## 460 1.0154 nan 0.0010 0.0002
## 480 1.0062 nan 0.0010 0.0002
## 500 0.9972 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3028 nan 0.0010 0.0004
## 40 1.2853 nan 0.0010 0.0004
## 60 1.2684 nan 0.0010 0.0004
## 80 1.2519 nan 0.0010 0.0004
## 100 1.2361 nan 0.0010 0.0004
## 120 1.2207 nan 0.0010 0.0003
## 140 1.2057 nan 0.0010 0.0003
## 160 1.1915 nan 0.0010 0.0003
## 180 1.1778 nan 0.0010 0.0003
## 200 1.1639 nan 0.0010 0.0002
## 220 1.1507 nan 0.0010 0.0003
## 240 1.1382 nan 0.0010 0.0002
## 260 1.1257 nan 0.0010 0.0003
## 280 1.1136 nan 0.0010 0.0003
## 300 1.1019 nan 0.0010 0.0002
## 320 1.0904 nan 0.0010 0.0002
## 340 1.0794 nan 0.0010 0.0002
## 360 1.0687 nan 0.0010 0.0002
## 380 1.0585 nan 0.0010 0.0002
## 400 1.0482 nan 0.0010 0.0002
## 420 1.0384 nan 0.0010 0.0002
## 440 1.0288 nan 0.0010 0.0002
## 460 1.0194 nan 0.0010 0.0002
## 480 1.0103 nan 0.0010 0.0002
## 500 1.0013 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0005
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3027 nan 0.0010 0.0004
## 40 1.2853 nan 0.0010 0.0004
## 60 1.2686 nan 0.0010 0.0004
## 80 1.2523 nan 0.0010 0.0004
## 100 1.2366 nan 0.0010 0.0003
## 120 1.2214 nan 0.0010 0.0003
## 140 1.2067 nan 0.0010 0.0003
## 160 1.1923 nan 0.0010 0.0003
## 180 1.1783 nan 0.0010 0.0003
## 200 1.1651 nan 0.0010 0.0003
## 220 1.1522 nan 0.0010 0.0003
## 240 1.1396 nan 0.0010 0.0003
## 260 1.1275 nan 0.0010 0.0003
## 280 1.1156 nan 0.0010 0.0003
## 300 1.1043 nan 0.0010 0.0002
## 320 1.0932 nan 0.0010 0.0002
## 340 1.0822 nan 0.0010 0.0003
## 360 1.0718 nan 0.0010 0.0002
## 380 1.0615 nan 0.0010 0.0002
## 400 1.0514 nan 0.0010 0.0002
## 420 1.0419 nan 0.0010 0.0002
## 440 1.0322 nan 0.0010 0.0002
## 460 1.0229 nan 0.0010 0.0002
## 480 1.0139 nan 0.0010 0.0002
## 500 1.0051 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3134 nan 0.0100 0.0032
## 2 1.3061 nan 0.0100 0.0036
## 3 1.2981 nan 0.0100 0.0037
## 4 1.2896 nan 0.0100 0.0039
## 5 1.2823 nan 0.0100 0.0034
## 6 1.2742 nan 0.0100 0.0038
## 7 1.2666 nan 0.0100 0.0038
## 8 1.2592 nan 0.0100 0.0034
## 9 1.2525 nan 0.0100 0.0028
## 10 1.2453 nan 0.0100 0.0035
## 20 1.1779 nan 0.0100 0.0024
## 40 1.0706 nan 0.0100 0.0020
## 60 0.9874 nan 0.0100 0.0017
## 80 0.9243 nan 0.0100 0.0011
## 100 0.8736 nan 0.0100 0.0009
## 120 0.8316 nan 0.0100 0.0006
## 140 0.7966 nan 0.0100 0.0004
## 160 0.7688 nan 0.0100 0.0003
## 180 0.7435 nan 0.0100 0.0004
## 200 0.7220 nan 0.0100 0.0004
## 220 0.7034 nan 0.0100 0.0000
## 240 0.6859 nan 0.0100 0.0002
## 260 0.6707 nan 0.0100 0.0002
## 280 0.6570 nan 0.0100 0.0000
## 300 0.6436 nan 0.0100 -0.0001
## 320 0.6315 nan 0.0100 -0.0000
## 340 0.6205 nan 0.0100 -0.0001
## 360 0.6104 nan 0.0100 -0.0001
## 380 0.6008 nan 0.0100 0.0000
## 400 0.5915 nan 0.0100 0.0000
## 420 0.5823 nan 0.0100 -0.0000
## 440 0.5728 nan 0.0100 -0.0000
## 460 0.5640 nan 0.0100 -0.0002
## 480 0.5559 nan 0.0100 -0.0000
## 500 0.5473 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3127 nan 0.0100 0.0039
## 2 1.3047 nan 0.0100 0.0033
## 3 1.2961 nan 0.0100 0.0038
## 4 1.2873 nan 0.0100 0.0038
## 5 1.2791 nan 0.0100 0.0036
## 6 1.2715 nan 0.0100 0.0035
## 7 1.2634 nan 0.0100 0.0038
## 8 1.2560 nan 0.0100 0.0031
## 9 1.2494 nan 0.0100 0.0033
## 10 1.2424 nan 0.0100 0.0034
## 20 1.1766 nan 0.0100 0.0031
## 40 1.0713 nan 0.0100 0.0020
## 60 0.9897 nan 0.0100 0.0015
## 80 0.9261 nan 0.0100 0.0013
## 100 0.8752 nan 0.0100 0.0008
## 120 0.8357 nan 0.0100 0.0006
## 140 0.8024 nan 0.0100 0.0005
## 160 0.7738 nan 0.0100 0.0002
## 180 0.7496 nan 0.0100 0.0004
## 200 0.7287 nan 0.0100 0.0003
## 220 0.7106 nan 0.0100 0.0001
## 240 0.6941 nan 0.0100 -0.0001
## 260 0.6788 nan 0.0100 0.0001
## 280 0.6645 nan 0.0100 -0.0000
## 300 0.6519 nan 0.0100 -0.0001
## 320 0.6390 nan 0.0100 -0.0001
## 340 0.6282 nan 0.0100 0.0000
## 360 0.6176 nan 0.0100 0.0000
## 380 0.6072 nan 0.0100 -0.0001
## 400 0.5979 nan 0.0100 -0.0001
## 420 0.5891 nan 0.0100 -0.0002
## 440 0.5804 nan 0.0100 -0.0001
## 460 0.5718 nan 0.0100 0.0000
## 480 0.5639 nan 0.0100 -0.0001
## 500 0.5554 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3136 nan 0.0100 0.0034
## 2 1.3048 nan 0.0100 0.0040
## 3 1.2971 nan 0.0100 0.0036
## 4 1.2891 nan 0.0100 0.0035
## 5 1.2807 nan 0.0100 0.0036
## 6 1.2730 nan 0.0100 0.0037
## 7 1.2658 nan 0.0100 0.0034
## 8 1.2581 nan 0.0100 0.0033
## 9 1.2513 nan 0.0100 0.0030
## 10 1.2438 nan 0.0100 0.0034
## 20 1.1781 nan 0.0100 0.0028
## 40 1.0717 nan 0.0100 0.0023
## 60 0.9920 nan 0.0100 0.0014
## 80 0.9291 nan 0.0100 0.0011
## 100 0.8785 nan 0.0100 0.0007
## 120 0.8378 nan 0.0100 0.0006
## 140 0.8042 nan 0.0100 0.0007
## 160 0.7757 nan 0.0100 0.0003
## 180 0.7518 nan 0.0100 0.0001
## 200 0.7304 nan 0.0100 0.0002
## 220 0.7123 nan 0.0100 0.0002
## 240 0.6969 nan 0.0100 0.0000
## 260 0.6819 nan 0.0100 -0.0001
## 280 0.6686 nan 0.0100 0.0001
## 300 0.6569 nan 0.0100 0.0000
## 320 0.6462 nan 0.0100 0.0001
## 340 0.6362 nan 0.0100 -0.0000
## 360 0.6266 nan 0.0100 -0.0001
## 380 0.6164 nan 0.0100 -0.0001
## 400 0.6069 nan 0.0100 -0.0002
## 420 0.5976 nan 0.0100 -0.0001
## 440 0.5901 nan 0.0100 -0.0000
## 460 0.5825 nan 0.0100 -0.0001
## 480 0.5750 nan 0.0100 -0.0000
## 500 0.5667 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0044
## 2 1.3032 nan 0.0100 0.0037
## 3 1.2944 nan 0.0100 0.0044
## 4 1.2855 nan 0.0100 0.0041
## 5 1.2774 nan 0.0100 0.0037
## 6 1.2688 nan 0.0100 0.0037
## 7 1.2606 nan 0.0100 0.0039
## 8 1.2535 nan 0.0100 0.0032
## 9 1.2460 nan 0.0100 0.0035
## 10 1.2380 nan 0.0100 0.0041
## 20 1.1689 nan 0.0100 0.0028
## 40 1.0555 nan 0.0100 0.0022
## 60 0.9721 nan 0.0100 0.0014
## 80 0.9061 nan 0.0100 0.0011
## 100 0.8504 nan 0.0100 0.0010
## 120 0.8079 nan 0.0100 0.0005
## 140 0.7702 nan 0.0100 0.0007
## 160 0.7390 nan 0.0100 0.0003
## 180 0.7130 nan 0.0100 0.0003
## 200 0.6894 nan 0.0100 0.0003
## 220 0.6694 nan 0.0100 0.0003
## 240 0.6516 nan 0.0100 0.0001
## 260 0.6354 nan 0.0100 0.0001
## 280 0.6195 nan 0.0100 0.0000
## 300 0.6049 nan 0.0100 -0.0001
## 320 0.5915 nan 0.0100 0.0001
## 340 0.5794 nan 0.0100 0.0000
## 360 0.5680 nan 0.0100 0.0000
## 380 0.5565 nan 0.0100 0.0001
## 400 0.5450 nan 0.0100 0.0001
## 420 0.5357 nan 0.0100 0.0001
## 440 0.5258 nan 0.0100 0.0000
## 460 0.5165 nan 0.0100 0.0001
## 480 0.5064 nan 0.0100 -0.0000
## 500 0.4965 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0041
## 2 1.3037 nan 0.0100 0.0040
## 3 1.2952 nan 0.0100 0.0038
## 4 1.2863 nan 0.0100 0.0039
## 5 1.2785 nan 0.0100 0.0037
## 6 1.2698 nan 0.0100 0.0037
## 7 1.2611 nan 0.0100 0.0038
## 8 1.2531 nan 0.0100 0.0034
## 9 1.2454 nan 0.0100 0.0032
## 10 1.2371 nan 0.0100 0.0034
## 20 1.1674 nan 0.0100 0.0025
## 40 1.0547 nan 0.0100 0.0024
## 60 0.9700 nan 0.0100 0.0016
## 80 0.9049 nan 0.0100 0.0010
## 100 0.8541 nan 0.0100 0.0010
## 120 0.8108 nan 0.0100 0.0005
## 140 0.7761 nan 0.0100 0.0004
## 160 0.7462 nan 0.0100 0.0001
## 180 0.7212 nan 0.0100 0.0002
## 200 0.6981 nan 0.0100 0.0001
## 220 0.6768 nan 0.0100 0.0002
## 240 0.6580 nan 0.0100 0.0002
## 260 0.6416 nan 0.0100 0.0002
## 280 0.6260 nan 0.0100 0.0001
## 300 0.6123 nan 0.0100 -0.0002
## 320 0.5989 nan 0.0100 0.0001
## 340 0.5864 nan 0.0100 -0.0000
## 360 0.5741 nan 0.0100 0.0000
## 380 0.5626 nan 0.0100 -0.0001
## 400 0.5518 nan 0.0100 -0.0000
## 420 0.5420 nan 0.0100 -0.0001
## 440 0.5321 nan 0.0100 -0.0001
## 460 0.5228 nan 0.0100 -0.0001
## 480 0.5139 nan 0.0100 0.0001
## 500 0.5049 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0042
## 2 1.3047 nan 0.0100 0.0035
## 3 1.2955 nan 0.0100 0.0038
## 4 1.2870 nan 0.0100 0.0037
## 5 1.2789 nan 0.0100 0.0033
## 6 1.2705 nan 0.0100 0.0039
## 7 1.2626 nan 0.0100 0.0036
## 8 1.2543 nan 0.0100 0.0034
## 9 1.2459 nan 0.0100 0.0035
## 10 1.2385 nan 0.0100 0.0035
## 20 1.1693 nan 0.0100 0.0027
## 40 1.0591 nan 0.0100 0.0021
## 60 0.9756 nan 0.0100 0.0013
## 80 0.9092 nan 0.0100 0.0012
## 100 0.8561 nan 0.0100 0.0008
## 120 0.8139 nan 0.0100 0.0007
## 140 0.7796 nan 0.0100 0.0003
## 160 0.7506 nan 0.0100 0.0002
## 180 0.7266 nan 0.0100 0.0001
## 200 0.7041 nan 0.0100 0.0003
## 220 0.6853 nan 0.0100 0.0001
## 240 0.6680 nan 0.0100 -0.0000
## 260 0.6518 nan 0.0100 0.0002
## 280 0.6374 nan 0.0100 0.0002
## 300 0.6238 nan 0.0100 0.0000
## 320 0.6107 nan 0.0100 0.0000
## 340 0.5979 nan 0.0100 0.0000
## 360 0.5871 nan 0.0100 0.0000
## 380 0.5770 nan 0.0100 0.0000
## 400 0.5663 nan 0.0100 -0.0003
## 420 0.5559 nan 0.0100 -0.0000
## 440 0.5472 nan 0.0100 0.0000
## 460 0.5392 nan 0.0100 0.0000
## 480 0.5303 nan 0.0100 0.0002
## 500 0.5215 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0043
## 2 1.3026 nan 0.0100 0.0043
## 3 1.2939 nan 0.0100 0.0042
## 4 1.2854 nan 0.0100 0.0041
## 5 1.2768 nan 0.0100 0.0042
## 6 1.2688 nan 0.0100 0.0038
## 7 1.2603 nan 0.0100 0.0039
## 8 1.2520 nan 0.0100 0.0036
## 9 1.2441 nan 0.0100 0.0033
## 10 1.2356 nan 0.0100 0.0037
## 20 1.1623 nan 0.0100 0.0028
## 40 1.0439 nan 0.0100 0.0023
## 60 0.9554 nan 0.0100 0.0018
## 80 0.8847 nan 0.0100 0.0012
## 100 0.8284 nan 0.0100 0.0009
## 120 0.7836 nan 0.0100 0.0007
## 140 0.7450 nan 0.0100 0.0006
## 160 0.7140 nan 0.0100 0.0001
## 180 0.6869 nan 0.0100 -0.0000
## 200 0.6627 nan 0.0100 0.0003
## 220 0.6416 nan 0.0100 0.0001
## 240 0.6212 nan 0.0100 0.0000
## 260 0.6030 nan 0.0100 0.0000
## 280 0.5858 nan 0.0100 0.0000
## 300 0.5689 nan 0.0100 0.0000
## 320 0.5532 nan 0.0100 0.0001
## 340 0.5396 nan 0.0100 0.0001
## 360 0.5266 nan 0.0100 -0.0000
## 380 0.5149 nan 0.0100 0.0000
## 400 0.5027 nan 0.0100 0.0001
## 420 0.4912 nan 0.0100 -0.0001
## 440 0.4808 nan 0.0100 -0.0001
## 460 0.4690 nan 0.0100 0.0000
## 480 0.4587 nan 0.0100 -0.0001
## 500 0.4493 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0042
## 2 1.3030 nan 0.0100 0.0042
## 3 1.2941 nan 0.0100 0.0040
## 4 1.2848 nan 0.0100 0.0041
## 5 1.2761 nan 0.0100 0.0040
## 6 1.2675 nan 0.0100 0.0039
## 7 1.2588 nan 0.0100 0.0038
## 8 1.2501 nan 0.0100 0.0037
## 9 1.2421 nan 0.0100 0.0035
## 10 1.2340 nan 0.0100 0.0033
## 20 1.1626 nan 0.0100 0.0028
## 40 1.0480 nan 0.0100 0.0020
## 60 0.9600 nan 0.0100 0.0017
## 80 0.8915 nan 0.0100 0.0015
## 100 0.8368 nan 0.0100 0.0008
## 120 0.7910 nan 0.0100 0.0007
## 140 0.7548 nan 0.0100 0.0001
## 160 0.7234 nan 0.0100 0.0004
## 180 0.6963 nan 0.0100 0.0003
## 200 0.6730 nan 0.0100 0.0002
## 220 0.6522 nan 0.0100 -0.0001
## 240 0.6330 nan 0.0100 0.0001
## 260 0.6153 nan 0.0100 0.0000
## 280 0.5998 nan 0.0100 0.0001
## 300 0.5851 nan 0.0100 -0.0001
## 320 0.5707 nan 0.0100 -0.0000
## 340 0.5577 nan 0.0100 -0.0000
## 360 0.5448 nan 0.0100 0.0000
## 380 0.5321 nan 0.0100 -0.0000
## 400 0.5199 nan 0.0100 0.0000
## 420 0.5083 nan 0.0100 0.0000
## 440 0.4969 nan 0.0100 -0.0000
## 460 0.4864 nan 0.0100 -0.0001
## 480 0.4757 nan 0.0100 -0.0000
## 500 0.4651 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3118 nan 0.0100 0.0038
## 2 1.3033 nan 0.0100 0.0041
## 3 1.2947 nan 0.0100 0.0041
## 4 1.2861 nan 0.0100 0.0035
## 5 1.2776 nan 0.0100 0.0044
## 6 1.2691 nan 0.0100 0.0036
## 7 1.2615 nan 0.0100 0.0033
## 8 1.2539 nan 0.0100 0.0037
## 9 1.2465 nan 0.0100 0.0032
## 10 1.2383 nan 0.0100 0.0034
## 20 1.1672 nan 0.0100 0.0029
## 40 1.0512 nan 0.0100 0.0022
## 60 0.9623 nan 0.0100 0.0015
## 80 0.8945 nan 0.0100 0.0008
## 100 0.8415 nan 0.0100 0.0009
## 120 0.7982 nan 0.0100 0.0006
## 140 0.7614 nan 0.0100 0.0006
## 160 0.7308 nan 0.0100 0.0003
## 180 0.7046 nan 0.0100 0.0003
## 200 0.6809 nan 0.0100 0.0001
## 220 0.6604 nan 0.0100 0.0000
## 240 0.6406 nan 0.0100 0.0001
## 260 0.6228 nan 0.0100 0.0000
## 280 0.6082 nan 0.0100 0.0001
## 300 0.5930 nan 0.0100 0.0001
## 320 0.5789 nan 0.0100 -0.0000
## 340 0.5658 nan 0.0100 0.0001
## 360 0.5527 nan 0.0100 -0.0001
## 380 0.5415 nan 0.0100 0.0000
## 400 0.5293 nan 0.0100 0.0001
## 420 0.5185 nan 0.0100 -0.0000
## 440 0.5076 nan 0.0100 0.0000
## 460 0.4968 nan 0.0100 0.0000
## 480 0.4876 nan 0.0100 -0.0001
## 500 0.4776 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2438 nan 0.1000 0.0383
## 2 1.1707 nan 0.1000 0.0330
## 3 1.1182 nan 0.1000 0.0240
## 4 1.0655 nan 0.1000 0.0228
## 5 1.0191 nan 0.1000 0.0200
## 6 0.9854 nan 0.1000 0.0136
## 7 0.9504 nan 0.1000 0.0142
## 8 0.9220 nan 0.1000 0.0090
## 9 0.8946 nan 0.1000 0.0103
## 10 0.8712 nan 0.1000 0.0084
## 20 0.7244 nan 0.1000 0.0033
## 40 0.5936 nan 0.1000 0.0002
## 60 0.5104 nan 0.1000 -0.0012
## 80 0.4405 nan 0.1000 0.0006
## 100 0.3898 nan 0.1000 -0.0004
## 120 0.3544 nan 0.1000 -0.0006
## 140 0.3150 nan 0.1000 -0.0018
## 160 0.2823 nan 0.1000 -0.0010
## 180 0.2540 nan 0.1000 0.0001
## 200 0.2296 nan 0.1000 -0.0002
## 220 0.2081 nan 0.1000 -0.0005
## 240 0.1905 nan 0.1000 0.0000
## 260 0.1739 nan 0.1000 -0.0002
## 280 0.1585 nan 0.1000 -0.0003
## 300 0.1463 nan 0.1000 -0.0004
## 320 0.1352 nan 0.1000 -0.0001
## 340 0.1247 nan 0.1000 -0.0006
## 360 0.1144 nan 0.1000 -0.0005
## 380 0.1074 nan 0.1000 -0.0003
## 400 0.0996 nan 0.1000 -0.0002
## 420 0.0910 nan 0.1000 -0.0001
## 440 0.0832 nan 0.1000 -0.0001
## 460 0.0769 nan 0.1000 -0.0002
## 480 0.0710 nan 0.1000 -0.0002
## 500 0.0663 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2379 nan 0.1000 0.0374
## 2 1.1677 nan 0.1000 0.0303
## 3 1.1124 nan 0.1000 0.0259
## 4 1.0638 nan 0.1000 0.0228
## 5 1.0229 nan 0.1000 0.0164
## 6 0.9838 nan 0.1000 0.0154
## 7 0.9534 nan 0.1000 0.0143
## 8 0.9247 nan 0.1000 0.0104
## 9 0.8986 nan 0.1000 0.0084
## 10 0.8740 nan 0.1000 0.0081
## 20 0.7318 nan 0.1000 0.0032
## 40 0.6121 nan 0.1000 -0.0002
## 60 0.5309 nan 0.1000 -0.0001
## 80 0.4740 nan 0.1000 -0.0011
## 100 0.4186 nan 0.1000 -0.0003
## 120 0.3693 nan 0.1000 -0.0011
## 140 0.3328 nan 0.1000 -0.0008
## 160 0.3001 nan 0.1000 -0.0018
## 180 0.2721 nan 0.1000 0.0000
## 200 0.2482 nan 0.1000 -0.0005
## 220 0.2270 nan 0.1000 -0.0007
## 240 0.2053 nan 0.1000 -0.0001
## 260 0.1865 nan 0.1000 -0.0006
## 280 0.1702 nan 0.1000 -0.0004
## 300 0.1572 nan 0.1000 -0.0004
## 320 0.1444 nan 0.1000 -0.0005
## 340 0.1336 nan 0.1000 0.0001
## 360 0.1238 nan 0.1000 -0.0006
## 380 0.1136 nan 0.1000 -0.0003
## 400 0.1048 nan 0.1000 -0.0003
## 420 0.0974 nan 0.1000 -0.0003
## 440 0.0899 nan 0.1000 -0.0004
## 460 0.0828 nan 0.1000 -0.0002
## 480 0.0770 nan 0.1000 -0.0001
## 500 0.0720 nan 0.1000 -0.0004
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2459 nan 0.1000 0.0395
## 2 1.1778 nan 0.1000 0.0291
## 3 1.1190 nan 0.1000 0.0258
## 4 1.0676 nan 0.1000 0.0212
## 5 1.0289 nan 0.1000 0.0170
## 6 0.9952 nan 0.1000 0.0141
## 7 0.9601 nan 0.1000 0.0149
## 8 0.9289 nan 0.1000 0.0136
## 9 0.9034 nan 0.1000 0.0099
## 10 0.8773 nan 0.1000 0.0107
## 20 0.7349 nan 0.1000 0.0003
## 40 0.6162 nan 0.1000 -0.0006
## 60 0.5400 nan 0.1000 -0.0025
## 80 0.4767 nan 0.1000 0.0001
## 100 0.4273 nan 0.1000 -0.0028
## 120 0.3853 nan 0.1000 -0.0009
## 140 0.3566 nan 0.1000 -0.0017
## 160 0.3254 nan 0.1000 -0.0010
## 180 0.2943 nan 0.1000 -0.0010
## 200 0.2693 nan 0.1000 -0.0007
## 220 0.2459 nan 0.1000 -0.0014
## 240 0.2260 nan 0.1000 -0.0007
## 260 0.2093 nan 0.1000 -0.0007
## 280 0.1942 nan 0.1000 -0.0006
## 300 0.1776 nan 0.1000 -0.0003
## 320 0.1642 nan 0.1000 -0.0005
## 340 0.1525 nan 0.1000 -0.0005
## 360 0.1398 nan 0.1000 -0.0004
## 380 0.1311 nan 0.1000 -0.0003
## 400 0.1201 nan 0.1000 -0.0002
## 420 0.1099 nan 0.1000 -0.0001
## 440 0.1024 nan 0.1000 -0.0005
## 460 0.0957 nan 0.1000 -0.0004
## 480 0.0888 nan 0.1000 -0.0002
## 500 0.0836 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2284 nan 0.1000 0.0379
## 2 1.1599 nan 0.1000 0.0297
## 3 1.1071 nan 0.1000 0.0195
## 4 1.0564 nan 0.1000 0.0226
## 5 1.0111 nan 0.1000 0.0192
## 6 0.9641 nan 0.1000 0.0170
## 7 0.9317 nan 0.1000 0.0117
## 8 0.9026 nan 0.1000 0.0111
## 9 0.8794 nan 0.1000 0.0071
## 10 0.8517 nan 0.1000 0.0100
## 20 0.6992 nan 0.1000 0.0012
## 40 0.5544 nan 0.1000 -0.0001
## 60 0.4582 nan 0.1000 -0.0014
## 80 0.3939 nan 0.1000 -0.0006
## 100 0.3414 nan 0.1000 -0.0012
## 120 0.2963 nan 0.1000 -0.0006
## 140 0.2623 nan 0.1000 -0.0004
## 160 0.2305 nan 0.1000 -0.0008
## 180 0.2022 nan 0.1000 -0.0003
## 200 0.1801 nan 0.1000 0.0002
## 220 0.1611 nan 0.1000 -0.0008
## 240 0.1442 nan 0.1000 -0.0001
## 260 0.1318 nan 0.1000 -0.0005
## 280 0.1179 nan 0.1000 -0.0003
## 300 0.1061 nan 0.1000 -0.0003
## 320 0.0953 nan 0.1000 -0.0003
## 340 0.0864 nan 0.1000 -0.0001
## 360 0.0783 nan 0.1000 -0.0001
## 380 0.0708 nan 0.1000 -0.0002
## 400 0.0648 nan 0.1000 -0.0002
## 420 0.0588 nan 0.1000 -0.0001
## 440 0.0527 nan 0.1000 -0.0001
## 460 0.0478 nan 0.1000 -0.0001
## 480 0.0439 nan 0.1000 -0.0003
## 500 0.0403 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2358 nan 0.1000 0.0378
## 2 1.1608 nan 0.1000 0.0326
## 3 1.0972 nan 0.1000 0.0271
## 4 1.0472 nan 0.1000 0.0215
## 5 1.0044 nan 0.1000 0.0168
## 6 0.9633 nan 0.1000 0.0154
## 7 0.9292 nan 0.1000 0.0143
## 8 0.9005 nan 0.1000 0.0120
## 9 0.8746 nan 0.1000 0.0085
## 10 0.8469 nan 0.1000 0.0090
## 20 0.6939 nan 0.1000 0.0008
## 40 0.5697 nan 0.1000 -0.0018
## 60 0.4851 nan 0.1000 -0.0010
## 80 0.4147 nan 0.1000 0.0004
## 100 0.3594 nan 0.1000 -0.0009
## 120 0.3118 nan 0.1000 -0.0005
## 140 0.2779 nan 0.1000 -0.0009
## 160 0.2470 nan 0.1000 -0.0003
## 180 0.2180 nan 0.1000 -0.0000
## 200 0.1946 nan 0.1000 -0.0012
## 220 0.1708 nan 0.1000 -0.0000
## 240 0.1526 nan 0.1000 -0.0006
## 260 0.1358 nan 0.1000 -0.0004
## 280 0.1219 nan 0.1000 -0.0005
## 300 0.1105 nan 0.1000 -0.0004
## 320 0.1000 nan 0.1000 -0.0001
## 340 0.0907 nan 0.1000 -0.0003
## 360 0.0815 nan 0.1000 -0.0004
## 380 0.0742 nan 0.1000 -0.0002
## 400 0.0663 nan 0.1000 0.0000
## 420 0.0597 nan 0.1000 -0.0001
## 440 0.0542 nan 0.1000 -0.0001
## 460 0.0498 nan 0.1000 -0.0001
## 480 0.0456 nan 0.1000 -0.0001
## 500 0.0413 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2346 nan 0.1000 0.0379
## 2 1.1630 nan 0.1000 0.0327
## 3 1.1014 nan 0.1000 0.0281
## 4 1.0501 nan 0.1000 0.0208
## 5 1.0016 nan 0.1000 0.0191
## 6 0.9664 nan 0.1000 0.0155
## 7 0.9309 nan 0.1000 0.0149
## 8 0.8995 nan 0.1000 0.0124
## 9 0.8754 nan 0.1000 0.0077
## 10 0.8507 nan 0.1000 0.0091
## 20 0.7054 nan 0.1000 0.0023
## 40 0.5701 nan 0.1000 -0.0011
## 60 0.4835 nan 0.1000 -0.0015
## 80 0.4214 nan 0.1000 -0.0020
## 100 0.3666 nan 0.1000 -0.0012
## 120 0.3210 nan 0.1000 -0.0014
## 140 0.2823 nan 0.1000 -0.0003
## 160 0.2505 nan 0.1000 -0.0014
## 180 0.2189 nan 0.1000 -0.0012
## 200 0.1941 nan 0.1000 -0.0013
## 220 0.1723 nan 0.1000 -0.0004
## 240 0.1552 nan 0.1000 -0.0006
## 260 0.1388 nan 0.1000 -0.0005
## 280 0.1245 nan 0.1000 -0.0005
## 300 0.1138 nan 0.1000 -0.0006
## 320 0.1045 nan 0.1000 -0.0004
## 340 0.0934 nan 0.1000 -0.0004
## 360 0.0845 nan 0.1000 -0.0003
## 380 0.0771 nan 0.1000 -0.0002
## 400 0.0711 nan 0.1000 -0.0000
## 420 0.0647 nan 0.1000 -0.0003
## 440 0.0590 nan 0.1000 -0.0002
## 460 0.0536 nan 0.1000 -0.0002
## 480 0.0490 nan 0.1000 -0.0001
## 500 0.0448 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2375 nan 0.1000 0.0387
## 2 1.1569 nan 0.1000 0.0386
## 3 1.0856 nan 0.1000 0.0303
## 4 1.0302 nan 0.1000 0.0217
## 5 0.9813 nan 0.1000 0.0216
## 6 0.9453 nan 0.1000 0.0127
## 7 0.9138 nan 0.1000 0.0118
## 8 0.8829 nan 0.1000 0.0121
## 9 0.8554 nan 0.1000 0.0091
## 10 0.8257 nan 0.1000 0.0118
## 20 0.6587 nan 0.1000 0.0028
## 40 0.5094 nan 0.1000 -0.0010
## 60 0.4041 nan 0.1000 -0.0013
## 80 0.3264 nan 0.1000 -0.0004
## 100 0.2751 nan 0.1000 -0.0005
## 120 0.2340 nan 0.1000 -0.0008
## 140 0.2001 nan 0.1000 -0.0000
## 160 0.1719 nan 0.1000 -0.0004
## 180 0.1478 nan 0.1000 -0.0002
## 200 0.1270 nan 0.1000 -0.0000
## 220 0.1101 nan 0.1000 -0.0005
## 240 0.0973 nan 0.1000 -0.0003
## 260 0.0858 nan 0.1000 -0.0002
## 280 0.0759 nan 0.1000 -0.0003
## 300 0.0682 nan 0.1000 -0.0003
## 320 0.0604 nan 0.1000 -0.0002
## 340 0.0532 nan 0.1000 -0.0001
## 360 0.0469 nan 0.1000 -0.0002
## 380 0.0422 nan 0.1000 -0.0002
## 400 0.0376 nan 0.1000 -0.0001
## 420 0.0336 nan 0.1000 -0.0001
## 440 0.0301 nan 0.1000 -0.0000
## 460 0.0265 nan 0.1000 -0.0001
## 480 0.0237 nan 0.1000 -0.0001
## 500 0.0212 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2296 nan 0.1000 0.0396
## 2 1.1548 nan 0.1000 0.0355
## 3 1.0951 nan 0.1000 0.0256
## 4 1.0417 nan 0.1000 0.0247
## 5 0.9941 nan 0.1000 0.0204
## 6 0.9571 nan 0.1000 0.0135
## 7 0.9221 nan 0.1000 0.0119
## 8 0.8892 nan 0.1000 0.0110
## 9 0.8605 nan 0.1000 0.0115
## 10 0.8356 nan 0.1000 0.0077
## 20 0.6743 nan 0.1000 0.0006
## 40 0.5249 nan 0.1000 0.0010
## 60 0.4186 nan 0.1000 -0.0009
## 80 0.3490 nan 0.1000 0.0001
## 100 0.2951 nan 0.1000 -0.0012
## 120 0.2525 nan 0.1000 -0.0013
## 140 0.2151 nan 0.1000 -0.0007
## 160 0.1876 nan 0.1000 -0.0003
## 180 0.1643 nan 0.1000 -0.0002
## 200 0.1424 nan 0.1000 0.0002
## 220 0.1251 nan 0.1000 -0.0004
## 240 0.1101 nan 0.1000 -0.0006
## 260 0.0961 nan 0.1000 -0.0005
## 280 0.0847 nan 0.1000 -0.0004
## 300 0.0756 nan 0.1000 -0.0001
## 320 0.0677 nan 0.1000 -0.0005
## 340 0.0608 nan 0.1000 -0.0000
## 360 0.0530 nan 0.1000 -0.0002
## 380 0.0472 nan 0.1000 -0.0000
## 400 0.0420 nan 0.1000 -0.0002
## 420 0.0373 nan 0.1000 -0.0001
## 440 0.0334 nan 0.1000 -0.0002
## 460 0.0296 nan 0.1000 -0.0000
## 480 0.0263 nan 0.1000 -0.0001
## 500 0.0236 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2333 nan 0.1000 0.0414
## 2 1.1631 nan 0.1000 0.0274
## 3 1.1019 nan 0.1000 0.0251
## 4 1.0483 nan 0.1000 0.0235
## 5 1.0040 nan 0.1000 0.0178
## 6 0.9620 nan 0.1000 0.0153
## 7 0.9242 nan 0.1000 0.0151
## 8 0.8972 nan 0.1000 0.0092
## 9 0.8674 nan 0.1000 0.0108
## 10 0.8395 nan 0.1000 0.0115
## 20 0.6897 nan 0.1000 0.0021
## 40 0.5415 nan 0.1000 -0.0013
## 60 0.4573 nan 0.1000 -0.0006
## 80 0.3777 nan 0.1000 -0.0005
## 100 0.3217 nan 0.1000 -0.0028
## 120 0.2745 nan 0.1000 -0.0006
## 140 0.2333 nan 0.1000 -0.0007
## 160 0.2006 nan 0.1000 -0.0006
## 180 0.1740 nan 0.1000 -0.0009
## 200 0.1527 nan 0.1000 -0.0007
## 220 0.1325 nan 0.1000 -0.0005
## 240 0.1180 nan 0.1000 -0.0003
## 260 0.1042 nan 0.1000 -0.0004
## 280 0.0937 nan 0.1000 -0.0003
## 300 0.0823 nan 0.1000 -0.0004
## 320 0.0739 nan 0.1000 -0.0002
## 340 0.0654 nan 0.1000 -0.0002
## 360 0.0587 nan 0.1000 -0.0003
## 380 0.0529 nan 0.1000 -0.0001
## 400 0.0476 nan 0.1000 -0.0002
## 420 0.0426 nan 0.1000 -0.0003
## 440 0.0380 nan 0.1000 -0.0001
## 460 0.0342 nan 0.1000 -0.0002
## 480 0.0306 nan 0.1000 0.0000
## 500 0.0276 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0003
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3145 nan 0.0010 0.0003
## 9 1.3137 nan 0.0010 0.0004
## 10 1.3129 nan 0.0010 0.0003
## 20 1.3047 nan 0.0010 0.0003
## 40 1.2885 nan 0.0010 0.0004
## 60 1.2736 nan 0.0010 0.0003
## 80 1.2588 nan 0.0010 0.0003
## 100 1.2446 nan 0.0010 0.0003
## 120 1.2307 nan 0.0010 0.0004
## 140 1.2175 nan 0.0010 0.0003
## 160 1.2050 nan 0.0010 0.0003
## 180 1.1925 nan 0.0010 0.0002
## 200 1.1802 nan 0.0010 0.0003
## 220 1.1685 nan 0.0010 0.0002
## 240 1.1573 nan 0.0010 0.0003
## 260 1.1460 nan 0.0010 0.0002
## 280 1.1351 nan 0.0010 0.0003
## 300 1.1245 nan 0.0010 0.0002
## 320 1.1143 nan 0.0010 0.0002
## 340 1.1044 nan 0.0010 0.0002
## 360 1.0947 nan 0.0010 0.0002
## 380 1.0850 nan 0.0010 0.0002
## 400 1.0758 nan 0.0010 0.0002
## 420 1.0669 nan 0.0010 0.0002
## 440 1.0581 nan 0.0010 0.0002
## 460 1.0499 nan 0.0010 0.0002
## 480 1.0417 nan 0.0010 0.0002
## 500 1.0336 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3159 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0003
## 8 1.3144 nan 0.0010 0.0004
## 9 1.3135 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3046 nan 0.0010 0.0003
## 40 1.2890 nan 0.0010 0.0004
## 60 1.2741 nan 0.0010 0.0004
## 80 1.2595 nan 0.0010 0.0003
## 100 1.2457 nan 0.0010 0.0003
## 120 1.2319 nan 0.0010 0.0003
## 140 1.2190 nan 0.0010 0.0002
## 160 1.2060 nan 0.0010 0.0003
## 180 1.1935 nan 0.0010 0.0003
## 200 1.1815 nan 0.0010 0.0003
## 220 1.1697 nan 0.0010 0.0003
## 240 1.1581 nan 0.0010 0.0002
## 260 1.1469 nan 0.0010 0.0003
## 280 1.1360 nan 0.0010 0.0002
## 300 1.1253 nan 0.0010 0.0002
## 320 1.1154 nan 0.0010 0.0002
## 340 1.1054 nan 0.0010 0.0003
## 360 1.0957 nan 0.0010 0.0002
## 380 1.0863 nan 0.0010 0.0002
## 400 1.0772 nan 0.0010 0.0002
## 420 1.0681 nan 0.0010 0.0002
## 440 1.0592 nan 0.0010 0.0002
## 460 1.0508 nan 0.0010 0.0002
## 480 1.0425 nan 0.0010 0.0002
## 500 1.0343 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3188 nan 0.0010 0.0003
## 4 1.3179 nan 0.0010 0.0004
## 5 1.3171 nan 0.0010 0.0004
## 6 1.3163 nan 0.0010 0.0004
## 7 1.3154 nan 0.0010 0.0003
## 8 1.3146 nan 0.0010 0.0003
## 9 1.3138 nan 0.0010 0.0004
## 10 1.3130 nan 0.0010 0.0003
## 20 1.3053 nan 0.0010 0.0004
## 40 1.2899 nan 0.0010 0.0003
## 60 1.2748 nan 0.0010 0.0003
## 80 1.2597 nan 0.0010 0.0003
## 100 1.2460 nan 0.0010 0.0003
## 120 1.2325 nan 0.0010 0.0003
## 140 1.2194 nan 0.0010 0.0003
## 160 1.2065 nan 0.0010 0.0003
## 180 1.1941 nan 0.0010 0.0003
## 200 1.1821 nan 0.0010 0.0002
## 220 1.1702 nan 0.0010 0.0003
## 240 1.1591 nan 0.0010 0.0002
## 260 1.1484 nan 0.0010 0.0002
## 280 1.1374 nan 0.0010 0.0002
## 300 1.1270 nan 0.0010 0.0002
## 320 1.1166 nan 0.0010 0.0002
## 340 1.1064 nan 0.0010 0.0002
## 360 1.0968 nan 0.0010 0.0002
## 380 1.0874 nan 0.0010 0.0002
## 400 1.0784 nan 0.0010 0.0002
## 420 1.0692 nan 0.0010 0.0002
## 440 1.0604 nan 0.0010 0.0002
## 460 1.0519 nan 0.0010 0.0001
## 480 1.0436 nan 0.0010 0.0002
## 500 1.0354 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3160 nan 0.0010 0.0003
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0004
## 10 1.3125 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0004
## 40 1.2875 nan 0.0010 0.0004
## 60 1.2711 nan 0.0010 0.0004
## 80 1.2555 nan 0.0010 0.0004
## 100 1.2400 nan 0.0010 0.0003
## 120 1.2256 nan 0.0010 0.0003
## 140 1.2114 nan 0.0010 0.0003
## 160 1.1976 nan 0.0010 0.0003
## 180 1.1841 nan 0.0010 0.0003
## 200 1.1707 nan 0.0010 0.0003
## 220 1.1582 nan 0.0010 0.0003
## 240 1.1462 nan 0.0010 0.0002
## 260 1.1342 nan 0.0010 0.0003
## 280 1.1229 nan 0.0010 0.0002
## 300 1.1116 nan 0.0010 0.0002
## 320 1.1009 nan 0.0010 0.0002
## 340 1.0903 nan 0.0010 0.0003
## 360 1.0799 nan 0.0010 0.0002
## 380 1.0700 nan 0.0010 0.0002
## 400 1.0603 nan 0.0010 0.0002
## 420 1.0508 nan 0.0010 0.0002
## 440 1.0416 nan 0.0010 0.0002
## 460 1.0328 nan 0.0010 0.0002
## 480 1.0239 nan 0.0010 0.0002
## 500 1.0154 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0005
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0003
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3159 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0003
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0004
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0004
## 40 1.2870 nan 0.0010 0.0003
## 60 1.2710 nan 0.0010 0.0003
## 80 1.2556 nan 0.0010 0.0004
## 100 1.2406 nan 0.0010 0.0003
## 120 1.2265 nan 0.0010 0.0003
## 140 1.2124 nan 0.0010 0.0003
## 160 1.1986 nan 0.0010 0.0003
## 180 1.1857 nan 0.0010 0.0003
## 200 1.1727 nan 0.0010 0.0003
## 220 1.1605 nan 0.0010 0.0003
## 240 1.1483 nan 0.0010 0.0003
## 260 1.1364 nan 0.0010 0.0003
## 280 1.1249 nan 0.0010 0.0003
## 300 1.1136 nan 0.0010 0.0002
## 320 1.1029 nan 0.0010 0.0002
## 340 1.0922 nan 0.0010 0.0002
## 360 1.0818 nan 0.0010 0.0002
## 380 1.0719 nan 0.0010 0.0002
## 400 1.0621 nan 0.0010 0.0002
## 420 1.0526 nan 0.0010 0.0002
## 440 1.0434 nan 0.0010 0.0002
## 460 1.0344 nan 0.0010 0.0002
## 480 1.0256 nan 0.0010 0.0001
## 500 1.0169 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0003
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0003
## 6 1.3162 nan 0.0010 0.0004
## 7 1.3153 nan 0.0010 0.0004
## 8 1.3145 nan 0.0010 0.0003
## 9 1.3137 nan 0.0010 0.0004
## 10 1.3129 nan 0.0010 0.0004
## 20 1.3044 nan 0.0010 0.0003
## 40 1.2882 nan 0.0010 0.0003
## 60 1.2722 nan 0.0010 0.0004
## 80 1.2571 nan 0.0010 0.0004
## 100 1.2420 nan 0.0010 0.0003
## 120 1.2275 nan 0.0010 0.0003
## 140 1.2135 nan 0.0010 0.0003
## 160 1.1998 nan 0.0010 0.0003
## 180 1.1868 nan 0.0010 0.0003
## 200 1.1743 nan 0.0010 0.0003
## 220 1.1622 nan 0.0010 0.0002
## 240 1.1500 nan 0.0010 0.0003
## 260 1.1382 nan 0.0010 0.0003
## 280 1.1267 nan 0.0010 0.0002
## 300 1.1157 nan 0.0010 0.0002
## 320 1.1048 nan 0.0010 0.0002
## 340 1.0943 nan 0.0010 0.0002
## 360 1.0842 nan 0.0010 0.0002
## 380 1.0743 nan 0.0010 0.0002
## 400 1.0647 nan 0.0010 0.0002
## 420 1.0555 nan 0.0010 0.0002
## 440 1.0466 nan 0.0010 0.0002
## 460 1.0375 nan 0.0010 0.0002
## 480 1.0287 nan 0.0010 0.0002
## 500 1.0203 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0003
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0005
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2854 nan 0.0010 0.0004
## 60 1.2686 nan 0.0010 0.0004
## 80 1.2522 nan 0.0010 0.0004
## 100 1.2366 nan 0.0010 0.0003
## 120 1.2212 nan 0.0010 0.0003
## 140 1.2068 nan 0.0010 0.0003
## 160 1.1925 nan 0.0010 0.0003
## 180 1.1785 nan 0.0010 0.0004
## 200 1.1649 nan 0.0010 0.0003
## 220 1.1522 nan 0.0010 0.0003
## 240 1.1395 nan 0.0010 0.0002
## 260 1.1272 nan 0.0010 0.0002
## 280 1.1153 nan 0.0010 0.0003
## 300 1.1037 nan 0.0010 0.0002
## 320 1.0925 nan 0.0010 0.0002
## 340 1.0814 nan 0.0010 0.0002
## 360 1.0706 nan 0.0010 0.0002
## 380 1.0602 nan 0.0010 0.0002
## 400 1.0503 nan 0.0010 0.0002
## 420 1.0403 nan 0.0010 0.0002
## 440 1.0308 nan 0.0010 0.0002
## 460 1.0211 nan 0.0010 0.0002
## 480 1.0120 nan 0.0010 0.0002
## 500 1.0030 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3183 nan 0.0010 0.0005
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2858 nan 0.0010 0.0004
## 60 1.2690 nan 0.0010 0.0004
## 80 1.2529 nan 0.0010 0.0004
## 100 1.2370 nan 0.0010 0.0003
## 120 1.2218 nan 0.0010 0.0003
## 140 1.2074 nan 0.0010 0.0003
## 160 1.1930 nan 0.0010 0.0003
## 180 1.1791 nan 0.0010 0.0003
## 200 1.1655 nan 0.0010 0.0003
## 220 1.1525 nan 0.0010 0.0003
## 240 1.1403 nan 0.0010 0.0003
## 260 1.1283 nan 0.0010 0.0002
## 280 1.1163 nan 0.0010 0.0002
## 300 1.1045 nan 0.0010 0.0002
## 320 1.0931 nan 0.0010 0.0003
## 340 1.0821 nan 0.0010 0.0003
## 360 1.0713 nan 0.0010 0.0002
## 380 1.0610 nan 0.0010 0.0002
## 400 1.0508 nan 0.0010 0.0002
## 420 1.0410 nan 0.0010 0.0002
## 440 1.0315 nan 0.0010 0.0002
## 460 1.0223 nan 0.0010 0.0002
## 480 1.0133 nan 0.0010 0.0002
## 500 1.0043 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3159 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3034 nan 0.0010 0.0004
## 40 1.2866 nan 0.0010 0.0003
## 60 1.2701 nan 0.0010 0.0004
## 80 1.2541 nan 0.0010 0.0004
## 100 1.2385 nan 0.0010 0.0003
## 120 1.2236 nan 0.0010 0.0004
## 140 1.2090 nan 0.0010 0.0003
## 160 1.1945 nan 0.0010 0.0003
## 180 1.1805 nan 0.0010 0.0003
## 200 1.1673 nan 0.0010 0.0003
## 220 1.1544 nan 0.0010 0.0003
## 240 1.1418 nan 0.0010 0.0003
## 260 1.1299 nan 0.0010 0.0003
## 280 1.1185 nan 0.0010 0.0002
## 300 1.1071 nan 0.0010 0.0002
## 320 1.0961 nan 0.0010 0.0002
## 340 1.0853 nan 0.0010 0.0002
## 360 1.0748 nan 0.0010 0.0002
## 380 1.0647 nan 0.0010 0.0002
## 400 1.0545 nan 0.0010 0.0002
## 420 1.0452 nan 0.0010 0.0002
## 440 1.0358 nan 0.0010 0.0002
## 460 1.0267 nan 0.0010 0.0002
## 480 1.0177 nan 0.0010 0.0002
## 500 1.0091 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3136 nan 0.0100 0.0034
## 2 1.3047 nan 0.0100 0.0034
## 3 1.2965 nan 0.0100 0.0038
## 4 1.2901 nan 0.0100 0.0030
## 5 1.2829 nan 0.0100 0.0032
## 6 1.2749 nan 0.0100 0.0037
## 7 1.2672 nan 0.0100 0.0036
## 8 1.2599 nan 0.0100 0.0031
## 9 1.2532 nan 0.0100 0.0029
## 10 1.2462 nan 0.0100 0.0030
## 20 1.1827 nan 0.0100 0.0030
## 40 1.0806 nan 0.0100 0.0019
## 60 1.0011 nan 0.0100 0.0015
## 80 0.9360 nan 0.0100 0.0010
## 100 0.8844 nan 0.0100 0.0007
## 120 0.8427 nan 0.0100 0.0006
## 140 0.8076 nan 0.0100 0.0004
## 160 0.7778 nan 0.0100 0.0004
## 180 0.7528 nan 0.0100 0.0003
## 200 0.7307 nan 0.0100 0.0001
## 220 0.7120 nan 0.0100 0.0001
## 240 0.6950 nan 0.0100 0.0000
## 260 0.6793 nan 0.0100 -0.0001
## 280 0.6649 nan 0.0100 0.0001
## 300 0.6508 nan 0.0100 0.0001
## 320 0.6381 nan 0.0100 0.0002
## 340 0.6266 nan 0.0100 -0.0000
## 360 0.6162 nan 0.0100 0.0000
## 380 0.6064 nan 0.0100 0.0001
## 400 0.5970 nan 0.0100 -0.0001
## 420 0.5878 nan 0.0100 0.0002
## 440 0.5784 nan 0.0100 0.0001
## 460 0.5691 nan 0.0100 0.0000
## 480 0.5607 nan 0.0100 0.0000
## 500 0.5530 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3126 nan 0.0100 0.0038
## 2 1.3042 nan 0.0100 0.0036
## 3 1.2962 nan 0.0100 0.0039
## 4 1.2882 nan 0.0100 0.0032
## 5 1.2813 nan 0.0100 0.0030
## 6 1.2747 nan 0.0100 0.0032
## 7 1.2681 nan 0.0100 0.0031
## 8 1.2606 nan 0.0100 0.0033
## 9 1.2544 nan 0.0100 0.0028
## 10 1.2461 nan 0.0100 0.0037
## 20 1.1796 nan 0.0100 0.0028
## 40 1.0759 nan 0.0100 0.0020
## 60 0.9970 nan 0.0100 0.0013
## 80 0.9352 nan 0.0100 0.0010
## 100 0.8836 nan 0.0100 0.0007
## 120 0.8416 nan 0.0100 0.0005
## 140 0.8062 nan 0.0100 0.0005
## 160 0.7780 nan 0.0100 0.0003
## 180 0.7532 nan 0.0100 0.0002
## 200 0.7312 nan 0.0100 0.0003
## 220 0.7128 nan 0.0100 0.0002
## 240 0.6959 nan 0.0100 -0.0000
## 260 0.6811 nan 0.0100 0.0002
## 280 0.6669 nan 0.0100 -0.0001
## 300 0.6536 nan 0.0100 0.0003
## 320 0.6425 nan 0.0100 0.0000
## 340 0.6314 nan 0.0100 -0.0001
## 360 0.6207 nan 0.0100 -0.0001
## 380 0.6113 nan 0.0100 -0.0000
## 400 0.6013 nan 0.0100 0.0000
## 420 0.5915 nan 0.0100 -0.0001
## 440 0.5827 nan 0.0100 -0.0000
## 460 0.5743 nan 0.0100 -0.0001
## 480 0.5658 nan 0.0100 -0.0000
## 500 0.5588 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3141 nan 0.0100 0.0031
## 2 1.3061 nan 0.0100 0.0036
## 3 1.2980 nan 0.0100 0.0039
## 4 1.2907 nan 0.0100 0.0033
## 5 1.2832 nan 0.0100 0.0031
## 6 1.2756 nan 0.0100 0.0037
## 7 1.2683 nan 0.0100 0.0031
## 8 1.2616 nan 0.0100 0.0030
## 9 1.2548 nan 0.0100 0.0031
## 10 1.2481 nan 0.0100 0.0030
## 20 1.1814 nan 0.0100 0.0031
## 40 1.0765 nan 0.0100 0.0020
## 60 0.9977 nan 0.0100 0.0015
## 80 0.9354 nan 0.0100 0.0012
## 100 0.8862 nan 0.0100 0.0009
## 120 0.8443 nan 0.0100 0.0004
## 140 0.8103 nan 0.0100 0.0005
## 160 0.7818 nan 0.0100 0.0004
## 180 0.7567 nan 0.0100 0.0003
## 200 0.7343 nan 0.0100 0.0001
## 220 0.7158 nan 0.0100 0.0000
## 240 0.6990 nan 0.0100 0.0002
## 260 0.6849 nan 0.0100 0.0002
## 280 0.6708 nan 0.0100 -0.0000
## 300 0.6572 nan 0.0100 0.0000
## 320 0.6456 nan 0.0100 -0.0001
## 340 0.6350 nan 0.0100 -0.0000
## 360 0.6245 nan 0.0100 0.0002
## 380 0.6150 nan 0.0100 0.0000
## 400 0.6057 nan 0.0100 0.0001
## 420 0.5960 nan 0.0100 0.0001
## 440 0.5888 nan 0.0100 -0.0001
## 460 0.5806 nan 0.0100 -0.0000
## 480 0.5719 nan 0.0100 0.0000
## 500 0.5639 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3127 nan 0.0100 0.0037
## 2 1.3041 nan 0.0100 0.0039
## 3 1.2954 nan 0.0100 0.0038
## 4 1.2873 nan 0.0100 0.0034
## 5 1.2791 nan 0.0100 0.0038
## 6 1.2708 nan 0.0100 0.0037
## 7 1.2631 nan 0.0100 0.0035
## 8 1.2568 nan 0.0100 0.0027
## 9 1.2488 nan 0.0100 0.0036
## 10 1.2414 nan 0.0100 0.0033
## 20 1.1749 nan 0.0100 0.0030
## 40 1.0615 nan 0.0100 0.0021
## 60 0.9765 nan 0.0100 0.0017
## 80 0.9093 nan 0.0100 0.0013
## 100 0.8561 nan 0.0100 0.0008
## 120 0.8124 nan 0.0100 0.0007
## 140 0.7763 nan 0.0100 0.0003
## 160 0.7451 nan 0.0100 0.0003
## 180 0.7179 nan 0.0100 0.0003
## 200 0.6949 nan 0.0100 0.0001
## 220 0.6758 nan 0.0100 0.0002
## 240 0.6571 nan 0.0100 0.0001
## 260 0.6396 nan 0.0100 -0.0002
## 280 0.6238 nan 0.0100 0.0001
## 300 0.6086 nan 0.0100 0.0001
## 320 0.5952 nan 0.0100 0.0000
## 340 0.5811 nan 0.0100 -0.0000
## 360 0.5684 nan 0.0100 -0.0000
## 380 0.5578 nan 0.0100 -0.0002
## 400 0.5477 nan 0.0100 0.0000
## 420 0.5371 nan 0.0100 -0.0000
## 440 0.5271 nan 0.0100 -0.0002
## 460 0.5178 nan 0.0100 -0.0000
## 480 0.5089 nan 0.0100 0.0000
## 500 0.4993 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3125 nan 0.0100 0.0040
## 2 1.3039 nan 0.0100 0.0039
## 3 1.2952 nan 0.0100 0.0039
## 4 1.2871 nan 0.0100 0.0036
## 5 1.2790 nan 0.0100 0.0035
## 6 1.2706 nan 0.0100 0.0040
## 7 1.2629 nan 0.0100 0.0039
## 8 1.2557 nan 0.0100 0.0031
## 9 1.2479 nan 0.0100 0.0034
## 10 1.2394 nan 0.0100 0.0037
## 20 1.1715 nan 0.0100 0.0026
## 40 1.0632 nan 0.0100 0.0021
## 60 0.9780 nan 0.0100 0.0018
## 80 0.9127 nan 0.0100 0.0011
## 100 0.8579 nan 0.0100 0.0008
## 120 0.8153 nan 0.0100 0.0007
## 140 0.7789 nan 0.0100 0.0004
## 160 0.7475 nan 0.0100 0.0005
## 180 0.7228 nan 0.0100 0.0000
## 200 0.7005 nan 0.0100 0.0004
## 220 0.6795 nan 0.0100 0.0001
## 240 0.6597 nan 0.0100 0.0001
## 260 0.6430 nan 0.0100 0.0002
## 280 0.6275 nan 0.0100 0.0000
## 300 0.6130 nan 0.0100 0.0000
## 320 0.6014 nan 0.0100 0.0000
## 340 0.5885 nan 0.0100 0.0002
## 360 0.5769 nan 0.0100 0.0000
## 380 0.5661 nan 0.0100 0.0001
## 400 0.5549 nan 0.0100 -0.0001
## 420 0.5444 nan 0.0100 0.0000
## 440 0.5332 nan 0.0100 0.0000
## 460 0.5243 nan 0.0100 0.0001
## 480 0.5155 nan 0.0100 -0.0001
## 500 0.5059 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3112 nan 0.0100 0.0043
## 2 1.3032 nan 0.0100 0.0037
## 3 1.2943 nan 0.0100 0.0038
## 4 1.2859 nan 0.0100 0.0037
## 5 1.2772 nan 0.0100 0.0039
## 6 1.2697 nan 0.0100 0.0033
## 7 1.2620 nan 0.0100 0.0038
## 8 1.2545 nan 0.0100 0.0034
## 9 1.2469 nan 0.0100 0.0037
## 10 1.2394 nan 0.0100 0.0033
## 20 1.1725 nan 0.0100 0.0028
## 40 1.0631 nan 0.0100 0.0017
## 60 0.9781 nan 0.0100 0.0016
## 80 0.9124 nan 0.0100 0.0013
## 100 0.8603 nan 0.0100 0.0007
## 120 0.8178 nan 0.0100 0.0007
## 140 0.7817 nan 0.0100 0.0006
## 160 0.7523 nan 0.0100 0.0005
## 180 0.7269 nan 0.0100 0.0002
## 200 0.7045 nan 0.0100 0.0001
## 220 0.6860 nan 0.0100 0.0003
## 240 0.6692 nan 0.0100 0.0001
## 260 0.6529 nan 0.0100 0.0001
## 280 0.6376 nan 0.0100 0.0000
## 300 0.6238 nan 0.0100 0.0000
## 320 0.6118 nan 0.0100 -0.0001
## 340 0.6000 nan 0.0100 -0.0000
## 360 0.5880 nan 0.0100 -0.0000
## 380 0.5773 nan 0.0100 -0.0000
## 400 0.5671 nan 0.0100 0.0000
## 420 0.5574 nan 0.0100 -0.0000
## 440 0.5479 nan 0.0100 -0.0001
## 460 0.5383 nan 0.0100 -0.0000
## 480 0.5301 nan 0.0100 -0.0000
## 500 0.5207 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3125 nan 0.0100 0.0039
## 2 1.3030 nan 0.0100 0.0045
## 3 1.2939 nan 0.0100 0.0043
## 4 1.2849 nan 0.0100 0.0042
## 5 1.2763 nan 0.0100 0.0034
## 6 1.2683 nan 0.0100 0.0033
## 7 1.2604 nan 0.0100 0.0036
## 8 1.2517 nan 0.0100 0.0037
## 9 1.2437 nan 0.0100 0.0037
## 10 1.2361 nan 0.0100 0.0034
## 20 1.1626 nan 0.0100 0.0032
## 40 1.0481 nan 0.0100 0.0017
## 60 0.9607 nan 0.0100 0.0014
## 80 0.8911 nan 0.0100 0.0009
## 100 0.8375 nan 0.0100 0.0007
## 120 0.7922 nan 0.0100 0.0009
## 140 0.7550 nan 0.0100 0.0005
## 160 0.7230 nan 0.0100 0.0003
## 180 0.6937 nan 0.0100 0.0004
## 200 0.6686 nan 0.0100 0.0004
## 220 0.6469 nan 0.0100 0.0004
## 240 0.6263 nan 0.0100 0.0001
## 260 0.6075 nan 0.0100 0.0000
## 280 0.5902 nan 0.0100 0.0001
## 300 0.5749 nan 0.0100 -0.0000
## 320 0.5615 nan 0.0100 0.0001
## 340 0.5475 nan 0.0100 0.0001
## 360 0.5342 nan 0.0100 -0.0000
## 380 0.5219 nan 0.0100 -0.0000
## 400 0.5105 nan 0.0100 0.0000
## 420 0.4989 nan 0.0100 0.0000
## 440 0.4873 nan 0.0100 -0.0000
## 460 0.4768 nan 0.0100 -0.0001
## 480 0.4659 nan 0.0100 0.0000
## 500 0.4565 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3114 nan 0.0100 0.0045
## 2 1.3033 nan 0.0100 0.0036
## 3 1.2941 nan 0.0100 0.0042
## 4 1.2861 nan 0.0100 0.0037
## 5 1.2777 nan 0.0100 0.0036
## 6 1.2684 nan 0.0100 0.0040
## 7 1.2614 nan 0.0100 0.0030
## 8 1.2537 nan 0.0100 0.0036
## 9 1.2454 nan 0.0100 0.0033
## 10 1.2377 nan 0.0100 0.0031
## 20 1.1657 nan 0.0100 0.0033
## 40 1.0509 nan 0.0100 0.0021
## 60 0.9630 nan 0.0100 0.0013
## 80 0.8950 nan 0.0100 0.0012
## 100 0.8401 nan 0.0100 0.0008
## 120 0.7958 nan 0.0100 0.0009
## 140 0.7568 nan 0.0100 0.0005
## 160 0.7248 nan 0.0100 0.0006
## 180 0.6968 nan 0.0100 0.0002
## 200 0.6722 nan 0.0100 0.0002
## 220 0.6508 nan 0.0100 0.0002
## 240 0.6321 nan 0.0100 0.0000
## 260 0.6141 nan 0.0100 0.0000
## 280 0.5973 nan 0.0100 -0.0000
## 300 0.5815 nan 0.0100 0.0001
## 320 0.5670 nan 0.0100 0.0001
## 340 0.5540 nan 0.0100 -0.0001
## 360 0.5416 nan 0.0100 -0.0001
## 380 0.5295 nan 0.0100 0.0000
## 400 0.5174 nan 0.0100 0.0001
## 420 0.5068 nan 0.0100 -0.0000
## 440 0.4967 nan 0.0100 0.0000
## 460 0.4873 nan 0.0100 0.0000
## 480 0.4779 nan 0.0100 -0.0000
## 500 0.4693 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0039
## 2 1.3034 nan 0.0100 0.0041
## 3 1.2943 nan 0.0100 0.0042
## 4 1.2857 nan 0.0100 0.0036
## 5 1.2774 nan 0.0100 0.0038
## 6 1.2693 nan 0.0100 0.0035
## 7 1.2610 nan 0.0100 0.0036
## 8 1.2536 nan 0.0100 0.0034
## 9 1.2452 nan 0.0100 0.0038
## 10 1.2369 nan 0.0100 0.0037
## 20 1.1664 nan 0.0100 0.0029
## 40 1.0525 nan 0.0100 0.0022
## 60 0.9675 nan 0.0100 0.0013
## 80 0.8987 nan 0.0100 0.0011
## 100 0.8449 nan 0.0100 0.0008
## 120 0.8013 nan 0.0100 0.0005
## 140 0.7625 nan 0.0100 0.0004
## 160 0.7318 nan 0.0100 0.0002
## 180 0.7061 nan 0.0100 0.0002
## 200 0.6831 nan 0.0100 0.0001
## 220 0.6619 nan 0.0100 0.0002
## 240 0.6425 nan 0.0100 0.0000
## 260 0.6248 nan 0.0100 0.0000
## 280 0.6079 nan 0.0100 -0.0000
## 300 0.5938 nan 0.0100 0.0000
## 320 0.5795 nan 0.0100 0.0001
## 340 0.5670 nan 0.0100 -0.0003
## 360 0.5551 nan 0.0100 -0.0000
## 380 0.5438 nan 0.0100 0.0001
## 400 0.5326 nan 0.0100 -0.0001
## 420 0.5208 nan 0.0100 -0.0001
## 440 0.5114 nan 0.0100 -0.0001
## 460 0.5008 nan 0.0100 -0.0000
## 480 0.4912 nan 0.0100 -0.0001
## 500 0.4823 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2458 nan 0.1000 0.0331
## 2 1.1915 nan 0.1000 0.0239
## 3 1.1326 nan 0.1000 0.0215
## 4 1.0847 nan 0.1000 0.0220
## 5 1.0401 nan 0.1000 0.0200
## 6 1.0028 nan 0.1000 0.0147
## 7 0.9636 nan 0.1000 0.0166
## 8 0.9380 nan 0.1000 0.0081
## 9 0.9121 nan 0.1000 0.0100
## 10 0.8861 nan 0.1000 0.0112
## 20 0.7392 nan 0.1000 0.0011
## 40 0.6112 nan 0.1000 -0.0021
## 60 0.5321 nan 0.1000 -0.0006
## 80 0.4658 nan 0.1000 -0.0008
## 100 0.4114 nan 0.1000 -0.0011
## 120 0.3715 nan 0.1000 -0.0007
## 140 0.3334 nan 0.1000 -0.0009
## 160 0.3005 nan 0.1000 -0.0003
## 180 0.2700 nan 0.1000 -0.0003
## 200 0.2428 nan 0.1000 -0.0006
## 220 0.2190 nan 0.1000 -0.0008
## 240 0.1988 nan 0.1000 -0.0013
## 260 0.1821 nan 0.1000 -0.0004
## 280 0.1647 nan 0.1000 -0.0003
## 300 0.1493 nan 0.1000 -0.0003
## 320 0.1360 nan 0.1000 -0.0008
## 340 0.1258 nan 0.1000 -0.0003
## 360 0.1150 nan 0.1000 -0.0007
## 380 0.1057 nan 0.1000 -0.0003
## 400 0.0976 nan 0.1000 -0.0001
## 420 0.0901 nan 0.1000 -0.0001
## 440 0.0828 nan 0.1000 -0.0001
## 460 0.0766 nan 0.1000 -0.0003
## 480 0.0710 nan 0.1000 -0.0002
## 500 0.0651 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2535 nan 0.1000 0.0320
## 2 1.1850 nan 0.1000 0.0282
## 3 1.1256 nan 0.1000 0.0279
## 4 1.0712 nan 0.1000 0.0233
## 5 1.0271 nan 0.1000 0.0156
## 6 0.9910 nan 0.1000 0.0130
## 7 0.9561 nan 0.1000 0.0140
## 8 0.9245 nan 0.1000 0.0119
## 9 0.8980 nan 0.1000 0.0083
## 10 0.8757 nan 0.1000 0.0082
## 20 0.7350 nan 0.1000 0.0021
## 40 0.6115 nan 0.1000 -0.0012
## 60 0.5290 nan 0.1000 -0.0010
## 80 0.4697 nan 0.1000 -0.0000
## 100 0.4256 nan 0.1000 -0.0015
## 120 0.3852 nan 0.1000 -0.0008
## 140 0.3450 nan 0.1000 -0.0006
## 160 0.3136 nan 0.1000 -0.0000
## 180 0.2833 nan 0.1000 -0.0004
## 200 0.2565 nan 0.1000 -0.0008
## 220 0.2350 nan 0.1000 -0.0005
## 240 0.2163 nan 0.1000 -0.0005
## 260 0.1990 nan 0.1000 -0.0005
## 280 0.1830 nan 0.1000 -0.0006
## 300 0.1683 nan 0.1000 -0.0011
## 320 0.1547 nan 0.1000 -0.0008
## 340 0.1418 nan 0.1000 -0.0004
## 360 0.1305 nan 0.1000 -0.0002
## 380 0.1194 nan 0.1000 -0.0004
## 400 0.1099 nan 0.1000 -0.0002
## 420 0.1004 nan 0.1000 -0.0004
## 440 0.0922 nan 0.1000 -0.0001
## 460 0.0855 nan 0.1000 -0.0001
## 480 0.0789 nan 0.1000 -0.0001
## 500 0.0728 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2415 nan 0.1000 0.0382
## 2 1.1776 nan 0.1000 0.0283
## 3 1.1214 nan 0.1000 0.0259
## 4 1.0725 nan 0.1000 0.0210
## 5 1.0299 nan 0.1000 0.0177
## 6 0.9931 nan 0.1000 0.0134
## 7 0.9608 nan 0.1000 0.0137
## 8 0.9339 nan 0.1000 0.0102
## 9 0.9052 nan 0.1000 0.0102
## 10 0.8818 nan 0.1000 0.0083
## 20 0.7271 nan 0.1000 0.0031
## 40 0.6087 nan 0.1000 -0.0004
## 60 0.5356 nan 0.1000 0.0000
## 80 0.4652 nan 0.1000 -0.0004
## 100 0.4191 nan 0.1000 -0.0016
## 120 0.3786 nan 0.1000 -0.0015
## 140 0.3416 nan 0.1000 -0.0004
## 160 0.3122 nan 0.1000 -0.0008
## 180 0.2824 nan 0.1000 -0.0010
## 200 0.2584 nan 0.1000 -0.0008
## 220 0.2391 nan 0.1000 -0.0008
## 240 0.2207 nan 0.1000 -0.0011
## 260 0.2030 nan 0.1000 0.0000
## 280 0.1869 nan 0.1000 -0.0005
## 300 0.1727 nan 0.1000 -0.0010
## 320 0.1582 nan 0.1000 -0.0002
## 340 0.1471 nan 0.1000 -0.0005
## 360 0.1369 nan 0.1000 -0.0006
## 380 0.1278 nan 0.1000 -0.0004
## 400 0.1192 nan 0.1000 -0.0005
## 420 0.1105 nan 0.1000 -0.0003
## 440 0.1020 nan 0.1000 -0.0003
## 460 0.0949 nan 0.1000 -0.0002
## 480 0.0881 nan 0.1000 -0.0003
## 500 0.0820 nan 0.1000 -0.0004
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2399 nan 0.1000 0.0318
## 2 1.1655 nan 0.1000 0.0299
## 3 1.1067 nan 0.1000 0.0240
## 4 1.0539 nan 0.1000 0.0217
## 5 1.0117 nan 0.1000 0.0181
## 6 0.9719 nan 0.1000 0.0166
## 7 0.9391 nan 0.1000 0.0122
## 8 0.9043 nan 0.1000 0.0131
## 9 0.8815 nan 0.1000 0.0088
## 10 0.8559 nan 0.1000 0.0098
## 20 0.7031 nan 0.1000 0.0016
## 40 0.5556 nan 0.1000 -0.0005
## 60 0.4734 nan 0.1000 -0.0016
## 80 0.4132 nan 0.1000 -0.0007
## 100 0.3566 nan 0.1000 -0.0014
## 120 0.3107 nan 0.1000 -0.0002
## 140 0.2686 nan 0.1000 -0.0002
## 160 0.2381 nan 0.1000 -0.0005
## 180 0.2076 nan 0.1000 -0.0004
## 200 0.1825 nan 0.1000 0.0003
## 220 0.1632 nan 0.1000 -0.0002
## 240 0.1468 nan 0.1000 -0.0006
## 260 0.1334 nan 0.1000 -0.0004
## 280 0.1201 nan 0.1000 -0.0004
## 300 0.1084 nan 0.1000 -0.0000
## 320 0.0971 nan 0.1000 -0.0003
## 340 0.0881 nan 0.1000 -0.0004
## 360 0.0798 nan 0.1000 -0.0003
## 380 0.0726 nan 0.1000 -0.0003
## 400 0.0657 nan 0.1000 -0.0001
## 420 0.0587 nan 0.1000 -0.0001
## 440 0.0529 nan 0.1000 -0.0002
## 460 0.0478 nan 0.1000 -0.0001
## 480 0.0427 nan 0.1000 -0.0000
## 500 0.0391 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2376 nan 0.1000 0.0343
## 2 1.1673 nan 0.1000 0.0325
## 3 1.1066 nan 0.1000 0.0252
## 4 1.0550 nan 0.1000 0.0191
## 5 1.0080 nan 0.1000 0.0191
## 6 0.9714 nan 0.1000 0.0147
## 7 0.9380 nan 0.1000 0.0138
## 8 0.9039 nan 0.1000 0.0128
## 9 0.8781 nan 0.1000 0.0086
## 10 0.8514 nan 0.1000 0.0101
## 20 0.6975 nan 0.1000 0.0031
## 40 0.5671 nan 0.1000 0.0004
## 60 0.4781 nan 0.1000 -0.0004
## 80 0.4137 nan 0.1000 -0.0002
## 100 0.3559 nan 0.1000 0.0004
## 120 0.3116 nan 0.1000 -0.0008
## 140 0.2702 nan 0.1000 -0.0005
## 160 0.2402 nan 0.1000 -0.0011
## 180 0.2082 nan 0.1000 -0.0008
## 200 0.1842 nan 0.1000 -0.0003
## 220 0.1649 nan 0.1000 -0.0003
## 240 0.1479 nan 0.1000 -0.0002
## 260 0.1323 nan 0.1000 -0.0003
## 280 0.1189 nan 0.1000 -0.0004
## 300 0.1075 nan 0.1000 -0.0002
## 320 0.0973 nan 0.1000 -0.0002
## 340 0.0871 nan 0.1000 -0.0003
## 360 0.0788 nan 0.1000 -0.0002
## 380 0.0726 nan 0.1000 -0.0005
## 400 0.0658 nan 0.1000 -0.0002
## 420 0.0600 nan 0.1000 0.0000
## 440 0.0545 nan 0.1000 -0.0002
## 460 0.0495 nan 0.1000 -0.0001
## 480 0.0446 nan 0.1000 0.0000
## 500 0.0407 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2380 nan 0.1000 0.0365
## 2 1.1705 nan 0.1000 0.0299
## 3 1.1123 nan 0.1000 0.0228
## 4 1.0602 nan 0.1000 0.0191
## 5 1.0095 nan 0.1000 0.0216
## 6 0.9681 nan 0.1000 0.0152
## 7 0.9358 nan 0.1000 0.0131
## 8 0.9065 nan 0.1000 0.0111
## 9 0.8777 nan 0.1000 0.0125
## 10 0.8551 nan 0.1000 0.0090
## 20 0.7100 nan 0.1000 0.0009
## 40 0.5780 nan 0.1000 0.0002
## 60 0.4880 nan 0.1000 -0.0006
## 80 0.4229 nan 0.1000 -0.0010
## 100 0.3682 nan 0.1000 -0.0016
## 120 0.3230 nan 0.1000 -0.0009
## 140 0.2857 nan 0.1000 -0.0016
## 160 0.2553 nan 0.1000 -0.0010
## 180 0.2271 nan 0.1000 -0.0006
## 200 0.2035 nan 0.1000 -0.0013
## 220 0.1778 nan 0.1000 -0.0001
## 240 0.1612 nan 0.1000 -0.0009
## 260 0.1441 nan 0.1000 -0.0002
## 280 0.1308 nan 0.1000 -0.0006
## 300 0.1192 nan 0.1000 -0.0003
## 320 0.1083 nan 0.1000 -0.0004
## 340 0.0991 nan 0.1000 -0.0001
## 360 0.0898 nan 0.1000 -0.0004
## 380 0.0813 nan 0.1000 -0.0003
## 400 0.0749 nan 0.1000 -0.0002
## 420 0.0679 nan 0.1000 -0.0001
## 440 0.0628 nan 0.1000 -0.0002
## 460 0.0571 nan 0.1000 -0.0002
## 480 0.0520 nan 0.1000 -0.0002
## 500 0.0468 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2388 nan 0.1000 0.0357
## 2 1.1737 nan 0.1000 0.0258
## 3 1.1102 nan 0.1000 0.0226
## 4 1.0559 nan 0.1000 0.0240
## 5 1.0070 nan 0.1000 0.0215
## 6 0.9601 nan 0.1000 0.0199
## 7 0.9237 nan 0.1000 0.0147
## 8 0.8940 nan 0.1000 0.0103
## 9 0.8633 nan 0.1000 0.0120
## 10 0.8355 nan 0.1000 0.0088
## 20 0.6673 nan 0.1000 -0.0001
## 40 0.5067 nan 0.1000 -0.0007
## 60 0.4133 nan 0.1000 -0.0000
## 80 0.3447 nan 0.1000 -0.0013
## 100 0.2920 nan 0.1000 -0.0005
## 120 0.2507 nan 0.1000 -0.0003
## 140 0.2152 nan 0.1000 -0.0001
## 160 0.1868 nan 0.1000 -0.0004
## 180 0.1606 nan 0.1000 -0.0009
## 200 0.1396 nan 0.1000 -0.0005
## 220 0.1223 nan 0.1000 -0.0004
## 240 0.1080 nan 0.1000 0.0000
## 260 0.0949 nan 0.1000 0.0001
## 280 0.0832 nan 0.1000 -0.0001
## 300 0.0739 nan 0.1000 0.0000
## 320 0.0649 nan 0.1000 -0.0001
## 340 0.0571 nan 0.1000 -0.0002
## 360 0.0503 nan 0.1000 -0.0002
## 380 0.0447 nan 0.1000 0.0000
## 400 0.0396 nan 0.1000 -0.0000
## 420 0.0358 nan 0.1000 -0.0001
## 440 0.0315 nan 0.1000 -0.0000
## 460 0.0280 nan 0.1000 -0.0001
## 480 0.0249 nan 0.1000 -0.0000
## 500 0.0221 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2388 nan 0.1000 0.0387
## 2 1.1634 nan 0.1000 0.0341
## 3 1.1048 nan 0.1000 0.0244
## 4 1.0513 nan 0.1000 0.0248
## 5 1.0047 nan 0.1000 0.0189
## 6 0.9565 nan 0.1000 0.0210
## 7 0.9196 nan 0.1000 0.0135
## 8 0.8867 nan 0.1000 0.0131
## 9 0.8601 nan 0.1000 0.0095
## 10 0.8350 nan 0.1000 0.0099
## 20 0.6728 nan 0.1000 0.0012
## 40 0.5167 nan 0.1000 0.0006
## 60 0.4234 nan 0.1000 -0.0020
## 80 0.3590 nan 0.1000 -0.0002
## 100 0.3009 nan 0.1000 -0.0014
## 120 0.2592 nan 0.1000 -0.0005
## 140 0.2240 nan 0.1000 0.0010
## 160 0.1923 nan 0.1000 -0.0005
## 180 0.1654 nan 0.1000 -0.0004
## 200 0.1433 nan 0.1000 -0.0004
## 220 0.1255 nan 0.1000 -0.0001
## 240 0.1114 nan 0.1000 -0.0002
## 260 0.0975 nan 0.1000 -0.0006
## 280 0.0860 nan 0.1000 -0.0003
## 300 0.0758 nan 0.1000 -0.0002
## 320 0.0674 nan 0.1000 -0.0004
## 340 0.0595 nan 0.1000 -0.0002
## 360 0.0524 nan 0.1000 -0.0001
## 380 0.0464 nan 0.1000 -0.0001
## 400 0.0412 nan 0.1000 0.0001
## 420 0.0367 nan 0.1000 -0.0000
## 440 0.0324 nan 0.1000 -0.0001
## 460 0.0293 nan 0.1000 -0.0001
## 480 0.0256 nan 0.1000 -0.0000
## 500 0.0225 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2323 nan 0.1000 0.0387
## 2 1.1627 nan 0.1000 0.0312
## 3 1.1052 nan 0.1000 0.0231
## 4 1.0532 nan 0.1000 0.0191
## 5 1.0070 nan 0.1000 0.0201
## 6 0.9681 nan 0.1000 0.0154
## 7 0.9332 nan 0.1000 0.0155
## 8 0.9038 nan 0.1000 0.0117
## 9 0.8751 nan 0.1000 0.0104
## 10 0.8500 nan 0.1000 0.0080
## 20 0.6781 nan 0.1000 0.0047
## 40 0.5396 nan 0.1000 0.0002
## 60 0.4520 nan 0.1000 -0.0017
## 80 0.3819 nan 0.1000 -0.0006
## 100 0.3242 nan 0.1000 -0.0017
## 120 0.2768 nan 0.1000 -0.0019
## 140 0.2398 nan 0.1000 -0.0009
## 160 0.2088 nan 0.1000 -0.0005
## 180 0.1826 nan 0.1000 -0.0004
## 200 0.1572 nan 0.1000 -0.0008
## 220 0.1384 nan 0.1000 -0.0007
## 240 0.1211 nan 0.1000 -0.0002
## 260 0.1075 nan 0.1000 -0.0007
## 280 0.0962 nan 0.1000 -0.0005
## 300 0.0834 nan 0.1000 -0.0002
## 320 0.0736 nan 0.1000 -0.0003
## 340 0.0656 nan 0.1000 -0.0002
## 360 0.0589 nan 0.1000 -0.0003
## 380 0.0528 nan 0.1000 -0.0002
## 400 0.0472 nan 0.1000 -0.0003
## 420 0.0421 nan 0.1000 -0.0001
## 440 0.0376 nan 0.1000 -0.0001
## 460 0.0336 nan 0.1000 -0.0002
## 480 0.0297 nan 0.1000 -0.0001
## 500 0.0267 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0003
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0003
## 7 1.3147 nan 0.0010 0.0003
## 8 1.3139 nan 0.0010 0.0004
## 9 1.3130 nan 0.0010 0.0004
## 10 1.3122 nan 0.0010 0.0004
## 20 1.3041 nan 0.0010 0.0003
## 40 1.2878 nan 0.0010 0.0003
## 60 1.2725 nan 0.0010 0.0004
## 80 1.2575 nan 0.0010 0.0003
## 100 1.2425 nan 0.0010 0.0003
## 120 1.2282 nan 0.0010 0.0003
## 140 1.2145 nan 0.0010 0.0003
## 160 1.2009 nan 0.0010 0.0003
## 180 1.1880 nan 0.0010 0.0003
## 200 1.1753 nan 0.0010 0.0003
## 220 1.1634 nan 0.0010 0.0003
## 240 1.1514 nan 0.0010 0.0002
## 260 1.1396 nan 0.0010 0.0002
## 280 1.1282 nan 0.0010 0.0003
## 300 1.1172 nan 0.0010 0.0002
## 320 1.1065 nan 0.0010 0.0002
## 340 1.0961 nan 0.0010 0.0002
## 360 1.0860 nan 0.0010 0.0002
## 380 1.0760 nan 0.0010 0.0002
## 400 1.0666 nan 0.0010 0.0001
## 420 1.0571 nan 0.0010 0.0002
## 440 1.0480 nan 0.0010 0.0002
## 460 1.0389 nan 0.0010 0.0002
## 480 1.0305 nan 0.0010 0.0002
## 500 1.0222 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0003
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3041 nan 0.0010 0.0004
## 40 1.2878 nan 0.0010 0.0003
## 60 1.2726 nan 0.0010 0.0003
## 80 1.2572 nan 0.0010 0.0003
## 100 1.2427 nan 0.0010 0.0003
## 120 1.2282 nan 0.0010 0.0003
## 140 1.2145 nan 0.0010 0.0003
## 160 1.2012 nan 0.0010 0.0003
## 180 1.1885 nan 0.0010 0.0003
## 200 1.1759 nan 0.0010 0.0003
## 220 1.1639 nan 0.0010 0.0003
## 240 1.1520 nan 0.0010 0.0003
## 260 1.1406 nan 0.0010 0.0002
## 280 1.1293 nan 0.0010 0.0003
## 300 1.1184 nan 0.0010 0.0002
## 320 1.1078 nan 0.0010 0.0002
## 340 1.0973 nan 0.0010 0.0002
## 360 1.0870 nan 0.0010 0.0002
## 380 1.0770 nan 0.0010 0.0002
## 400 1.0672 nan 0.0010 0.0002
## 420 1.0578 nan 0.0010 0.0002
## 440 1.0487 nan 0.0010 0.0002
## 460 1.0400 nan 0.0010 0.0002
## 480 1.0310 nan 0.0010 0.0002
## 500 1.0224 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0003
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3139 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0003
## 20 1.3038 nan 0.0010 0.0004
## 40 1.2880 nan 0.0010 0.0004
## 60 1.2729 nan 0.0010 0.0003
## 80 1.2580 nan 0.0010 0.0003
## 100 1.2435 nan 0.0010 0.0004
## 120 1.2296 nan 0.0010 0.0003
## 140 1.2160 nan 0.0010 0.0003
## 160 1.2028 nan 0.0010 0.0003
## 180 1.1897 nan 0.0010 0.0003
## 200 1.1770 nan 0.0010 0.0003
## 220 1.1647 nan 0.0010 0.0003
## 240 1.1528 nan 0.0010 0.0002
## 260 1.1415 nan 0.0010 0.0003
## 280 1.1303 nan 0.0010 0.0002
## 300 1.1192 nan 0.0010 0.0003
## 320 1.1084 nan 0.0010 0.0002
## 340 1.0978 nan 0.0010 0.0003
## 360 1.0877 nan 0.0010 0.0002
## 380 1.0779 nan 0.0010 0.0002
## 400 1.0681 nan 0.0010 0.0002
## 420 1.0590 nan 0.0010 0.0002
## 440 1.0500 nan 0.0010 0.0002
## 460 1.0411 nan 0.0010 0.0002
## 480 1.0327 nan 0.0010 0.0002
## 500 1.0241 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3152 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3030 nan 0.0010 0.0004
## 40 1.2860 nan 0.0010 0.0004
## 60 1.2693 nan 0.0010 0.0004
## 80 1.2532 nan 0.0010 0.0004
## 100 1.2372 nan 0.0010 0.0004
## 120 1.2222 nan 0.0010 0.0003
## 140 1.2077 nan 0.0010 0.0003
## 160 1.1934 nan 0.0010 0.0003
## 180 1.1796 nan 0.0010 0.0004
## 200 1.1663 nan 0.0010 0.0003
## 220 1.1532 nan 0.0010 0.0002
## 240 1.1401 nan 0.0010 0.0003
## 260 1.1280 nan 0.0010 0.0003
## 280 1.1160 nan 0.0010 0.0003
## 300 1.1040 nan 0.0010 0.0003
## 320 1.0928 nan 0.0010 0.0003
## 340 1.0817 nan 0.0010 0.0002
## 360 1.0709 nan 0.0010 0.0002
## 380 1.0606 nan 0.0010 0.0002
## 400 1.0506 nan 0.0010 0.0002
## 420 1.0410 nan 0.0010 0.0002
## 440 1.0313 nan 0.0010 0.0002
## 460 1.0219 nan 0.0010 0.0002
## 480 1.0127 nan 0.0010 0.0002
## 500 1.0038 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0003
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3030 nan 0.0010 0.0004
## 40 1.2862 nan 0.0010 0.0004
## 60 1.2698 nan 0.0010 0.0003
## 80 1.2536 nan 0.0010 0.0004
## 100 1.2380 nan 0.0010 0.0003
## 120 1.2227 nan 0.0010 0.0004
## 140 1.2080 nan 0.0010 0.0003
## 160 1.1938 nan 0.0010 0.0003
## 180 1.1802 nan 0.0010 0.0003
## 200 1.1670 nan 0.0010 0.0003
## 220 1.1537 nan 0.0010 0.0003
## 240 1.1411 nan 0.0010 0.0003
## 260 1.1289 nan 0.0010 0.0003
## 280 1.1170 nan 0.0010 0.0003
## 300 1.1055 nan 0.0010 0.0003
## 320 1.0941 nan 0.0010 0.0002
## 340 1.0831 nan 0.0010 0.0003
## 360 1.0726 nan 0.0010 0.0002
## 380 1.0622 nan 0.0010 0.0002
## 400 1.0518 nan 0.0010 0.0002
## 420 1.0422 nan 0.0010 0.0002
## 440 1.0325 nan 0.0010 0.0002
## 460 1.0232 nan 0.0010 0.0002
## 480 1.0142 nan 0.0010 0.0002
## 500 1.0052 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2861 nan 0.0010 0.0003
## 60 1.2694 nan 0.0010 0.0003
## 80 1.2532 nan 0.0010 0.0004
## 100 1.2379 nan 0.0010 0.0004
## 120 1.2232 nan 0.0010 0.0003
## 140 1.2084 nan 0.0010 0.0003
## 160 1.1945 nan 0.0010 0.0003
## 180 1.1808 nan 0.0010 0.0003
## 200 1.1678 nan 0.0010 0.0003
## 220 1.1548 nan 0.0010 0.0003
## 240 1.1423 nan 0.0010 0.0003
## 260 1.1301 nan 0.0010 0.0003
## 280 1.1181 nan 0.0010 0.0002
## 300 1.1064 nan 0.0010 0.0002
## 320 1.0952 nan 0.0010 0.0002
## 340 1.0841 nan 0.0010 0.0003
## 360 1.0737 nan 0.0010 0.0002
## 380 1.0634 nan 0.0010 0.0002
## 400 1.0534 nan 0.0010 0.0002
## 420 1.0435 nan 0.0010 0.0002
## 440 1.0339 nan 0.0010 0.0002
## 460 1.0247 nan 0.0010 0.0002
## 480 1.0157 nan 0.0010 0.0002
## 500 1.0068 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3187 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0004
## 4 1.3168 nan 0.0010 0.0004
## 5 1.3157 nan 0.0010 0.0004
## 6 1.3149 nan 0.0010 0.0003
## 7 1.3139 nan 0.0010 0.0004
## 8 1.3129 nan 0.0010 0.0004
## 9 1.3119 nan 0.0010 0.0004
## 10 1.3110 nan 0.0010 0.0004
## 20 1.3017 nan 0.0010 0.0004
## 40 1.2836 nan 0.0010 0.0004
## 60 1.2662 nan 0.0010 0.0004
## 80 1.2495 nan 0.0010 0.0004
## 100 1.2333 nan 0.0010 0.0003
## 120 1.2174 nan 0.0010 0.0003
## 140 1.2019 nan 0.0010 0.0003
## 160 1.1870 nan 0.0010 0.0004
## 180 1.1725 nan 0.0010 0.0003
## 200 1.1583 nan 0.0010 0.0003
## 220 1.1446 nan 0.0010 0.0003
## 240 1.1313 nan 0.0010 0.0003
## 260 1.1185 nan 0.0010 0.0003
## 280 1.1060 nan 0.0010 0.0003
## 300 1.0939 nan 0.0010 0.0003
## 320 1.0823 nan 0.0010 0.0002
## 340 1.0710 nan 0.0010 0.0003
## 360 1.0599 nan 0.0010 0.0002
## 380 1.0493 nan 0.0010 0.0002
## 400 1.0390 nan 0.0010 0.0002
## 420 1.0288 nan 0.0010 0.0002
## 440 1.0189 nan 0.0010 0.0002
## 460 1.0091 nan 0.0010 0.0002
## 480 0.9995 nan 0.0010 0.0002
## 500 0.9904 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3159 nan 0.0010 0.0004
## 6 1.3150 nan 0.0010 0.0004
## 7 1.3141 nan 0.0010 0.0004
## 8 1.3132 nan 0.0010 0.0004
## 9 1.3122 nan 0.0010 0.0004
## 10 1.3113 nan 0.0010 0.0004
## 20 1.3021 nan 0.0010 0.0004
## 40 1.2841 nan 0.0010 0.0004
## 60 1.2665 nan 0.0010 0.0004
## 80 1.2494 nan 0.0010 0.0003
## 100 1.2331 nan 0.0010 0.0003
## 120 1.2175 nan 0.0010 0.0003
## 140 1.2021 nan 0.0010 0.0003
## 160 1.1870 nan 0.0010 0.0003
## 180 1.1732 nan 0.0010 0.0003
## 200 1.1592 nan 0.0010 0.0003
## 220 1.1459 nan 0.0010 0.0003
## 240 1.1329 nan 0.0010 0.0003
## 260 1.1200 nan 0.0010 0.0003
## 280 1.1076 nan 0.0010 0.0003
## 300 1.0958 nan 0.0010 0.0003
## 320 1.0842 nan 0.0010 0.0002
## 340 1.0728 nan 0.0010 0.0002
## 360 1.0616 nan 0.0010 0.0002
## 380 1.0511 nan 0.0010 0.0002
## 400 1.0406 nan 0.0010 0.0002
## 420 1.0300 nan 0.0010 0.0002
## 440 1.0201 nan 0.0010 0.0002
## 460 1.0103 nan 0.0010 0.0002
## 480 1.0006 nan 0.0010 0.0002
## 500 0.9917 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0005
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3152 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3124 nan 0.0010 0.0004
## 10 1.3115 nan 0.0010 0.0003
## 20 1.3023 nan 0.0010 0.0004
## 40 1.2847 nan 0.0010 0.0004
## 60 1.2675 nan 0.0010 0.0004
## 80 1.2509 nan 0.0010 0.0003
## 100 1.2349 nan 0.0010 0.0003
## 120 1.2193 nan 0.0010 0.0004
## 140 1.2042 nan 0.0010 0.0004
## 160 1.1895 nan 0.0010 0.0003
## 180 1.1752 nan 0.0010 0.0003
## 200 1.1615 nan 0.0010 0.0003
## 220 1.1483 nan 0.0010 0.0003
## 240 1.1350 nan 0.0010 0.0003
## 260 1.1224 nan 0.0010 0.0002
## 280 1.1101 nan 0.0010 0.0003
## 300 1.0982 nan 0.0010 0.0002
## 320 1.0866 nan 0.0010 0.0002
## 340 1.0753 nan 0.0010 0.0002
## 360 1.0643 nan 0.0010 0.0003
## 380 1.0536 nan 0.0010 0.0002
## 400 1.0434 nan 0.0010 0.0002
## 420 1.0336 nan 0.0010 0.0002
## 440 1.0239 nan 0.0010 0.0002
## 460 1.0144 nan 0.0010 0.0002
## 480 1.0050 nan 0.0010 0.0002
## 500 0.9959 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3127 nan 0.0100 0.0036
## 2 1.3037 nan 0.0100 0.0036
## 3 1.2950 nan 0.0100 0.0047
## 4 1.2871 nan 0.0100 0.0038
## 5 1.2791 nan 0.0100 0.0036
## 6 1.2711 nan 0.0100 0.0037
## 7 1.2638 nan 0.0100 0.0037
## 8 1.2565 nan 0.0100 0.0031
## 9 1.2486 nan 0.0100 0.0035
## 10 1.2414 nan 0.0100 0.0033
## 20 1.1747 nan 0.0100 0.0025
## 40 1.0650 nan 0.0100 0.0020
## 60 0.9803 nan 0.0100 0.0015
## 80 0.9146 nan 0.0100 0.0011
## 100 0.8627 nan 0.0100 0.0005
## 120 0.8193 nan 0.0100 0.0008
## 140 0.7835 nan 0.0100 0.0003
## 160 0.7538 nan 0.0100 0.0003
## 180 0.7285 nan 0.0100 0.0002
## 200 0.7060 nan 0.0100 0.0002
## 220 0.6863 nan 0.0100 0.0003
## 240 0.6683 nan 0.0100 0.0001
## 260 0.6531 nan 0.0100 0.0001
## 280 0.6381 nan 0.0100 0.0003
## 300 0.6245 nan 0.0100 -0.0001
## 320 0.6113 nan 0.0100 -0.0000
## 340 0.5998 nan 0.0100 0.0001
## 360 0.5892 nan 0.0100 0.0001
## 380 0.5787 nan 0.0100 0.0001
## 400 0.5686 nan 0.0100 0.0001
## 420 0.5603 nan 0.0100 -0.0000
## 440 0.5521 nan 0.0100 -0.0001
## 460 0.5429 nan 0.0100 -0.0001
## 480 0.5348 nan 0.0100 -0.0001
## 500 0.5275 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3132 nan 0.0100 0.0036
## 2 1.3049 nan 0.0100 0.0032
## 3 1.2969 nan 0.0100 0.0034
## 4 1.2893 nan 0.0100 0.0033
## 5 1.2810 nan 0.0100 0.0041
## 6 1.2729 nan 0.0100 0.0038
## 7 1.2654 nan 0.0100 0.0035
## 8 1.2584 nan 0.0100 0.0029
## 9 1.2512 nan 0.0100 0.0033
## 10 1.2438 nan 0.0100 0.0030
## 20 1.1751 nan 0.0100 0.0028
## 40 1.0659 nan 0.0100 0.0024
## 60 0.9816 nan 0.0100 0.0014
## 80 0.9172 nan 0.0100 0.0013
## 100 0.8641 nan 0.0100 0.0007
## 120 0.8197 nan 0.0100 0.0006
## 140 0.7844 nan 0.0100 0.0006
## 160 0.7541 nan 0.0100 0.0004
## 180 0.7280 nan 0.0100 0.0002
## 200 0.7066 nan 0.0100 0.0002
## 220 0.6881 nan 0.0100 0.0003
## 240 0.6726 nan 0.0100 -0.0001
## 260 0.6572 nan 0.0100 0.0001
## 280 0.6429 nan 0.0100 -0.0000
## 300 0.6313 nan 0.0100 0.0000
## 320 0.6200 nan 0.0100 0.0000
## 340 0.6089 nan 0.0100 -0.0001
## 360 0.5982 nan 0.0100 -0.0000
## 380 0.5883 nan 0.0100 -0.0001
## 400 0.5783 nan 0.0100 -0.0001
## 420 0.5692 nan 0.0100 0.0000
## 440 0.5615 nan 0.0100 -0.0001
## 460 0.5535 nan 0.0100 -0.0001
## 480 0.5457 nan 0.0100 -0.0001
## 500 0.5384 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3116 nan 0.0100 0.0042
## 2 1.3030 nan 0.0100 0.0035
## 3 1.2948 nan 0.0100 0.0037
## 4 1.2870 nan 0.0100 0.0035
## 5 1.2787 nan 0.0100 0.0039
## 6 1.2710 nan 0.0100 0.0037
## 7 1.2634 nan 0.0100 0.0030
## 8 1.2557 nan 0.0100 0.0035
## 9 1.2482 nan 0.0100 0.0036
## 10 1.2406 nan 0.0100 0.0031
## 20 1.1743 nan 0.0100 0.0028
## 40 1.0683 nan 0.0100 0.0018
## 60 0.9859 nan 0.0100 0.0015
## 80 0.9186 nan 0.0100 0.0009
## 100 0.8658 nan 0.0100 0.0008
## 120 0.8226 nan 0.0100 0.0007
## 140 0.7865 nan 0.0100 0.0008
## 160 0.7565 nan 0.0100 0.0003
## 180 0.7313 nan 0.0100 0.0003
## 200 0.7100 nan 0.0100 0.0002
## 220 0.6914 nan 0.0100 0.0003
## 240 0.6746 nan 0.0100 0.0001
## 260 0.6604 nan 0.0100 0.0001
## 280 0.6474 nan 0.0100 0.0001
## 300 0.6358 nan 0.0100 0.0000
## 320 0.6246 nan 0.0100 -0.0001
## 340 0.6142 nan 0.0100 0.0000
## 360 0.6057 nan 0.0100 -0.0001
## 380 0.5960 nan 0.0100 -0.0002
## 400 0.5872 nan 0.0100 -0.0000
## 420 0.5788 nan 0.0100 0.0000
## 440 0.5706 nan 0.0100 -0.0002
## 460 0.5635 nan 0.0100 -0.0001
## 480 0.5561 nan 0.0100 -0.0001
## 500 0.5488 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3118 nan 0.0100 0.0042
## 2 1.3027 nan 0.0100 0.0041
## 3 1.2939 nan 0.0100 0.0038
## 4 1.2848 nan 0.0100 0.0037
## 5 1.2762 nan 0.0100 0.0034
## 6 1.2692 nan 0.0100 0.0032
## 7 1.2610 nan 0.0100 0.0037
## 8 1.2532 nan 0.0100 0.0035
## 9 1.2455 nan 0.0100 0.0036
## 10 1.2376 nan 0.0100 0.0035
## 20 1.1687 nan 0.0100 0.0027
## 40 1.0519 nan 0.0100 0.0021
## 60 0.9627 nan 0.0100 0.0016
## 80 0.8926 nan 0.0100 0.0010
## 100 0.8357 nan 0.0100 0.0011
## 120 0.7902 nan 0.0100 0.0008
## 140 0.7525 nan 0.0100 0.0005
## 160 0.7217 nan 0.0100 0.0002
## 180 0.6946 nan 0.0100 0.0003
## 200 0.6723 nan 0.0100 0.0001
## 220 0.6517 nan 0.0100 0.0001
## 240 0.6326 nan 0.0100 0.0002
## 260 0.6165 nan 0.0100 0.0001
## 280 0.6007 nan 0.0100 -0.0000
## 300 0.5861 nan 0.0100 0.0001
## 320 0.5726 nan 0.0100 0.0000
## 340 0.5610 nan 0.0100 0.0000
## 360 0.5490 nan 0.0100 -0.0001
## 380 0.5377 nan 0.0100 -0.0000
## 400 0.5269 nan 0.0100 -0.0000
## 420 0.5162 nan 0.0100 -0.0001
## 440 0.5060 nan 0.0100 -0.0000
## 460 0.4965 nan 0.0100 -0.0000
## 480 0.4874 nan 0.0100 -0.0000
## 500 0.4795 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3112 nan 0.0100 0.0041
## 2 1.3019 nan 0.0100 0.0040
## 3 1.2936 nan 0.0100 0.0037
## 4 1.2848 nan 0.0100 0.0042
## 5 1.2760 nan 0.0100 0.0036
## 6 1.2673 nan 0.0100 0.0043
## 7 1.2588 nan 0.0100 0.0038
## 8 1.2516 nan 0.0100 0.0034
## 9 1.2441 nan 0.0100 0.0034
## 10 1.2356 nan 0.0100 0.0035
## 20 1.1643 nan 0.0100 0.0026
## 40 1.0514 nan 0.0100 0.0025
## 60 0.9639 nan 0.0100 0.0018
## 80 0.8931 nan 0.0100 0.0012
## 100 0.8405 nan 0.0100 0.0008
## 120 0.7959 nan 0.0100 0.0009
## 140 0.7584 nan 0.0100 0.0006
## 160 0.7291 nan 0.0100 0.0001
## 180 0.7019 nan 0.0100 0.0002
## 200 0.6788 nan 0.0100 0.0003
## 220 0.6579 nan 0.0100 0.0003
## 240 0.6410 nan 0.0100 0.0001
## 260 0.6245 nan 0.0100 0.0001
## 280 0.6095 nan 0.0100 0.0001
## 300 0.5962 nan 0.0100 -0.0001
## 320 0.5832 nan 0.0100 0.0001
## 340 0.5708 nan 0.0100 0.0002
## 360 0.5595 nan 0.0100 0.0001
## 380 0.5487 nan 0.0100 0.0001
## 400 0.5383 nan 0.0100 -0.0000
## 420 0.5300 nan 0.0100 -0.0001
## 440 0.5196 nan 0.0100 -0.0001
## 460 0.5102 nan 0.0100 0.0000
## 480 0.5015 nan 0.0100 -0.0001
## 500 0.4928 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3116 nan 0.0100 0.0040
## 2 1.3025 nan 0.0100 0.0038
## 3 1.2941 nan 0.0100 0.0041
## 4 1.2855 nan 0.0100 0.0039
## 5 1.2771 nan 0.0100 0.0029
## 6 1.2694 nan 0.0100 0.0039
## 7 1.2610 nan 0.0100 0.0038
## 8 1.2539 nan 0.0100 0.0030
## 9 1.2460 nan 0.0100 0.0034
## 10 1.2386 nan 0.0100 0.0035
## 20 1.1679 nan 0.0100 0.0031
## 40 1.0532 nan 0.0100 0.0023
## 60 0.9656 nan 0.0100 0.0016
## 80 0.8977 nan 0.0100 0.0010
## 100 0.8423 nan 0.0100 0.0008
## 120 0.7984 nan 0.0100 0.0007
## 140 0.7608 nan 0.0100 0.0006
## 160 0.7311 nan 0.0100 0.0003
## 180 0.7045 nan 0.0100 0.0000
## 200 0.6838 nan 0.0100 0.0003
## 220 0.6636 nan 0.0100 0.0001
## 240 0.6464 nan 0.0100 0.0000
## 260 0.6301 nan 0.0100 0.0000
## 280 0.6150 nan 0.0100 -0.0000
## 300 0.6023 nan 0.0100 0.0002
## 320 0.5905 nan 0.0100 0.0000
## 340 0.5788 nan 0.0100 -0.0001
## 360 0.5686 nan 0.0100 -0.0000
## 380 0.5582 nan 0.0100 0.0001
## 400 0.5483 nan 0.0100 -0.0000
## 420 0.5397 nan 0.0100 0.0000
## 440 0.5309 nan 0.0100 -0.0002
## 460 0.5221 nan 0.0100 0.0000
## 480 0.5130 nan 0.0100 -0.0000
## 500 0.5048 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3112 nan 0.0100 0.0041
## 2 1.3020 nan 0.0100 0.0039
## 3 1.2932 nan 0.0100 0.0039
## 4 1.2838 nan 0.0100 0.0042
## 5 1.2756 nan 0.0100 0.0033
## 6 1.2665 nan 0.0100 0.0042
## 7 1.2574 nan 0.0100 0.0038
## 8 1.2492 nan 0.0100 0.0035
## 9 1.2413 nan 0.0100 0.0031
## 10 1.2331 nan 0.0100 0.0035
## 20 1.1593 nan 0.0100 0.0031
## 40 1.0388 nan 0.0100 0.0020
## 60 0.9485 nan 0.0100 0.0016
## 80 0.8750 nan 0.0100 0.0010
## 100 0.8158 nan 0.0100 0.0008
## 120 0.7692 nan 0.0100 0.0005
## 140 0.7297 nan 0.0100 0.0002
## 160 0.6965 nan 0.0100 0.0004
## 180 0.6671 nan 0.0100 0.0004
## 200 0.6430 nan 0.0100 0.0003
## 220 0.6215 nan 0.0100 0.0003
## 240 0.5998 nan 0.0100 0.0001
## 260 0.5815 nan 0.0100 0.0000
## 280 0.5644 nan 0.0100 0.0001
## 300 0.5486 nan 0.0100 0.0002
## 320 0.5339 nan 0.0100 -0.0001
## 340 0.5202 nan 0.0100 0.0001
## 360 0.5078 nan 0.0100 -0.0001
## 380 0.4959 nan 0.0100 0.0001
## 400 0.4841 nan 0.0100 -0.0001
## 420 0.4738 nan 0.0100 0.0001
## 440 0.4645 nan 0.0100 -0.0001
## 460 0.4550 nan 0.0100 -0.0001
## 480 0.4458 nan 0.0100 -0.0002
## 500 0.4347 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3109 nan 0.0100 0.0040
## 2 1.3012 nan 0.0100 0.0049
## 3 1.2917 nan 0.0100 0.0043
## 4 1.2831 nan 0.0100 0.0039
## 5 1.2740 nan 0.0100 0.0043
## 6 1.2647 nan 0.0100 0.0038
## 7 1.2565 nan 0.0100 0.0037
## 8 1.2473 nan 0.0100 0.0040
## 9 1.2394 nan 0.0100 0.0032
## 10 1.2313 nan 0.0100 0.0036
## 20 1.1574 nan 0.0100 0.0031
## 40 1.0384 nan 0.0100 0.0023
## 60 0.9492 nan 0.0100 0.0016
## 80 0.8776 nan 0.0100 0.0014
## 100 0.8208 nan 0.0100 0.0010
## 120 0.7748 nan 0.0100 0.0008
## 140 0.7377 nan 0.0100 0.0005
## 160 0.7062 nan 0.0100 0.0004
## 180 0.6780 nan 0.0100 0.0004
## 200 0.6539 nan 0.0100 0.0003
## 220 0.6320 nan 0.0100 0.0002
## 240 0.6120 nan 0.0100 0.0002
## 260 0.5942 nan 0.0100 0.0001
## 280 0.5777 nan 0.0100 -0.0000
## 300 0.5636 nan 0.0100 0.0002
## 320 0.5500 nan 0.0100 -0.0001
## 340 0.5372 nan 0.0100 -0.0001
## 360 0.5248 nan 0.0100 -0.0001
## 380 0.5116 nan 0.0100 0.0000
## 400 0.5000 nan 0.0100 -0.0001
## 420 0.4887 nan 0.0100 -0.0001
## 440 0.4780 nan 0.0100 0.0000
## 460 0.4677 nan 0.0100 -0.0000
## 480 0.4579 nan 0.0100 -0.0000
## 500 0.4494 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3113 nan 0.0100 0.0045
## 2 1.3022 nan 0.0100 0.0041
## 3 1.2929 nan 0.0100 0.0038
## 4 1.2845 nan 0.0100 0.0038
## 5 1.2760 nan 0.0100 0.0034
## 6 1.2664 nan 0.0100 0.0040
## 7 1.2579 nan 0.0100 0.0038
## 8 1.2494 nan 0.0100 0.0032
## 9 1.2414 nan 0.0100 0.0033
## 10 1.2325 nan 0.0100 0.0036
## 20 1.1595 nan 0.0100 0.0034
## 40 1.0427 nan 0.0100 0.0021
## 60 0.9541 nan 0.0100 0.0016
## 80 0.8835 nan 0.0100 0.0009
## 100 0.8257 nan 0.0100 0.0009
## 120 0.7794 nan 0.0100 0.0007
## 140 0.7432 nan 0.0100 0.0003
## 160 0.7113 nan 0.0100 0.0002
## 180 0.6845 nan 0.0100 0.0002
## 200 0.6613 nan 0.0100 -0.0002
## 220 0.6413 nan 0.0100 0.0001
## 240 0.6224 nan 0.0100 0.0002
## 260 0.6039 nan 0.0100 0.0001
## 280 0.5885 nan 0.0100 0.0003
## 300 0.5736 nan 0.0100 0.0001
## 320 0.5609 nan 0.0100 -0.0001
## 340 0.5488 nan 0.0100 -0.0000
## 360 0.5367 nan 0.0100 -0.0000
## 380 0.5257 nan 0.0100 0.0000
## 400 0.5144 nan 0.0100 0.0000
## 420 0.5041 nan 0.0100 -0.0002
## 440 0.4943 nan 0.0100 -0.0002
## 460 0.4854 nan 0.0100 -0.0001
## 480 0.4757 nan 0.0100 -0.0001
## 500 0.4659 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2317 nan 0.1000 0.0382
## 2 1.1609 nan 0.1000 0.0319
## 3 1.1012 nan 0.1000 0.0230
## 4 1.0546 nan 0.1000 0.0210
## 5 1.0142 nan 0.1000 0.0170
## 6 0.9734 nan 0.1000 0.0182
## 7 0.9376 nan 0.1000 0.0143
## 8 0.9118 nan 0.1000 0.0101
## 9 0.8851 nan 0.1000 0.0104
## 10 0.8641 nan 0.1000 0.0071
## 20 0.7084 nan 0.1000 0.0030
## 40 0.5717 nan 0.1000 -0.0011
## 60 0.4943 nan 0.1000 -0.0011
## 80 0.4376 nan 0.1000 0.0000
## 100 0.3901 nan 0.1000 -0.0008
## 120 0.3501 nan 0.1000 -0.0012
## 140 0.3118 nan 0.1000 0.0001
## 160 0.2833 nan 0.1000 -0.0009
## 180 0.2542 nan 0.1000 -0.0003
## 200 0.2290 nan 0.1000 -0.0002
## 220 0.2093 nan 0.1000 -0.0004
## 240 0.1895 nan 0.1000 0.0004
## 260 0.1729 nan 0.1000 -0.0000
## 280 0.1572 nan 0.1000 -0.0006
## 300 0.1450 nan 0.1000 -0.0003
## 320 0.1334 nan 0.1000 -0.0000
## 340 0.1226 nan 0.1000 -0.0004
## 360 0.1114 nan 0.1000 -0.0002
## 380 0.1016 nan 0.1000 -0.0001
## 400 0.0934 nan 0.1000 -0.0002
## 420 0.0858 nan 0.1000 -0.0002
## 440 0.0800 nan 0.1000 -0.0001
## 460 0.0735 nan 0.1000 -0.0001
## 480 0.0680 nan 0.1000 -0.0004
## 500 0.0628 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2424 nan 0.1000 0.0370
## 2 1.1698 nan 0.1000 0.0316
## 3 1.1115 nan 0.1000 0.0271
## 4 1.0587 nan 0.1000 0.0226
## 5 1.0135 nan 0.1000 0.0223
## 6 0.9786 nan 0.1000 0.0128
## 7 0.9431 nan 0.1000 0.0156
## 8 0.9120 nan 0.1000 0.0121
## 9 0.8836 nan 0.1000 0.0115
## 10 0.8566 nan 0.1000 0.0102
## 20 0.7126 nan 0.1000 0.0022
## 40 0.5808 nan 0.1000 -0.0009
## 60 0.5091 nan 0.1000 -0.0011
## 80 0.4531 nan 0.1000 -0.0004
## 100 0.3989 nan 0.1000 -0.0014
## 120 0.3558 nan 0.1000 -0.0004
## 140 0.3250 nan 0.1000 -0.0003
## 160 0.2956 nan 0.1000 -0.0004
## 180 0.2698 nan 0.1000 -0.0007
## 200 0.2452 nan 0.1000 -0.0005
## 220 0.2246 nan 0.1000 -0.0002
## 240 0.2073 nan 0.1000 0.0000
## 260 0.1909 nan 0.1000 -0.0005
## 280 0.1737 nan 0.1000 -0.0003
## 300 0.1593 nan 0.1000 -0.0005
## 320 0.1460 nan 0.1000 -0.0003
## 340 0.1337 nan 0.1000 -0.0002
## 360 0.1238 nan 0.1000 0.0003
## 380 0.1138 nan 0.1000 -0.0006
## 400 0.1053 nan 0.1000 -0.0001
## 420 0.0971 nan 0.1000 -0.0004
## 440 0.0898 nan 0.1000 -0.0001
## 460 0.0830 nan 0.1000 -0.0002
## 480 0.0770 nan 0.1000 -0.0002
## 500 0.0718 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2406 nan 0.1000 0.0359
## 2 1.1777 nan 0.1000 0.0309
## 3 1.1210 nan 0.1000 0.0250
## 4 1.0721 nan 0.1000 0.0212
## 5 1.0292 nan 0.1000 0.0181
## 6 0.9870 nan 0.1000 0.0182
## 7 0.9525 nan 0.1000 0.0141
## 8 0.9197 nan 0.1000 0.0135
## 9 0.8914 nan 0.1000 0.0118
## 10 0.8641 nan 0.1000 0.0101
## 20 0.7186 nan 0.1000 0.0029
## 40 0.5927 nan 0.1000 -0.0012
## 60 0.5180 nan 0.1000 0.0002
## 80 0.4649 nan 0.1000 -0.0010
## 100 0.4102 nan 0.1000 -0.0007
## 120 0.3739 nan 0.1000 -0.0016
## 140 0.3376 nan 0.1000 -0.0007
## 160 0.3065 nan 0.1000 -0.0008
## 180 0.2809 nan 0.1000 -0.0013
## 200 0.2587 nan 0.1000 -0.0008
## 220 0.2378 nan 0.1000 -0.0010
## 240 0.2173 nan 0.1000 -0.0005
## 260 0.1983 nan 0.1000 -0.0003
## 280 0.1837 nan 0.1000 -0.0007
## 300 0.1711 nan 0.1000 -0.0005
## 320 0.1589 nan 0.1000 -0.0008
## 340 0.1484 nan 0.1000 -0.0006
## 360 0.1374 nan 0.1000 -0.0003
## 380 0.1268 nan 0.1000 -0.0002
## 400 0.1173 nan 0.1000 -0.0001
## 420 0.1086 nan 0.1000 -0.0002
## 440 0.1006 nan 0.1000 -0.0003
## 460 0.0927 nan 0.1000 -0.0005
## 480 0.0854 nan 0.1000 -0.0002
## 500 0.0789 nan 0.1000 -0.0004
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2268 nan 0.1000 0.0457
## 2 1.1601 nan 0.1000 0.0300
## 3 1.0936 nan 0.1000 0.0286
## 4 1.0402 nan 0.1000 0.0235
## 5 0.9945 nan 0.1000 0.0185
## 6 0.9552 nan 0.1000 0.0135
## 7 0.9243 nan 0.1000 0.0111
## 8 0.8939 nan 0.1000 0.0100
## 9 0.8679 nan 0.1000 0.0102
## 10 0.8462 nan 0.1000 0.0060
## 20 0.6707 nan 0.1000 -0.0001
## 40 0.5270 nan 0.1000 0.0014
## 60 0.4319 nan 0.1000 0.0002
## 80 0.3796 nan 0.1000 -0.0016
## 100 0.3275 nan 0.1000 0.0005
## 120 0.2838 nan 0.1000 -0.0002
## 140 0.2486 nan 0.1000 -0.0002
## 160 0.2166 nan 0.1000 -0.0005
## 180 0.1921 nan 0.1000 -0.0008
## 200 0.1700 nan 0.1000 -0.0009
## 220 0.1512 nan 0.1000 -0.0005
## 240 0.1339 nan 0.1000 -0.0001
## 260 0.1189 nan 0.1000 -0.0002
## 280 0.1069 nan 0.1000 -0.0003
## 300 0.0966 nan 0.1000 -0.0002
## 320 0.0873 nan 0.1000 -0.0002
## 340 0.0789 nan 0.1000 -0.0001
## 360 0.0716 nan 0.1000 -0.0001
## 380 0.0649 nan 0.1000 -0.0002
## 400 0.0594 nan 0.1000 0.0000
## 420 0.0539 nan 0.1000 -0.0001
## 440 0.0485 nan 0.1000 -0.0000
## 460 0.0441 nan 0.1000 -0.0002
## 480 0.0402 nan 0.1000 -0.0001
## 500 0.0365 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2358 nan 0.1000 0.0413
## 2 1.1594 nan 0.1000 0.0339
## 3 1.0990 nan 0.1000 0.0283
## 4 1.0473 nan 0.1000 0.0208
## 5 0.9968 nan 0.1000 0.0204
## 6 0.9544 nan 0.1000 0.0176
## 7 0.9158 nan 0.1000 0.0160
## 8 0.8864 nan 0.1000 0.0091
## 9 0.8577 nan 0.1000 0.0120
## 10 0.8328 nan 0.1000 0.0091
## 20 0.6825 nan 0.1000 0.0028
## 40 0.5438 nan 0.1000 -0.0020
## 60 0.4611 nan 0.1000 -0.0011
## 80 0.3940 nan 0.1000 -0.0005
## 100 0.3326 nan 0.1000 -0.0005
## 120 0.2868 nan 0.1000 -0.0015
## 140 0.2489 nan 0.1000 -0.0004
## 160 0.2187 nan 0.1000 -0.0010
## 180 0.1934 nan 0.1000 -0.0010
## 200 0.1716 nan 0.1000 -0.0004
## 220 0.1516 nan 0.1000 -0.0006
## 240 0.1351 nan 0.1000 -0.0008
## 260 0.1208 nan 0.1000 -0.0001
## 280 0.1083 nan 0.1000 -0.0004
## 300 0.0968 nan 0.1000 -0.0002
## 320 0.0879 nan 0.1000 -0.0006
## 340 0.0797 nan 0.1000 -0.0002
## 360 0.0716 nan 0.1000 -0.0001
## 380 0.0647 nan 0.1000 -0.0002
## 400 0.0590 nan 0.1000 -0.0002
## 420 0.0530 nan 0.1000 -0.0003
## 440 0.0487 nan 0.1000 -0.0001
## 460 0.0447 nan 0.1000 -0.0002
## 480 0.0410 nan 0.1000 -0.0001
## 500 0.0371 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2352 nan 0.1000 0.0386
## 2 1.1654 nan 0.1000 0.0314
## 3 1.0990 nan 0.1000 0.0295
## 4 1.0449 nan 0.1000 0.0228
## 5 0.9996 nan 0.1000 0.0200
## 6 0.9577 nan 0.1000 0.0178
## 7 0.9249 nan 0.1000 0.0118
## 8 0.8991 nan 0.1000 0.0091
## 9 0.8657 nan 0.1000 0.0110
## 10 0.8410 nan 0.1000 0.0092
## 20 0.6850 nan 0.1000 0.0001
## 40 0.5577 nan 0.1000 -0.0003
## 60 0.4830 nan 0.1000 -0.0013
## 80 0.4130 nan 0.1000 -0.0021
## 100 0.3626 nan 0.1000 0.0006
## 120 0.3188 nan 0.1000 -0.0015
## 140 0.2815 nan 0.1000 -0.0006
## 160 0.2493 nan 0.1000 -0.0013
## 180 0.2202 nan 0.1000 -0.0013
## 200 0.1964 nan 0.1000 -0.0002
## 220 0.1770 nan 0.1000 -0.0005
## 240 0.1611 nan 0.1000 -0.0007
## 260 0.1443 nan 0.1000 -0.0005
## 280 0.1300 nan 0.1000 -0.0008
## 300 0.1168 nan 0.1000 -0.0003
## 320 0.1069 nan 0.1000 -0.0005
## 340 0.0964 nan 0.1000 -0.0001
## 360 0.0870 nan 0.1000 -0.0003
## 380 0.0800 nan 0.1000 -0.0005
## 400 0.0729 nan 0.1000 -0.0002
## 420 0.0662 nan 0.1000 -0.0004
## 440 0.0608 nan 0.1000 -0.0004
## 460 0.0548 nan 0.1000 -0.0003
## 480 0.0496 nan 0.1000 -0.0001
## 500 0.0455 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2355 nan 0.1000 0.0332
## 2 1.1565 nan 0.1000 0.0372
## 3 1.0930 nan 0.1000 0.0277
## 4 1.0372 nan 0.1000 0.0231
## 5 0.9927 nan 0.1000 0.0176
## 6 0.9480 nan 0.1000 0.0155
## 7 0.9091 nan 0.1000 0.0170
## 8 0.8783 nan 0.1000 0.0124
## 9 0.8468 nan 0.1000 0.0109
## 10 0.8215 nan 0.1000 0.0096
## 20 0.6506 nan 0.1000 0.0017
## 40 0.4879 nan 0.1000 -0.0004
## 60 0.4003 nan 0.1000 -0.0001
## 80 0.3321 nan 0.1000 -0.0004
## 100 0.2853 nan 0.1000 0.0004
## 120 0.2420 nan 0.1000 -0.0002
## 140 0.2057 nan 0.1000 0.0001
## 160 0.1755 nan 0.1000 -0.0002
## 180 0.1505 nan 0.1000 -0.0002
## 200 0.1311 nan 0.1000 -0.0001
## 220 0.1144 nan 0.1000 0.0000
## 240 0.0999 nan 0.1000 -0.0004
## 260 0.0880 nan 0.1000 -0.0001
## 280 0.0772 nan 0.1000 -0.0002
## 300 0.0682 nan 0.1000 -0.0001
## 320 0.0602 nan 0.1000 -0.0002
## 340 0.0527 nan 0.1000 -0.0001
## 360 0.0466 nan 0.1000 -0.0001
## 380 0.0411 nan 0.1000 -0.0001
## 400 0.0369 nan 0.1000 -0.0001
## 420 0.0321 nan 0.1000 -0.0001
## 440 0.0287 nan 0.1000 -0.0001
## 460 0.0255 nan 0.1000 -0.0002
## 480 0.0227 nan 0.1000 -0.0001
## 500 0.0204 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2183 nan 0.1000 0.0402
## 2 1.1477 nan 0.1000 0.0283
## 3 1.0855 nan 0.1000 0.0331
## 4 1.0329 nan 0.1000 0.0230
## 5 0.9858 nan 0.1000 0.0181
## 6 0.9398 nan 0.1000 0.0156
## 7 0.9030 nan 0.1000 0.0142
## 8 0.8684 nan 0.1000 0.0106
## 9 0.8368 nan 0.1000 0.0115
## 10 0.8114 nan 0.1000 0.0090
## 20 0.6492 nan 0.1000 0.0032
## 40 0.5044 nan 0.1000 0.0003
## 60 0.4109 nan 0.1000 -0.0007
## 80 0.3421 nan 0.1000 -0.0004
## 100 0.2889 nan 0.1000 -0.0003
## 120 0.2409 nan 0.1000 -0.0005
## 140 0.2003 nan 0.1000 -0.0002
## 160 0.1719 nan 0.1000 -0.0003
## 180 0.1501 nan 0.1000 -0.0004
## 200 0.1312 nan 0.1000 -0.0004
## 220 0.1140 nan 0.1000 -0.0002
## 240 0.1001 nan 0.1000 -0.0004
## 260 0.0882 nan 0.1000 -0.0003
## 280 0.0783 nan 0.1000 -0.0002
## 300 0.0683 nan 0.1000 -0.0001
## 320 0.0609 nan 0.1000 -0.0003
## 340 0.0541 nan 0.1000 -0.0001
## 360 0.0480 nan 0.1000 0.0001
## 380 0.0429 nan 0.1000 -0.0001
## 400 0.0381 nan 0.1000 -0.0002
## 420 0.0342 nan 0.1000 -0.0001
## 440 0.0305 nan 0.1000 -0.0001
## 460 0.0271 nan 0.1000 -0.0001
## 480 0.0246 nan 0.1000 -0.0001
## 500 0.0218 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2368 nan 0.1000 0.0358
## 2 1.1576 nan 0.1000 0.0361
## 3 1.0924 nan 0.1000 0.0305
## 4 1.0330 nan 0.1000 0.0239
## 5 0.9790 nan 0.1000 0.0233
## 6 0.9393 nan 0.1000 0.0156
## 7 0.9022 nan 0.1000 0.0147
## 8 0.8726 nan 0.1000 0.0102
## 9 0.8453 nan 0.1000 0.0084
## 10 0.8206 nan 0.1000 0.0120
## 20 0.6593 nan 0.1000 0.0028
## 40 0.5233 nan 0.1000 0.0006
## 60 0.4412 nan 0.1000 -0.0017
## 80 0.3734 nan 0.1000 -0.0010
## 100 0.3172 nan 0.1000 -0.0006
## 120 0.2729 nan 0.1000 -0.0011
## 140 0.2367 nan 0.1000 -0.0009
## 160 0.2019 nan 0.1000 -0.0012
## 180 0.1769 nan 0.1000 -0.0003
## 200 0.1559 nan 0.1000 -0.0007
## 220 0.1388 nan 0.1000 -0.0005
## 240 0.1220 nan 0.1000 -0.0004
## 260 0.1062 nan 0.1000 -0.0002
## 280 0.0942 nan 0.1000 -0.0004
## 300 0.0836 nan 0.1000 -0.0002
## 320 0.0740 nan 0.1000 -0.0001
## 340 0.0664 nan 0.1000 -0.0003
## 360 0.0597 nan 0.1000 -0.0002
## 380 0.0534 nan 0.1000 -0.0003
## 400 0.0474 nan 0.1000 -0.0002
## 420 0.0423 nan 0.1000 -0.0002
## 440 0.0385 nan 0.1000 -0.0001
## 460 0.0344 nan 0.1000 -0.0002
## 480 0.0305 nan 0.1000 -0.0001
## 500 0.0274 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0003
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0003
## 6 1.3159 nan 0.0010 0.0003
## 7 1.3151 nan 0.0010 0.0003
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3136 nan 0.0010 0.0003
## 10 1.3128 nan 0.0010 0.0004
## 20 1.3050 nan 0.0010 0.0004
## 40 1.2895 nan 0.0010 0.0003
## 60 1.2745 nan 0.0010 0.0004
## 80 1.2603 nan 0.0010 0.0003
## 100 1.2462 nan 0.0010 0.0003
## 120 1.2325 nan 0.0010 0.0003
## 140 1.2193 nan 0.0010 0.0003
## 160 1.2065 nan 0.0010 0.0003
## 180 1.1942 nan 0.0010 0.0002
## 200 1.1820 nan 0.0010 0.0002
## 220 1.1702 nan 0.0010 0.0002
## 240 1.1586 nan 0.0010 0.0003
## 260 1.1473 nan 0.0010 0.0002
## 280 1.1364 nan 0.0010 0.0003
## 300 1.1263 nan 0.0010 0.0002
## 320 1.1164 nan 0.0010 0.0002
## 340 1.1066 nan 0.0010 0.0002
## 360 1.0969 nan 0.0010 0.0002
## 380 1.0874 nan 0.0010 0.0002
## 400 1.0780 nan 0.0010 0.0002
## 420 1.0690 nan 0.0010 0.0002
## 440 1.0604 nan 0.0010 0.0002
## 460 1.0520 nan 0.0010 0.0002
## 480 1.0439 nan 0.0010 0.0001
## 500 1.0359 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3183 nan 0.0010 0.0003
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0003
## 6 1.3159 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0003
## 8 1.3144 nan 0.0010 0.0003
## 9 1.3136 nan 0.0010 0.0004
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3051 nan 0.0010 0.0003
## 40 1.2898 nan 0.0010 0.0004
## 60 1.2748 nan 0.0010 0.0004
## 80 1.2602 nan 0.0010 0.0003
## 100 1.2463 nan 0.0010 0.0003
## 120 1.2327 nan 0.0010 0.0003
## 140 1.2200 nan 0.0010 0.0003
## 160 1.2070 nan 0.0010 0.0003
## 180 1.1945 nan 0.0010 0.0003
## 200 1.1826 nan 0.0010 0.0003
## 220 1.1712 nan 0.0010 0.0002
## 240 1.1602 nan 0.0010 0.0002
## 260 1.1493 nan 0.0010 0.0002
## 280 1.1384 nan 0.0010 0.0003
## 300 1.1277 nan 0.0010 0.0002
## 320 1.1175 nan 0.0010 0.0003
## 340 1.1079 nan 0.0010 0.0002
## 360 1.0983 nan 0.0010 0.0002
## 380 1.0892 nan 0.0010 0.0002
## 400 1.0801 nan 0.0010 0.0002
## 420 1.0710 nan 0.0010 0.0002
## 440 1.0624 nan 0.0010 0.0002
## 460 1.0539 nan 0.0010 0.0002
## 480 1.0456 nan 0.0010 0.0002
## 500 1.0376 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0004
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0003
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3044 nan 0.0010 0.0003
## 40 1.2889 nan 0.0010 0.0004
## 60 1.2740 nan 0.0010 0.0003
## 80 1.2595 nan 0.0010 0.0003
## 100 1.2457 nan 0.0010 0.0003
## 120 1.2322 nan 0.0010 0.0003
## 140 1.2190 nan 0.0010 0.0003
## 160 1.2062 nan 0.0010 0.0003
## 180 1.1941 nan 0.0010 0.0003
## 200 1.1822 nan 0.0010 0.0003
## 220 1.1704 nan 0.0010 0.0002
## 240 1.1592 nan 0.0010 0.0002
## 260 1.1486 nan 0.0010 0.0002
## 280 1.1379 nan 0.0010 0.0002
## 300 1.1273 nan 0.0010 0.0002
## 320 1.1173 nan 0.0010 0.0002
## 340 1.1076 nan 0.0010 0.0002
## 360 1.0977 nan 0.0010 0.0002
## 380 1.0885 nan 0.0010 0.0002
## 400 1.0795 nan 0.0010 0.0002
## 420 1.0707 nan 0.0010 0.0002
## 440 1.0623 nan 0.0010 0.0002
## 460 1.0541 nan 0.0010 0.0002
## 480 1.0459 nan 0.0010 0.0002
## 500 1.0379 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3035 nan 0.0010 0.0004
## 40 1.2869 nan 0.0010 0.0004
## 60 1.2708 nan 0.0010 0.0004
## 80 1.2554 nan 0.0010 0.0003
## 100 1.2401 nan 0.0010 0.0003
## 120 1.2259 nan 0.0010 0.0003
## 140 1.2121 nan 0.0010 0.0002
## 160 1.1985 nan 0.0010 0.0003
## 180 1.1850 nan 0.0010 0.0003
## 200 1.1721 nan 0.0010 0.0003
## 220 1.1596 nan 0.0010 0.0003
## 240 1.1477 nan 0.0010 0.0003
## 260 1.1362 nan 0.0010 0.0002
## 280 1.1251 nan 0.0010 0.0002
## 300 1.1140 nan 0.0010 0.0002
## 320 1.1033 nan 0.0010 0.0002
## 340 1.0930 nan 0.0010 0.0002
## 360 1.0828 nan 0.0010 0.0002
## 380 1.0729 nan 0.0010 0.0002
## 400 1.0631 nan 0.0010 0.0002
## 420 1.0537 nan 0.0010 0.0002
## 440 1.0444 nan 0.0010 0.0002
## 460 1.0355 nan 0.0010 0.0002
## 480 1.0268 nan 0.0010 0.0002
## 500 1.0185 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3032 nan 0.0010 0.0004
## 40 1.2869 nan 0.0010 0.0004
## 60 1.2707 nan 0.0010 0.0004
## 80 1.2554 nan 0.0010 0.0003
## 100 1.2404 nan 0.0010 0.0003
## 120 1.2262 nan 0.0010 0.0003
## 140 1.2121 nan 0.0010 0.0003
## 160 1.1988 nan 0.0010 0.0003
## 180 1.1857 nan 0.0010 0.0002
## 200 1.1726 nan 0.0010 0.0003
## 220 1.1602 nan 0.0010 0.0003
## 240 1.1483 nan 0.0010 0.0003
## 260 1.1366 nan 0.0010 0.0002
## 280 1.1254 nan 0.0010 0.0002
## 300 1.1143 nan 0.0010 0.0002
## 320 1.1036 nan 0.0010 0.0002
## 340 1.0935 nan 0.0010 0.0002
## 360 1.0838 nan 0.0010 0.0002
## 380 1.0739 nan 0.0010 0.0002
## 400 1.0642 nan 0.0010 0.0002
## 420 1.0550 nan 0.0010 0.0002
## 440 1.0459 nan 0.0010 0.0002
## 460 1.0372 nan 0.0010 0.0002
## 480 1.0285 nan 0.0010 0.0002
## 500 1.0201 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3036 nan 0.0010 0.0003
## 40 1.2873 nan 0.0010 0.0003
## 60 1.2715 nan 0.0010 0.0004
## 80 1.2560 nan 0.0010 0.0003
## 100 1.2417 nan 0.0010 0.0004
## 120 1.2273 nan 0.0010 0.0003
## 140 1.2136 nan 0.0010 0.0003
## 160 1.2003 nan 0.0010 0.0003
## 180 1.1874 nan 0.0010 0.0003
## 200 1.1748 nan 0.0010 0.0003
## 220 1.1624 nan 0.0010 0.0003
## 240 1.1505 nan 0.0010 0.0003
## 260 1.1388 nan 0.0010 0.0002
## 280 1.1274 nan 0.0010 0.0003
## 300 1.1166 nan 0.0010 0.0002
## 320 1.1061 nan 0.0010 0.0002
## 340 1.0960 nan 0.0010 0.0002
## 360 1.0858 nan 0.0010 0.0002
## 380 1.0760 nan 0.0010 0.0002
## 400 1.0665 nan 0.0010 0.0002
## 420 1.0573 nan 0.0010 0.0002
## 440 1.0482 nan 0.0010 0.0002
## 460 1.0395 nan 0.0010 0.0002
## 480 1.0309 nan 0.0010 0.0002
## 500 1.0226 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3027 nan 0.0010 0.0004
## 40 1.2856 nan 0.0010 0.0004
## 60 1.2686 nan 0.0010 0.0004
## 80 1.2522 nan 0.0010 0.0003
## 100 1.2366 nan 0.0010 0.0004
## 120 1.2214 nan 0.0010 0.0003
## 140 1.2068 nan 0.0010 0.0003
## 160 1.1928 nan 0.0010 0.0003
## 180 1.1792 nan 0.0010 0.0003
## 200 1.1658 nan 0.0010 0.0003
## 220 1.1529 nan 0.0010 0.0003
## 240 1.1404 nan 0.0010 0.0003
## 260 1.1281 nan 0.0010 0.0002
## 280 1.1162 nan 0.0010 0.0002
## 300 1.1046 nan 0.0010 0.0003
## 320 1.0934 nan 0.0010 0.0002
## 340 1.0825 nan 0.0010 0.0002
## 360 1.0717 nan 0.0010 0.0002
## 380 1.0613 nan 0.0010 0.0001
## 400 1.0516 nan 0.0010 0.0002
## 420 1.0419 nan 0.0010 0.0002
## 440 1.0322 nan 0.0010 0.0002
## 460 1.0229 nan 0.0010 0.0002
## 480 1.0139 nan 0.0010 0.0001
## 500 1.0050 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3151 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0005
## 10 1.3116 nan 0.0010 0.0004
## 20 1.3028 nan 0.0010 0.0003
## 40 1.2858 nan 0.0010 0.0004
## 60 1.2697 nan 0.0010 0.0003
## 80 1.2538 nan 0.0010 0.0004
## 100 1.2381 nan 0.0010 0.0003
## 120 1.2230 nan 0.0010 0.0003
## 140 1.2086 nan 0.0010 0.0003
## 160 1.1942 nan 0.0010 0.0003
## 180 1.1806 nan 0.0010 0.0003
## 200 1.1674 nan 0.0010 0.0003
## 220 1.1547 nan 0.0010 0.0003
## 240 1.1425 nan 0.0010 0.0002
## 260 1.1302 nan 0.0010 0.0003
## 280 1.1180 nan 0.0010 0.0002
## 300 1.1068 nan 0.0010 0.0002
## 320 1.0959 nan 0.0010 0.0002
## 340 1.0848 nan 0.0010 0.0002
## 360 1.0741 nan 0.0010 0.0002
## 380 1.0640 nan 0.0010 0.0002
## 400 1.0543 nan 0.0010 0.0002
## 420 1.0445 nan 0.0010 0.0002
## 440 1.0351 nan 0.0010 0.0002
## 460 1.0259 nan 0.0010 0.0002
## 480 1.0169 nan 0.0010 0.0002
## 500 1.0082 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3030 nan 0.0010 0.0004
## 40 1.2861 nan 0.0010 0.0004
## 60 1.2697 nan 0.0010 0.0004
## 80 1.2534 nan 0.0010 0.0004
## 100 1.2380 nan 0.0010 0.0003
## 120 1.2234 nan 0.0010 0.0003
## 140 1.2090 nan 0.0010 0.0003
## 160 1.1950 nan 0.0010 0.0003
## 180 1.1816 nan 0.0010 0.0003
## 200 1.1686 nan 0.0010 0.0003
## 220 1.1557 nan 0.0010 0.0003
## 240 1.1434 nan 0.0010 0.0002
## 260 1.1318 nan 0.0010 0.0003
## 280 1.1200 nan 0.0010 0.0003
## 300 1.1087 nan 0.0010 0.0002
## 320 1.0975 nan 0.0010 0.0003
## 340 1.0867 nan 0.0010 0.0003
## 360 1.0761 nan 0.0010 0.0002
## 380 1.0659 nan 0.0010 0.0002
## 400 1.0561 nan 0.0010 0.0002
## 420 1.0466 nan 0.0010 0.0002
## 440 1.0373 nan 0.0010 0.0002
## 460 1.0279 nan 0.0010 0.0002
## 480 1.0190 nan 0.0010 0.0002
## 500 1.0103 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3128 nan 0.0100 0.0038
## 2 1.3052 nan 0.0100 0.0036
## 3 1.2974 nan 0.0100 0.0036
## 4 1.2897 nan 0.0100 0.0034
## 5 1.2819 nan 0.0100 0.0037
## 6 1.2739 nan 0.0100 0.0037
## 7 1.2659 nan 0.0100 0.0036
## 8 1.2584 nan 0.0100 0.0035
## 9 1.2515 nan 0.0100 0.0034
## 10 1.2443 nan 0.0100 0.0030
## 20 1.1811 nan 0.0100 0.0025
## 40 1.0754 nan 0.0100 0.0017
## 60 0.9959 nan 0.0100 0.0013
## 80 0.9353 nan 0.0100 0.0008
## 100 0.8837 nan 0.0100 0.0008
## 120 0.8427 nan 0.0100 0.0003
## 140 0.8080 nan 0.0100 0.0003
## 160 0.7799 nan 0.0100 0.0003
## 180 0.7555 nan 0.0100 0.0003
## 200 0.7334 nan 0.0100 0.0003
## 220 0.7147 nan 0.0100 0.0001
## 240 0.6973 nan 0.0100 0.0001
## 260 0.6810 nan 0.0100 0.0001
## 280 0.6678 nan 0.0100 0.0001
## 300 0.6556 nan 0.0100 0.0002
## 320 0.6433 nan 0.0100 0.0000
## 340 0.6311 nan 0.0100 0.0002
## 360 0.6207 nan 0.0100 0.0001
## 380 0.6096 nan 0.0100 -0.0000
## 400 0.6000 nan 0.0100 0.0000
## 420 0.5906 nan 0.0100 -0.0001
## 440 0.5808 nan 0.0100 0.0001
## 460 0.5723 nan 0.0100 0.0001
## 480 0.5635 nan 0.0100 -0.0000
## 500 0.5548 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3135 nan 0.0100 0.0032
## 2 1.3058 nan 0.0100 0.0038
## 3 1.2980 nan 0.0100 0.0035
## 4 1.2902 nan 0.0100 0.0036
## 5 1.2817 nan 0.0100 0.0036
## 6 1.2751 nan 0.0100 0.0031
## 7 1.2677 nan 0.0100 0.0034
## 8 1.2601 nan 0.0100 0.0033
## 9 1.2533 nan 0.0100 0.0029
## 10 1.2463 nan 0.0100 0.0029
## 20 1.1820 nan 0.0100 0.0025
## 40 1.0800 nan 0.0100 0.0019
## 60 0.9970 nan 0.0100 0.0016
## 80 0.9344 nan 0.0100 0.0009
## 100 0.8853 nan 0.0100 0.0007
## 120 0.8445 nan 0.0100 0.0005
## 140 0.8113 nan 0.0100 0.0006
## 160 0.7832 nan 0.0100 0.0002
## 180 0.7594 nan 0.0100 0.0001
## 200 0.7395 nan 0.0100 0.0002
## 220 0.7208 nan 0.0100 0.0003
## 240 0.7033 nan 0.0100 0.0002
## 260 0.6879 nan 0.0100 -0.0000
## 280 0.6730 nan 0.0100 0.0001
## 300 0.6607 nan 0.0100 0.0000
## 320 0.6489 nan 0.0100 0.0001
## 340 0.6386 nan 0.0100 0.0000
## 360 0.6280 nan 0.0100 -0.0002
## 380 0.6186 nan 0.0100 -0.0001
## 400 0.6090 nan 0.0100 0.0001
## 420 0.6000 nan 0.0100 -0.0002
## 440 0.5924 nan 0.0100 -0.0000
## 460 0.5829 nan 0.0100 0.0002
## 480 0.5741 nan 0.0100 -0.0001
## 500 0.5660 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3126 nan 0.0100 0.0035
## 2 1.3041 nan 0.0100 0.0037
## 3 1.2970 nan 0.0100 0.0032
## 4 1.2893 nan 0.0100 0.0039
## 5 1.2825 nan 0.0100 0.0031
## 6 1.2747 nan 0.0100 0.0037
## 7 1.2665 nan 0.0100 0.0037
## 8 1.2594 nan 0.0100 0.0035
## 9 1.2533 nan 0.0100 0.0027
## 10 1.2467 nan 0.0100 0.0029
## 20 1.1839 nan 0.0100 0.0025
## 40 1.0796 nan 0.0100 0.0019
## 60 0.9999 nan 0.0100 0.0015
## 80 0.9400 nan 0.0100 0.0009
## 100 0.8907 nan 0.0100 0.0009
## 120 0.8505 nan 0.0100 0.0005
## 140 0.8168 nan 0.0100 0.0007
## 160 0.7882 nan 0.0100 0.0004
## 180 0.7641 nan 0.0100 0.0003
## 200 0.7431 nan 0.0100 0.0002
## 220 0.7259 nan 0.0100 0.0002
## 240 0.7097 nan 0.0100 0.0001
## 260 0.6952 nan 0.0100 0.0002
## 280 0.6821 nan 0.0100 0.0001
## 300 0.6697 nan 0.0100 -0.0001
## 320 0.6581 nan 0.0100 0.0001
## 340 0.6482 nan 0.0100 -0.0001
## 360 0.6377 nan 0.0100 -0.0001
## 380 0.6274 nan 0.0100 0.0001
## 400 0.6185 nan 0.0100 -0.0001
## 420 0.6089 nan 0.0100 -0.0001
## 440 0.6002 nan 0.0100 -0.0000
## 460 0.5922 nan 0.0100 0.0001
## 480 0.5840 nan 0.0100 -0.0000
## 500 0.5757 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3114 nan 0.0100 0.0042
## 2 1.3026 nan 0.0100 0.0041
## 3 1.2950 nan 0.0100 0.0034
## 4 1.2868 nan 0.0100 0.0039
## 5 1.2790 nan 0.0100 0.0035
## 6 1.2711 nan 0.0100 0.0034
## 7 1.2635 nan 0.0100 0.0036
## 8 1.2561 nan 0.0100 0.0031
## 9 1.2481 nan 0.0100 0.0031
## 10 1.2400 nan 0.0100 0.0038
## 20 1.1704 nan 0.0100 0.0026
## 40 1.0598 nan 0.0100 0.0022
## 60 0.9768 nan 0.0100 0.0014
## 80 0.9089 nan 0.0100 0.0010
## 100 0.8563 nan 0.0100 0.0010
## 120 0.8150 nan 0.0100 0.0003
## 140 0.7799 nan 0.0100 0.0005
## 160 0.7492 nan 0.0100 0.0001
## 180 0.7242 nan 0.0100 0.0000
## 200 0.7012 nan 0.0100 0.0001
## 220 0.6804 nan 0.0100 0.0001
## 240 0.6607 nan 0.0100 0.0003
## 260 0.6427 nan 0.0100 0.0001
## 280 0.6266 nan 0.0100 0.0001
## 300 0.6126 nan 0.0100 0.0000
## 320 0.5976 nan 0.0100 0.0001
## 340 0.5842 nan 0.0100 0.0000
## 360 0.5708 nan 0.0100 0.0001
## 380 0.5585 nan 0.0100 0.0001
## 400 0.5472 nan 0.0100 -0.0001
## 420 0.5363 nan 0.0100 -0.0001
## 440 0.5272 nan 0.0100 0.0000
## 460 0.5165 nan 0.0100 -0.0001
## 480 0.5074 nan 0.0100 -0.0001
## 500 0.4982 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0038
## 2 1.3037 nan 0.0100 0.0034
## 3 1.2949 nan 0.0100 0.0038
## 4 1.2866 nan 0.0100 0.0039
## 5 1.2783 nan 0.0100 0.0039
## 6 1.2699 nan 0.0100 0.0039
## 7 1.2625 nan 0.0100 0.0034
## 8 1.2547 nan 0.0100 0.0035
## 9 1.2470 nan 0.0100 0.0034
## 10 1.2395 nan 0.0100 0.0036
## 20 1.1732 nan 0.0100 0.0031
## 40 1.0655 nan 0.0100 0.0020
## 60 0.9813 nan 0.0100 0.0014
## 80 0.9166 nan 0.0100 0.0010
## 100 0.8645 nan 0.0100 0.0007
## 120 0.8211 nan 0.0100 0.0006
## 140 0.7867 nan 0.0100 0.0005
## 160 0.7567 nan 0.0100 0.0006
## 180 0.7316 nan 0.0100 0.0004
## 200 0.7071 nan 0.0100 -0.0001
## 220 0.6878 nan 0.0100 0.0002
## 240 0.6693 nan 0.0100 0.0001
## 260 0.6520 nan 0.0100 0.0001
## 280 0.6367 nan 0.0100 -0.0001
## 300 0.6226 nan 0.0100 0.0001
## 320 0.6092 nan 0.0100 0.0001
## 340 0.5967 nan 0.0100 -0.0001
## 360 0.5851 nan 0.0100 0.0001
## 380 0.5741 nan 0.0100 0.0000
## 400 0.5629 nan 0.0100 -0.0000
## 420 0.5520 nan 0.0100 -0.0001
## 440 0.5424 nan 0.0100 0.0001
## 460 0.5326 nan 0.0100 -0.0001
## 480 0.5228 nan 0.0100 0.0001
## 500 0.5138 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0037
## 2 1.3036 nan 0.0100 0.0036
## 3 1.2943 nan 0.0100 0.0041
## 4 1.2867 nan 0.0100 0.0030
## 5 1.2791 nan 0.0100 0.0036
## 6 1.2714 nan 0.0100 0.0036
## 7 1.2643 nan 0.0100 0.0034
## 8 1.2568 nan 0.0100 0.0032
## 9 1.2489 nan 0.0100 0.0037
## 10 1.2413 nan 0.0100 0.0031
## 20 1.1741 nan 0.0100 0.0027
## 40 1.0649 nan 0.0100 0.0022
## 60 0.9831 nan 0.0100 0.0016
## 80 0.9197 nan 0.0100 0.0010
## 100 0.8690 nan 0.0100 0.0007
## 120 0.8286 nan 0.0100 0.0002
## 140 0.7935 nan 0.0100 0.0004
## 160 0.7653 nan 0.0100 0.0002
## 180 0.7397 nan 0.0100 0.0002
## 200 0.7180 nan 0.0100 0.0001
## 220 0.6974 nan 0.0100 0.0001
## 240 0.6792 nan 0.0100 -0.0001
## 260 0.6639 nan 0.0100 -0.0000
## 280 0.6477 nan 0.0100 0.0001
## 300 0.6351 nan 0.0100 0.0000
## 320 0.6221 nan 0.0100 -0.0000
## 340 0.6099 nan 0.0100 0.0001
## 360 0.5975 nan 0.0100 0.0001
## 380 0.5854 nan 0.0100 0.0000
## 400 0.5751 nan 0.0100 -0.0001
## 420 0.5640 nan 0.0100 0.0001
## 440 0.5553 nan 0.0100 -0.0001
## 460 0.5457 nan 0.0100 -0.0002
## 480 0.5364 nan 0.0100 -0.0000
## 500 0.5272 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3107 nan 0.0100 0.0046
## 2 1.3020 nan 0.0100 0.0039
## 3 1.2926 nan 0.0100 0.0039
## 4 1.2833 nan 0.0100 0.0041
## 5 1.2756 nan 0.0100 0.0036
## 6 1.2675 nan 0.0100 0.0036
## 7 1.2595 nan 0.0100 0.0036
## 8 1.2513 nan 0.0100 0.0038
## 9 1.2435 nan 0.0100 0.0031
## 10 1.2362 nan 0.0100 0.0032
## 20 1.1667 nan 0.0100 0.0025
## 40 1.0518 nan 0.0100 0.0024
## 60 0.9638 nan 0.0100 0.0015
## 80 0.8947 nan 0.0100 0.0008
## 100 0.8407 nan 0.0100 0.0009
## 120 0.7938 nan 0.0100 0.0007
## 140 0.7569 nan 0.0100 0.0004
## 160 0.7239 nan 0.0100 0.0003
## 180 0.6948 nan 0.0100 0.0004
## 200 0.6710 nan 0.0100 0.0002
## 220 0.6485 nan 0.0100 0.0002
## 240 0.6293 nan 0.0100 0.0000
## 260 0.6109 nan 0.0100 0.0002
## 280 0.5931 nan 0.0100 0.0002
## 300 0.5770 nan 0.0100 -0.0001
## 320 0.5621 nan 0.0100 0.0001
## 340 0.5481 nan 0.0100 0.0002
## 360 0.5348 nan 0.0100 0.0001
## 380 0.5227 nan 0.0100 0.0000
## 400 0.5113 nan 0.0100 -0.0000
## 420 0.5002 nan 0.0100 0.0000
## 440 0.4897 nan 0.0100 0.0001
## 460 0.4786 nan 0.0100 -0.0001
## 480 0.4687 nan 0.0100 -0.0000
## 500 0.4577 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3116 nan 0.0100 0.0043
## 2 1.3029 nan 0.0100 0.0040
## 3 1.2937 nan 0.0100 0.0040
## 4 1.2847 nan 0.0100 0.0044
## 5 1.2762 nan 0.0100 0.0036
## 6 1.2680 nan 0.0100 0.0036
## 7 1.2600 nan 0.0100 0.0035
## 8 1.2523 nan 0.0100 0.0032
## 9 1.2436 nan 0.0100 0.0038
## 10 1.2360 nan 0.0100 0.0032
## 20 1.1692 nan 0.0100 0.0030
## 40 1.0548 nan 0.0100 0.0022
## 60 0.9681 nan 0.0100 0.0017
## 80 0.8999 nan 0.0100 0.0010
## 100 0.8461 nan 0.0100 0.0004
## 120 0.8019 nan 0.0100 0.0007
## 140 0.7644 nan 0.0100 0.0003
## 160 0.7324 nan 0.0100 0.0003
## 180 0.7047 nan 0.0100 0.0003
## 200 0.6793 nan 0.0100 0.0001
## 220 0.6577 nan 0.0100 0.0002
## 240 0.6387 nan 0.0100 0.0001
## 260 0.6206 nan 0.0100 -0.0000
## 280 0.6051 nan 0.0100 -0.0000
## 300 0.5896 nan 0.0100 -0.0001
## 320 0.5743 nan 0.0100 0.0004
## 340 0.5606 nan 0.0100 0.0000
## 360 0.5475 nan 0.0100 0.0000
## 380 0.5341 nan 0.0100 0.0001
## 400 0.5224 nan 0.0100 -0.0000
## 420 0.5111 nan 0.0100 0.0000
## 440 0.5003 nan 0.0100 -0.0001
## 460 0.4901 nan 0.0100 -0.0000
## 480 0.4788 nan 0.0100 0.0001
## 500 0.4684 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3110 nan 0.0100 0.0043
## 2 1.3022 nan 0.0100 0.0039
## 3 1.2932 nan 0.0100 0.0043
## 4 1.2851 nan 0.0100 0.0032
## 5 1.2771 nan 0.0100 0.0036
## 6 1.2694 nan 0.0100 0.0034
## 7 1.2612 nan 0.0100 0.0036
## 8 1.2531 nan 0.0100 0.0035
## 9 1.2452 nan 0.0100 0.0038
## 10 1.2378 nan 0.0100 0.0032
## 20 1.1686 nan 0.0100 0.0029
## 40 1.0545 nan 0.0100 0.0022
## 60 0.9691 nan 0.0100 0.0015
## 80 0.9042 nan 0.0100 0.0014
## 100 0.8516 nan 0.0100 0.0010
## 120 0.8076 nan 0.0100 0.0006
## 140 0.7730 nan 0.0100 0.0007
## 160 0.7426 nan 0.0100 0.0005
## 180 0.7149 nan 0.0100 0.0004
## 200 0.6899 nan 0.0100 0.0003
## 220 0.6697 nan 0.0100 0.0000
## 240 0.6511 nan 0.0100 -0.0001
## 260 0.6341 nan 0.0100 0.0001
## 280 0.6189 nan 0.0100 0.0002
## 300 0.6031 nan 0.0100 -0.0000
## 320 0.5894 nan 0.0100 -0.0000
## 340 0.5755 nan 0.0100 0.0000
## 360 0.5632 nan 0.0100 0.0001
## 380 0.5513 nan 0.0100 -0.0000
## 400 0.5406 nan 0.0100 0.0001
## 420 0.5299 nan 0.0100 -0.0002
## 440 0.5191 nan 0.0100 -0.0001
## 460 0.5086 nan 0.0100 -0.0000
## 480 0.4989 nan 0.0100 -0.0000
## 500 0.4892 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2377 nan 0.1000 0.0394
## 2 1.1647 nan 0.1000 0.0329
## 3 1.1150 nan 0.1000 0.0207
## 4 1.0740 nan 0.1000 0.0184
## 5 1.0333 nan 0.1000 0.0198
## 6 0.9989 nan 0.1000 0.0106
## 7 0.9726 nan 0.1000 0.0074
## 8 0.9407 nan 0.1000 0.0112
## 9 0.9115 nan 0.1000 0.0129
## 10 0.8886 nan 0.1000 0.0096
## 20 0.7392 nan 0.1000 0.0025
## 40 0.6029 nan 0.1000 -0.0002
## 60 0.5196 nan 0.1000 -0.0012
## 80 0.4498 nan 0.1000 0.0003
## 100 0.3879 nan 0.1000 0.0005
## 120 0.3468 nan 0.1000 -0.0006
## 140 0.3144 nan 0.1000 -0.0008
## 160 0.2830 nan 0.1000 -0.0001
## 180 0.2573 nan 0.1000 -0.0002
## 200 0.2325 nan 0.1000 -0.0002
## 220 0.2111 nan 0.1000 -0.0006
## 240 0.1917 nan 0.1000 -0.0000
## 260 0.1740 nan 0.1000 -0.0000
## 280 0.1597 nan 0.1000 -0.0002
## 300 0.1455 nan 0.1000 0.0001
## 320 0.1328 nan 0.1000 -0.0003
## 340 0.1231 nan 0.1000 0.0000
## 360 0.1135 nan 0.1000 -0.0003
## 380 0.1043 nan 0.1000 -0.0002
## 400 0.0957 nan 0.1000 0.0001
## 420 0.0893 nan 0.1000 -0.0002
## 440 0.0822 nan 0.1000 -0.0002
## 460 0.0763 nan 0.1000 -0.0000
## 480 0.0704 nan 0.1000 -0.0000
## 500 0.0649 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2451 nan 0.1000 0.0349
## 2 1.1749 nan 0.1000 0.0335
## 3 1.1207 nan 0.1000 0.0219
## 4 1.0786 nan 0.1000 0.0180
## 5 1.0403 nan 0.1000 0.0138
## 6 1.0012 nan 0.1000 0.0170
## 7 0.9653 nan 0.1000 0.0147
## 8 0.9329 nan 0.1000 0.0127
## 9 0.9056 nan 0.1000 0.0112
## 10 0.8802 nan 0.1000 0.0106
## 20 0.7353 nan 0.1000 0.0017
## 40 0.6077 nan 0.1000 -0.0009
## 60 0.5234 nan 0.1000 -0.0009
## 80 0.4588 nan 0.1000 -0.0001
## 100 0.4046 nan 0.1000 0.0000
## 120 0.3583 nan 0.1000 -0.0009
## 140 0.3236 nan 0.1000 -0.0011
## 160 0.2920 nan 0.1000 -0.0014
## 180 0.2670 nan 0.1000 -0.0008
## 200 0.2430 nan 0.1000 -0.0006
## 220 0.2230 nan 0.1000 -0.0004
## 240 0.2042 nan 0.1000 -0.0005
## 260 0.1858 nan 0.1000 -0.0008
## 280 0.1710 nan 0.1000 -0.0004
## 300 0.1558 nan 0.1000 -0.0003
## 320 0.1418 nan 0.1000 -0.0004
## 340 0.1305 nan 0.1000 -0.0003
## 360 0.1202 nan 0.1000 -0.0006
## 380 0.1099 nan 0.1000 -0.0005
## 400 0.1011 nan 0.1000 -0.0001
## 420 0.0932 nan 0.1000 -0.0002
## 440 0.0862 nan 0.1000 -0.0001
## 460 0.0795 nan 0.1000 -0.0003
## 480 0.0734 nan 0.1000 -0.0003
## 500 0.0683 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2538 nan 0.1000 0.0265
## 2 1.1924 nan 0.1000 0.0268
## 3 1.1294 nan 0.1000 0.0274
## 4 1.0806 nan 0.1000 0.0233
## 5 1.0365 nan 0.1000 0.0205
## 6 1.0025 nan 0.1000 0.0152
## 7 0.9715 nan 0.1000 0.0114
## 8 0.9405 nan 0.1000 0.0142
## 9 0.9168 nan 0.1000 0.0062
## 10 0.8950 nan 0.1000 0.0091
## 20 0.7542 nan 0.1000 0.0015
## 40 0.6220 nan 0.1000 0.0018
## 60 0.5430 nan 0.1000 0.0003
## 80 0.4906 nan 0.1000 -0.0014
## 100 0.4405 nan 0.1000 -0.0008
## 120 0.3928 nan 0.1000 -0.0007
## 140 0.3533 nan 0.1000 -0.0007
## 160 0.3195 nan 0.1000 -0.0010
## 180 0.2912 nan 0.1000 -0.0010
## 200 0.2643 nan 0.1000 -0.0008
## 220 0.2390 nan 0.1000 -0.0012
## 240 0.2166 nan 0.1000 -0.0001
## 260 0.1984 nan 0.1000 -0.0012
## 280 0.1845 nan 0.1000 -0.0008
## 300 0.1683 nan 0.1000 -0.0004
## 320 0.1551 nan 0.1000 -0.0008
## 340 0.1421 nan 0.1000 -0.0003
## 360 0.1317 nan 0.1000 -0.0005
## 380 0.1221 nan 0.1000 -0.0001
## 400 0.1134 nan 0.1000 -0.0006
## 420 0.1050 nan 0.1000 -0.0003
## 440 0.0983 nan 0.1000 -0.0004
## 460 0.0917 nan 0.1000 -0.0002
## 480 0.0844 nan 0.1000 -0.0003
## 500 0.0787 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2352 nan 0.1000 0.0369
## 2 1.1665 nan 0.1000 0.0337
## 3 1.1090 nan 0.1000 0.0255
## 4 1.0583 nan 0.1000 0.0245
## 5 1.0133 nan 0.1000 0.0178
## 6 0.9767 nan 0.1000 0.0132
## 7 0.9436 nan 0.1000 0.0124
## 8 0.9075 nan 0.1000 0.0136
## 9 0.8817 nan 0.1000 0.0099
## 10 0.8601 nan 0.1000 0.0087
## 20 0.7040 nan 0.1000 0.0015
## 40 0.5596 nan 0.1000 0.0003
## 60 0.4619 nan 0.1000 0.0008
## 80 0.3900 nan 0.1000 -0.0007
## 100 0.3409 nan 0.1000 -0.0001
## 120 0.2949 nan 0.1000 -0.0011
## 140 0.2570 nan 0.1000 -0.0003
## 160 0.2273 nan 0.1000 -0.0002
## 180 0.2021 nan 0.1000 0.0000
## 200 0.1783 nan 0.1000 -0.0009
## 220 0.1605 nan 0.1000 -0.0005
## 240 0.1420 nan 0.1000 -0.0002
## 260 0.1271 nan 0.1000 -0.0003
## 280 0.1138 nan 0.1000 -0.0003
## 300 0.1008 nan 0.1000 -0.0000
## 320 0.0911 nan 0.1000 -0.0001
## 340 0.0825 nan 0.1000 -0.0003
## 360 0.0745 nan 0.1000 -0.0001
## 380 0.0673 nan 0.1000 -0.0001
## 400 0.0612 nan 0.1000 -0.0001
## 420 0.0555 nan 0.1000 -0.0002
## 440 0.0500 nan 0.1000 -0.0001
## 460 0.0457 nan 0.1000 -0.0001
## 480 0.0413 nan 0.1000 -0.0001
## 500 0.0374 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2408 nan 0.1000 0.0372
## 2 1.1738 nan 0.1000 0.0248
## 3 1.1183 nan 0.1000 0.0245
## 4 1.0663 nan 0.1000 0.0234
## 5 1.0204 nan 0.1000 0.0197
## 6 0.9858 nan 0.1000 0.0140
## 7 0.9552 nan 0.1000 0.0133
## 8 0.9246 nan 0.1000 0.0126
## 9 0.8899 nan 0.1000 0.0136
## 10 0.8644 nan 0.1000 0.0112
## 20 0.7156 nan 0.1000 0.0037
## 40 0.5734 nan 0.1000 -0.0011
## 60 0.4829 nan 0.1000 0.0004
## 80 0.4091 nan 0.1000 0.0004
## 100 0.3564 nan 0.1000 -0.0016
## 120 0.3171 nan 0.1000 -0.0009
## 140 0.2751 nan 0.1000 -0.0004
## 160 0.2426 nan 0.1000 -0.0014
## 180 0.2139 nan 0.1000 -0.0003
## 200 0.1886 nan 0.1000 -0.0011
## 220 0.1678 nan 0.1000 -0.0009
## 240 0.1495 nan 0.1000 -0.0004
## 260 0.1347 nan 0.1000 -0.0006
## 280 0.1227 nan 0.1000 -0.0002
## 300 0.1109 nan 0.1000 -0.0001
## 320 0.1002 nan 0.1000 -0.0004
## 340 0.0897 nan 0.1000 -0.0002
## 360 0.0799 nan 0.1000 -0.0003
## 380 0.0723 nan 0.1000 -0.0002
## 400 0.0652 nan 0.1000 -0.0001
## 420 0.0589 nan 0.1000 -0.0001
## 440 0.0539 nan 0.1000 -0.0002
## 460 0.0491 nan 0.1000 -0.0002
## 480 0.0449 nan 0.1000 -0.0002
## 500 0.0410 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2358 nan 0.1000 0.0365
## 2 1.1739 nan 0.1000 0.0252
## 3 1.1178 nan 0.1000 0.0265
## 4 1.0743 nan 0.1000 0.0185
## 5 1.0254 nan 0.1000 0.0218
## 6 0.9864 nan 0.1000 0.0149
## 7 0.9571 nan 0.1000 0.0117
## 8 0.9241 nan 0.1000 0.0098
## 9 0.8967 nan 0.1000 0.0099
## 10 0.8720 nan 0.1000 0.0093
## 20 0.7240 nan 0.1000 0.0020
## 40 0.5901 nan 0.1000 -0.0010
## 60 0.4930 nan 0.1000 0.0008
## 80 0.4156 nan 0.1000 -0.0020
## 100 0.3582 nan 0.1000 -0.0008
## 120 0.3154 nan 0.1000 -0.0009
## 140 0.2799 nan 0.1000 -0.0004
## 160 0.2474 nan 0.1000 -0.0007
## 180 0.2168 nan 0.1000 -0.0009
## 200 0.1917 nan 0.1000 -0.0004
## 220 0.1733 nan 0.1000 -0.0006
## 240 0.1565 nan 0.1000 -0.0003
## 260 0.1399 nan 0.1000 -0.0006
## 280 0.1259 nan 0.1000 -0.0003
## 300 0.1145 nan 0.1000 -0.0004
## 320 0.1023 nan 0.1000 -0.0003
## 340 0.0922 nan 0.1000 -0.0000
## 360 0.0831 nan 0.1000 -0.0002
## 380 0.0745 nan 0.1000 -0.0003
## 400 0.0683 nan 0.1000 -0.0000
## 420 0.0619 nan 0.1000 -0.0002
## 440 0.0565 nan 0.1000 -0.0002
## 460 0.0521 nan 0.1000 -0.0002
## 480 0.0478 nan 0.1000 -0.0001
## 500 0.0433 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2381 nan 0.1000 0.0364
## 2 1.1613 nan 0.1000 0.0347
## 3 1.1067 nan 0.1000 0.0240
## 4 1.0568 nan 0.1000 0.0194
## 5 1.0098 nan 0.1000 0.0198
## 6 0.9690 nan 0.1000 0.0160
## 7 0.9290 nan 0.1000 0.0127
## 8 0.8984 nan 0.1000 0.0117
## 9 0.8679 nan 0.1000 0.0122
## 10 0.8399 nan 0.1000 0.0101
## 20 0.6780 nan 0.1000 0.0000
## 40 0.5153 nan 0.1000 -0.0009
## 60 0.4154 nan 0.1000 -0.0007
## 80 0.3412 nan 0.1000 -0.0002
## 100 0.2875 nan 0.1000 -0.0011
## 120 0.2410 nan 0.1000 -0.0007
## 140 0.2035 nan 0.1000 -0.0001
## 160 0.1758 nan 0.1000 -0.0003
## 180 0.1514 nan 0.1000 -0.0004
## 200 0.1329 nan 0.1000 -0.0004
## 220 0.1162 nan 0.1000 -0.0004
## 240 0.1002 nan 0.1000 -0.0001
## 260 0.0884 nan 0.1000 -0.0002
## 280 0.0776 nan 0.1000 -0.0002
## 300 0.0691 nan 0.1000 0.0000
## 320 0.0608 nan 0.1000 -0.0001
## 340 0.0538 nan 0.1000 -0.0000
## 360 0.0471 nan 0.1000 -0.0000
## 380 0.0419 nan 0.1000 -0.0001
## 400 0.0369 nan 0.1000 -0.0000
## 420 0.0327 nan 0.1000 -0.0001
## 440 0.0291 nan 0.1000 -0.0000
## 460 0.0258 nan 0.1000 -0.0000
## 480 0.0230 nan 0.1000 -0.0000
## 500 0.0204 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2278 nan 0.1000 0.0391
## 2 1.1651 nan 0.1000 0.0313
## 3 1.1032 nan 0.1000 0.0215
## 4 1.0494 nan 0.1000 0.0230
## 5 1.0053 nan 0.1000 0.0193
## 6 0.9649 nan 0.1000 0.0147
## 7 0.9271 nan 0.1000 0.0149
## 8 0.8952 nan 0.1000 0.0096
## 9 0.8635 nan 0.1000 0.0123
## 10 0.8401 nan 0.1000 0.0075
## 20 0.6785 nan 0.1000 0.0033
## 40 0.5274 nan 0.1000 -0.0010
## 60 0.4233 nan 0.1000 -0.0004
## 80 0.3541 nan 0.1000 -0.0008
## 100 0.2965 nan 0.1000 0.0000
## 120 0.2500 nan 0.1000 0.0002
## 140 0.2136 nan 0.1000 -0.0006
## 160 0.1833 nan 0.1000 -0.0005
## 180 0.1592 nan 0.1000 -0.0005
## 200 0.1379 nan 0.1000 -0.0001
## 220 0.1208 nan 0.1000 -0.0001
## 240 0.1051 nan 0.1000 -0.0004
## 260 0.0933 nan 0.1000 -0.0003
## 280 0.0809 nan 0.1000 -0.0001
## 300 0.0716 nan 0.1000 -0.0003
## 320 0.0631 nan 0.1000 -0.0002
## 340 0.0562 nan 0.1000 -0.0002
## 360 0.0508 nan 0.1000 -0.0002
## 380 0.0451 nan 0.1000 -0.0002
## 400 0.0401 nan 0.1000 -0.0001
## 420 0.0361 nan 0.1000 -0.0002
## 440 0.0326 nan 0.1000 -0.0001
## 460 0.0291 nan 0.1000 -0.0000
## 480 0.0257 nan 0.1000 -0.0001
## 500 0.0230 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2349 nan 0.1000 0.0392
## 2 1.1618 nan 0.1000 0.0301
## 3 1.1028 nan 0.1000 0.0239
## 4 1.0528 nan 0.1000 0.0225
## 5 1.0038 nan 0.1000 0.0205
## 6 0.9653 nan 0.1000 0.0167
## 7 0.9304 nan 0.1000 0.0145
## 8 0.9082 nan 0.1000 0.0058
## 9 0.8852 nan 0.1000 0.0084
## 10 0.8581 nan 0.1000 0.0095
## 20 0.6884 nan 0.1000 0.0019
## 40 0.5469 nan 0.1000 0.0006
## 60 0.4472 nan 0.1000 -0.0007
## 80 0.3777 nan 0.1000 0.0000
## 100 0.3151 nan 0.1000 -0.0003
## 120 0.2686 nan 0.1000 -0.0010
## 140 0.2271 nan 0.1000 -0.0006
## 160 0.1975 nan 0.1000 -0.0008
## 180 0.1728 nan 0.1000 -0.0005
## 200 0.1500 nan 0.1000 -0.0004
## 220 0.1322 nan 0.1000 -0.0002
## 240 0.1179 nan 0.1000 -0.0003
## 260 0.1036 nan 0.1000 -0.0004
## 280 0.0915 nan 0.1000 -0.0003
## 300 0.0814 nan 0.1000 -0.0003
## 320 0.0716 nan 0.1000 -0.0002
## 340 0.0644 nan 0.1000 -0.0002
## 360 0.0573 nan 0.1000 -0.0002
## 380 0.0514 nan 0.1000 -0.0001
## 400 0.0454 nan 0.1000 -0.0001
## 420 0.0405 nan 0.1000 -0.0002
## 440 0.0363 nan 0.1000 -0.0001
## 460 0.0326 nan 0.1000 -0.0001
## 480 0.0290 nan 0.1000 -0.0001
## 500 0.0260 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0003
## 6 1.3159 nan 0.0010 0.0003
## 7 1.3150 nan 0.0010 0.0003
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3134 nan 0.0010 0.0003
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3048 nan 0.0010 0.0003
## 40 1.2896 nan 0.0010 0.0003
## 60 1.2746 nan 0.0010 0.0003
## 80 1.2604 nan 0.0010 0.0003
## 100 1.2463 nan 0.0010 0.0003
## 120 1.2329 nan 0.0010 0.0003
## 140 1.2196 nan 0.0010 0.0003
## 160 1.2067 nan 0.0010 0.0002
## 180 1.1942 nan 0.0010 0.0003
## 200 1.1821 nan 0.0010 0.0003
## 220 1.1701 nan 0.0010 0.0003
## 240 1.1588 nan 0.0010 0.0002
## 260 1.1479 nan 0.0010 0.0002
## 280 1.1371 nan 0.0010 0.0002
## 300 1.1267 nan 0.0010 0.0002
## 320 1.1168 nan 0.0010 0.0002
## 340 1.1071 nan 0.0010 0.0002
## 360 1.0976 nan 0.0010 0.0002
## 380 1.0880 nan 0.0010 0.0002
## 400 1.0788 nan 0.0010 0.0002
## 420 1.0699 nan 0.0010 0.0002
## 440 1.0611 nan 0.0010 0.0002
## 460 1.0526 nan 0.0010 0.0001
## 480 1.0442 nan 0.0010 0.0002
## 500 1.0362 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0003
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3139 nan 0.0010 0.0003
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3040 nan 0.0010 0.0004
## 40 1.2885 nan 0.0010 0.0003
## 60 1.2735 nan 0.0010 0.0004
## 80 1.2591 nan 0.0010 0.0003
## 100 1.2450 nan 0.0010 0.0003
## 120 1.2314 nan 0.0010 0.0003
## 140 1.2183 nan 0.0010 0.0003
## 160 1.2055 nan 0.0010 0.0003
## 180 1.1934 nan 0.0010 0.0003
## 200 1.1812 nan 0.0010 0.0003
## 220 1.1694 nan 0.0010 0.0002
## 240 1.1580 nan 0.0010 0.0002
## 260 1.1470 nan 0.0010 0.0002
## 280 1.1362 nan 0.0010 0.0002
## 300 1.1257 nan 0.0010 0.0002
## 320 1.1156 nan 0.0010 0.0002
## 340 1.1057 nan 0.0010 0.0002
## 360 1.0963 nan 0.0010 0.0002
## 380 1.0870 nan 0.0010 0.0002
## 400 1.0781 nan 0.0010 0.0002
## 420 1.0691 nan 0.0010 0.0002
## 440 1.0606 nan 0.0010 0.0002
## 460 1.0524 nan 0.0010 0.0002
## 480 1.0442 nan 0.0010 0.0002
## 500 1.0360 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0003
## 5 1.3166 nan 0.0010 0.0003
## 6 1.3158 nan 0.0010 0.0003
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3134 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3047 nan 0.0010 0.0003
## 40 1.2892 nan 0.0010 0.0003
## 60 1.2741 nan 0.0010 0.0004
## 80 1.2597 nan 0.0010 0.0003
## 100 1.2461 nan 0.0010 0.0003
## 120 1.2325 nan 0.0010 0.0003
## 140 1.2195 nan 0.0010 0.0003
## 160 1.2072 nan 0.0010 0.0003
## 180 1.1949 nan 0.0010 0.0003
## 200 1.1830 nan 0.0010 0.0003
## 220 1.1714 nan 0.0010 0.0002
## 240 1.1601 nan 0.0010 0.0002
## 260 1.1493 nan 0.0010 0.0003
## 280 1.1386 nan 0.0010 0.0003
## 300 1.1285 nan 0.0010 0.0002
## 320 1.1183 nan 0.0010 0.0002
## 340 1.1083 nan 0.0010 0.0002
## 360 1.0986 nan 0.0010 0.0002
## 380 1.0895 nan 0.0010 0.0002
## 400 1.0805 nan 0.0010 0.0002
## 420 1.0715 nan 0.0010 0.0002
## 440 1.0629 nan 0.0010 0.0002
## 460 1.0545 nan 0.0010 0.0002
## 480 1.0463 nan 0.0010 0.0002
## 500 1.0382 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0003
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3130 nan 0.0010 0.0004
## 10 1.3122 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0004
## 40 1.2869 nan 0.0010 0.0004
## 60 1.2707 nan 0.0010 0.0003
## 80 1.2552 nan 0.0010 0.0003
## 100 1.2403 nan 0.0010 0.0003
## 120 1.2255 nan 0.0010 0.0003
## 140 1.2113 nan 0.0010 0.0003
## 160 1.1976 nan 0.0010 0.0003
## 180 1.1847 nan 0.0010 0.0003
## 200 1.1719 nan 0.0010 0.0003
## 220 1.1594 nan 0.0010 0.0003
## 240 1.1473 nan 0.0010 0.0003
## 260 1.1358 nan 0.0010 0.0002
## 280 1.1240 nan 0.0010 0.0002
## 300 1.1128 nan 0.0010 0.0002
## 320 1.1021 nan 0.0010 0.0002
## 340 1.0916 nan 0.0010 0.0002
## 360 1.0818 nan 0.0010 0.0002
## 380 1.0721 nan 0.0010 0.0002
## 400 1.0625 nan 0.0010 0.0002
## 420 1.0531 nan 0.0010 0.0002
## 440 1.0436 nan 0.0010 0.0002
## 460 1.0349 nan 0.0010 0.0002
## 480 1.0262 nan 0.0010 0.0002
## 500 1.0175 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3036 nan 0.0010 0.0004
## 40 1.2869 nan 0.0010 0.0004
## 60 1.2708 nan 0.0010 0.0003
## 80 1.2553 nan 0.0010 0.0004
## 100 1.2406 nan 0.0010 0.0004
## 120 1.2261 nan 0.0010 0.0003
## 140 1.2121 nan 0.0010 0.0003
## 160 1.1983 nan 0.0010 0.0003
## 180 1.1853 nan 0.0010 0.0003
## 200 1.1723 nan 0.0010 0.0003
## 220 1.1598 nan 0.0010 0.0003
## 240 1.1477 nan 0.0010 0.0002
## 260 1.1360 nan 0.0010 0.0002
## 280 1.1249 nan 0.0010 0.0002
## 300 1.1138 nan 0.0010 0.0002
## 320 1.1031 nan 0.0010 0.0002
## 340 1.0928 nan 0.0010 0.0002
## 360 1.0827 nan 0.0010 0.0002
## 380 1.0726 nan 0.0010 0.0002
## 400 1.0630 nan 0.0010 0.0002
## 420 1.0537 nan 0.0010 0.0002
## 440 1.0446 nan 0.0010 0.0002
## 460 1.0358 nan 0.0010 0.0002
## 480 1.0270 nan 0.0010 0.0002
## 500 1.0185 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0003
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3130 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3036 nan 0.0010 0.0004
## 40 1.2873 nan 0.0010 0.0003
## 60 1.2715 nan 0.0010 0.0003
## 80 1.2562 nan 0.0010 0.0003
## 100 1.2413 nan 0.0010 0.0003
## 120 1.2268 nan 0.0010 0.0003
## 140 1.2130 nan 0.0010 0.0003
## 160 1.1995 nan 0.0010 0.0003
## 180 1.1866 nan 0.0010 0.0003
## 200 1.1741 nan 0.0010 0.0002
## 220 1.1620 nan 0.0010 0.0003
## 240 1.1503 nan 0.0010 0.0003
## 260 1.1387 nan 0.0010 0.0003
## 280 1.1274 nan 0.0010 0.0003
## 300 1.1162 nan 0.0010 0.0002
## 320 1.1054 nan 0.0010 0.0002
## 340 1.0950 nan 0.0010 0.0002
## 360 1.0849 nan 0.0010 0.0002
## 380 1.0751 nan 0.0010 0.0002
## 400 1.0656 nan 0.0010 0.0002
## 420 1.0562 nan 0.0010 0.0002
## 440 1.0474 nan 0.0010 0.0002
## 460 1.0385 nan 0.0010 0.0002
## 480 1.0297 nan 0.0010 0.0002
## 500 1.0216 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0003
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3159 nan 0.0010 0.0004
## 6 1.3151 nan 0.0010 0.0004
## 7 1.3142 nan 0.0010 0.0004
## 8 1.3133 nan 0.0010 0.0004
## 9 1.3123 nan 0.0010 0.0004
## 10 1.3114 nan 0.0010 0.0004
## 20 1.3023 nan 0.0010 0.0004
## 40 1.2849 nan 0.0010 0.0004
## 60 1.2681 nan 0.0010 0.0004
## 80 1.2519 nan 0.0010 0.0003
## 100 1.2361 nan 0.0010 0.0004
## 120 1.2208 nan 0.0010 0.0003
## 140 1.2060 nan 0.0010 0.0003
## 160 1.1916 nan 0.0010 0.0003
## 180 1.1779 nan 0.0010 0.0003
## 200 1.1643 nan 0.0010 0.0002
## 220 1.1512 nan 0.0010 0.0003
## 240 1.1386 nan 0.0010 0.0003
## 260 1.1263 nan 0.0010 0.0002
## 280 1.1144 nan 0.0010 0.0002
## 300 1.1026 nan 0.0010 0.0003
## 320 1.0912 nan 0.0010 0.0002
## 340 1.0804 nan 0.0010 0.0002
## 360 1.0698 nan 0.0010 0.0002
## 380 1.0595 nan 0.0010 0.0002
## 400 1.0494 nan 0.0010 0.0002
## 420 1.0396 nan 0.0010 0.0002
## 440 1.0299 nan 0.0010 0.0002
## 460 1.0206 nan 0.0010 0.0002
## 480 1.0113 nan 0.0010 0.0002
## 500 1.0024 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3152 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0004
## 10 1.3116 nan 0.0010 0.0004
## 20 1.3027 nan 0.0010 0.0004
## 40 1.2855 nan 0.0010 0.0004
## 60 1.2685 nan 0.0010 0.0004
## 80 1.2523 nan 0.0010 0.0004
## 100 1.2365 nan 0.0010 0.0003
## 120 1.2215 nan 0.0010 0.0003
## 140 1.2068 nan 0.0010 0.0003
## 160 1.1927 nan 0.0010 0.0003
## 180 1.1789 nan 0.0010 0.0003
## 200 1.1655 nan 0.0010 0.0003
## 220 1.1523 nan 0.0010 0.0003
## 240 1.1398 nan 0.0010 0.0003
## 260 1.1278 nan 0.0010 0.0002
## 280 1.1157 nan 0.0010 0.0002
## 300 1.1040 nan 0.0010 0.0003
## 320 1.0929 nan 0.0010 0.0002
## 340 1.0821 nan 0.0010 0.0002
## 360 1.0714 nan 0.0010 0.0002
## 380 1.0611 nan 0.0010 0.0002
## 400 1.0512 nan 0.0010 0.0002
## 420 1.0415 nan 0.0010 0.0002
## 440 1.0321 nan 0.0010 0.0002
## 460 1.0228 nan 0.0010 0.0002
## 480 1.0138 nan 0.0010 0.0002
## 500 1.0048 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0003
## 40 1.2858 nan 0.0010 0.0004
## 60 1.2694 nan 0.0010 0.0004
## 80 1.2532 nan 0.0010 0.0004
## 100 1.2375 nan 0.0010 0.0003
## 120 1.2225 nan 0.0010 0.0003
## 140 1.2081 nan 0.0010 0.0003
## 160 1.1942 nan 0.0010 0.0003
## 180 1.1806 nan 0.0010 0.0003
## 200 1.1675 nan 0.0010 0.0003
## 220 1.1546 nan 0.0010 0.0003
## 240 1.1422 nan 0.0010 0.0003
## 260 1.1298 nan 0.0010 0.0003
## 280 1.1180 nan 0.0010 0.0003
## 300 1.1066 nan 0.0010 0.0002
## 320 1.0957 nan 0.0010 0.0002
## 340 1.0852 nan 0.0010 0.0002
## 360 1.0746 nan 0.0010 0.0002
## 380 1.0643 nan 0.0010 0.0002
## 400 1.0543 nan 0.0010 0.0002
## 420 1.0448 nan 0.0010 0.0002
## 440 1.0356 nan 0.0010 0.0002
## 460 1.0264 nan 0.0010 0.0002
## 480 1.0171 nan 0.0010 0.0002
## 500 1.0085 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3128 nan 0.0100 0.0037
## 2 1.3062 nan 0.0100 0.0028
## 3 1.2990 nan 0.0100 0.0030
## 4 1.2919 nan 0.0100 0.0033
## 5 1.2837 nan 0.0100 0.0036
## 6 1.2764 nan 0.0100 0.0036
## 7 1.2690 nan 0.0100 0.0035
## 8 1.2610 nan 0.0100 0.0032
## 9 1.2542 nan 0.0100 0.0032
## 10 1.2468 nan 0.0100 0.0034
## 20 1.1828 nan 0.0100 0.0026
## 40 1.0788 nan 0.0100 0.0020
## 60 0.9969 nan 0.0100 0.0014
## 80 0.9348 nan 0.0100 0.0012
## 100 0.8837 nan 0.0100 0.0008
## 120 0.8408 nan 0.0100 0.0008
## 140 0.8047 nan 0.0100 0.0004
## 160 0.7742 nan 0.0100 0.0005
## 180 0.7476 nan 0.0100 0.0002
## 200 0.7254 nan 0.0100 0.0002
## 220 0.7050 nan 0.0100 0.0001
## 240 0.6875 nan 0.0100 0.0003
## 260 0.6707 nan 0.0100 0.0002
## 280 0.6562 nan 0.0100 -0.0000
## 300 0.6434 nan 0.0100 0.0001
## 320 0.6315 nan 0.0100 -0.0000
## 340 0.6201 nan 0.0100 -0.0000
## 360 0.6096 nan 0.0100 -0.0000
## 380 0.5991 nan 0.0100 -0.0002
## 400 0.5885 nan 0.0100 -0.0001
## 420 0.5777 nan 0.0100 -0.0000
## 440 0.5684 nan 0.0100 -0.0000
## 460 0.5594 nan 0.0100 -0.0001
## 480 0.5508 nan 0.0100 -0.0001
## 500 0.5429 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0038
## 2 1.3050 nan 0.0100 0.0033
## 3 1.2978 nan 0.0100 0.0030
## 4 1.2898 nan 0.0100 0.0034
## 5 1.2827 nan 0.0100 0.0033
## 6 1.2755 nan 0.0100 0.0029
## 7 1.2674 nan 0.0100 0.0035
## 8 1.2602 nan 0.0100 0.0027
## 9 1.2535 nan 0.0100 0.0032
## 10 1.2463 nan 0.0100 0.0034
## 20 1.1830 nan 0.0100 0.0029
## 40 1.0780 nan 0.0100 0.0019
## 60 0.9977 nan 0.0100 0.0013
## 80 0.9350 nan 0.0100 0.0009
## 100 0.8841 nan 0.0100 0.0010
## 120 0.8414 nan 0.0100 0.0008
## 140 0.8068 nan 0.0100 0.0006
## 160 0.7768 nan 0.0100 0.0004
## 180 0.7510 nan 0.0100 0.0002
## 200 0.7302 nan 0.0100 0.0002
## 220 0.7119 nan 0.0100 0.0002
## 240 0.6948 nan 0.0100 0.0000
## 260 0.6799 nan 0.0100 0.0000
## 280 0.6658 nan 0.0100 0.0000
## 300 0.6530 nan 0.0100 0.0001
## 320 0.6400 nan 0.0100 0.0002
## 340 0.6286 nan 0.0100 -0.0001
## 360 0.6164 nan 0.0100 0.0001
## 380 0.6057 nan 0.0100 0.0001
## 400 0.5954 nan 0.0100 -0.0000
## 420 0.5856 nan 0.0100 0.0002
## 440 0.5763 nan 0.0100 -0.0001
## 460 0.5674 nan 0.0100 -0.0001
## 480 0.5587 nan 0.0100 -0.0003
## 500 0.5498 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3128 nan 0.0100 0.0035
## 2 1.3048 nan 0.0100 0.0035
## 3 1.2964 nan 0.0100 0.0037
## 4 1.2885 nan 0.0100 0.0034
## 5 1.2811 nan 0.0100 0.0035
## 6 1.2728 nan 0.0100 0.0037
## 7 1.2660 nan 0.0100 0.0030
## 8 1.2586 nan 0.0100 0.0035
## 9 1.2512 nan 0.0100 0.0029
## 10 1.2446 nan 0.0100 0.0029
## 20 1.1826 nan 0.0100 0.0025
## 40 1.0800 nan 0.0100 0.0023
## 60 0.9989 nan 0.0100 0.0014
## 80 0.9374 nan 0.0100 0.0010
## 100 0.8850 nan 0.0100 0.0008
## 120 0.8433 nan 0.0100 0.0007
## 140 0.8093 nan 0.0100 0.0005
## 160 0.7793 nan 0.0100 0.0002
## 180 0.7546 nan 0.0100 0.0005
## 200 0.7332 nan 0.0100 -0.0002
## 220 0.7135 nan 0.0100 0.0003
## 240 0.6972 nan 0.0100 0.0002
## 260 0.6834 nan 0.0100 0.0001
## 280 0.6700 nan 0.0100 0.0001
## 300 0.6565 nan 0.0100 0.0000
## 320 0.6452 nan 0.0100 0.0001
## 340 0.6332 nan 0.0100 0.0001
## 360 0.6229 nan 0.0100 0.0001
## 380 0.6126 nan 0.0100 -0.0000
## 400 0.6023 nan 0.0100 -0.0001
## 420 0.5921 nan 0.0100 0.0000
## 440 0.5822 nan 0.0100 0.0000
## 460 0.5735 nan 0.0100 0.0000
## 480 0.5649 nan 0.0100 0.0000
## 500 0.5563 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0037
## 2 1.3043 nan 0.0100 0.0036
## 3 1.2965 nan 0.0100 0.0031
## 4 1.2881 nan 0.0100 0.0039
## 5 1.2796 nan 0.0100 0.0034
## 6 1.2714 nan 0.0100 0.0033
## 7 1.2641 nan 0.0100 0.0033
## 8 1.2565 nan 0.0100 0.0034
## 9 1.2480 nan 0.0100 0.0035
## 10 1.2404 nan 0.0100 0.0036
## 20 1.1730 nan 0.0100 0.0028
## 40 1.0630 nan 0.0100 0.0018
## 60 0.9777 nan 0.0100 0.0017
## 80 0.9105 nan 0.0100 0.0014
## 100 0.8556 nan 0.0100 0.0009
## 120 0.8126 nan 0.0100 0.0006
## 140 0.7756 nan 0.0100 0.0004
## 160 0.7437 nan 0.0100 0.0005
## 180 0.7173 nan 0.0100 0.0003
## 200 0.6934 nan 0.0100 0.0002
## 220 0.6715 nan 0.0100 0.0004
## 240 0.6514 nan 0.0100 0.0002
## 260 0.6343 nan 0.0100 0.0001
## 280 0.6187 nan 0.0100 0.0001
## 300 0.6028 nan 0.0100 0.0001
## 320 0.5890 nan 0.0100 -0.0001
## 340 0.5755 nan 0.0100 0.0000
## 360 0.5641 nan 0.0100 -0.0001
## 380 0.5518 nan 0.0100 0.0001
## 400 0.5400 nan 0.0100 0.0001
## 420 0.5292 nan 0.0100 0.0000
## 440 0.5200 nan 0.0100 -0.0000
## 460 0.5100 nan 0.0100 0.0000
## 480 0.5000 nan 0.0100 -0.0001
## 500 0.4904 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0038
## 2 1.3028 nan 0.0100 0.0038
## 3 1.2946 nan 0.0100 0.0037
## 4 1.2864 nan 0.0100 0.0038
## 5 1.2781 nan 0.0100 0.0035
## 6 1.2706 nan 0.0100 0.0037
## 7 1.2631 nan 0.0100 0.0037
## 8 1.2553 nan 0.0100 0.0034
## 9 1.2475 nan 0.0100 0.0031
## 10 1.2401 nan 0.0100 0.0035
## 20 1.1728 nan 0.0100 0.0027
## 40 1.0619 nan 0.0100 0.0020
## 60 0.9766 nan 0.0100 0.0015
## 80 0.9098 nan 0.0100 0.0012
## 100 0.8556 nan 0.0100 0.0009
## 120 0.8111 nan 0.0100 0.0007
## 140 0.7742 nan 0.0100 0.0005
## 160 0.7435 nan 0.0100 0.0004
## 180 0.7155 nan 0.0100 0.0004
## 200 0.6917 nan 0.0100 0.0004
## 220 0.6707 nan 0.0100 0.0001
## 240 0.6519 nan 0.0100 0.0002
## 260 0.6347 nan 0.0100 0.0000
## 280 0.6179 nan 0.0100 0.0002
## 300 0.6031 nan 0.0100 0.0000
## 320 0.5900 nan 0.0100 0.0001
## 340 0.5776 nan 0.0100 0.0001
## 360 0.5654 nan 0.0100 -0.0000
## 380 0.5541 nan 0.0100 -0.0001
## 400 0.5438 nan 0.0100 -0.0001
## 420 0.5335 nan 0.0100 0.0001
## 440 0.5235 nan 0.0100 -0.0002
## 460 0.5138 nan 0.0100 -0.0001
## 480 0.5040 nan 0.0100 -0.0001
## 500 0.4944 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0039
## 2 1.3031 nan 0.0100 0.0038
## 3 1.2945 nan 0.0100 0.0041
## 4 1.2866 nan 0.0100 0.0038
## 5 1.2787 nan 0.0100 0.0034
## 6 1.2709 nan 0.0100 0.0034
## 7 1.2634 nan 0.0100 0.0036
## 8 1.2560 nan 0.0100 0.0032
## 9 1.2496 nan 0.0100 0.0028
## 10 1.2417 nan 0.0100 0.0035
## 20 1.1751 nan 0.0100 0.0027
## 40 1.0653 nan 0.0100 0.0021
## 60 0.9821 nan 0.0100 0.0013
## 80 0.9158 nan 0.0100 0.0010
## 100 0.8618 nan 0.0100 0.0007
## 120 0.8177 nan 0.0100 0.0006
## 140 0.7809 nan 0.0100 0.0004
## 160 0.7513 nan 0.0100 0.0005
## 180 0.7255 nan 0.0100 0.0004
## 200 0.7017 nan 0.0100 0.0003
## 220 0.6817 nan 0.0100 0.0001
## 240 0.6638 nan 0.0100 0.0001
## 260 0.6476 nan 0.0100 0.0002
## 280 0.6325 nan 0.0100 0.0001
## 300 0.6182 nan 0.0100 -0.0002
## 320 0.6055 nan 0.0100 -0.0002
## 340 0.5931 nan 0.0100 -0.0001
## 360 0.5815 nan 0.0100 0.0001
## 380 0.5697 nan 0.0100 -0.0000
## 400 0.5589 nan 0.0100 0.0001
## 420 0.5490 nan 0.0100 -0.0001
## 440 0.5380 nan 0.0100 -0.0000
## 460 0.5296 nan 0.0100 -0.0000
## 480 0.5194 nan 0.0100 0.0000
## 500 0.5094 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3116 nan 0.0100 0.0042
## 2 1.3030 nan 0.0100 0.0040
## 3 1.2939 nan 0.0100 0.0037
## 4 1.2853 nan 0.0100 0.0041
## 5 1.2767 nan 0.0100 0.0038
## 6 1.2690 nan 0.0100 0.0031
## 7 1.2607 nan 0.0100 0.0037
## 8 1.2522 nan 0.0100 0.0035
## 9 1.2440 nan 0.0100 0.0035
## 10 1.2357 nan 0.0100 0.0034
## 20 1.1640 nan 0.0100 0.0028
## 40 1.0473 nan 0.0100 0.0020
## 60 0.9597 nan 0.0100 0.0018
## 80 0.8901 nan 0.0100 0.0011
## 100 0.8326 nan 0.0100 0.0009
## 120 0.7861 nan 0.0100 0.0008
## 140 0.7464 nan 0.0100 0.0007
## 160 0.7123 nan 0.0100 0.0008
## 180 0.6831 nan 0.0100 0.0006
## 200 0.6573 nan 0.0100 0.0003
## 220 0.6335 nan 0.0100 0.0001
## 240 0.6131 nan 0.0100 0.0003
## 260 0.5942 nan 0.0100 0.0002
## 280 0.5783 nan 0.0100 0.0001
## 300 0.5622 nan 0.0100 0.0001
## 320 0.5477 nan 0.0100 0.0000
## 340 0.5332 nan 0.0100 -0.0000
## 360 0.5196 nan 0.0100 0.0000
## 380 0.5076 nan 0.0100 -0.0001
## 400 0.4959 nan 0.0100 -0.0000
## 420 0.4847 nan 0.0100 -0.0001
## 440 0.4730 nan 0.0100 0.0000
## 460 0.4618 nan 0.0100 0.0000
## 480 0.4515 nan 0.0100 0.0001
## 500 0.4418 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0034
## 2 1.3034 nan 0.0100 0.0038
## 3 1.2950 nan 0.0100 0.0039
## 4 1.2863 nan 0.0100 0.0035
## 5 1.2784 nan 0.0100 0.0037
## 6 1.2705 nan 0.0100 0.0033
## 7 1.2624 nan 0.0100 0.0034
## 8 1.2538 nan 0.0100 0.0037
## 9 1.2460 nan 0.0100 0.0035
## 10 1.2385 nan 0.0100 0.0031
## 20 1.1676 nan 0.0100 0.0029
## 40 1.0538 nan 0.0100 0.0021
## 60 0.9651 nan 0.0100 0.0014
## 80 0.8952 nan 0.0100 0.0010
## 100 0.8389 nan 0.0100 0.0010
## 120 0.7937 nan 0.0100 0.0007
## 140 0.7546 nan 0.0100 0.0006
## 160 0.7206 nan 0.0100 0.0005
## 180 0.6925 nan 0.0100 0.0004
## 200 0.6665 nan 0.0100 0.0000
## 220 0.6439 nan 0.0100 0.0003
## 240 0.6243 nan 0.0100 0.0002
## 260 0.6054 nan 0.0100 0.0001
## 280 0.5892 nan 0.0100 0.0001
## 300 0.5733 nan 0.0100 -0.0000
## 320 0.5578 nan 0.0100 0.0003
## 340 0.5431 nan 0.0100 -0.0000
## 360 0.5292 nan 0.0100 0.0001
## 380 0.5164 nan 0.0100 -0.0000
## 400 0.5044 nan 0.0100 -0.0002
## 420 0.4931 nan 0.0100 0.0001
## 440 0.4830 nan 0.0100 -0.0000
## 460 0.4727 nan 0.0100 -0.0001
## 480 0.4619 nan 0.0100 -0.0000
## 500 0.4511 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0036
## 2 1.3031 nan 0.0100 0.0039
## 3 1.2937 nan 0.0100 0.0041
## 4 1.2854 nan 0.0100 0.0038
## 5 1.2764 nan 0.0100 0.0041
## 6 1.2687 nan 0.0100 0.0036
## 7 1.2611 nan 0.0100 0.0036
## 8 1.2531 nan 0.0100 0.0037
## 9 1.2450 nan 0.0100 0.0037
## 10 1.2370 nan 0.0100 0.0033
## 20 1.1674 nan 0.0100 0.0032
## 40 1.0561 nan 0.0100 0.0019
## 60 0.9680 nan 0.0100 0.0013
## 80 0.9002 nan 0.0100 0.0011
## 100 0.8444 nan 0.0100 0.0009
## 120 0.7993 nan 0.0100 0.0008
## 140 0.7628 nan 0.0100 0.0005
## 160 0.7320 nan 0.0100 0.0001
## 180 0.7051 nan 0.0100 0.0001
## 200 0.6792 nan 0.0100 0.0002
## 220 0.6578 nan 0.0100 0.0002
## 240 0.6379 nan 0.0100 0.0002
## 260 0.6197 nan 0.0100 0.0001
## 280 0.6030 nan 0.0100 0.0001
## 300 0.5867 nan 0.0100 0.0000
## 320 0.5723 nan 0.0100 -0.0000
## 340 0.5578 nan 0.0100 -0.0000
## 360 0.5450 nan 0.0100 0.0001
## 380 0.5327 nan 0.0100 -0.0001
## 400 0.5208 nan 0.0100 0.0001
## 420 0.5087 nan 0.0100 -0.0000
## 440 0.4969 nan 0.0100 -0.0000
## 460 0.4862 nan 0.0100 -0.0002
## 480 0.4755 nan 0.0100 -0.0000
## 500 0.4652 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2382 nan 0.1000 0.0361
## 2 1.1684 nan 0.1000 0.0311
## 3 1.1128 nan 0.1000 0.0241
## 4 1.0661 nan 0.1000 0.0195
## 5 1.0238 nan 0.1000 0.0180
## 6 0.9878 nan 0.1000 0.0130
## 7 0.9536 nan 0.1000 0.0123
## 8 0.9231 nan 0.1000 0.0105
## 9 0.8985 nan 0.1000 0.0096
## 10 0.8725 nan 0.1000 0.0091
## 20 0.7264 nan 0.1000 0.0026
## 40 0.5872 nan 0.1000 -0.0003
## 60 0.5086 nan 0.1000 -0.0019
## 80 0.4445 nan 0.1000 -0.0009
## 100 0.3904 nan 0.1000 -0.0013
## 120 0.3434 nan 0.1000 -0.0005
## 140 0.3068 nan 0.1000 -0.0010
## 160 0.2757 nan 0.1000 -0.0003
## 180 0.2505 nan 0.1000 -0.0015
## 200 0.2268 nan 0.1000 -0.0003
## 220 0.2060 nan 0.1000 -0.0005
## 240 0.1822 nan 0.1000 -0.0001
## 260 0.1671 nan 0.1000 -0.0006
## 280 0.1519 nan 0.1000 -0.0004
## 300 0.1393 nan 0.1000 -0.0002
## 320 0.1268 nan 0.1000 -0.0001
## 340 0.1162 nan 0.1000 -0.0001
## 360 0.1062 nan 0.1000 -0.0002
## 380 0.0981 nan 0.1000 0.0000
## 400 0.0913 nan 0.1000 -0.0002
## 420 0.0844 nan 0.1000 -0.0002
## 440 0.0780 nan 0.1000 -0.0002
## 460 0.0721 nan 0.1000 -0.0002
## 480 0.0664 nan 0.1000 -0.0003
## 500 0.0613 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2384 nan 0.1000 0.0356
## 2 1.1724 nan 0.1000 0.0302
## 3 1.1189 nan 0.1000 0.0247
## 4 1.0693 nan 0.1000 0.0221
## 5 1.0277 nan 0.1000 0.0175
## 6 0.9871 nan 0.1000 0.0116
## 7 0.9554 nan 0.1000 0.0138
## 8 0.9308 nan 0.1000 0.0091
## 9 0.9060 nan 0.1000 0.0104
## 10 0.8833 nan 0.1000 0.0083
## 20 0.7294 nan 0.1000 0.0030
## 40 0.5873 nan 0.1000 -0.0011
## 60 0.5065 nan 0.1000 -0.0010
## 80 0.4444 nan 0.1000 -0.0008
## 100 0.3967 nan 0.1000 -0.0011
## 120 0.3541 nan 0.1000 -0.0004
## 140 0.3122 nan 0.1000 0.0007
## 160 0.2858 nan 0.1000 -0.0007
## 180 0.2579 nan 0.1000 0.0002
## 200 0.2299 nan 0.1000 -0.0007
## 220 0.2077 nan 0.1000 -0.0003
## 240 0.1919 nan 0.1000 -0.0008
## 260 0.1760 nan 0.1000 -0.0011
## 280 0.1604 nan 0.1000 -0.0005
## 300 0.1482 nan 0.1000 -0.0008
## 320 0.1357 nan 0.1000 -0.0003
## 340 0.1251 nan 0.1000 -0.0004
## 360 0.1142 nan 0.1000 -0.0001
## 380 0.1049 nan 0.1000 -0.0001
## 400 0.0966 nan 0.1000 -0.0003
## 420 0.0900 nan 0.1000 -0.0002
## 440 0.0835 nan 0.1000 -0.0004
## 460 0.0769 nan 0.1000 -0.0003
## 480 0.0718 nan 0.1000 -0.0001
## 500 0.0669 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2462 nan 0.1000 0.0343
## 2 1.1869 nan 0.1000 0.0261
## 3 1.1257 nan 0.1000 0.0246
## 4 1.0800 nan 0.1000 0.0209
## 5 1.0396 nan 0.1000 0.0163
## 6 1.0001 nan 0.1000 0.0149
## 7 0.9691 nan 0.1000 0.0127
## 8 0.9380 nan 0.1000 0.0125
## 9 0.9136 nan 0.1000 0.0102
## 10 0.8842 nan 0.1000 0.0106
## 20 0.7323 nan 0.1000 0.0032
## 40 0.6031 nan 0.1000 0.0003
## 60 0.5256 nan 0.1000 -0.0001
## 80 0.4669 nan 0.1000 -0.0012
## 100 0.4190 nan 0.1000 -0.0006
## 120 0.3783 nan 0.1000 -0.0012
## 140 0.3367 nan 0.1000 -0.0003
## 160 0.3080 nan 0.1000 -0.0008
## 180 0.2777 nan 0.1000 -0.0006
## 200 0.2530 nan 0.1000 -0.0006
## 220 0.2291 nan 0.1000 -0.0006
## 240 0.2081 nan 0.1000 -0.0010
## 260 0.1922 nan 0.1000 -0.0004
## 280 0.1775 nan 0.1000 -0.0006
## 300 0.1664 nan 0.1000 -0.0006
## 320 0.1532 nan 0.1000 -0.0008
## 340 0.1405 nan 0.1000 0.0000
## 360 0.1296 nan 0.1000 -0.0004
## 380 0.1186 nan 0.1000 -0.0002
## 400 0.1088 nan 0.1000 -0.0001
## 420 0.1008 nan 0.1000 -0.0003
## 440 0.0924 nan 0.1000 -0.0002
## 460 0.0841 nan 0.1000 -0.0002
## 480 0.0780 nan 0.1000 -0.0004
## 500 0.0716 nan 0.1000 -0.0004
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2312 nan 0.1000 0.0392
## 2 1.1613 nan 0.1000 0.0294
## 3 1.1001 nan 0.1000 0.0267
## 4 1.0501 nan 0.1000 0.0223
## 5 1.0040 nan 0.1000 0.0191
## 6 0.9613 nan 0.1000 0.0161
## 7 0.9264 nan 0.1000 0.0114
## 8 0.8981 nan 0.1000 0.0096
## 9 0.8706 nan 0.1000 0.0083
## 10 0.8459 nan 0.1000 0.0080
## 20 0.6910 nan 0.1000 0.0008
## 40 0.5411 nan 0.1000 -0.0001
## 60 0.4516 nan 0.1000 -0.0009
## 80 0.3879 nan 0.1000 0.0002
## 100 0.3331 nan 0.1000 -0.0007
## 120 0.2907 nan 0.1000 -0.0006
## 140 0.2504 nan 0.1000 -0.0008
## 160 0.2178 nan 0.1000 -0.0009
## 180 0.1898 nan 0.1000 -0.0008
## 200 0.1685 nan 0.1000 -0.0003
## 220 0.1489 nan 0.1000 -0.0003
## 240 0.1326 nan 0.1000 -0.0000
## 260 0.1177 nan 0.1000 -0.0004
## 280 0.1049 nan 0.1000 -0.0003
## 300 0.0947 nan 0.1000 0.0000
## 320 0.0836 nan 0.1000 -0.0004
## 340 0.0752 nan 0.1000 -0.0002
## 360 0.0685 nan 0.1000 -0.0001
## 380 0.0613 nan 0.1000 -0.0002
## 400 0.0560 nan 0.1000 -0.0002
## 420 0.0513 nan 0.1000 -0.0002
## 440 0.0458 nan 0.1000 -0.0001
## 460 0.0410 nan 0.1000 0.0001
## 480 0.0365 nan 0.1000 -0.0000
## 500 0.0330 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2439 nan 0.1000 0.0346
## 2 1.1725 nan 0.1000 0.0301
## 3 1.1044 nan 0.1000 0.0289
## 4 1.0567 nan 0.1000 0.0199
## 5 1.0098 nan 0.1000 0.0170
## 6 0.9697 nan 0.1000 0.0174
## 7 0.9346 nan 0.1000 0.0132
## 8 0.9036 nan 0.1000 0.0118
## 9 0.8711 nan 0.1000 0.0108
## 10 0.8476 nan 0.1000 0.0088
## 20 0.6882 nan 0.1000 0.0024
## 40 0.5450 nan 0.1000 0.0001
## 60 0.4627 nan 0.1000 -0.0005
## 80 0.3992 nan 0.1000 -0.0011
## 100 0.3413 nan 0.1000 -0.0002
## 120 0.2999 nan 0.1000 -0.0005
## 140 0.2598 nan 0.1000 -0.0003
## 160 0.2304 nan 0.1000 -0.0004
## 180 0.1994 nan 0.1000 -0.0003
## 200 0.1758 nan 0.1000 -0.0003
## 220 0.1550 nan 0.1000 -0.0002
## 240 0.1377 nan 0.1000 -0.0003
## 260 0.1220 nan 0.1000 -0.0000
## 280 0.1093 nan 0.1000 -0.0003
## 300 0.0986 nan 0.1000 -0.0001
## 320 0.0895 nan 0.1000 -0.0004
## 340 0.0811 nan 0.1000 -0.0002
## 360 0.0727 nan 0.1000 -0.0002
## 380 0.0648 nan 0.1000 -0.0000
## 400 0.0582 nan 0.1000 -0.0004
## 420 0.0522 nan 0.1000 -0.0001
## 440 0.0471 nan 0.1000 -0.0002
## 460 0.0430 nan 0.1000 -0.0003
## 480 0.0387 nan 0.1000 -0.0001
## 500 0.0351 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2453 nan 0.1000 0.0374
## 2 1.1707 nan 0.1000 0.0321
## 3 1.1157 nan 0.1000 0.0240
## 4 1.0604 nan 0.1000 0.0235
## 5 1.0190 nan 0.1000 0.0173
## 6 0.9792 nan 0.1000 0.0173
## 7 0.9391 nan 0.1000 0.0169
## 8 0.9043 nan 0.1000 0.0146
## 9 0.8778 nan 0.1000 0.0100
## 10 0.8520 nan 0.1000 0.0093
## 20 0.6957 nan 0.1000 0.0003
## 40 0.5610 nan 0.1000 0.0003
## 60 0.4637 nan 0.1000 -0.0008
## 80 0.3987 nan 0.1000 -0.0007
## 100 0.3456 nan 0.1000 -0.0008
## 120 0.3036 nan 0.1000 0.0006
## 140 0.2654 nan 0.1000 -0.0019
## 160 0.2355 nan 0.1000 -0.0005
## 180 0.2096 nan 0.1000 0.0001
## 200 0.1873 nan 0.1000 -0.0007
## 220 0.1662 nan 0.1000 0.0001
## 240 0.1492 nan 0.1000 -0.0013
## 260 0.1349 nan 0.1000 -0.0003
## 280 0.1220 nan 0.1000 -0.0004
## 300 0.1091 nan 0.1000 -0.0008
## 320 0.0974 nan 0.1000 -0.0005
## 340 0.0871 nan 0.1000 -0.0005
## 360 0.0780 nan 0.1000 -0.0003
## 380 0.0707 nan 0.1000 -0.0002
## 400 0.0638 nan 0.1000 -0.0002
## 420 0.0577 nan 0.1000 -0.0002
## 440 0.0524 nan 0.1000 -0.0002
## 460 0.0476 nan 0.1000 -0.0002
## 480 0.0436 nan 0.1000 -0.0002
## 500 0.0402 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2334 nan 0.1000 0.0357
## 2 1.1634 nan 0.1000 0.0333
## 3 1.0990 nan 0.1000 0.0266
## 4 1.0466 nan 0.1000 0.0235
## 5 0.9980 nan 0.1000 0.0184
## 6 0.9624 nan 0.1000 0.0128
## 7 0.9263 nan 0.1000 0.0155
## 8 0.8882 nan 0.1000 0.0108
## 9 0.8584 nan 0.1000 0.0092
## 10 0.8294 nan 0.1000 0.0105
## 20 0.6632 nan 0.1000 0.0021
## 40 0.5049 nan 0.1000 -0.0004
## 60 0.4016 nan 0.1000 -0.0003
## 80 0.3368 nan 0.1000 -0.0001
## 100 0.2807 nan 0.1000 0.0007
## 120 0.2411 nan 0.1000 -0.0009
## 140 0.2101 nan 0.1000 -0.0010
## 160 0.1790 nan 0.1000 -0.0011
## 180 0.1535 nan 0.1000 -0.0002
## 200 0.1304 nan 0.1000 -0.0003
## 220 0.1146 nan 0.1000 -0.0005
## 240 0.0986 nan 0.1000 -0.0001
## 260 0.0856 nan 0.1000 0.0000
## 280 0.0750 nan 0.1000 -0.0002
## 300 0.0662 nan 0.1000 -0.0002
## 320 0.0578 nan 0.1000 -0.0001
## 340 0.0511 nan 0.1000 -0.0002
## 360 0.0452 nan 0.1000 -0.0002
## 380 0.0398 nan 0.1000 -0.0000
## 400 0.0350 nan 0.1000 -0.0001
## 420 0.0311 nan 0.1000 -0.0000
## 440 0.0272 nan 0.1000 -0.0000
## 460 0.0243 nan 0.1000 -0.0001
## 480 0.0216 nan 0.1000 -0.0001
## 500 0.0191 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2334 nan 0.1000 0.0404
## 2 1.1633 nan 0.1000 0.0305
## 3 1.1051 nan 0.1000 0.0221
## 4 1.0559 nan 0.1000 0.0218
## 5 1.0143 nan 0.1000 0.0155
## 6 0.9736 nan 0.1000 0.0180
## 7 0.9393 nan 0.1000 0.0125
## 8 0.9097 nan 0.1000 0.0109
## 9 0.8770 nan 0.1000 0.0125
## 10 0.8458 nan 0.1000 0.0093
## 20 0.6754 nan 0.1000 0.0022
## 40 0.5126 nan 0.1000 0.0006
## 60 0.4212 nan 0.1000 -0.0014
## 80 0.3528 nan 0.1000 -0.0023
## 100 0.2925 nan 0.1000 -0.0015
## 120 0.2528 nan 0.1000 -0.0008
## 140 0.2138 nan 0.1000 -0.0000
## 160 0.1849 nan 0.1000 -0.0006
## 180 0.1569 nan 0.1000 -0.0003
## 200 0.1363 nan 0.1000 -0.0005
## 220 0.1185 nan 0.1000 -0.0001
## 240 0.1035 nan 0.1000 -0.0004
## 260 0.0900 nan 0.1000 -0.0005
## 280 0.0788 nan 0.1000 -0.0003
## 300 0.0686 nan 0.1000 -0.0004
## 320 0.0611 nan 0.1000 -0.0002
## 340 0.0540 nan 0.1000 -0.0001
## 360 0.0478 nan 0.1000 -0.0003
## 380 0.0420 nan 0.1000 -0.0002
## 400 0.0376 nan 0.1000 -0.0002
## 420 0.0336 nan 0.1000 -0.0002
## 440 0.0303 nan 0.1000 -0.0001
## 460 0.0271 nan 0.1000 -0.0001
## 480 0.0239 nan 0.1000 -0.0001
## 500 0.0211 nan 0.1000 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2317 nan 0.1000 0.0425
## 2 1.1614 nan 0.1000 0.0332
## 3 1.0980 nan 0.1000 0.0281
## 4 1.0501 nan 0.1000 0.0224
## 5 1.0042 nan 0.1000 0.0166
## 6 0.9622 nan 0.1000 0.0166
## 7 0.9258 nan 0.1000 0.0145
## 8 0.8920 nan 0.1000 0.0108
## 9 0.8620 nan 0.1000 0.0111
## 10 0.8406 nan 0.1000 0.0068
## 20 0.6752 nan 0.1000 0.0010
## 40 0.5242 nan 0.1000 -0.0005
## 60 0.4385 nan 0.1000 -0.0010
## 80 0.3599 nan 0.1000 -0.0008
## 100 0.3034 nan 0.1000 -0.0004
## 120 0.2601 nan 0.1000 -0.0004
## 140 0.2176 nan 0.1000 -0.0013
## 160 0.1902 nan 0.1000 -0.0009
## 180 0.1639 nan 0.1000 -0.0003
## 200 0.1444 nan 0.1000 -0.0005
## 220 0.1264 nan 0.1000 -0.0006
## 240 0.1121 nan 0.1000 -0.0005
## 260 0.0983 nan 0.1000 -0.0003
## 280 0.0862 nan 0.1000 -0.0004
## 300 0.0766 nan 0.1000 -0.0003
## 320 0.0678 nan 0.1000 -0.0003
## 340 0.0608 nan 0.1000 -0.0001
## 360 0.0535 nan 0.1000 -0.0001
## 380 0.0483 nan 0.1000 -0.0002
## 400 0.0429 nan 0.1000 -0.0000
## 420 0.0386 nan 0.1000 -0.0002
## 440 0.0340 nan 0.1000 -0.0001
## 460 0.0302 nan 0.1000 -0.0001
## 480 0.0268 nan 0.1000 -0.0002
## 500 0.0237 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3184 nan 0.0010 0.0003
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3144 nan 0.0010 0.0004
## 9 1.3137 nan 0.0010 0.0003
## 10 1.3129 nan 0.0010 0.0003
## 20 1.3048 nan 0.0010 0.0003
## 40 1.2894 nan 0.0010 0.0003
## 60 1.2741 nan 0.0010 0.0004
## 80 1.2597 nan 0.0010 0.0004
## 100 1.2452 nan 0.0010 0.0004
## 120 1.2315 nan 0.0010 0.0003
## 140 1.2181 nan 0.0010 0.0003
## 160 1.2051 nan 0.0010 0.0003
## 180 1.1929 nan 0.0010 0.0003
## 200 1.1810 nan 0.0010 0.0002
## 220 1.1694 nan 0.0010 0.0003
## 240 1.1581 nan 0.0010 0.0002
## 260 1.1469 nan 0.0010 0.0002
## 280 1.1362 nan 0.0010 0.0002
## 300 1.1258 nan 0.0010 0.0002
## 320 1.1155 nan 0.0010 0.0002
## 340 1.1057 nan 0.0010 0.0002
## 360 1.0962 nan 0.0010 0.0002
## 380 1.0870 nan 0.0010 0.0002
## 400 1.0783 nan 0.0010 0.0002
## 420 1.0694 nan 0.0010 0.0002
## 440 1.0609 nan 0.0010 0.0002
## 460 1.0525 nan 0.0010 0.0002
## 480 1.0444 nan 0.0010 0.0002
## 500 1.0364 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0004
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0003
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0003
## 7 1.3150 nan 0.0010 0.0003
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3134 nan 0.0010 0.0003
## 10 1.3125 nan 0.0010 0.0004
## 20 1.3044 nan 0.0010 0.0004
## 40 1.2891 nan 0.0010 0.0003
## 60 1.2739 nan 0.0010 0.0003
## 80 1.2597 nan 0.0010 0.0003
## 100 1.2459 nan 0.0010 0.0003
## 120 1.2321 nan 0.0010 0.0003
## 140 1.2190 nan 0.0010 0.0003
## 160 1.2062 nan 0.0010 0.0003
## 180 1.1937 nan 0.0010 0.0002
## 200 1.1815 nan 0.0010 0.0003
## 220 1.1699 nan 0.0010 0.0003
## 240 1.1587 nan 0.0010 0.0003
## 260 1.1476 nan 0.0010 0.0002
## 280 1.1365 nan 0.0010 0.0002
## 300 1.1259 nan 0.0010 0.0002
## 320 1.1159 nan 0.0010 0.0002
## 340 1.1064 nan 0.0010 0.0002
## 360 1.0965 nan 0.0010 0.0002
## 380 1.0871 nan 0.0010 0.0002
## 400 1.0780 nan 0.0010 0.0002
## 420 1.0696 nan 0.0010 0.0001
## 440 1.0610 nan 0.0010 0.0002
## 460 1.0526 nan 0.0010 0.0002
## 480 1.0446 nan 0.0010 0.0001
## 500 1.0367 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0003
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3134 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3046 nan 0.0010 0.0004
## 40 1.2892 nan 0.0010 0.0003
## 60 1.2741 nan 0.0010 0.0003
## 80 1.2598 nan 0.0010 0.0003
## 100 1.2460 nan 0.0010 0.0004
## 120 1.2328 nan 0.0010 0.0003
## 140 1.2199 nan 0.0010 0.0003
## 160 1.2071 nan 0.0010 0.0003
## 180 1.1948 nan 0.0010 0.0002
## 200 1.1829 nan 0.0010 0.0003
## 220 1.1712 nan 0.0010 0.0003
## 240 1.1601 nan 0.0010 0.0002
## 260 1.1492 nan 0.0010 0.0003
## 280 1.1382 nan 0.0010 0.0002
## 300 1.1281 nan 0.0010 0.0002
## 320 1.1179 nan 0.0010 0.0002
## 340 1.1082 nan 0.0010 0.0002
## 360 1.0988 nan 0.0010 0.0002
## 380 1.0897 nan 0.0010 0.0002
## 400 1.0804 nan 0.0010 0.0002
## 420 1.0716 nan 0.0010 0.0001
## 440 1.0630 nan 0.0010 0.0002
## 460 1.0547 nan 0.0010 0.0002
## 480 1.0463 nan 0.0010 0.0002
## 500 1.0383 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3033 nan 0.0010 0.0004
## 40 1.2867 nan 0.0010 0.0003
## 60 1.2710 nan 0.0010 0.0003
## 80 1.2558 nan 0.0010 0.0003
## 100 1.2410 nan 0.0010 0.0004
## 120 1.2265 nan 0.0010 0.0003
## 140 1.2127 nan 0.0010 0.0003
## 160 1.1990 nan 0.0010 0.0003
## 180 1.1856 nan 0.0010 0.0003
## 200 1.1731 nan 0.0010 0.0002
## 220 1.1606 nan 0.0010 0.0002
## 240 1.1486 nan 0.0010 0.0003
## 260 1.1369 nan 0.0010 0.0002
## 280 1.1254 nan 0.0010 0.0002
## 300 1.1144 nan 0.0010 0.0002
## 320 1.1038 nan 0.0010 0.0002
## 340 1.0931 nan 0.0010 0.0003
## 360 1.0830 nan 0.0010 0.0002
## 380 1.0732 nan 0.0010 0.0002
## 400 1.0634 nan 0.0010 0.0002
## 420 1.0541 nan 0.0010 0.0002
## 440 1.0448 nan 0.0010 0.0002
## 460 1.0362 nan 0.0010 0.0002
## 480 1.0276 nan 0.0010 0.0002
## 500 1.0193 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0003
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3035 nan 0.0010 0.0004
## 40 1.2869 nan 0.0010 0.0004
## 60 1.2711 nan 0.0010 0.0004
## 80 1.2560 nan 0.0010 0.0003
## 100 1.2412 nan 0.0010 0.0003
## 120 1.2268 nan 0.0010 0.0003
## 140 1.2128 nan 0.0010 0.0003
## 160 1.1993 nan 0.0010 0.0003
## 180 1.1862 nan 0.0010 0.0003
## 200 1.1738 nan 0.0010 0.0003
## 220 1.1615 nan 0.0010 0.0003
## 240 1.1494 nan 0.0010 0.0002
## 260 1.1377 nan 0.0010 0.0002
## 280 1.1261 nan 0.0010 0.0003
## 300 1.1151 nan 0.0010 0.0002
## 320 1.1048 nan 0.0010 0.0003
## 340 1.0943 nan 0.0010 0.0002
## 360 1.0842 nan 0.0010 0.0002
## 380 1.0742 nan 0.0010 0.0002
## 400 1.0648 nan 0.0010 0.0002
## 420 1.0553 nan 0.0010 0.0002
## 440 1.0460 nan 0.0010 0.0002
## 460 1.0372 nan 0.0010 0.0002
## 480 1.0287 nan 0.0010 0.0002
## 500 1.0202 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0003
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3139 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3122 nan 0.0010 0.0004
## 20 1.3038 nan 0.0010 0.0004
## 40 1.2875 nan 0.0010 0.0004
## 60 1.2716 nan 0.0010 0.0004
## 80 1.2565 nan 0.0010 0.0004
## 100 1.2420 nan 0.0010 0.0003
## 120 1.2278 nan 0.0010 0.0003
## 140 1.2139 nan 0.0010 0.0003
## 160 1.2005 nan 0.0010 0.0003
## 180 1.1876 nan 0.0010 0.0003
## 200 1.1748 nan 0.0010 0.0003
## 220 1.1625 nan 0.0010 0.0002
## 240 1.1507 nan 0.0010 0.0003
## 260 1.1392 nan 0.0010 0.0003
## 280 1.1282 nan 0.0010 0.0002
## 300 1.1176 nan 0.0010 0.0002
## 320 1.1069 nan 0.0010 0.0002
## 340 1.0964 nan 0.0010 0.0002
## 360 1.0863 nan 0.0010 0.0002
## 380 1.0766 nan 0.0010 0.0002
## 400 1.0671 nan 0.0010 0.0002
## 420 1.0581 nan 0.0010 0.0002
## 440 1.0490 nan 0.0010 0.0002
## 460 1.0402 nan 0.0010 0.0002
## 480 1.0317 nan 0.0010 0.0002
## 500 1.0232 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0004
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0003
## 6 1.3151 nan 0.0010 0.0004
## 7 1.3142 nan 0.0010 0.0004
## 8 1.3132 nan 0.0010 0.0004
## 9 1.3124 nan 0.0010 0.0004
## 10 1.3115 nan 0.0010 0.0003
## 20 1.3027 nan 0.0010 0.0004
## 40 1.2857 nan 0.0010 0.0003
## 60 1.2694 nan 0.0010 0.0004
## 80 1.2536 nan 0.0010 0.0004
## 100 1.2382 nan 0.0010 0.0004
## 120 1.2227 nan 0.0010 0.0003
## 140 1.2082 nan 0.0010 0.0003
## 160 1.1944 nan 0.0010 0.0003
## 180 1.1809 nan 0.0010 0.0002
## 200 1.1677 nan 0.0010 0.0003
## 220 1.1545 nan 0.0010 0.0003
## 240 1.1418 nan 0.0010 0.0003
## 260 1.1293 nan 0.0010 0.0003
## 280 1.1174 nan 0.0010 0.0003
## 300 1.1059 nan 0.0010 0.0003
## 320 1.0946 nan 0.0010 0.0003
## 340 1.0838 nan 0.0010 0.0003
## 360 1.0732 nan 0.0010 0.0002
## 380 1.0630 nan 0.0010 0.0002
## 400 1.0532 nan 0.0010 0.0002
## 420 1.0436 nan 0.0010 0.0002
## 440 1.0343 nan 0.0010 0.0002
## 460 1.0252 nan 0.0010 0.0002
## 480 1.0159 nan 0.0010 0.0002
## 500 1.0071 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0005
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3030 nan 0.0010 0.0004
## 40 1.2857 nan 0.0010 0.0004
## 60 1.2692 nan 0.0010 0.0003
## 80 1.2532 nan 0.0010 0.0004
## 100 1.2381 nan 0.0010 0.0004
## 120 1.2231 nan 0.0010 0.0003
## 140 1.2085 nan 0.0010 0.0003
## 160 1.1942 nan 0.0010 0.0003
## 180 1.1804 nan 0.0010 0.0003
## 200 1.1671 nan 0.0010 0.0003
## 220 1.1540 nan 0.0010 0.0003
## 240 1.1418 nan 0.0010 0.0003
## 260 1.1298 nan 0.0010 0.0003
## 280 1.1181 nan 0.0010 0.0002
## 300 1.1066 nan 0.0010 0.0002
## 320 1.0954 nan 0.0010 0.0002
## 340 1.0846 nan 0.0010 0.0003
## 360 1.0742 nan 0.0010 0.0002
## 380 1.0642 nan 0.0010 0.0002
## 400 1.0543 nan 0.0010 0.0002
## 420 1.0447 nan 0.0010 0.0002
## 440 1.0354 nan 0.0010 0.0002
## 460 1.0261 nan 0.0010 0.0002
## 480 1.0171 nan 0.0010 0.0002
## 500 1.0084 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3033 nan 0.0010 0.0004
## 40 1.2865 nan 0.0010 0.0004
## 60 1.2704 nan 0.0010 0.0004
## 80 1.2544 nan 0.0010 0.0003
## 100 1.2390 nan 0.0010 0.0004
## 120 1.2241 nan 0.0010 0.0003
## 140 1.2096 nan 0.0010 0.0003
## 160 1.1959 nan 0.0010 0.0003
## 180 1.1825 nan 0.0010 0.0003
## 200 1.1696 nan 0.0010 0.0003
## 220 1.1571 nan 0.0010 0.0003
## 240 1.1448 nan 0.0010 0.0003
## 260 1.1326 nan 0.0010 0.0003
## 280 1.1210 nan 0.0010 0.0002
## 300 1.1095 nan 0.0010 0.0002
## 320 1.0986 nan 0.0010 0.0002
## 340 1.0880 nan 0.0010 0.0002
## 360 1.0777 nan 0.0010 0.0002
## 380 1.0675 nan 0.0010 0.0002
## 400 1.0576 nan 0.0010 0.0002
## 420 1.0481 nan 0.0010 0.0001
## 440 1.0386 nan 0.0010 0.0002
## 460 1.0295 nan 0.0010 0.0002
## 480 1.0207 nan 0.0010 0.0002
## 500 1.0119 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3125 nan 0.0100 0.0041
## 2 1.3044 nan 0.0100 0.0036
## 3 1.2964 nan 0.0100 0.0036
## 4 1.2887 nan 0.0100 0.0034
## 5 1.2813 nan 0.0100 0.0035
## 6 1.2748 nan 0.0100 0.0028
## 7 1.2670 nan 0.0100 0.0030
## 8 1.2603 nan 0.0100 0.0028
## 9 1.2535 nan 0.0100 0.0029
## 10 1.2459 nan 0.0100 0.0032
## 20 1.1812 nan 0.0100 0.0026
## 40 1.0792 nan 0.0100 0.0019
## 60 1.0000 nan 0.0100 0.0013
## 80 0.9375 nan 0.0100 0.0012
## 100 0.8883 nan 0.0100 0.0005
## 120 0.8462 nan 0.0100 0.0004
## 140 0.8108 nan 0.0100 0.0004
## 160 0.7828 nan 0.0100 0.0004
## 180 0.7581 nan 0.0100 0.0001
## 200 0.7366 nan 0.0100 0.0004
## 220 0.7172 nan 0.0100 0.0002
## 240 0.6985 nan 0.0100 0.0003
## 260 0.6828 nan 0.0100 0.0001
## 280 0.6685 nan 0.0100 -0.0001
## 300 0.6559 nan 0.0100 0.0000
## 320 0.6439 nan 0.0100 -0.0001
## 340 0.6323 nan 0.0100 -0.0002
## 360 0.6216 nan 0.0100 0.0000
## 380 0.6109 nan 0.0100 -0.0001
## 400 0.6008 nan 0.0100 0.0001
## 420 0.5915 nan 0.0100 -0.0001
## 440 0.5823 nan 0.0100 0.0000
## 460 0.5730 nan 0.0100 -0.0000
## 480 0.5642 nan 0.0100 -0.0000
## 500 0.5553 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3127 nan 0.0100 0.0033
## 2 1.3045 nan 0.0100 0.0037
## 3 1.2965 nan 0.0100 0.0035
## 4 1.2886 nan 0.0100 0.0039
## 5 1.2818 nan 0.0100 0.0026
## 6 1.2736 nan 0.0100 0.0038
## 7 1.2661 nan 0.0100 0.0034
## 8 1.2588 nan 0.0100 0.0032
## 9 1.2518 nan 0.0100 0.0031
## 10 1.2446 nan 0.0100 0.0031
## 20 1.1801 nan 0.0100 0.0021
## 40 1.0792 nan 0.0100 0.0020
## 60 1.0022 nan 0.0100 0.0014
## 80 0.9391 nan 0.0100 0.0010
## 100 0.8896 nan 0.0100 0.0005
## 120 0.8474 nan 0.0100 0.0008
## 140 0.8140 nan 0.0100 0.0006
## 160 0.7849 nan 0.0100 0.0004
## 180 0.7586 nan 0.0100 0.0003
## 200 0.7381 nan 0.0100 0.0002
## 220 0.7196 nan 0.0100 0.0001
## 240 0.7037 nan 0.0100 0.0002
## 260 0.6891 nan 0.0100 0.0001
## 280 0.6754 nan 0.0100 0.0002
## 300 0.6636 nan 0.0100 -0.0000
## 320 0.6515 nan 0.0100 0.0001
## 340 0.6405 nan 0.0100 -0.0000
## 360 0.6307 nan 0.0100 0.0000
## 380 0.6207 nan 0.0100 0.0000
## 400 0.6106 nan 0.0100 -0.0002
## 420 0.6013 nan 0.0100 -0.0000
## 440 0.5928 nan 0.0100 -0.0001
## 460 0.5841 nan 0.0100 -0.0001
## 480 0.5759 nan 0.0100 0.0000
## 500 0.5677 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3127 nan 0.0100 0.0039
## 2 1.3043 nan 0.0100 0.0038
## 3 1.2968 nan 0.0100 0.0036
## 4 1.2891 nan 0.0100 0.0036
## 5 1.2816 nan 0.0100 0.0036
## 6 1.2744 nan 0.0100 0.0030
## 7 1.2671 nan 0.0100 0.0033
## 8 1.2601 nan 0.0100 0.0030
## 9 1.2527 nan 0.0100 0.0035
## 10 1.2455 nan 0.0100 0.0031
## 20 1.1812 nan 0.0100 0.0025
## 40 1.0788 nan 0.0100 0.0019
## 60 1.0009 nan 0.0100 0.0013
## 80 0.9399 nan 0.0100 0.0010
## 100 0.8913 nan 0.0100 0.0007
## 120 0.8513 nan 0.0100 0.0005
## 140 0.8170 nan 0.0100 0.0005
## 160 0.7886 nan 0.0100 0.0004
## 180 0.7646 nan 0.0100 0.0002
## 200 0.7441 nan 0.0100 0.0002
## 220 0.7261 nan 0.0100 0.0001
## 240 0.7106 nan 0.0100 0.0000
## 260 0.6956 nan 0.0100 -0.0001
## 280 0.6809 nan 0.0100 0.0001
## 300 0.6688 nan 0.0100 0.0001
## 320 0.6575 nan 0.0100 -0.0000
## 340 0.6470 nan 0.0100 0.0001
## 360 0.6378 nan 0.0100 0.0000
## 380 0.6274 nan 0.0100 0.0000
## 400 0.6187 nan 0.0100 -0.0002
## 420 0.6107 nan 0.0100 -0.0001
## 440 0.6030 nan 0.0100 -0.0000
## 460 0.5956 nan 0.0100 0.0001
## 480 0.5872 nan 0.0100 -0.0001
## 500 0.5786 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0034
## 2 1.3040 nan 0.0100 0.0040
## 3 1.2956 nan 0.0100 0.0035
## 4 1.2876 nan 0.0100 0.0033
## 5 1.2786 nan 0.0100 0.0038
## 6 1.2706 nan 0.0100 0.0034
## 7 1.2634 nan 0.0100 0.0031
## 8 1.2557 nan 0.0100 0.0034
## 9 1.2486 nan 0.0100 0.0029
## 10 1.2406 nan 0.0100 0.0035
## 20 1.1723 nan 0.0100 0.0029
## 40 1.0649 nan 0.0100 0.0017
## 60 0.9798 nan 0.0100 0.0017
## 80 0.9136 nan 0.0100 0.0014
## 100 0.8610 nan 0.0100 0.0007
## 120 0.8169 nan 0.0100 0.0005
## 140 0.7817 nan 0.0100 0.0003
## 160 0.7513 nan 0.0100 0.0003
## 180 0.7240 nan 0.0100 0.0003
## 200 0.7009 nan 0.0100 0.0002
## 220 0.6820 nan 0.0100 0.0002
## 240 0.6646 nan 0.0100 0.0000
## 260 0.6470 nan 0.0100 0.0001
## 280 0.6327 nan 0.0100 0.0001
## 300 0.6192 nan 0.0100 -0.0000
## 320 0.6050 nan 0.0100 -0.0000
## 340 0.5908 nan 0.0100 0.0001
## 360 0.5787 nan 0.0100 -0.0001
## 380 0.5683 nan 0.0100 -0.0001
## 400 0.5578 nan 0.0100 -0.0000
## 420 0.5474 nan 0.0100 -0.0001
## 440 0.5371 nan 0.0100 -0.0000
## 460 0.5280 nan 0.0100 -0.0001
## 480 0.5183 nan 0.0100 0.0001
## 500 0.5087 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0041
## 2 1.3028 nan 0.0100 0.0038
## 3 1.2935 nan 0.0100 0.0037
## 4 1.2845 nan 0.0100 0.0038
## 5 1.2767 nan 0.0100 0.0033
## 6 1.2686 nan 0.0100 0.0036
## 7 1.2618 nan 0.0100 0.0030
## 8 1.2551 nan 0.0100 0.0032
## 9 1.2479 nan 0.0100 0.0034
## 10 1.2405 nan 0.0100 0.0029
## 20 1.1725 nan 0.0100 0.0030
## 40 1.0641 nan 0.0100 0.0022
## 60 0.9802 nan 0.0100 0.0014
## 80 0.9140 nan 0.0100 0.0011
## 100 0.8620 nan 0.0100 0.0009
## 120 0.8196 nan 0.0100 0.0006
## 140 0.7856 nan 0.0100 0.0003
## 160 0.7581 nan 0.0100 0.0004
## 180 0.7323 nan 0.0100 0.0004
## 200 0.7082 nan 0.0100 0.0003
## 220 0.6874 nan 0.0100 0.0001
## 240 0.6686 nan 0.0100 0.0003
## 260 0.6519 nan 0.0100 0.0002
## 280 0.6372 nan 0.0100 0.0003
## 300 0.6228 nan 0.0100 0.0001
## 320 0.6089 nan 0.0100 -0.0001
## 340 0.5961 nan 0.0100 0.0000
## 360 0.5835 nan 0.0100 0.0000
## 380 0.5727 nan 0.0100 -0.0000
## 400 0.5626 nan 0.0100 -0.0001
## 420 0.5530 nan 0.0100 -0.0001
## 440 0.5424 nan 0.0100 -0.0002
## 460 0.5329 nan 0.0100 -0.0001
## 480 0.5239 nan 0.0100 0.0000
## 500 0.5149 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0038
## 2 1.3041 nan 0.0100 0.0038
## 3 1.2963 nan 0.0100 0.0038
## 4 1.2878 nan 0.0100 0.0039
## 5 1.2796 nan 0.0100 0.0037
## 6 1.2718 nan 0.0100 0.0036
## 7 1.2645 nan 0.0100 0.0032
## 8 1.2566 nan 0.0100 0.0034
## 9 1.2494 nan 0.0100 0.0029
## 10 1.2418 nan 0.0100 0.0036
## 20 1.1770 nan 0.0100 0.0024
## 40 1.0683 nan 0.0100 0.0019
## 60 0.9846 nan 0.0100 0.0016
## 80 0.9175 nan 0.0100 0.0011
## 100 0.8666 nan 0.0100 0.0008
## 120 0.8255 nan 0.0100 0.0005
## 140 0.7905 nan 0.0100 0.0004
## 160 0.7615 nan 0.0100 0.0003
## 180 0.7373 nan 0.0100 0.0004
## 200 0.7140 nan 0.0100 0.0003
## 220 0.6935 nan 0.0100 0.0001
## 240 0.6758 nan 0.0100 -0.0001
## 260 0.6600 nan 0.0100 0.0001
## 280 0.6453 nan 0.0100 0.0001
## 300 0.6319 nan 0.0100 0.0001
## 320 0.6198 nan 0.0100 -0.0002
## 340 0.6086 nan 0.0100 -0.0000
## 360 0.5968 nan 0.0100 -0.0001
## 380 0.5871 nan 0.0100 -0.0000
## 400 0.5770 nan 0.0100 0.0000
## 420 0.5665 nan 0.0100 0.0000
## 440 0.5574 nan 0.0100 0.0001
## 460 0.5480 nan 0.0100 -0.0001
## 480 0.5384 nan 0.0100 -0.0000
## 500 0.5293 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3105 nan 0.0100 0.0044
## 2 1.3022 nan 0.0100 0.0037
## 3 1.2935 nan 0.0100 0.0038
## 4 1.2844 nan 0.0100 0.0041
## 5 1.2761 nan 0.0100 0.0037
## 6 1.2687 nan 0.0100 0.0031
## 7 1.2603 nan 0.0100 0.0035
## 8 1.2520 nan 0.0100 0.0035
## 9 1.2441 nan 0.0100 0.0038
## 10 1.2357 nan 0.0100 0.0039
## 20 1.1630 nan 0.0100 0.0029
## 40 1.0504 nan 0.0100 0.0016
## 60 0.9637 nan 0.0100 0.0013
## 80 0.8949 nan 0.0100 0.0012
## 100 0.8394 nan 0.0100 0.0007
## 120 0.7945 nan 0.0100 0.0004
## 140 0.7555 nan 0.0100 0.0005
## 160 0.7230 nan 0.0100 0.0003
## 180 0.6945 nan 0.0100 0.0004
## 200 0.6703 nan 0.0100 0.0003
## 220 0.6479 nan 0.0100 0.0001
## 240 0.6282 nan 0.0100 0.0002
## 260 0.6104 nan 0.0100 0.0001
## 280 0.5936 nan 0.0100 0.0001
## 300 0.5779 nan 0.0100 0.0002
## 320 0.5639 nan 0.0100 0.0001
## 340 0.5512 nan 0.0100 0.0001
## 360 0.5383 nan 0.0100 -0.0001
## 380 0.5268 nan 0.0100 0.0000
## 400 0.5153 nan 0.0100 -0.0001
## 420 0.5033 nan 0.0100 -0.0001
## 440 0.4925 nan 0.0100 0.0002
## 460 0.4821 nan 0.0100 -0.0000
## 480 0.4721 nan 0.0100 0.0000
## 500 0.4618 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3106 nan 0.0100 0.0044
## 2 1.3019 nan 0.0100 0.0042
## 3 1.2929 nan 0.0100 0.0040
## 4 1.2843 nan 0.0100 0.0036
## 5 1.2753 nan 0.0100 0.0037
## 6 1.2671 nan 0.0100 0.0038
## 7 1.2589 nan 0.0100 0.0035
## 8 1.2503 nan 0.0100 0.0037
## 9 1.2429 nan 0.0100 0.0032
## 10 1.2352 nan 0.0100 0.0037
## 20 1.1642 nan 0.0100 0.0031
## 40 1.0505 nan 0.0100 0.0017
## 60 0.9653 nan 0.0100 0.0016
## 80 0.8967 nan 0.0100 0.0011
## 100 0.8434 nan 0.0100 0.0010
## 120 0.7990 nan 0.0100 0.0007
## 140 0.7615 nan 0.0100 0.0005
## 160 0.7302 nan 0.0100 0.0003
## 180 0.7034 nan 0.0100 0.0004
## 200 0.6799 nan 0.0100 0.0001
## 220 0.6590 nan 0.0100 -0.0000
## 240 0.6402 nan 0.0100 -0.0001
## 260 0.6225 nan 0.0100 0.0001
## 280 0.6061 nan 0.0100 -0.0000
## 300 0.5907 nan 0.0100 -0.0001
## 320 0.5765 nan 0.0100 -0.0003
## 340 0.5634 nan 0.0100 -0.0000
## 360 0.5512 nan 0.0100 0.0001
## 380 0.5380 nan 0.0100 -0.0000
## 400 0.5266 nan 0.0100 0.0001
## 420 0.5153 nan 0.0100 -0.0001
## 440 0.5045 nan 0.0100 0.0000
## 460 0.4939 nan 0.0100 -0.0001
## 480 0.4832 nan 0.0100 0.0000
## 500 0.4740 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3113 nan 0.0100 0.0042
## 2 1.3020 nan 0.0100 0.0042
## 3 1.2940 nan 0.0100 0.0037
## 4 1.2857 nan 0.0100 0.0036
## 5 1.2773 nan 0.0100 0.0040
## 6 1.2692 nan 0.0100 0.0038
## 7 1.2616 nan 0.0100 0.0034
## 8 1.2535 nan 0.0100 0.0034
## 9 1.2461 nan 0.0100 0.0032
## 10 1.2390 nan 0.0100 0.0033
## 20 1.1692 nan 0.0100 0.0026
## 40 1.0585 nan 0.0100 0.0020
## 60 0.9745 nan 0.0100 0.0014
## 80 0.9082 nan 0.0100 0.0010
## 100 0.8530 nan 0.0100 0.0007
## 120 0.8104 nan 0.0100 0.0008
## 140 0.7759 nan 0.0100 0.0004
## 160 0.7442 nan 0.0100 0.0002
## 180 0.7164 nan 0.0100 0.0001
## 200 0.6920 nan 0.0100 0.0003
## 220 0.6715 nan 0.0100 0.0002
## 240 0.6533 nan 0.0100 0.0000
## 260 0.6359 nan 0.0100 0.0000
## 280 0.6199 nan 0.0100 0.0002
## 300 0.6052 nan 0.0100 0.0000
## 320 0.5914 nan 0.0100 0.0002
## 340 0.5775 nan 0.0100 0.0002
## 360 0.5645 nan 0.0100 0.0001
## 380 0.5527 nan 0.0100 -0.0000
## 400 0.5423 nan 0.0100 -0.0000
## 420 0.5320 nan 0.0100 0.0000
## 440 0.5214 nan 0.0100 0.0000
## 460 0.5113 nan 0.0100 -0.0001
## 480 0.5013 nan 0.0100 -0.0001
## 500 0.4917 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2333 nan 0.1000 0.0384
## 2 1.1751 nan 0.1000 0.0211
## 3 1.1296 nan 0.1000 0.0187
## 4 1.0791 nan 0.1000 0.0237
## 5 1.0393 nan 0.1000 0.0161
## 6 1.0005 nan 0.1000 0.0173
## 7 0.9673 nan 0.1000 0.0102
## 8 0.9372 nan 0.1000 0.0098
## 9 0.9113 nan 0.1000 0.0115
## 10 0.8892 nan 0.1000 0.0077
## 20 0.7379 nan 0.1000 0.0021
## 40 0.5983 nan 0.1000 0.0016
## 60 0.5242 nan 0.1000 -0.0009
## 80 0.4616 nan 0.1000 -0.0004
## 100 0.4131 nan 0.1000 -0.0000
## 120 0.3679 nan 0.1000 -0.0019
## 140 0.3269 nan 0.1000 0.0006
## 160 0.2903 nan 0.1000 -0.0005
## 180 0.2632 nan 0.1000 -0.0008
## 200 0.2368 nan 0.1000 -0.0010
## 220 0.2160 nan 0.1000 -0.0005
## 240 0.1955 nan 0.1000 -0.0006
## 260 0.1765 nan 0.1000 -0.0002
## 280 0.1615 nan 0.1000 -0.0007
## 300 0.1472 nan 0.1000 -0.0000
## 320 0.1369 nan 0.1000 -0.0001
## 340 0.1267 nan 0.1000 -0.0003
## 360 0.1177 nan 0.1000 0.0001
## 380 0.1101 nan 0.1000 -0.0001
## 400 0.1020 nan 0.1000 0.0001
## 420 0.0947 nan 0.1000 -0.0003
## 440 0.0883 nan 0.1000 -0.0002
## 460 0.0818 nan 0.1000 -0.0002
## 480 0.0759 nan 0.1000 -0.0001
## 500 0.0712 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2446 nan 0.1000 0.0345
## 2 1.1704 nan 0.1000 0.0320
## 3 1.1179 nan 0.1000 0.0212
## 4 1.0743 nan 0.1000 0.0183
## 5 1.0333 nan 0.1000 0.0160
## 6 0.9923 nan 0.1000 0.0170
## 7 0.9521 nan 0.1000 0.0161
## 8 0.9194 nan 0.1000 0.0147
## 9 0.8984 nan 0.1000 0.0078
## 10 0.8777 nan 0.1000 0.0065
## 20 0.7314 nan 0.1000 0.0028
## 40 0.6115 nan 0.1000 -0.0014
## 60 0.5357 nan 0.1000 -0.0002
## 80 0.4758 nan 0.1000 -0.0009
## 100 0.4222 nan 0.1000 -0.0004
## 120 0.3784 nan 0.1000 -0.0011
## 140 0.3405 nan 0.1000 -0.0015
## 160 0.3045 nan 0.1000 -0.0014
## 180 0.2780 nan 0.1000 0.0002
## 200 0.2548 nan 0.1000 -0.0007
## 220 0.2317 nan 0.1000 -0.0006
## 240 0.2135 nan 0.1000 -0.0008
## 260 0.1969 nan 0.1000 -0.0007
## 280 0.1808 nan 0.1000 -0.0003
## 300 0.1668 nan 0.1000 -0.0006
## 320 0.1532 nan 0.1000 -0.0008
## 340 0.1424 nan 0.1000 -0.0004
## 360 0.1332 nan 0.1000 -0.0003
## 380 0.1232 nan 0.1000 -0.0003
## 400 0.1147 nan 0.1000 -0.0002
## 420 0.1063 nan 0.1000 -0.0001
## 440 0.0980 nan 0.1000 -0.0006
## 460 0.0915 nan 0.1000 -0.0004
## 480 0.0844 nan 0.1000 -0.0003
## 500 0.0788 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2363 nan 0.1000 0.0388
## 2 1.1681 nan 0.1000 0.0294
## 3 1.1138 nan 0.1000 0.0262
## 4 1.0703 nan 0.1000 0.0196
## 5 1.0317 nan 0.1000 0.0184
## 6 0.9948 nan 0.1000 0.0154
## 7 0.9626 nan 0.1000 0.0124
## 8 0.9387 nan 0.1000 0.0071
## 9 0.9133 nan 0.1000 0.0126
## 10 0.8921 nan 0.1000 0.0065
## 20 0.7389 nan 0.1000 0.0059
## 40 0.6130 nan 0.1000 0.0004
## 60 0.5299 nan 0.1000 -0.0025
## 80 0.4716 nan 0.1000 -0.0004
## 100 0.4235 nan 0.1000 -0.0007
## 120 0.3777 nan 0.1000 -0.0012
## 140 0.3459 nan 0.1000 -0.0016
## 160 0.3119 nan 0.1000 -0.0007
## 180 0.2878 nan 0.1000 -0.0009
## 200 0.2633 nan 0.1000 -0.0004
## 220 0.2387 nan 0.1000 -0.0000
## 240 0.2201 nan 0.1000 -0.0010
## 260 0.2015 nan 0.1000 -0.0013
## 280 0.1856 nan 0.1000 -0.0006
## 300 0.1718 nan 0.1000 -0.0005
## 320 0.1601 nan 0.1000 -0.0005
## 340 0.1482 nan 0.1000 -0.0002
## 360 0.1378 nan 0.1000 -0.0004
## 380 0.1288 nan 0.1000 -0.0005
## 400 0.1199 nan 0.1000 -0.0004
## 420 0.1120 nan 0.1000 -0.0003
## 440 0.1057 nan 0.1000 -0.0003
## 460 0.0990 nan 0.1000 -0.0003
## 480 0.0925 nan 0.1000 -0.0006
## 500 0.0862 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2379 nan 0.1000 0.0347
## 2 1.1778 nan 0.1000 0.0266
## 3 1.1257 nan 0.1000 0.0245
## 4 1.0755 nan 0.1000 0.0198
## 5 1.0261 nan 0.1000 0.0223
## 6 0.9773 nan 0.1000 0.0158
## 7 0.9408 nan 0.1000 0.0117
## 8 0.9116 nan 0.1000 0.0132
## 9 0.8840 nan 0.1000 0.0111
## 10 0.8597 nan 0.1000 0.0093
## 20 0.7030 nan 0.1000 0.0047
## 40 0.5579 nan 0.1000 0.0002
## 60 0.4718 nan 0.1000 -0.0012
## 80 0.3988 nan 0.1000 -0.0005
## 100 0.3444 nan 0.1000 -0.0005
## 120 0.3006 nan 0.1000 -0.0008
## 140 0.2657 nan 0.1000 -0.0007
## 160 0.2350 nan 0.1000 0.0002
## 180 0.2103 nan 0.1000 -0.0009
## 200 0.1886 nan 0.1000 0.0004
## 220 0.1681 nan 0.1000 -0.0003
## 240 0.1496 nan 0.1000 -0.0002
## 260 0.1351 nan 0.1000 -0.0002
## 280 0.1229 nan 0.1000 -0.0003
## 300 0.1105 nan 0.1000 -0.0003
## 320 0.1002 nan 0.1000 -0.0001
## 340 0.0905 nan 0.1000 -0.0002
## 360 0.0816 nan 0.1000 -0.0002
## 380 0.0747 nan 0.1000 -0.0003
## 400 0.0683 nan 0.1000 -0.0001
## 420 0.0627 nan 0.1000 -0.0001
## 440 0.0577 nan 0.1000 -0.0001
## 460 0.0521 nan 0.1000 -0.0001
## 480 0.0474 nan 0.1000 -0.0002
## 500 0.0434 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2470 nan 0.1000 0.0314
## 2 1.1746 nan 0.1000 0.0324
## 3 1.1217 nan 0.1000 0.0243
## 4 1.0721 nan 0.1000 0.0212
## 5 1.0330 nan 0.1000 0.0172
## 6 0.9932 nan 0.1000 0.0155
## 7 0.9601 nan 0.1000 0.0127
## 8 0.9262 nan 0.1000 0.0124
## 9 0.8959 nan 0.1000 0.0112
## 10 0.8702 nan 0.1000 0.0100
## 20 0.7257 nan 0.1000 0.0018
## 40 0.5737 nan 0.1000 -0.0010
## 60 0.4821 nan 0.1000 0.0003
## 80 0.4135 nan 0.1000 -0.0008
## 100 0.3577 nan 0.1000 -0.0020
## 120 0.3132 nan 0.1000 -0.0004
## 140 0.2721 nan 0.1000 -0.0006
## 160 0.2387 nan 0.1000 -0.0010
## 180 0.2139 nan 0.1000 -0.0008
## 200 0.1923 nan 0.1000 -0.0012
## 220 0.1704 nan 0.1000 -0.0012
## 240 0.1517 nan 0.1000 -0.0005
## 260 0.1366 nan 0.1000 -0.0002
## 280 0.1242 nan 0.1000 -0.0006
## 300 0.1121 nan 0.1000 -0.0001
## 320 0.1016 nan 0.1000 -0.0003
## 340 0.0913 nan 0.1000 -0.0001
## 360 0.0831 nan 0.1000 -0.0003
## 380 0.0738 nan 0.1000 -0.0003
## 400 0.0674 nan 0.1000 -0.0001
## 420 0.0623 nan 0.1000 -0.0001
## 440 0.0570 nan 0.1000 -0.0001
## 460 0.0523 nan 0.1000 -0.0001
## 480 0.0485 nan 0.1000 -0.0003
## 500 0.0440 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2394 nan 0.1000 0.0396
## 2 1.1733 nan 0.1000 0.0263
## 3 1.1140 nan 0.1000 0.0244
## 4 1.0634 nan 0.1000 0.0192
## 5 1.0189 nan 0.1000 0.0175
## 6 0.9812 nan 0.1000 0.0149
## 7 0.9467 nan 0.1000 0.0137
## 8 0.9169 nan 0.1000 0.0109
## 9 0.8888 nan 0.1000 0.0112
## 10 0.8617 nan 0.1000 0.0104
## 20 0.7167 nan 0.1000 0.0022
## 40 0.5768 nan 0.1000 -0.0004
## 60 0.4887 nan 0.1000 -0.0015
## 80 0.4159 nan 0.1000 -0.0000
## 100 0.3621 nan 0.1000 -0.0006
## 120 0.3158 nan 0.1000 -0.0016
## 140 0.2845 nan 0.1000 -0.0012
## 160 0.2550 nan 0.1000 -0.0014
## 180 0.2276 nan 0.1000 -0.0007
## 200 0.2029 nan 0.1000 -0.0005
## 220 0.1823 nan 0.1000 -0.0005
## 240 0.1634 nan 0.1000 -0.0001
## 260 0.1462 nan 0.1000 -0.0008
## 280 0.1324 nan 0.1000 -0.0003
## 300 0.1203 nan 0.1000 -0.0007
## 320 0.1085 nan 0.1000 -0.0001
## 340 0.0996 nan 0.1000 -0.0007
## 360 0.0907 nan 0.1000 -0.0002
## 380 0.0828 nan 0.1000 -0.0004
## 400 0.0754 nan 0.1000 -0.0002
## 420 0.0691 nan 0.1000 -0.0003
## 440 0.0630 nan 0.1000 -0.0002
## 460 0.0579 nan 0.1000 -0.0003
## 480 0.0533 nan 0.1000 -0.0001
## 500 0.0488 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2374 nan 0.1000 0.0370
## 2 1.1697 nan 0.1000 0.0292
## 3 1.1112 nan 0.1000 0.0258
## 4 1.0585 nan 0.1000 0.0243
## 5 1.0118 nan 0.1000 0.0197
## 6 0.9717 nan 0.1000 0.0146
## 7 0.9368 nan 0.1000 0.0148
## 8 0.9058 nan 0.1000 0.0111
## 9 0.8776 nan 0.1000 0.0103
## 10 0.8560 nan 0.1000 0.0071
## 20 0.6814 nan 0.1000 0.0036
## 40 0.5277 nan 0.1000 -0.0001
## 60 0.4203 nan 0.1000 0.0001
## 80 0.3495 nan 0.1000 -0.0002
## 100 0.2927 nan 0.1000 -0.0004
## 120 0.2474 nan 0.1000 -0.0007
## 140 0.2144 nan 0.1000 -0.0012
## 160 0.1850 nan 0.1000 -0.0003
## 180 0.1610 nan 0.1000 -0.0007
## 200 0.1411 nan 0.1000 -0.0004
## 220 0.1244 nan 0.1000 -0.0004
## 240 0.1088 nan 0.1000 -0.0007
## 260 0.0960 nan 0.1000 -0.0003
## 280 0.0853 nan 0.1000 -0.0000
## 300 0.0751 nan 0.1000 -0.0002
## 320 0.0663 nan 0.1000 -0.0001
## 340 0.0589 nan 0.1000 -0.0001
## 360 0.0524 nan 0.1000 -0.0000
## 380 0.0471 nan 0.1000 -0.0001
## 400 0.0420 nan 0.1000 -0.0002
## 420 0.0376 nan 0.1000 -0.0001
## 440 0.0341 nan 0.1000 -0.0001
## 460 0.0308 nan 0.1000 -0.0001
## 480 0.0277 nan 0.1000 -0.0002
## 500 0.0248 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2349 nan 0.1000 0.0375
## 2 1.1647 nan 0.1000 0.0294
## 3 1.1003 nan 0.1000 0.0290
## 4 1.0551 nan 0.1000 0.0177
## 5 1.0070 nan 0.1000 0.0202
## 6 0.9668 nan 0.1000 0.0169
## 7 0.9333 nan 0.1000 0.0128
## 8 0.8966 nan 0.1000 0.0149
## 9 0.8691 nan 0.1000 0.0108
## 10 0.8430 nan 0.1000 0.0104
## 20 0.6799 nan 0.1000 0.0031
## 40 0.5260 nan 0.1000 -0.0003
## 60 0.4293 nan 0.1000 -0.0006
## 80 0.3606 nan 0.1000 0.0003
## 100 0.3018 nan 0.1000 -0.0011
## 120 0.2588 nan 0.1000 0.0004
## 140 0.2218 nan 0.1000 -0.0003
## 160 0.1936 nan 0.1000 -0.0010
## 180 0.1681 nan 0.1000 -0.0009
## 200 0.1477 nan 0.1000 -0.0005
## 220 0.1278 nan 0.1000 -0.0004
## 240 0.1111 nan 0.1000 -0.0005
## 260 0.0982 nan 0.1000 -0.0003
## 280 0.0870 nan 0.1000 -0.0001
## 300 0.0786 nan 0.1000 -0.0003
## 320 0.0706 nan 0.1000 -0.0001
## 340 0.0627 nan 0.1000 -0.0001
## 360 0.0560 nan 0.1000 -0.0001
## 380 0.0489 nan 0.1000 -0.0001
## 400 0.0439 nan 0.1000 -0.0002
## 420 0.0397 nan 0.1000 -0.0001
## 440 0.0358 nan 0.1000 -0.0001
## 460 0.0323 nan 0.1000 -0.0002
## 480 0.0293 nan 0.1000 -0.0001
## 500 0.0264 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2307 nan 0.1000 0.0350
## 2 1.1581 nan 0.1000 0.0325
## 3 1.0971 nan 0.1000 0.0233
## 4 1.0422 nan 0.1000 0.0222
## 5 1.0060 nan 0.1000 0.0140
## 6 0.9635 nan 0.1000 0.0171
## 7 0.9296 nan 0.1000 0.0145
## 8 0.9000 nan 0.1000 0.0122
## 9 0.8757 nan 0.1000 0.0089
## 10 0.8525 nan 0.1000 0.0067
## 20 0.6910 nan 0.1000 0.0026
## 40 0.5468 nan 0.1000 -0.0010
## 60 0.4605 nan 0.1000 -0.0013
## 80 0.3852 nan 0.1000 -0.0014
## 100 0.3300 nan 0.1000 -0.0007
## 120 0.2837 nan 0.1000 -0.0016
## 140 0.2448 nan 0.1000 -0.0002
## 160 0.2152 nan 0.1000 -0.0005
## 180 0.1892 nan 0.1000 -0.0004
## 200 0.1664 nan 0.1000 -0.0002
## 220 0.1452 nan 0.1000 -0.0005
## 240 0.1278 nan 0.1000 -0.0005
## 260 0.1144 nan 0.1000 -0.0009
## 280 0.1019 nan 0.1000 -0.0004
## 300 0.0899 nan 0.1000 -0.0001
## 320 0.0799 nan 0.1000 -0.0001
## 340 0.0708 nan 0.1000 -0.0005
## 360 0.0629 nan 0.1000 -0.0001
## 380 0.0559 nan 0.1000 -0.0002
## 400 0.0505 nan 0.1000 -0.0002
## 420 0.0452 nan 0.1000 -0.0002
## 440 0.0406 nan 0.1000 -0.0001
## 460 0.0365 nan 0.1000 -0.0001
## 480 0.0329 nan 0.1000 -0.0001
## 500 0.0298 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0003
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3183 nan 0.0010 0.0003
## 4 1.3174 nan 0.0010 0.0003
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0003
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3040 nan 0.0010 0.0004
## 40 1.2881 nan 0.0010 0.0004
## 60 1.2724 nan 0.0010 0.0003
## 80 1.2573 nan 0.0010 0.0003
## 100 1.2424 nan 0.0010 0.0003
## 120 1.2283 nan 0.0010 0.0004
## 140 1.2141 nan 0.0010 0.0003
## 160 1.2006 nan 0.0010 0.0003
## 180 1.1879 nan 0.0010 0.0003
## 200 1.1753 nan 0.0010 0.0003
## 220 1.1633 nan 0.0010 0.0002
## 240 1.1514 nan 0.0010 0.0002
## 260 1.1395 nan 0.0010 0.0003
## 280 1.1283 nan 0.0010 0.0002
## 300 1.1177 nan 0.0010 0.0002
## 320 1.1074 nan 0.0010 0.0002
## 340 1.0971 nan 0.0010 0.0002
## 360 1.0872 nan 0.0010 0.0002
## 380 1.0779 nan 0.0010 0.0002
## 400 1.0682 nan 0.0010 0.0002
## 420 1.0589 nan 0.0010 0.0002
## 440 1.0497 nan 0.0010 0.0002
## 460 1.0411 nan 0.0010 0.0002
## 480 1.0327 nan 0.0010 0.0002
## 500 1.0245 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0003
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3044 nan 0.0010 0.0004
## 40 1.2886 nan 0.0010 0.0003
## 60 1.2733 nan 0.0010 0.0003
## 80 1.2581 nan 0.0010 0.0003
## 100 1.2433 nan 0.0010 0.0003
## 120 1.2301 nan 0.0010 0.0003
## 140 1.2164 nan 0.0010 0.0003
## 160 1.2029 nan 0.0010 0.0003
## 180 1.1900 nan 0.0010 0.0003
## 200 1.1774 nan 0.0010 0.0003
## 220 1.1650 nan 0.0010 0.0003
## 240 1.1531 nan 0.0010 0.0003
## 260 1.1416 nan 0.0010 0.0003
## 280 1.1304 nan 0.0010 0.0002
## 300 1.1195 nan 0.0010 0.0002
## 320 1.1092 nan 0.0010 0.0002
## 340 1.0990 nan 0.0010 0.0002
## 360 1.0889 nan 0.0010 0.0002
## 380 1.0793 nan 0.0010 0.0002
## 400 1.0697 nan 0.0010 0.0002
## 420 1.0606 nan 0.0010 0.0002
## 440 1.0517 nan 0.0010 0.0002
## 460 1.0427 nan 0.0010 0.0002
## 480 1.0343 nan 0.0010 0.0002
## 500 1.0260 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0003
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0003
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3038 nan 0.0010 0.0004
## 40 1.2878 nan 0.0010 0.0003
## 60 1.2728 nan 0.0010 0.0003
## 80 1.2577 nan 0.0010 0.0004
## 100 1.2433 nan 0.0010 0.0003
## 120 1.2293 nan 0.0010 0.0003
## 140 1.2155 nan 0.0010 0.0003
## 160 1.2027 nan 0.0010 0.0003
## 180 1.1899 nan 0.0010 0.0003
## 200 1.1775 nan 0.0010 0.0003
## 220 1.1653 nan 0.0010 0.0003
## 240 1.1535 nan 0.0010 0.0003
## 260 1.1419 nan 0.0010 0.0003
## 280 1.1306 nan 0.0010 0.0002
## 300 1.1196 nan 0.0010 0.0002
## 320 1.1091 nan 0.0010 0.0002
## 340 1.0992 nan 0.0010 0.0002
## 360 1.0894 nan 0.0010 0.0002
## 380 1.0796 nan 0.0010 0.0002
## 400 1.0702 nan 0.0010 0.0002
## 420 1.0610 nan 0.0010 0.0002
## 440 1.0520 nan 0.0010 0.0002
## 460 1.0432 nan 0.0010 0.0002
## 480 1.0347 nan 0.0010 0.0002
## 500 1.0264 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0005
## 6 1.3151 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3029 nan 0.0010 0.0003
## 40 1.2852 nan 0.0010 0.0004
## 60 1.2689 nan 0.0010 0.0003
## 80 1.2529 nan 0.0010 0.0003
## 100 1.2371 nan 0.0010 0.0003
## 120 1.2218 nan 0.0010 0.0004
## 140 1.2071 nan 0.0010 0.0003
## 160 1.1928 nan 0.0010 0.0003
## 180 1.1789 nan 0.0010 0.0004
## 200 1.1655 nan 0.0010 0.0003
## 220 1.1529 nan 0.0010 0.0003
## 240 1.1405 nan 0.0010 0.0003
## 260 1.1281 nan 0.0010 0.0002
## 280 1.1164 nan 0.0010 0.0002
## 300 1.1049 nan 0.0010 0.0002
## 320 1.0937 nan 0.0010 0.0002
## 340 1.0832 nan 0.0010 0.0002
## 360 1.0728 nan 0.0010 0.0002
## 380 1.0624 nan 0.0010 0.0002
## 400 1.0525 nan 0.0010 0.0002
## 420 1.0427 nan 0.0010 0.0002
## 440 1.0331 nan 0.0010 0.0002
## 460 1.0238 nan 0.0010 0.0002
## 480 1.0150 nan 0.0010 0.0002
## 500 1.0065 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0005
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2859 nan 0.0010 0.0004
## 60 1.2694 nan 0.0010 0.0003
## 80 1.2534 nan 0.0010 0.0004
## 100 1.2380 nan 0.0010 0.0003
## 120 1.2230 nan 0.0010 0.0004
## 140 1.2084 nan 0.0010 0.0003
## 160 1.1946 nan 0.0010 0.0003
## 180 1.1809 nan 0.0010 0.0003
## 200 1.1675 nan 0.0010 0.0003
## 220 1.1542 nan 0.0010 0.0003
## 240 1.1417 nan 0.0010 0.0002
## 260 1.1295 nan 0.0010 0.0003
## 280 1.1178 nan 0.0010 0.0003
## 300 1.1064 nan 0.0010 0.0002
## 320 1.0953 nan 0.0010 0.0003
## 340 1.0844 nan 0.0010 0.0002
## 360 1.0741 nan 0.0010 0.0002
## 380 1.0639 nan 0.0010 0.0002
## 400 1.0539 nan 0.0010 0.0002
## 420 1.0442 nan 0.0010 0.0002
## 440 1.0347 nan 0.0010 0.0002
## 460 1.0255 nan 0.0010 0.0002
## 480 1.0168 nan 0.0010 0.0002
## 500 1.0081 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0005
## 5 1.3161 nan 0.0010 0.0003
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3033 nan 0.0010 0.0004
## 40 1.2858 nan 0.0010 0.0004
## 60 1.2695 nan 0.0010 0.0004
## 80 1.2535 nan 0.0010 0.0003
## 100 1.2380 nan 0.0010 0.0003
## 120 1.2234 nan 0.0010 0.0003
## 140 1.2087 nan 0.0010 0.0004
## 160 1.1949 nan 0.0010 0.0003
## 180 1.1814 nan 0.0010 0.0002
## 200 1.1684 nan 0.0010 0.0003
## 220 1.1554 nan 0.0010 0.0003
## 240 1.1431 nan 0.0010 0.0002
## 260 1.1313 nan 0.0010 0.0003
## 280 1.1198 nan 0.0010 0.0002
## 300 1.1087 nan 0.0010 0.0003
## 320 1.0974 nan 0.0010 0.0003
## 340 1.0868 nan 0.0010 0.0002
## 360 1.0763 nan 0.0010 0.0002
## 380 1.0664 nan 0.0010 0.0002
## 400 1.0567 nan 0.0010 0.0002
## 420 1.0470 nan 0.0010 0.0002
## 440 1.0374 nan 0.0010 0.0002
## 460 1.0282 nan 0.0010 0.0002
## 480 1.0193 nan 0.0010 0.0002
## 500 1.0106 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3196 nan 0.0010 0.0005
## 2 1.3186 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0004
## 4 1.3168 nan 0.0010 0.0004
## 5 1.3158 nan 0.0010 0.0005
## 6 1.3149 nan 0.0010 0.0004
## 7 1.3139 nan 0.0010 0.0004
## 8 1.3129 nan 0.0010 0.0005
## 9 1.3120 nan 0.0010 0.0004
## 10 1.3111 nan 0.0010 0.0004
## 20 1.3018 nan 0.0010 0.0004
## 40 1.2841 nan 0.0010 0.0004
## 60 1.2666 nan 0.0010 0.0004
## 80 1.2497 nan 0.0010 0.0003
## 100 1.2335 nan 0.0010 0.0004
## 120 1.2178 nan 0.0010 0.0003
## 140 1.2027 nan 0.0010 0.0003
## 160 1.1878 nan 0.0010 0.0003
## 180 1.1736 nan 0.0010 0.0004
## 200 1.1596 nan 0.0010 0.0003
## 220 1.1462 nan 0.0010 0.0003
## 240 1.1332 nan 0.0010 0.0003
## 260 1.1208 nan 0.0010 0.0003
## 280 1.1087 nan 0.0010 0.0002
## 300 1.0967 nan 0.0010 0.0003
## 320 1.0849 nan 0.0010 0.0003
## 340 1.0735 nan 0.0010 0.0003
## 360 1.0626 nan 0.0010 0.0002
## 380 1.0520 nan 0.0010 0.0002
## 400 1.0415 nan 0.0010 0.0002
## 420 1.0314 nan 0.0010 0.0002
## 440 1.0217 nan 0.0010 0.0002
## 460 1.0120 nan 0.0010 0.0002
## 480 1.0027 nan 0.0010 0.0002
## 500 0.9936 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0005
## 4 1.3168 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0004
## 6 1.3151 nan 0.0010 0.0004
## 7 1.3141 nan 0.0010 0.0005
## 8 1.3131 nan 0.0010 0.0004
## 9 1.3122 nan 0.0010 0.0004
## 10 1.3113 nan 0.0010 0.0004
## 20 1.3019 nan 0.0010 0.0004
## 40 1.2842 nan 0.0010 0.0004
## 60 1.2671 nan 0.0010 0.0004
## 80 1.2504 nan 0.0010 0.0004
## 100 1.2342 nan 0.0010 0.0003
## 120 1.2185 nan 0.0010 0.0004
## 140 1.2034 nan 0.0010 0.0003
## 160 1.1888 nan 0.0010 0.0003
## 180 1.1747 nan 0.0010 0.0003
## 200 1.1609 nan 0.0010 0.0003
## 220 1.1474 nan 0.0010 0.0003
## 240 1.1344 nan 0.0010 0.0003
## 260 1.1219 nan 0.0010 0.0003
## 280 1.1101 nan 0.0010 0.0002
## 300 1.0985 nan 0.0010 0.0003
## 320 1.0868 nan 0.0010 0.0002
## 340 1.0755 nan 0.0010 0.0002
## 360 1.0648 nan 0.0010 0.0002
## 380 1.0542 nan 0.0010 0.0002
## 400 1.0440 nan 0.0010 0.0002
## 420 1.0341 nan 0.0010 0.0002
## 440 1.0243 nan 0.0010 0.0002
## 460 1.0146 nan 0.0010 0.0002
## 480 1.0055 nan 0.0010 0.0002
## 500 0.9964 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3187 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3028 nan 0.0010 0.0004
## 40 1.2850 nan 0.0010 0.0004
## 60 1.2678 nan 0.0010 0.0004
## 80 1.2512 nan 0.0010 0.0003
## 100 1.2352 nan 0.0010 0.0003
## 120 1.2195 nan 0.0010 0.0003
## 140 1.2044 nan 0.0010 0.0003
## 160 1.1901 nan 0.0010 0.0003
## 180 1.1764 nan 0.0010 0.0003
## 200 1.1627 nan 0.0010 0.0003
## 220 1.1494 nan 0.0010 0.0003
## 240 1.1367 nan 0.0010 0.0003
## 260 1.1245 nan 0.0010 0.0003
## 280 1.1121 nan 0.0010 0.0003
## 300 1.1006 nan 0.0010 0.0002
## 320 1.0895 nan 0.0010 0.0002
## 340 1.0783 nan 0.0010 0.0002
## 360 1.0674 nan 0.0010 0.0002
## 380 1.0570 nan 0.0010 0.0002
## 400 1.0469 nan 0.0010 0.0002
## 420 1.0367 nan 0.0010 0.0002
## 440 1.0270 nan 0.0010 0.0002
## 460 1.0178 nan 0.0010 0.0002
## 480 1.0086 nan 0.0010 0.0002
## 500 0.9997 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0042
## 2 1.3038 nan 0.0100 0.0040
## 3 1.2965 nan 0.0100 0.0032
## 4 1.2887 nan 0.0100 0.0035
## 5 1.2804 nan 0.0100 0.0039
## 6 1.2725 nan 0.0100 0.0036
## 7 1.2645 nan 0.0100 0.0037
## 8 1.2567 nan 0.0100 0.0035
## 9 1.2495 nan 0.0100 0.0035
## 10 1.2420 nan 0.0100 0.0033
## 20 1.1724 nan 0.0100 0.0029
## 40 1.0642 nan 0.0100 0.0018
## 60 0.9809 nan 0.0100 0.0017
## 80 0.9198 nan 0.0100 0.0011
## 100 0.8680 nan 0.0100 0.0009
## 120 0.8262 nan 0.0100 0.0007
## 140 0.7906 nan 0.0100 0.0004
## 160 0.7623 nan 0.0100 0.0004
## 180 0.7373 nan 0.0100 0.0004
## 200 0.7152 nan 0.0100 0.0001
## 220 0.6952 nan 0.0100 0.0002
## 240 0.6768 nan 0.0100 0.0003
## 260 0.6622 nan 0.0100 -0.0001
## 280 0.6481 nan 0.0100 0.0001
## 300 0.6342 nan 0.0100 0.0001
## 320 0.6217 nan 0.0100 -0.0000
## 340 0.6095 nan 0.0100 0.0001
## 360 0.5997 nan 0.0100 0.0000
## 380 0.5898 nan 0.0100 0.0000
## 400 0.5784 nan 0.0100 0.0001
## 420 0.5686 nan 0.0100 -0.0001
## 440 0.5595 nan 0.0100 -0.0000
## 460 0.5505 nan 0.0100 -0.0000
## 480 0.5425 nan 0.0100 -0.0001
## 500 0.5346 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3131 nan 0.0100 0.0031
## 2 1.3045 nan 0.0100 0.0040
## 3 1.2956 nan 0.0100 0.0039
## 4 1.2878 nan 0.0100 0.0033
## 5 1.2795 nan 0.0100 0.0039
## 6 1.2715 nan 0.0100 0.0037
## 7 1.2625 nan 0.0100 0.0038
## 8 1.2553 nan 0.0100 0.0031
## 9 1.2480 nan 0.0100 0.0031
## 10 1.2405 nan 0.0100 0.0033
## 20 1.1748 nan 0.0100 0.0032
## 40 1.0669 nan 0.0100 0.0017
## 60 0.9851 nan 0.0100 0.0015
## 80 0.9210 nan 0.0100 0.0013
## 100 0.8678 nan 0.0100 0.0007
## 120 0.8270 nan 0.0100 0.0006
## 140 0.7920 nan 0.0100 0.0005
## 160 0.7628 nan 0.0100 0.0004
## 180 0.7391 nan 0.0100 0.0003
## 200 0.7192 nan 0.0100 0.0001
## 220 0.7020 nan 0.0100 0.0002
## 240 0.6855 nan 0.0100 -0.0000
## 260 0.6701 nan 0.0100 -0.0001
## 280 0.6567 nan 0.0100 0.0001
## 300 0.6447 nan 0.0100 -0.0000
## 320 0.6318 nan 0.0100 0.0001
## 340 0.6208 nan 0.0100 0.0001
## 360 0.6102 nan 0.0100 0.0001
## 380 0.6001 nan 0.0100 -0.0003
## 400 0.5915 nan 0.0100 -0.0002
## 420 0.5816 nan 0.0100 0.0001
## 440 0.5720 nan 0.0100 -0.0001
## 460 0.5627 nan 0.0100 0.0000
## 480 0.5541 nan 0.0100 0.0001
## 500 0.5460 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3118 nan 0.0100 0.0039
## 2 1.3042 nan 0.0100 0.0034
## 3 1.2958 nan 0.0100 0.0040
## 4 1.2885 nan 0.0100 0.0033
## 5 1.2807 nan 0.0100 0.0035
## 6 1.2735 nan 0.0100 0.0029
## 7 1.2657 nan 0.0100 0.0034
## 8 1.2580 nan 0.0100 0.0031
## 9 1.2503 nan 0.0100 0.0034
## 10 1.2428 nan 0.0100 0.0034
## 20 1.1773 nan 0.0100 0.0031
## 40 1.0717 nan 0.0100 0.0021
## 60 0.9886 nan 0.0100 0.0013
## 80 0.9234 nan 0.0100 0.0011
## 100 0.8695 nan 0.0100 0.0007
## 120 0.8274 nan 0.0100 0.0006
## 140 0.7929 nan 0.0100 0.0004
## 160 0.7656 nan 0.0100 0.0002
## 180 0.7412 nan 0.0100 0.0004
## 200 0.7212 nan 0.0100 0.0003
## 220 0.7028 nan 0.0100 0.0003
## 240 0.6872 nan 0.0100 -0.0000
## 260 0.6746 nan 0.0100 -0.0000
## 280 0.6611 nan 0.0100 -0.0000
## 300 0.6496 nan 0.0100 0.0000
## 320 0.6388 nan 0.0100 0.0002
## 340 0.6283 nan 0.0100 -0.0001
## 360 0.6180 nan 0.0100 -0.0000
## 380 0.6087 nan 0.0100 0.0000
## 400 0.5992 nan 0.0100 -0.0001
## 420 0.5906 nan 0.0100 -0.0000
## 440 0.5816 nan 0.0100 -0.0001
## 460 0.5734 nan 0.0100 -0.0001
## 480 0.5654 nan 0.0100 -0.0001
## 500 0.5579 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3112 nan 0.0100 0.0039
## 2 1.3021 nan 0.0100 0.0041
## 3 1.2928 nan 0.0100 0.0045
## 4 1.2846 nan 0.0100 0.0036
## 5 1.2771 nan 0.0100 0.0036
## 6 1.2690 nan 0.0100 0.0031
## 7 1.2615 nan 0.0100 0.0032
## 8 1.2529 nan 0.0100 0.0037
## 9 1.2452 nan 0.0100 0.0035
## 10 1.2376 nan 0.0100 0.0031
## 20 1.1674 nan 0.0100 0.0025
## 40 1.0558 nan 0.0100 0.0020
## 60 0.9697 nan 0.0100 0.0016
## 80 0.9015 nan 0.0100 0.0014
## 100 0.8470 nan 0.0100 0.0008
## 120 0.8022 nan 0.0100 0.0006
## 140 0.7655 nan 0.0100 0.0006
## 160 0.7343 nan 0.0100 0.0004
## 180 0.7078 nan 0.0100 0.0003
## 200 0.6829 nan 0.0100 0.0001
## 220 0.6617 nan 0.0100 0.0002
## 240 0.6424 nan 0.0100 0.0002
## 260 0.6246 nan 0.0100 0.0001
## 280 0.6086 nan 0.0100 0.0002
## 300 0.5938 nan 0.0100 0.0001
## 320 0.5793 nan 0.0100 0.0002
## 340 0.5647 nan 0.0100 -0.0000
## 360 0.5524 nan 0.0100 0.0000
## 380 0.5408 nan 0.0100 0.0000
## 400 0.5306 nan 0.0100 -0.0000
## 420 0.5206 nan 0.0100 -0.0001
## 440 0.5109 nan 0.0100 0.0000
## 460 0.5008 nan 0.0100 -0.0000
## 480 0.4917 nan 0.0100 0.0000
## 500 0.4826 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3112 nan 0.0100 0.0041
## 2 1.3027 nan 0.0100 0.0034
## 3 1.2940 nan 0.0100 0.0045
## 4 1.2853 nan 0.0100 0.0040
## 5 1.2772 nan 0.0100 0.0036
## 6 1.2674 nan 0.0100 0.0044
## 7 1.2592 nan 0.0100 0.0037
## 8 1.2511 nan 0.0100 0.0034
## 9 1.2430 nan 0.0100 0.0039
## 10 1.2354 nan 0.0100 0.0031
## 20 1.1653 nan 0.0100 0.0029
## 40 1.0527 nan 0.0100 0.0022
## 60 0.9651 nan 0.0100 0.0015
## 80 0.8987 nan 0.0100 0.0014
## 100 0.8451 nan 0.0100 0.0009
## 120 0.8004 nan 0.0100 0.0007
## 140 0.7659 nan 0.0100 0.0003
## 160 0.7360 nan 0.0100 0.0004
## 180 0.7122 nan 0.0100 0.0003
## 200 0.6893 nan 0.0100 0.0003
## 220 0.6696 nan 0.0100 0.0004
## 240 0.6508 nan 0.0100 -0.0000
## 260 0.6343 nan 0.0100 0.0001
## 280 0.6192 nan 0.0100 0.0002
## 300 0.6058 nan 0.0100 0.0002
## 320 0.5921 nan 0.0100 -0.0000
## 340 0.5792 nan 0.0100 0.0000
## 360 0.5668 nan 0.0100 -0.0001
## 380 0.5548 nan 0.0100 0.0000
## 400 0.5427 nan 0.0100 -0.0001
## 420 0.5314 nan 0.0100 0.0000
## 440 0.5220 nan 0.0100 -0.0001
## 460 0.5131 nan 0.0100 -0.0001
## 480 0.5033 nan 0.0100 0.0002
## 500 0.4940 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3114 nan 0.0100 0.0044
## 2 1.3025 nan 0.0100 0.0042
## 3 1.2939 nan 0.0100 0.0037
## 4 1.2853 nan 0.0100 0.0037
## 5 1.2766 nan 0.0100 0.0039
## 6 1.2685 nan 0.0100 0.0036
## 7 1.2605 nan 0.0100 0.0037
## 8 1.2528 nan 0.0100 0.0034
## 9 1.2461 nan 0.0100 0.0030
## 10 1.2388 nan 0.0100 0.0036
## 20 1.1659 nan 0.0100 0.0030
## 40 1.0519 nan 0.0100 0.0019
## 60 0.9672 nan 0.0100 0.0015
## 80 0.9000 nan 0.0100 0.0014
## 100 0.8488 nan 0.0100 0.0008
## 120 0.8058 nan 0.0100 0.0007
## 140 0.7708 nan 0.0100 0.0004
## 160 0.7417 nan 0.0100 0.0003
## 180 0.7161 nan 0.0100 0.0001
## 200 0.6932 nan 0.0100 0.0004
## 220 0.6740 nan 0.0100 0.0002
## 240 0.6568 nan 0.0100 0.0004
## 260 0.6400 nan 0.0100 0.0002
## 280 0.6259 nan 0.0100 -0.0001
## 300 0.6121 nan 0.0100 0.0000
## 320 0.6001 nan 0.0100 -0.0000
## 340 0.5883 nan 0.0100 -0.0000
## 360 0.5770 nan 0.0100 -0.0002
## 380 0.5666 nan 0.0100 0.0001
## 400 0.5544 nan 0.0100 -0.0001
## 420 0.5442 nan 0.0100 -0.0001
## 440 0.5350 nan 0.0100 0.0000
## 460 0.5255 nan 0.0100 0.0000
## 480 0.5167 nan 0.0100 0.0000
## 500 0.5084 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3114 nan 0.0100 0.0043
## 2 1.3023 nan 0.0100 0.0045
## 3 1.2928 nan 0.0100 0.0040
## 4 1.2840 nan 0.0100 0.0038
## 5 1.2748 nan 0.0100 0.0044
## 6 1.2663 nan 0.0100 0.0039
## 7 1.2577 nan 0.0100 0.0041
## 8 1.2491 nan 0.0100 0.0037
## 9 1.2409 nan 0.0100 0.0035
## 10 1.2330 nan 0.0100 0.0036
## 20 1.1597 nan 0.0100 0.0032
## 40 1.0406 nan 0.0100 0.0019
## 60 0.9508 nan 0.0100 0.0014
## 80 0.8823 nan 0.0100 0.0014
## 100 0.8259 nan 0.0100 0.0009
## 120 0.7811 nan 0.0100 0.0005
## 140 0.7417 nan 0.0100 0.0006
## 160 0.7085 nan 0.0100 0.0005
## 180 0.6798 nan 0.0100 0.0004
## 200 0.6544 nan 0.0100 0.0001
## 220 0.6312 nan 0.0100 0.0002
## 240 0.6096 nan 0.0100 0.0001
## 260 0.5909 nan 0.0100 0.0002
## 280 0.5720 nan 0.0100 0.0001
## 300 0.5562 nan 0.0100 0.0001
## 320 0.5413 nan 0.0100 0.0000
## 340 0.5274 nan 0.0100 0.0001
## 360 0.5129 nan 0.0100 -0.0000
## 380 0.4998 nan 0.0100 -0.0000
## 400 0.4885 nan 0.0100 0.0000
## 420 0.4771 nan 0.0100 -0.0002
## 440 0.4657 nan 0.0100 0.0001
## 460 0.4553 nan 0.0100 0.0000
## 480 0.4452 nan 0.0100 -0.0001
## 500 0.4352 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3114 nan 0.0100 0.0041
## 2 1.3021 nan 0.0100 0.0043
## 3 1.2927 nan 0.0100 0.0043
## 4 1.2842 nan 0.0100 0.0035
## 5 1.2748 nan 0.0100 0.0042
## 6 1.2670 nan 0.0100 0.0034
## 7 1.2581 nan 0.0100 0.0040
## 8 1.2505 nan 0.0100 0.0031
## 9 1.2429 nan 0.0100 0.0036
## 10 1.2351 nan 0.0100 0.0033
## 20 1.1603 nan 0.0100 0.0030
## 40 1.0435 nan 0.0100 0.0019
## 60 0.9539 nan 0.0100 0.0013
## 80 0.8859 nan 0.0100 0.0010
## 100 0.8298 nan 0.0100 0.0008
## 120 0.7844 nan 0.0100 0.0007
## 140 0.7478 nan 0.0100 0.0005
## 160 0.7164 nan 0.0100 0.0002
## 180 0.6889 nan 0.0100 0.0005
## 200 0.6635 nan 0.0100 0.0004
## 220 0.6430 nan 0.0100 0.0001
## 240 0.6227 nan 0.0100 0.0003
## 260 0.6039 nan 0.0100 0.0001
## 280 0.5874 nan 0.0100 -0.0001
## 300 0.5726 nan 0.0100 0.0000
## 320 0.5587 nan 0.0100 -0.0000
## 340 0.5448 nan 0.0100 0.0001
## 360 0.5319 nan 0.0100 0.0000
## 380 0.5193 nan 0.0100 -0.0000
## 400 0.5068 nan 0.0100 0.0001
## 420 0.4956 nan 0.0100 -0.0001
## 440 0.4841 nan 0.0100 -0.0001
## 460 0.4740 nan 0.0100 -0.0002
## 480 0.4644 nan 0.0100 -0.0002
## 500 0.4545 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0041
## 2 1.3033 nan 0.0100 0.0033
## 3 1.2939 nan 0.0100 0.0043
## 4 1.2847 nan 0.0100 0.0042
## 5 1.2761 nan 0.0100 0.0039
## 6 1.2669 nan 0.0100 0.0042
## 7 1.2576 nan 0.0100 0.0042
## 8 1.2501 nan 0.0100 0.0032
## 9 1.2425 nan 0.0100 0.0034
## 10 1.2339 nan 0.0100 0.0041
## 20 1.1614 nan 0.0100 0.0032
## 40 1.0457 nan 0.0100 0.0023
## 60 0.9563 nan 0.0100 0.0018
## 80 0.8896 nan 0.0100 0.0013
## 100 0.8342 nan 0.0100 0.0009
## 120 0.7914 nan 0.0100 0.0006
## 140 0.7544 nan 0.0100 0.0006
## 160 0.7226 nan 0.0100 0.0006
## 180 0.6943 nan 0.0100 0.0006
## 200 0.6708 nan 0.0100 0.0001
## 220 0.6502 nan 0.0100 -0.0000
## 240 0.6302 nan 0.0100 0.0002
## 260 0.6137 nan 0.0100 -0.0001
## 280 0.5969 nan 0.0100 -0.0001
## 300 0.5823 nan 0.0100 0.0000
## 320 0.5691 nan 0.0100 -0.0000
## 340 0.5561 nan 0.0100 0.0000
## 360 0.5440 nan 0.0100 -0.0001
## 380 0.5322 nan 0.0100 -0.0001
## 400 0.5206 nan 0.0100 -0.0001
## 420 0.5083 nan 0.0100 -0.0001
## 440 0.4979 nan 0.0100 -0.0000
## 460 0.4871 nan 0.0100 -0.0001
## 480 0.4771 nan 0.0100 -0.0001
## 500 0.4677 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2501 nan 0.1000 0.0289
## 2 1.1745 nan 0.1000 0.0341
## 3 1.1301 nan 0.1000 0.0171
## 4 1.0727 nan 0.1000 0.0228
## 5 1.0225 nan 0.1000 0.0227
## 6 0.9845 nan 0.1000 0.0151
## 7 0.9489 nan 0.1000 0.0132
## 8 0.9224 nan 0.1000 0.0102
## 9 0.8961 nan 0.1000 0.0099
## 10 0.8693 nan 0.1000 0.0116
## 20 0.7129 nan 0.1000 0.0043
## 40 0.5715 nan 0.1000 -0.0003
## 60 0.4917 nan 0.1000 -0.0014
## 80 0.4298 nan 0.1000 0.0007
## 100 0.3799 nan 0.1000 -0.0016
## 120 0.3331 nan 0.1000 -0.0004
## 140 0.2960 nan 0.1000 -0.0003
## 160 0.2665 nan 0.1000 -0.0005
## 180 0.2431 nan 0.1000 0.0005
## 200 0.2176 nan 0.1000 -0.0004
## 220 0.1952 nan 0.1000 0.0000
## 240 0.1757 nan 0.1000 -0.0004
## 260 0.1599 nan 0.1000 0.0000
## 280 0.1461 nan 0.1000 -0.0003
## 300 0.1348 nan 0.1000 -0.0003
## 320 0.1230 nan 0.1000 -0.0003
## 340 0.1128 nan 0.1000 -0.0001
## 360 0.1034 nan 0.1000 -0.0001
## 380 0.0949 nan 0.1000 -0.0003
## 400 0.0872 nan 0.1000 -0.0002
## 420 0.0810 nan 0.1000 -0.0002
## 440 0.0750 nan 0.1000 -0.0001
## 460 0.0689 nan 0.1000 -0.0003
## 480 0.0633 nan 0.1000 -0.0001
## 500 0.0585 nan 0.1000 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2389 nan 0.1000 0.0376
## 2 1.1674 nan 0.1000 0.0334
## 3 1.1051 nan 0.1000 0.0279
## 4 1.0553 nan 0.1000 0.0222
## 5 1.0120 nan 0.1000 0.0189
## 6 0.9776 nan 0.1000 0.0133
## 7 0.9485 nan 0.1000 0.0117
## 8 0.9161 nan 0.1000 0.0140
## 9 0.8881 nan 0.1000 0.0113
## 10 0.8651 nan 0.1000 0.0107
## 20 0.7188 nan 0.1000 0.0038
## 40 0.5994 nan 0.1000 -0.0009
## 60 0.5107 nan 0.1000 0.0001
## 80 0.4574 nan 0.1000 -0.0015
## 100 0.4027 nan 0.1000 -0.0016
## 120 0.3570 nan 0.1000 0.0000
## 140 0.3235 nan 0.1000 -0.0008
## 160 0.2957 nan 0.1000 -0.0006
## 180 0.2625 nan 0.1000 -0.0010
## 200 0.2363 nan 0.1000 -0.0002
## 220 0.2176 nan 0.1000 -0.0008
## 240 0.2013 nan 0.1000 0.0001
## 260 0.1834 nan 0.1000 -0.0001
## 280 0.1668 nan 0.1000 -0.0010
## 300 0.1521 nan 0.1000 -0.0005
## 320 0.1394 nan 0.1000 -0.0002
## 340 0.1283 nan 0.1000 -0.0002
## 360 0.1172 nan 0.1000 -0.0002
## 380 0.1080 nan 0.1000 -0.0001
## 400 0.0987 nan 0.1000 -0.0001
## 420 0.0907 nan 0.1000 -0.0002
## 440 0.0841 nan 0.1000 -0.0002
## 460 0.0782 nan 0.1000 -0.0001
## 480 0.0726 nan 0.1000 -0.0004
## 500 0.0672 nan 0.1000 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2340 nan 0.1000 0.0380
## 2 1.1709 nan 0.1000 0.0277
## 3 1.1142 nan 0.1000 0.0230
## 4 1.0646 nan 0.1000 0.0205
## 5 1.0194 nan 0.1000 0.0212
## 6 0.9825 nan 0.1000 0.0159
## 7 0.9459 nan 0.1000 0.0134
## 8 0.9166 nan 0.1000 0.0121
## 9 0.8876 nan 0.1000 0.0116
## 10 0.8640 nan 0.1000 0.0104
## 20 0.7158 nan 0.1000 0.0032
## 40 0.5890 nan 0.1000 -0.0014
## 60 0.5133 nan 0.1000 -0.0005
## 80 0.4494 nan 0.1000 -0.0008
## 100 0.4031 nan 0.1000 -0.0008
## 120 0.3605 nan 0.1000 0.0001
## 140 0.3306 nan 0.1000 0.0000
## 160 0.2955 nan 0.1000 -0.0002
## 180 0.2705 nan 0.1000 -0.0008
## 200 0.2442 nan 0.1000 0.0000
## 220 0.2234 nan 0.1000 -0.0001
## 240 0.2045 nan 0.1000 -0.0006
## 260 0.1877 nan 0.1000 -0.0000
## 280 0.1704 nan 0.1000 -0.0004
## 300 0.1551 nan 0.1000 -0.0003
## 320 0.1437 nan 0.1000 -0.0002
## 340 0.1304 nan 0.1000 -0.0004
## 360 0.1212 nan 0.1000 -0.0003
## 380 0.1117 nan 0.1000 0.0001
## 400 0.1043 nan 0.1000 0.0001
## 420 0.0961 nan 0.1000 -0.0002
## 440 0.0886 nan 0.1000 -0.0004
## 460 0.0817 nan 0.1000 -0.0003
## 480 0.0757 nan 0.1000 -0.0003
## 500 0.0710 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2329 nan 0.1000 0.0415
## 2 1.1613 nan 0.1000 0.0309
## 3 1.0943 nan 0.1000 0.0281
## 4 1.0443 nan 0.1000 0.0207
## 5 1.0003 nan 0.1000 0.0192
## 6 0.9699 nan 0.1000 0.0097
## 7 0.9334 nan 0.1000 0.0156
## 8 0.8994 nan 0.1000 0.0125
## 9 0.8727 nan 0.1000 0.0074
## 10 0.8466 nan 0.1000 0.0076
## 20 0.6920 nan 0.1000 0.0010
## 40 0.5355 nan 0.1000 -0.0003
## 60 0.4382 nan 0.1000 -0.0012
## 80 0.3686 nan 0.1000 -0.0000
## 100 0.3146 nan 0.1000 -0.0009
## 120 0.2742 nan 0.1000 -0.0005
## 140 0.2395 nan 0.1000 -0.0011
## 160 0.2074 nan 0.1000 -0.0005
## 180 0.1845 nan 0.1000 -0.0010
## 200 0.1623 nan 0.1000 -0.0002
## 220 0.1437 nan 0.1000 -0.0001
## 240 0.1276 nan 0.1000 -0.0001
## 260 0.1152 nan 0.1000 -0.0006
## 280 0.1036 nan 0.1000 -0.0003
## 300 0.0929 nan 0.1000 0.0000
## 320 0.0828 nan 0.1000 0.0000
## 340 0.0734 nan 0.1000 -0.0001
## 360 0.0661 nan 0.1000 -0.0001
## 380 0.0596 nan 0.1000 -0.0002
## 400 0.0532 nan 0.1000 -0.0001
## 420 0.0482 nan 0.1000 -0.0002
## 440 0.0438 nan 0.1000 -0.0001
## 460 0.0395 nan 0.1000 -0.0001
## 480 0.0357 nan 0.1000 -0.0002
## 500 0.0321 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2383 nan 0.1000 0.0367
## 2 1.1624 nan 0.1000 0.0313
## 3 1.1016 nan 0.1000 0.0281
## 4 1.0516 nan 0.1000 0.0233
## 5 1.0105 nan 0.1000 0.0179
## 6 0.9675 nan 0.1000 0.0178
## 7 0.9301 nan 0.1000 0.0157
## 8 0.8989 nan 0.1000 0.0115
## 9 0.8703 nan 0.1000 0.0116
## 10 0.8465 nan 0.1000 0.0090
## 20 0.6862 nan 0.1000 0.0035
## 40 0.5443 nan 0.1000 0.0001
## 60 0.4581 nan 0.1000 -0.0013
## 80 0.3891 nan 0.1000 -0.0003
## 100 0.3367 nan 0.1000 -0.0008
## 120 0.2916 nan 0.1000 0.0000
## 140 0.2570 nan 0.1000 -0.0006
## 160 0.2230 nan 0.1000 0.0001
## 180 0.1988 nan 0.1000 -0.0007
## 200 0.1752 nan 0.1000 -0.0003
## 220 0.1520 nan 0.1000 -0.0002
## 240 0.1340 nan 0.1000 -0.0005
## 260 0.1213 nan 0.1000 -0.0004
## 280 0.1091 nan 0.1000 -0.0002
## 300 0.0974 nan 0.1000 -0.0002
## 320 0.0874 nan 0.1000 -0.0001
## 340 0.0780 nan 0.1000 -0.0003
## 360 0.0711 nan 0.1000 -0.0002
## 380 0.0634 nan 0.1000 -0.0003
## 400 0.0567 nan 0.1000 -0.0002
## 420 0.0512 nan 0.1000 -0.0001
## 440 0.0467 nan 0.1000 -0.0002
## 460 0.0418 nan 0.1000 -0.0001
## 480 0.0376 nan 0.1000 -0.0000
## 500 0.0339 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2438 nan 0.1000 0.0347
## 2 1.1778 nan 0.1000 0.0290
## 3 1.1053 nan 0.1000 0.0303
## 4 1.0515 nan 0.1000 0.0249
## 5 0.9984 nan 0.1000 0.0225
## 6 0.9579 nan 0.1000 0.0162
## 7 0.9215 nan 0.1000 0.0152
## 8 0.8907 nan 0.1000 0.0112
## 9 0.8666 nan 0.1000 0.0098
## 10 0.8424 nan 0.1000 0.0095
## 20 0.6922 nan 0.1000 0.0016
## 40 0.5648 nan 0.1000 -0.0021
## 60 0.4738 nan 0.1000 -0.0022
## 80 0.4131 nan 0.1000 -0.0021
## 100 0.3609 nan 0.1000 0.0000
## 120 0.3197 nan 0.1000 -0.0014
## 140 0.2821 nan 0.1000 -0.0010
## 160 0.2502 nan 0.1000 -0.0005
## 180 0.2185 nan 0.1000 -0.0007
## 200 0.1952 nan 0.1000 -0.0005
## 220 0.1743 nan 0.1000 -0.0003
## 240 0.1561 nan 0.1000 -0.0005
## 260 0.1421 nan 0.1000 -0.0006
## 280 0.1280 nan 0.1000 -0.0003
## 300 0.1146 nan 0.1000 -0.0000
## 320 0.1021 nan 0.1000 -0.0001
## 340 0.0914 nan 0.1000 -0.0002
## 360 0.0829 nan 0.1000 -0.0002
## 380 0.0753 nan 0.1000 -0.0003
## 400 0.0688 nan 0.1000 -0.0001
## 420 0.0628 nan 0.1000 -0.0001
## 440 0.0564 nan 0.1000 -0.0001
## 460 0.0519 nan 0.1000 -0.0001
## 480 0.0472 nan 0.1000 -0.0002
## 500 0.0433 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2283 nan 0.1000 0.0395
## 2 1.1577 nan 0.1000 0.0303
## 3 1.0896 nan 0.1000 0.0310
## 4 1.0343 nan 0.1000 0.0242
## 5 0.9949 nan 0.1000 0.0148
## 6 0.9516 nan 0.1000 0.0172
## 7 0.9121 nan 0.1000 0.0131
## 8 0.8814 nan 0.1000 0.0116
## 9 0.8507 nan 0.1000 0.0112
## 10 0.8199 nan 0.1000 0.0122
## 20 0.6605 nan 0.1000 0.0029
## 40 0.4998 nan 0.1000 -0.0014
## 60 0.3944 nan 0.1000 -0.0010
## 80 0.3314 nan 0.1000 -0.0017
## 100 0.2782 nan 0.1000 -0.0002
## 120 0.2316 nan 0.1000 -0.0008
## 140 0.1982 nan 0.1000 -0.0009
## 160 0.1693 nan 0.1000 -0.0005
## 180 0.1457 nan 0.1000 -0.0005
## 200 0.1258 nan 0.1000 0.0002
## 220 0.1106 nan 0.1000 -0.0005
## 240 0.0967 nan 0.1000 -0.0003
## 260 0.0851 nan 0.1000 -0.0001
## 280 0.0734 nan 0.1000 -0.0001
## 300 0.0645 nan 0.1000 -0.0000
## 320 0.0557 nan 0.1000 -0.0001
## 340 0.0492 nan 0.1000 -0.0001
## 360 0.0432 nan 0.1000 -0.0000
## 380 0.0379 nan 0.1000 -0.0001
## 400 0.0337 nan 0.1000 -0.0003
## 420 0.0296 nan 0.1000 -0.0001
## 440 0.0260 nan 0.1000 -0.0000
## 460 0.0228 nan 0.1000 0.0000
## 480 0.0203 nan 0.1000 -0.0001
## 500 0.0179 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2350 nan 0.1000 0.0420
## 2 1.1697 nan 0.1000 0.0292
## 3 1.1099 nan 0.1000 0.0268
## 4 1.0557 nan 0.1000 0.0250
## 5 1.0031 nan 0.1000 0.0240
## 6 0.9662 nan 0.1000 0.0163
## 7 0.9270 nan 0.1000 0.0179
## 8 0.8885 nan 0.1000 0.0157
## 9 0.8603 nan 0.1000 0.0101
## 10 0.8333 nan 0.1000 0.0087
## 20 0.6693 nan 0.1000 0.0026
## 40 0.5194 nan 0.1000 -0.0015
## 60 0.4271 nan 0.1000 -0.0005
## 80 0.3539 nan 0.1000 -0.0009
## 100 0.2923 nan 0.1000 0.0005
## 120 0.2431 nan 0.1000 -0.0014
## 140 0.2067 nan 0.1000 -0.0004
## 160 0.1772 nan 0.1000 0.0000
## 180 0.1513 nan 0.1000 -0.0006
## 200 0.1303 nan 0.1000 -0.0002
## 220 0.1140 nan 0.1000 -0.0004
## 240 0.0996 nan 0.1000 -0.0002
## 260 0.0880 nan 0.1000 -0.0003
## 280 0.0781 nan 0.1000 -0.0002
## 300 0.0690 nan 0.1000 -0.0002
## 320 0.0609 nan 0.1000 -0.0001
## 340 0.0540 nan 0.1000 -0.0003
## 360 0.0483 nan 0.1000 -0.0001
## 380 0.0423 nan 0.1000 -0.0001
## 400 0.0368 nan 0.1000 -0.0000
## 420 0.0327 nan 0.1000 0.0000
## 440 0.0285 nan 0.1000 -0.0001
## 460 0.0253 nan 0.1000 -0.0001
## 480 0.0223 nan 0.1000 -0.0001
## 500 0.0197 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2371 nan 0.1000 0.0391
## 2 1.1684 nan 0.1000 0.0290
## 3 1.1038 nan 0.1000 0.0277
## 4 1.0436 nan 0.1000 0.0255
## 5 1.0002 nan 0.1000 0.0151
## 6 0.9588 nan 0.1000 0.0161
## 7 0.9208 nan 0.1000 0.0170
## 8 0.8884 nan 0.1000 0.0160
## 9 0.8605 nan 0.1000 0.0126
## 10 0.8355 nan 0.1000 0.0074
## 20 0.6695 nan 0.1000 0.0054
## 40 0.5187 nan 0.1000 -0.0009
## 60 0.4299 nan 0.1000 -0.0001
## 80 0.3569 nan 0.1000 -0.0002
## 100 0.2994 nan 0.1000 -0.0000
## 120 0.2529 nan 0.1000 -0.0010
## 140 0.2174 nan 0.1000 -0.0010
## 160 0.1856 nan 0.1000 -0.0005
## 180 0.1611 nan 0.1000 0.0000
## 200 0.1405 nan 0.1000 -0.0000
## 220 0.1228 nan 0.1000 -0.0006
## 240 0.1060 nan 0.1000 -0.0003
## 260 0.0929 nan 0.1000 -0.0005
## 280 0.0824 nan 0.1000 -0.0002
## 300 0.0719 nan 0.1000 -0.0002
## 320 0.0628 nan 0.1000 -0.0002
## 340 0.0554 nan 0.1000 -0.0001
## 360 0.0496 nan 0.1000 -0.0001
## 380 0.0449 nan 0.1000 -0.0003
## 400 0.0399 nan 0.1000 -0.0002
## 420 0.0353 nan 0.1000 -0.0002
## 440 0.0314 nan 0.1000 -0.0001
## 460 0.0280 nan 0.1000 -0.0002
## 480 0.0249 nan 0.1000 -0.0000
## 500 0.0218 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0003
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0003
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3139 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3122 nan 0.0010 0.0003
## 20 1.3039 nan 0.0010 0.0004
## 40 1.2877 nan 0.0010 0.0004
## 60 1.2719 nan 0.0010 0.0004
## 80 1.2568 nan 0.0010 0.0004
## 100 1.2420 nan 0.0010 0.0003
## 120 1.2281 nan 0.0010 0.0003
## 140 1.2145 nan 0.0010 0.0003
## 160 1.2010 nan 0.0010 0.0003
## 180 1.1882 nan 0.0010 0.0003
## 200 1.1758 nan 0.0010 0.0002
## 220 1.1636 nan 0.0010 0.0002
## 240 1.1521 nan 0.0010 0.0002
## 260 1.1405 nan 0.0010 0.0002
## 280 1.1291 nan 0.0010 0.0002
## 300 1.1178 nan 0.0010 0.0002
## 320 1.1069 nan 0.0010 0.0002
## 340 1.0964 nan 0.0010 0.0003
## 360 1.0865 nan 0.0010 0.0002
## 380 1.0768 nan 0.0010 0.0002
## 400 1.0672 nan 0.0010 0.0002
## 420 1.0578 nan 0.0010 0.0002
## 440 1.0488 nan 0.0010 0.0002
## 460 1.0403 nan 0.0010 0.0002
## 480 1.0318 nan 0.0010 0.0002
## 500 1.0236 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0003
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3038 nan 0.0010 0.0004
## 40 1.2876 nan 0.0010 0.0003
## 60 1.2719 nan 0.0010 0.0003
## 80 1.2565 nan 0.0010 0.0003
## 100 1.2419 nan 0.0010 0.0003
## 120 1.2274 nan 0.0010 0.0003
## 140 1.2140 nan 0.0010 0.0003
## 160 1.2009 nan 0.0010 0.0003
## 180 1.1880 nan 0.0010 0.0003
## 200 1.1753 nan 0.0010 0.0003
## 220 1.1633 nan 0.0010 0.0003
## 240 1.1516 nan 0.0010 0.0003
## 260 1.1400 nan 0.0010 0.0002
## 280 1.1286 nan 0.0010 0.0002
## 300 1.1179 nan 0.0010 0.0002
## 320 1.1074 nan 0.0010 0.0002
## 340 1.0972 nan 0.0010 0.0002
## 360 1.0873 nan 0.0010 0.0002
## 380 1.0775 nan 0.0010 0.0002
## 400 1.0680 nan 0.0010 0.0002
## 420 1.0589 nan 0.0010 0.0002
## 440 1.0500 nan 0.0010 0.0002
## 460 1.0412 nan 0.0010 0.0002
## 480 1.0327 nan 0.0010 0.0002
## 500 1.0244 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0003
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0003
## 8 1.3139 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0003
## 10 1.3122 nan 0.0010 0.0004
## 20 1.3041 nan 0.0010 0.0003
## 40 1.2884 nan 0.0010 0.0003
## 60 1.2732 nan 0.0010 0.0003
## 80 1.2589 nan 0.0010 0.0003
## 100 1.2445 nan 0.0010 0.0003
## 120 1.2303 nan 0.0010 0.0003
## 140 1.2168 nan 0.0010 0.0003
## 160 1.2036 nan 0.0010 0.0003
## 180 1.1909 nan 0.0010 0.0003
## 200 1.1778 nan 0.0010 0.0003
## 220 1.1655 nan 0.0010 0.0003
## 240 1.1537 nan 0.0010 0.0002
## 260 1.1425 nan 0.0010 0.0003
## 280 1.1311 nan 0.0010 0.0003
## 300 1.1202 nan 0.0010 0.0002
## 320 1.1097 nan 0.0010 0.0002
## 340 1.0996 nan 0.0010 0.0002
## 360 1.0897 nan 0.0010 0.0002
## 380 1.0801 nan 0.0010 0.0002
## 400 1.0708 nan 0.0010 0.0002
## 420 1.0618 nan 0.0010 0.0002
## 440 1.0529 nan 0.0010 0.0002
## 460 1.0442 nan 0.0010 0.0002
## 480 1.0360 nan 0.0010 0.0002
## 500 1.0277 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0003
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3151 nan 0.0010 0.0005
## 7 1.3142 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0004
## 10 1.3116 nan 0.0010 0.0004
## 20 1.3028 nan 0.0010 0.0004
## 40 1.2854 nan 0.0010 0.0004
## 60 1.2686 nan 0.0010 0.0004
## 80 1.2524 nan 0.0010 0.0003
## 100 1.2364 nan 0.0010 0.0003
## 120 1.2214 nan 0.0010 0.0004
## 140 1.2069 nan 0.0010 0.0003
## 160 1.1928 nan 0.0010 0.0003
## 180 1.1791 nan 0.0010 0.0003
## 200 1.1656 nan 0.0010 0.0003
## 220 1.1529 nan 0.0010 0.0003
## 240 1.1404 nan 0.0010 0.0002
## 260 1.1284 nan 0.0010 0.0003
## 280 1.1166 nan 0.0010 0.0003
## 300 1.1052 nan 0.0010 0.0002
## 320 1.0939 nan 0.0010 0.0002
## 340 1.0831 nan 0.0010 0.0002
## 360 1.0723 nan 0.0010 0.0002
## 380 1.0622 nan 0.0010 0.0002
## 400 1.0524 nan 0.0010 0.0002
## 420 1.0425 nan 0.0010 0.0002
## 440 1.0331 nan 0.0010 0.0002
## 460 1.0242 nan 0.0010 0.0002
## 480 1.0155 nan 0.0010 0.0002
## 500 1.0069 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2858 nan 0.0010 0.0004
## 60 1.2692 nan 0.0010 0.0004
## 80 1.2533 nan 0.0010 0.0003
## 100 1.2379 nan 0.0010 0.0004
## 120 1.2230 nan 0.0010 0.0003
## 140 1.2083 nan 0.0010 0.0003
## 160 1.1943 nan 0.0010 0.0003
## 180 1.1809 nan 0.0010 0.0003
## 200 1.1678 nan 0.0010 0.0003
## 220 1.1551 nan 0.0010 0.0003
## 240 1.1427 nan 0.0010 0.0002
## 260 1.1307 nan 0.0010 0.0003
## 280 1.1189 nan 0.0010 0.0002
## 300 1.1077 nan 0.0010 0.0003
## 320 1.0970 nan 0.0010 0.0002
## 340 1.0861 nan 0.0010 0.0003
## 360 1.0756 nan 0.0010 0.0002
## 380 1.0655 nan 0.0010 0.0002
## 400 1.0557 nan 0.0010 0.0002
## 420 1.0462 nan 0.0010 0.0002
## 440 1.0369 nan 0.0010 0.0002
## 460 1.0276 nan 0.0010 0.0002
## 480 1.0188 nan 0.0010 0.0002
## 500 1.0102 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2863 nan 0.0010 0.0004
## 60 1.2702 nan 0.0010 0.0004
## 80 1.2548 nan 0.0010 0.0003
## 100 1.2393 nan 0.0010 0.0003
## 120 1.2246 nan 0.0010 0.0003
## 140 1.2106 nan 0.0010 0.0003
## 160 1.1965 nan 0.0010 0.0004
## 180 1.1829 nan 0.0010 0.0003
## 200 1.1699 nan 0.0010 0.0003
## 220 1.1573 nan 0.0010 0.0003
## 240 1.1450 nan 0.0010 0.0002
## 260 1.1333 nan 0.0010 0.0003
## 280 1.1219 nan 0.0010 0.0002
## 300 1.1106 nan 0.0010 0.0003
## 320 1.0999 nan 0.0010 0.0002
## 340 1.0893 nan 0.0010 0.0002
## 360 1.0792 nan 0.0010 0.0002
## 380 1.0690 nan 0.0010 0.0002
## 400 1.0590 nan 0.0010 0.0002
## 420 1.0496 nan 0.0010 0.0002
## 440 1.0403 nan 0.0010 0.0002
## 460 1.0312 nan 0.0010 0.0002
## 480 1.0224 nan 0.0010 0.0002
## 500 1.0141 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3187 nan 0.0010 0.0005
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3159 nan 0.0010 0.0005
## 6 1.3150 nan 0.0010 0.0004
## 7 1.3140 nan 0.0010 0.0005
## 8 1.3130 nan 0.0010 0.0004
## 9 1.3121 nan 0.0010 0.0004
## 10 1.3112 nan 0.0010 0.0004
## 20 1.3020 nan 0.0010 0.0004
## 40 1.2840 nan 0.0010 0.0004
## 60 1.2666 nan 0.0010 0.0004
## 80 1.2498 nan 0.0010 0.0004
## 100 1.2335 nan 0.0010 0.0004
## 120 1.2176 nan 0.0010 0.0004
## 140 1.2019 nan 0.0010 0.0003
## 160 1.1872 nan 0.0010 0.0003
## 180 1.1732 nan 0.0010 0.0003
## 200 1.1595 nan 0.0010 0.0003
## 220 1.1460 nan 0.0010 0.0003
## 240 1.1329 nan 0.0010 0.0003
## 260 1.1203 nan 0.0010 0.0003
## 280 1.1081 nan 0.0010 0.0002
## 300 1.0960 nan 0.0010 0.0002
## 320 1.0844 nan 0.0010 0.0003
## 340 1.0732 nan 0.0010 0.0002
## 360 1.0625 nan 0.0010 0.0002
## 380 1.0517 nan 0.0010 0.0002
## 400 1.0414 nan 0.0010 0.0002
## 420 1.0312 nan 0.0010 0.0002
## 440 1.0215 nan 0.0010 0.0002
## 460 1.0119 nan 0.0010 0.0002
## 480 1.0024 nan 0.0010 0.0002
## 500 0.9934 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3187 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0004
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0004
## 6 1.3150 nan 0.0010 0.0005
## 7 1.3141 nan 0.0010 0.0004
## 8 1.3132 nan 0.0010 0.0004
## 9 1.3122 nan 0.0010 0.0004
## 10 1.3114 nan 0.0010 0.0004
## 20 1.3019 nan 0.0010 0.0004
## 40 1.2839 nan 0.0010 0.0004
## 60 1.2668 nan 0.0010 0.0004
## 80 1.2502 nan 0.0010 0.0004
## 100 1.2339 nan 0.0010 0.0004
## 120 1.2184 nan 0.0010 0.0003
## 140 1.2034 nan 0.0010 0.0003
## 160 1.1888 nan 0.0010 0.0003
## 180 1.1748 nan 0.0010 0.0003
## 200 1.1608 nan 0.0010 0.0003
## 220 1.1476 nan 0.0010 0.0003
## 240 1.1349 nan 0.0010 0.0003
## 260 1.1224 nan 0.0010 0.0002
## 280 1.1101 nan 0.0010 0.0002
## 300 1.0982 nan 0.0010 0.0003
## 320 1.0868 nan 0.0010 0.0002
## 340 1.0756 nan 0.0010 0.0002
## 360 1.0648 nan 0.0010 0.0002
## 380 1.0543 nan 0.0010 0.0002
## 400 1.0441 nan 0.0010 0.0002
## 420 1.0343 nan 0.0010 0.0002
## 440 1.0244 nan 0.0010 0.0002
## 460 1.0152 nan 0.0010 0.0002
## 480 1.0061 nan 0.0010 0.0002
## 500 0.9973 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0004
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0004
## 6 1.3152 nan 0.0010 0.0004
## 7 1.3142 nan 0.0010 0.0004
## 8 1.3133 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0004
## 10 1.3115 nan 0.0010 0.0004
## 20 1.3023 nan 0.0010 0.0004
## 40 1.2853 nan 0.0010 0.0003
## 60 1.2687 nan 0.0010 0.0004
## 80 1.2522 nan 0.0010 0.0003
## 100 1.2365 nan 0.0010 0.0003
## 120 1.2210 nan 0.0010 0.0003
## 140 1.2060 nan 0.0010 0.0003
## 160 1.1918 nan 0.0010 0.0003
## 180 1.1780 nan 0.0010 0.0003
## 200 1.1646 nan 0.0010 0.0003
## 220 1.1514 nan 0.0010 0.0003
## 240 1.1388 nan 0.0010 0.0003
## 260 1.1264 nan 0.0010 0.0003
## 280 1.1146 nan 0.0010 0.0003
## 300 1.1029 nan 0.0010 0.0002
## 320 1.0915 nan 0.0010 0.0002
## 340 1.0806 nan 0.0010 0.0002
## 360 1.0698 nan 0.0010 0.0002
## 380 1.0594 nan 0.0010 0.0002
## 400 1.0495 nan 0.0010 0.0002
## 420 1.0397 nan 0.0010 0.0002
## 440 1.0301 nan 0.0010 0.0002
## 460 1.0206 nan 0.0010 0.0002
## 480 1.0116 nan 0.0010 0.0002
## 500 1.0027 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0036
## 2 1.3038 nan 0.0100 0.0037
## 3 1.2960 nan 0.0100 0.0034
## 4 1.2884 nan 0.0100 0.0035
## 5 1.2803 nan 0.0100 0.0036
## 6 1.2720 nan 0.0100 0.0038
## 7 1.2643 nan 0.0100 0.0033
## 8 1.2570 nan 0.0100 0.0035
## 9 1.2498 nan 0.0100 0.0029
## 10 1.2417 nan 0.0100 0.0036
## 20 1.1747 nan 0.0100 0.0030
## 40 1.0670 nan 0.0100 0.0021
## 60 0.9837 nan 0.0100 0.0018
## 80 0.9198 nan 0.0100 0.0010
## 100 0.8685 nan 0.0100 0.0009
## 120 0.8269 nan 0.0100 0.0007
## 140 0.7915 nan 0.0100 0.0005
## 160 0.7628 nan 0.0100 0.0002
## 180 0.7358 nan 0.0100 0.0005
## 200 0.7114 nan 0.0100 0.0002
## 220 0.6916 nan 0.0100 0.0001
## 240 0.6741 nan 0.0100 0.0001
## 260 0.6581 nan 0.0100 0.0002
## 280 0.6433 nan 0.0100 -0.0000
## 300 0.6294 nan 0.0100 0.0000
## 320 0.6169 nan 0.0100 -0.0000
## 340 0.6052 nan 0.0100 -0.0001
## 360 0.5934 nan 0.0100 0.0001
## 380 0.5837 nan 0.0100 -0.0000
## 400 0.5734 nan 0.0100 -0.0000
## 420 0.5641 nan 0.0100 -0.0001
## 440 0.5547 nan 0.0100 0.0000
## 460 0.5451 nan 0.0100 -0.0000
## 480 0.5372 nan 0.0100 -0.0001
## 500 0.5292 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3133 nan 0.0100 0.0034
## 2 1.3039 nan 0.0100 0.0041
## 3 1.2963 nan 0.0100 0.0032
## 4 1.2879 nan 0.0100 0.0037
## 5 1.2806 nan 0.0100 0.0032
## 6 1.2725 nan 0.0100 0.0038
## 7 1.2641 nan 0.0100 0.0038
## 8 1.2572 nan 0.0100 0.0031
## 9 1.2504 nan 0.0100 0.0034
## 10 1.2431 nan 0.0100 0.0035
## 20 1.1769 nan 0.0100 0.0029
## 40 1.0682 nan 0.0100 0.0022
## 60 0.9872 nan 0.0100 0.0014
## 80 0.9245 nan 0.0100 0.0013
## 100 0.8722 nan 0.0100 0.0009
## 120 0.8307 nan 0.0100 0.0006
## 140 0.7954 nan 0.0100 0.0005
## 160 0.7672 nan 0.0100 0.0005
## 180 0.7428 nan 0.0100 0.0003
## 200 0.7204 nan 0.0100 0.0004
## 220 0.7011 nan 0.0100 0.0003
## 240 0.6837 nan 0.0100 0.0002
## 260 0.6673 nan 0.0100 -0.0001
## 280 0.6529 nan 0.0100 0.0000
## 300 0.6397 nan 0.0100 -0.0000
## 320 0.6271 nan 0.0100 0.0000
## 340 0.6151 nan 0.0100 0.0001
## 360 0.6049 nan 0.0100 0.0001
## 380 0.5941 nan 0.0100 -0.0001
## 400 0.5840 nan 0.0100 -0.0000
## 420 0.5748 nan 0.0100 -0.0000
## 440 0.5652 nan 0.0100 0.0000
## 460 0.5568 nan 0.0100 -0.0000
## 480 0.5482 nan 0.0100 -0.0001
## 500 0.5404 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3126 nan 0.0100 0.0037
## 2 1.3036 nan 0.0100 0.0042
## 3 1.2958 nan 0.0100 0.0036
## 4 1.2875 nan 0.0100 0.0037
## 5 1.2791 nan 0.0100 0.0038
## 6 1.2713 nan 0.0100 0.0034
## 7 1.2632 nan 0.0100 0.0037
## 8 1.2553 nan 0.0100 0.0036
## 9 1.2479 nan 0.0100 0.0029
## 10 1.2405 nan 0.0100 0.0033
## 20 1.1749 nan 0.0100 0.0028
## 40 1.0678 nan 0.0100 0.0020
## 60 0.9855 nan 0.0100 0.0017
## 80 0.9229 nan 0.0100 0.0011
## 100 0.8716 nan 0.0100 0.0007
## 120 0.8301 nan 0.0100 0.0008
## 140 0.7946 nan 0.0100 0.0005
## 160 0.7657 nan 0.0100 0.0002
## 180 0.7414 nan 0.0100 0.0003
## 200 0.7204 nan 0.0100 0.0002
## 220 0.7026 nan 0.0100 0.0002
## 240 0.6851 nan 0.0100 0.0002
## 260 0.6700 nan 0.0100 0.0003
## 280 0.6553 nan 0.0100 0.0001
## 300 0.6434 nan 0.0100 -0.0001
## 320 0.6308 nan 0.0100 0.0000
## 340 0.6203 nan 0.0100 0.0000
## 360 0.6105 nan 0.0100 -0.0000
## 380 0.6006 nan 0.0100 -0.0001
## 400 0.5918 nan 0.0100 0.0000
## 420 0.5830 nan 0.0100 -0.0002
## 440 0.5745 nan 0.0100 -0.0000
## 460 0.5656 nan 0.0100 -0.0001
## 480 0.5576 nan 0.0100 -0.0002
## 500 0.5493 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3110 nan 0.0100 0.0045
## 2 1.3022 nan 0.0100 0.0038
## 3 1.2931 nan 0.0100 0.0043
## 4 1.2855 nan 0.0100 0.0037
## 5 1.2771 nan 0.0100 0.0038
## 6 1.2693 nan 0.0100 0.0038
## 7 1.2609 nan 0.0100 0.0037
## 8 1.2527 nan 0.0100 0.0039
## 9 1.2455 nan 0.0100 0.0032
## 10 1.2378 nan 0.0100 0.0033
## 20 1.1681 nan 0.0100 0.0029
## 40 1.0547 nan 0.0100 0.0023
## 60 0.9663 nan 0.0100 0.0014
## 80 0.8995 nan 0.0100 0.0011
## 100 0.8454 nan 0.0100 0.0010
## 120 0.8030 nan 0.0100 0.0006
## 140 0.7654 nan 0.0100 0.0006
## 160 0.7340 nan 0.0100 0.0004
## 180 0.7076 nan 0.0100 0.0003
## 200 0.6850 nan 0.0100 0.0001
## 220 0.6634 nan 0.0100 0.0001
## 240 0.6442 nan 0.0100 0.0001
## 260 0.6258 nan 0.0100 0.0000
## 280 0.6092 nan 0.0100 0.0001
## 300 0.5944 nan 0.0100 0.0000
## 320 0.5813 nan 0.0100 0.0001
## 340 0.5680 nan 0.0100 0.0000
## 360 0.5558 nan 0.0100 -0.0001
## 380 0.5440 nan 0.0100 -0.0000
## 400 0.5321 nan 0.0100 0.0001
## 420 0.5210 nan 0.0100 -0.0000
## 440 0.5099 nan 0.0100 -0.0001
## 460 0.5001 nan 0.0100 -0.0000
## 480 0.4910 nan 0.0100 -0.0000
## 500 0.4821 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3116 nan 0.0100 0.0042
## 2 1.3028 nan 0.0100 0.0040
## 3 1.2928 nan 0.0100 0.0043
## 4 1.2842 nan 0.0100 0.0043
## 5 1.2757 nan 0.0100 0.0036
## 6 1.2675 nan 0.0100 0.0037
## 7 1.2592 nan 0.0100 0.0033
## 8 1.2512 nan 0.0100 0.0037
## 9 1.2430 nan 0.0100 0.0037
## 10 1.2355 nan 0.0100 0.0039
## 20 1.1673 nan 0.0100 0.0029
## 40 1.0541 nan 0.0100 0.0022
## 60 0.9683 nan 0.0100 0.0016
## 80 0.9019 nan 0.0100 0.0009
## 100 0.8498 nan 0.0100 0.0009
## 120 0.8057 nan 0.0100 0.0006
## 140 0.7697 nan 0.0100 0.0006
## 160 0.7377 nan 0.0100 0.0004
## 180 0.7108 nan 0.0100 0.0004
## 200 0.6880 nan 0.0100 0.0002
## 220 0.6676 nan 0.0100 -0.0002
## 240 0.6500 nan 0.0100 0.0001
## 260 0.6326 nan 0.0100 0.0001
## 280 0.6173 nan 0.0100 0.0002
## 300 0.6022 nan 0.0100 0.0002
## 320 0.5882 nan 0.0100 0.0001
## 340 0.5749 nan 0.0100 0.0000
## 360 0.5627 nan 0.0100 -0.0000
## 380 0.5520 nan 0.0100 0.0001
## 400 0.5400 nan 0.0100 0.0000
## 420 0.5297 nan 0.0100 0.0001
## 440 0.5195 nan 0.0100 -0.0000
## 460 0.5103 nan 0.0100 -0.0001
## 480 0.5013 nan 0.0100 0.0001
## 500 0.4909 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3111 nan 0.0100 0.0046
## 2 1.3021 nan 0.0100 0.0036
## 3 1.2937 nan 0.0100 0.0037
## 4 1.2848 nan 0.0100 0.0037
## 5 1.2762 nan 0.0100 0.0037
## 6 1.2687 nan 0.0100 0.0033
## 7 1.2605 nan 0.0100 0.0037
## 8 1.2518 nan 0.0100 0.0037
## 9 1.2438 nan 0.0100 0.0034
## 10 1.2362 nan 0.0100 0.0033
## 20 1.1664 nan 0.0100 0.0032
## 40 1.0565 nan 0.0100 0.0018
## 60 0.9696 nan 0.0100 0.0014
## 80 0.9049 nan 0.0100 0.0009
## 100 0.8521 nan 0.0100 0.0009
## 120 0.8097 nan 0.0100 0.0007
## 140 0.7744 nan 0.0100 0.0005
## 160 0.7443 nan 0.0100 0.0003
## 180 0.7184 nan 0.0100 0.0002
## 200 0.6965 nan 0.0100 0.0001
## 220 0.6762 nan 0.0100 -0.0001
## 240 0.6575 nan 0.0100 -0.0000
## 260 0.6396 nan 0.0100 0.0001
## 280 0.6256 nan 0.0100 0.0002
## 300 0.6119 nan 0.0100 0.0001
## 320 0.5991 nan 0.0100 0.0001
## 340 0.5864 nan 0.0100 -0.0001
## 360 0.5746 nan 0.0100 -0.0000
## 380 0.5632 nan 0.0100 0.0001
## 400 0.5522 nan 0.0100 0.0001
## 420 0.5430 nan 0.0100 -0.0000
## 440 0.5326 nan 0.0100 0.0002
## 460 0.5232 nan 0.0100 -0.0000
## 480 0.5143 nan 0.0100 0.0001
## 500 0.5051 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0040
## 2 1.3027 nan 0.0100 0.0042
## 3 1.2939 nan 0.0100 0.0038
## 4 1.2847 nan 0.0100 0.0035
## 5 1.2766 nan 0.0100 0.0032
## 6 1.2675 nan 0.0100 0.0043
## 7 1.2592 nan 0.0100 0.0038
## 8 1.2508 nan 0.0100 0.0037
## 9 1.2420 nan 0.0100 0.0039
## 10 1.2335 nan 0.0100 0.0035
## 20 1.1587 nan 0.0100 0.0030
## 40 1.0391 nan 0.0100 0.0025
## 60 0.9497 nan 0.0100 0.0019
## 80 0.8793 nan 0.0100 0.0011
## 100 0.8224 nan 0.0100 0.0009
## 120 0.7742 nan 0.0100 0.0006
## 140 0.7332 nan 0.0100 0.0005
## 160 0.7006 nan 0.0100 0.0003
## 180 0.6726 nan 0.0100 0.0001
## 200 0.6479 nan 0.0100 0.0002
## 220 0.6244 nan 0.0100 0.0002
## 240 0.6051 nan 0.0100 0.0002
## 260 0.5864 nan 0.0100 0.0001
## 280 0.5685 nan 0.0100 0.0001
## 300 0.5527 nan 0.0100 0.0000
## 320 0.5371 nan 0.0100 0.0001
## 340 0.5226 nan 0.0100 0.0001
## 360 0.5091 nan 0.0100 0.0001
## 380 0.4966 nan 0.0100 0.0000
## 400 0.4843 nan 0.0100 0.0000
## 420 0.4735 nan 0.0100 -0.0000
## 440 0.4624 nan 0.0100 0.0001
## 460 0.4518 nan 0.0100 -0.0000
## 480 0.4417 nan 0.0100 -0.0002
## 500 0.4322 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3118 nan 0.0100 0.0038
## 2 1.3026 nan 0.0100 0.0042
## 3 1.2935 nan 0.0100 0.0037
## 4 1.2846 nan 0.0100 0.0040
## 5 1.2768 nan 0.0100 0.0034
## 6 1.2683 nan 0.0100 0.0033
## 7 1.2595 nan 0.0100 0.0041
## 8 1.2514 nan 0.0100 0.0036
## 9 1.2432 nan 0.0100 0.0040
## 10 1.2351 nan 0.0100 0.0038
## 20 1.1611 nan 0.0100 0.0031
## 40 1.0456 nan 0.0100 0.0022
## 60 0.9545 nan 0.0100 0.0016
## 80 0.8841 nan 0.0100 0.0014
## 100 0.8279 nan 0.0100 0.0010
## 120 0.7838 nan 0.0100 0.0006
## 140 0.7453 nan 0.0100 0.0006
## 160 0.7126 nan 0.0100 0.0004
## 180 0.6853 nan 0.0100 0.0002
## 200 0.6596 nan 0.0100 0.0003
## 220 0.6380 nan 0.0100 0.0001
## 240 0.6178 nan 0.0100 0.0001
## 260 0.5989 nan 0.0100 0.0002
## 280 0.5829 nan 0.0100 0.0000
## 300 0.5661 nan 0.0100 0.0002
## 320 0.5505 nan 0.0100 -0.0001
## 340 0.5364 nan 0.0100 -0.0001
## 360 0.5238 nan 0.0100 0.0000
## 380 0.5118 nan 0.0100 0.0000
## 400 0.4997 nan 0.0100 -0.0001
## 420 0.4889 nan 0.0100 -0.0001
## 440 0.4784 nan 0.0100 -0.0000
## 460 0.4678 nan 0.0100 -0.0001
## 480 0.4578 nan 0.0100 -0.0001
## 500 0.4486 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3107 nan 0.0100 0.0044
## 2 1.3014 nan 0.0100 0.0044
## 3 1.2937 nan 0.0100 0.0035
## 4 1.2852 nan 0.0100 0.0039
## 5 1.2767 nan 0.0100 0.0034
## 6 1.2679 nan 0.0100 0.0039
## 7 1.2596 nan 0.0100 0.0033
## 8 1.2517 nan 0.0100 0.0034
## 9 1.2429 nan 0.0100 0.0040
## 10 1.2347 nan 0.0100 0.0036
## 20 1.1628 nan 0.0100 0.0028
## 40 1.0485 nan 0.0100 0.0021
## 60 0.9613 nan 0.0100 0.0015
## 80 0.8934 nan 0.0100 0.0013
## 100 0.8411 nan 0.0100 0.0010
## 120 0.7944 nan 0.0100 0.0007
## 140 0.7574 nan 0.0100 0.0003
## 160 0.7240 nan 0.0100 0.0007
## 180 0.6959 nan 0.0100 0.0003
## 200 0.6736 nan 0.0100 0.0002
## 220 0.6520 nan 0.0100 0.0000
## 240 0.6326 nan 0.0100 0.0000
## 260 0.6137 nan 0.0100 0.0003
## 280 0.5977 nan 0.0100 0.0001
## 300 0.5817 nan 0.0100 -0.0001
## 320 0.5662 nan 0.0100 0.0002
## 340 0.5537 nan 0.0100 -0.0000
## 360 0.5408 nan 0.0100 -0.0001
## 380 0.5270 nan 0.0100 -0.0001
## 400 0.5152 nan 0.0100 0.0001
## 420 0.5041 nan 0.0100 -0.0001
## 440 0.4944 nan 0.0100 -0.0000
## 460 0.4848 nan 0.0100 -0.0001
## 480 0.4744 nan 0.0100 -0.0000
## 500 0.4657 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2442 nan 0.1000 0.0379
## 2 1.1770 nan 0.1000 0.0305
## 3 1.1243 nan 0.1000 0.0251
## 4 1.0739 nan 0.1000 0.0218
## 5 1.0295 nan 0.1000 0.0198
## 6 0.9918 nan 0.1000 0.0157
## 7 0.9542 nan 0.1000 0.0156
## 8 0.9259 nan 0.1000 0.0115
## 9 0.9003 nan 0.1000 0.0098
## 10 0.8788 nan 0.1000 0.0073
## 20 0.7228 nan 0.1000 0.0025
## 40 0.5800 nan 0.1000 -0.0001
## 60 0.5008 nan 0.1000 -0.0002
## 80 0.4411 nan 0.1000 0.0000
## 100 0.3915 nan 0.1000 -0.0010
## 120 0.3509 nan 0.1000 -0.0005
## 140 0.3083 nan 0.1000 -0.0005
## 160 0.2763 nan 0.1000 -0.0011
## 180 0.2475 nan 0.1000 -0.0008
## 200 0.2241 nan 0.1000 -0.0007
## 220 0.1994 nan 0.1000 -0.0002
## 240 0.1804 nan 0.1000 0.0001
## 260 0.1650 nan 0.1000 0.0000
## 280 0.1495 nan 0.1000 -0.0001
## 300 0.1354 nan 0.1000 -0.0002
## 320 0.1235 nan 0.1000 -0.0003
## 340 0.1135 nan 0.1000 -0.0003
## 360 0.1043 nan 0.1000 -0.0003
## 380 0.0966 nan 0.1000 -0.0002
## 400 0.0892 nan 0.1000 -0.0002
## 420 0.0824 nan 0.1000 -0.0003
## 440 0.0758 nan 0.1000 -0.0002
## 460 0.0708 nan 0.1000 -0.0001
## 480 0.0656 nan 0.1000 -0.0002
## 500 0.0607 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2429 nan 0.1000 0.0330
## 2 1.1750 nan 0.1000 0.0307
## 3 1.1208 nan 0.1000 0.0242
## 4 1.0707 nan 0.1000 0.0239
## 5 1.0276 nan 0.1000 0.0166
## 6 0.9881 nan 0.1000 0.0183
## 7 0.9527 nan 0.1000 0.0144
## 8 0.9250 nan 0.1000 0.0113
## 9 0.8982 nan 0.1000 0.0093
## 10 0.8723 nan 0.1000 0.0105
## 20 0.7249 nan 0.1000 0.0010
## 40 0.5830 nan 0.1000 -0.0010
## 60 0.5021 nan 0.1000 -0.0024
## 80 0.4461 nan 0.1000 -0.0003
## 100 0.3945 nan 0.1000 -0.0006
## 120 0.3555 nan 0.1000 -0.0005
## 140 0.3146 nan 0.1000 -0.0013
## 160 0.2810 nan 0.1000 -0.0004
## 180 0.2562 nan 0.1000 -0.0006
## 200 0.2311 nan 0.1000 -0.0005
## 220 0.2104 nan 0.1000 -0.0008
## 240 0.1903 nan 0.1000 -0.0005
## 260 0.1761 nan 0.1000 -0.0004
## 280 0.1599 nan 0.1000 -0.0005
## 300 0.1454 nan 0.1000 -0.0005
## 320 0.1334 nan 0.1000 -0.0003
## 340 0.1246 nan 0.1000 -0.0003
## 360 0.1137 nan 0.1000 -0.0003
## 380 0.1048 nan 0.1000 -0.0004
## 400 0.0965 nan 0.1000 -0.0001
## 420 0.0887 nan 0.1000 -0.0001
## 440 0.0825 nan 0.1000 -0.0004
## 460 0.0765 nan 0.1000 -0.0001
## 480 0.0709 nan 0.1000 -0.0002
## 500 0.0665 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2477 nan 0.1000 0.0355
## 2 1.1845 nan 0.1000 0.0327
## 3 1.1242 nan 0.1000 0.0276
## 4 1.0688 nan 0.1000 0.0214
## 5 1.0243 nan 0.1000 0.0193
## 6 0.9920 nan 0.1000 0.0126
## 7 0.9621 nan 0.1000 0.0146
## 8 0.9329 nan 0.1000 0.0119
## 9 0.9035 nan 0.1000 0.0116
## 10 0.8815 nan 0.1000 0.0067
## 20 0.7298 nan 0.1000 0.0030
## 40 0.5922 nan 0.1000 -0.0008
## 60 0.5056 nan 0.1000 -0.0000
## 80 0.4498 nan 0.1000 -0.0007
## 100 0.4063 nan 0.1000 0.0001
## 120 0.3590 nan 0.1000 0.0000
## 140 0.3287 nan 0.1000 -0.0010
## 160 0.2967 nan 0.1000 -0.0008
## 180 0.2689 nan 0.1000 -0.0011
## 200 0.2460 nan 0.1000 -0.0012
## 220 0.2259 nan 0.1000 -0.0015
## 240 0.2077 nan 0.1000 -0.0007
## 260 0.1912 nan 0.1000 -0.0006
## 280 0.1758 nan 0.1000 -0.0006
## 300 0.1629 nan 0.1000 -0.0005
## 320 0.1497 nan 0.1000 -0.0003
## 340 0.1371 nan 0.1000 -0.0008
## 360 0.1261 nan 0.1000 -0.0007
## 380 0.1173 nan 0.1000 -0.0004
## 400 0.1095 nan 0.1000 -0.0005
## 420 0.1019 nan 0.1000 -0.0006
## 440 0.0948 nan 0.1000 -0.0002
## 460 0.0881 nan 0.1000 -0.0002
## 480 0.0815 nan 0.1000 -0.0002
## 500 0.0759 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2369 nan 0.1000 0.0332
## 2 1.1576 nan 0.1000 0.0338
## 3 1.0979 nan 0.1000 0.0232
## 4 1.0453 nan 0.1000 0.0233
## 5 1.0016 nan 0.1000 0.0179
## 6 0.9618 nan 0.1000 0.0156
## 7 0.9269 nan 0.1000 0.0127
## 8 0.8961 nan 0.1000 0.0116
## 9 0.8737 nan 0.1000 0.0097
## 10 0.8492 nan 0.1000 0.0099
## 20 0.6857 nan 0.1000 0.0027
## 40 0.5318 nan 0.1000 -0.0011
## 60 0.4432 nan 0.1000 -0.0004
## 80 0.3800 nan 0.1000 -0.0002
## 100 0.3256 nan 0.1000 -0.0010
## 120 0.2817 nan 0.1000 -0.0003
## 140 0.2483 nan 0.1000 -0.0010
## 160 0.2197 nan 0.1000 -0.0008
## 180 0.1928 nan 0.1000 -0.0011
## 200 0.1686 nan 0.1000 -0.0003
## 220 0.1497 nan 0.1000 -0.0003
## 240 0.1326 nan 0.1000 -0.0004
## 260 0.1188 nan 0.1000 -0.0002
## 280 0.1077 nan 0.1000 -0.0005
## 300 0.0979 nan 0.1000 -0.0000
## 320 0.0872 nan 0.1000 -0.0004
## 340 0.0789 nan 0.1000 -0.0002
## 360 0.0712 nan 0.1000 -0.0002
## 380 0.0639 nan 0.1000 -0.0001
## 400 0.0574 nan 0.1000 -0.0000
## 420 0.0523 nan 0.1000 -0.0002
## 440 0.0472 nan 0.1000 -0.0000
## 460 0.0432 nan 0.1000 -0.0001
## 480 0.0383 nan 0.1000 -0.0000
## 500 0.0345 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2300 nan 0.1000 0.0407
## 2 1.1688 nan 0.1000 0.0266
## 3 1.1066 nan 0.1000 0.0272
## 4 1.0567 nan 0.1000 0.0208
## 5 1.0101 nan 0.1000 0.0190
## 6 0.9738 nan 0.1000 0.0163
## 7 0.9391 nan 0.1000 0.0129
## 8 0.9063 nan 0.1000 0.0119
## 9 0.8750 nan 0.1000 0.0119
## 10 0.8487 nan 0.1000 0.0084
## 20 0.6857 nan 0.1000 0.0026
## 40 0.5574 nan 0.1000 -0.0001
## 60 0.4652 nan 0.1000 -0.0023
## 80 0.3967 nan 0.1000 -0.0012
## 100 0.3432 nan 0.1000 -0.0014
## 120 0.3005 nan 0.1000 -0.0007
## 140 0.2629 nan 0.1000 -0.0017
## 160 0.2294 nan 0.1000 -0.0005
## 180 0.2034 nan 0.1000 -0.0010
## 200 0.1855 nan 0.1000 -0.0010
## 220 0.1670 nan 0.1000 -0.0002
## 240 0.1464 nan 0.1000 -0.0000
## 260 0.1307 nan 0.1000 -0.0006
## 280 0.1170 nan 0.1000 -0.0002
## 300 0.1057 nan 0.1000 -0.0003
## 320 0.0946 nan 0.1000 -0.0003
## 340 0.0847 nan 0.1000 -0.0004
## 360 0.0758 nan 0.1000 -0.0003
## 380 0.0697 nan 0.1000 -0.0003
## 400 0.0623 nan 0.1000 -0.0003
## 420 0.0560 nan 0.1000 -0.0002
## 440 0.0508 nan 0.1000 -0.0002
## 460 0.0456 nan 0.1000 -0.0001
## 480 0.0411 nan 0.1000 -0.0002
## 500 0.0373 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2341 nan 0.1000 0.0414
## 2 1.1665 nan 0.1000 0.0306
## 3 1.1110 nan 0.1000 0.0224
## 4 1.0585 nan 0.1000 0.0170
## 5 1.0111 nan 0.1000 0.0212
## 6 0.9705 nan 0.1000 0.0180
## 7 0.9375 nan 0.1000 0.0140
## 8 0.9051 nan 0.1000 0.0123
## 9 0.8798 nan 0.1000 0.0091
## 10 0.8530 nan 0.1000 0.0108
## 20 0.6957 nan 0.1000 0.0040
## 40 0.5621 nan 0.1000 0.0015
## 60 0.4668 nan 0.1000 0.0001
## 80 0.3989 nan 0.1000 0.0003
## 100 0.3425 nan 0.1000 -0.0001
## 120 0.3014 nan 0.1000 -0.0008
## 140 0.2658 nan 0.1000 -0.0017
## 160 0.2348 nan 0.1000 -0.0011
## 180 0.2115 nan 0.1000 -0.0009
## 200 0.1875 nan 0.1000 -0.0007
## 220 0.1662 nan 0.1000 -0.0005
## 240 0.1502 nan 0.1000 -0.0006
## 260 0.1347 nan 0.1000 -0.0008
## 280 0.1219 nan 0.1000 -0.0004
## 300 0.1099 nan 0.1000 -0.0004
## 320 0.1003 nan 0.1000 -0.0005
## 340 0.0902 nan 0.1000 -0.0003
## 360 0.0818 nan 0.1000 -0.0002
## 380 0.0742 nan 0.1000 -0.0002
## 400 0.0669 nan 0.1000 -0.0003
## 420 0.0613 nan 0.1000 -0.0002
## 440 0.0554 nan 0.1000 -0.0002
## 460 0.0505 nan 0.1000 -0.0002
## 480 0.0462 nan 0.1000 -0.0001
## 500 0.0420 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2292 nan 0.1000 0.0391
## 2 1.1599 nan 0.1000 0.0313
## 3 1.0946 nan 0.1000 0.0278
## 4 1.0397 nan 0.1000 0.0216
## 5 0.9902 nan 0.1000 0.0193
## 6 0.9422 nan 0.1000 0.0178
## 7 0.9064 nan 0.1000 0.0132
## 8 0.8781 nan 0.1000 0.0085
## 9 0.8510 nan 0.1000 0.0098
## 10 0.8261 nan 0.1000 0.0092
## 20 0.6554 nan 0.1000 0.0022
## 40 0.5017 nan 0.1000 0.0003
## 60 0.4048 nan 0.1000 -0.0002
## 80 0.3334 nan 0.1000 -0.0009
## 100 0.2762 nan 0.1000 -0.0002
## 120 0.2322 nan 0.1000 -0.0009
## 140 0.1962 nan 0.1000 -0.0008
## 160 0.1680 nan 0.1000 -0.0002
## 180 0.1464 nan 0.1000 -0.0002
## 200 0.1274 nan 0.1000 -0.0003
## 220 0.1103 nan 0.1000 -0.0003
## 240 0.0968 nan 0.1000 -0.0000
## 260 0.0847 nan 0.1000 -0.0002
## 280 0.0744 nan 0.1000 -0.0003
## 300 0.0659 nan 0.1000 -0.0001
## 320 0.0579 nan 0.1000 -0.0000
## 340 0.0513 nan 0.1000 -0.0003
## 360 0.0455 nan 0.1000 -0.0001
## 380 0.0402 nan 0.1000 -0.0001
## 400 0.0351 nan 0.1000 -0.0001
## 420 0.0309 nan 0.1000 0.0000
## 440 0.0272 nan 0.1000 0.0000
## 460 0.0240 nan 0.1000 -0.0001
## 480 0.0213 nan 0.1000 -0.0001
## 500 0.0190 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2314 nan 0.1000 0.0401
## 2 1.1590 nan 0.1000 0.0310
## 3 1.0965 nan 0.1000 0.0233
## 4 1.0397 nan 0.1000 0.0248
## 5 0.9911 nan 0.1000 0.0205
## 6 0.9499 nan 0.1000 0.0188
## 7 0.9164 nan 0.1000 0.0140
## 8 0.8847 nan 0.1000 0.0111
## 9 0.8560 nan 0.1000 0.0128
## 10 0.8278 nan 0.1000 0.0112
## 20 0.6675 nan 0.1000 0.0014
## 40 0.5026 nan 0.1000 -0.0004
## 60 0.4144 nan 0.1000 0.0001
## 80 0.3421 nan 0.1000 -0.0014
## 100 0.2897 nan 0.1000 -0.0018
## 120 0.2494 nan 0.1000 -0.0007
## 140 0.2130 nan 0.1000 -0.0004
## 160 0.1828 nan 0.1000 -0.0003
## 180 0.1554 nan 0.1000 -0.0006
## 200 0.1312 nan 0.1000 -0.0003
## 220 0.1127 nan 0.1000 -0.0005
## 240 0.0997 nan 0.1000 -0.0006
## 260 0.0868 nan 0.1000 -0.0002
## 280 0.0762 nan 0.1000 -0.0003
## 300 0.0679 nan 0.1000 -0.0001
## 320 0.0591 nan 0.1000 -0.0003
## 340 0.0526 nan 0.1000 -0.0001
## 360 0.0471 nan 0.1000 -0.0001
## 380 0.0411 nan 0.1000 -0.0001
## 400 0.0366 nan 0.1000 -0.0002
## 420 0.0320 nan 0.1000 -0.0000
## 440 0.0287 nan 0.1000 -0.0002
## 460 0.0252 nan 0.1000 -0.0001
## 480 0.0223 nan 0.1000 -0.0000
## 500 0.0199 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2391 nan 0.1000 0.0368
## 2 1.1585 nan 0.1000 0.0343
## 3 1.0956 nan 0.1000 0.0289
## 4 1.0422 nan 0.1000 0.0208
## 5 0.9990 nan 0.1000 0.0172
## 6 0.9563 nan 0.1000 0.0165
## 7 0.9228 nan 0.1000 0.0135
## 8 0.8905 nan 0.1000 0.0129
## 9 0.8628 nan 0.1000 0.0121
## 10 0.8342 nan 0.1000 0.0097
## 20 0.6641 nan 0.1000 0.0013
## 40 0.5125 nan 0.1000 -0.0001
## 60 0.4160 nan 0.1000 0.0012
## 80 0.3433 nan 0.1000 -0.0007
## 100 0.2933 nan 0.1000 -0.0009
## 120 0.2498 nan 0.1000 -0.0004
## 140 0.2126 nan 0.1000 -0.0003
## 160 0.1828 nan 0.1000 -0.0002
## 180 0.1615 nan 0.1000 -0.0010
## 200 0.1411 nan 0.1000 -0.0002
## 220 0.1237 nan 0.1000 -0.0003
## 240 0.1100 nan 0.1000 -0.0003
## 260 0.0970 nan 0.1000 -0.0004
## 280 0.0858 nan 0.1000 -0.0002
## 300 0.0763 nan 0.1000 -0.0003
## 320 0.0673 nan 0.1000 -0.0002
## 340 0.0593 nan 0.1000 -0.0003
## 360 0.0523 nan 0.1000 -0.0001
## 380 0.0462 nan 0.1000 -0.0001
## 400 0.0409 nan 0.1000 -0.0002
## 420 0.0369 nan 0.1000 -0.0000
## 440 0.0328 nan 0.1000 -0.0001
## 460 0.0294 nan 0.1000 -0.0002
## 480 0.0262 nan 0.1000 -0.0001
## 500 0.0233 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3196 nan 0.0010 0.0004
## 3 1.3188 nan 0.0010 0.0003
## 4 1.3181 nan 0.0010 0.0004
## 5 1.3172 nan 0.0010 0.0004
## 6 1.3166 nan 0.0010 0.0003
## 7 1.3158 nan 0.0010 0.0003
## 8 1.3150 nan 0.0010 0.0003
## 9 1.3143 nan 0.0010 0.0003
## 10 1.3135 nan 0.0010 0.0003
## 20 1.3060 nan 0.0010 0.0004
## 40 1.2910 nan 0.0010 0.0003
## 60 1.2765 nan 0.0010 0.0003
## 80 1.2623 nan 0.0010 0.0003
## 100 1.2486 nan 0.0010 0.0003
## 120 1.2354 nan 0.0010 0.0002
## 140 1.2223 nan 0.0010 0.0003
## 160 1.2097 nan 0.0010 0.0003
## 180 1.1977 nan 0.0010 0.0003
## 200 1.1858 nan 0.0010 0.0002
## 220 1.1744 nan 0.0010 0.0002
## 240 1.1632 nan 0.0010 0.0002
## 260 1.1525 nan 0.0010 0.0001
## 280 1.1418 nan 0.0010 0.0002
## 300 1.1315 nan 0.0010 0.0003
## 320 1.1213 nan 0.0010 0.0002
## 340 1.1111 nan 0.0010 0.0002
## 360 1.1015 nan 0.0010 0.0002
## 380 1.0922 nan 0.0010 0.0002
## 400 1.0827 nan 0.0010 0.0002
## 420 1.0738 nan 0.0010 0.0002
## 440 1.0652 nan 0.0010 0.0002
## 460 1.0567 nan 0.0010 0.0002
## 480 1.0485 nan 0.0010 0.0002
## 500 1.0404 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0003
## 2 1.3196 nan 0.0010 0.0004
## 3 1.3188 nan 0.0010 0.0004
## 4 1.3179 nan 0.0010 0.0004
## 5 1.3171 nan 0.0010 0.0004
## 6 1.3163 nan 0.0010 0.0003
## 7 1.3155 nan 0.0010 0.0004
## 8 1.3147 nan 0.0010 0.0003
## 9 1.3140 nan 0.0010 0.0004
## 10 1.3132 nan 0.0010 0.0003
## 20 1.3053 nan 0.0010 0.0004
## 40 1.2903 nan 0.0010 0.0004
## 60 1.2759 nan 0.0010 0.0003
## 80 1.2617 nan 0.0010 0.0003
## 100 1.2480 nan 0.0010 0.0003
## 120 1.2345 nan 0.0010 0.0003
## 140 1.2215 nan 0.0010 0.0003
## 160 1.2092 nan 0.0010 0.0003
## 180 1.1971 nan 0.0010 0.0002
## 200 1.1852 nan 0.0010 0.0002
## 220 1.1738 nan 0.0010 0.0002
## 240 1.1625 nan 0.0010 0.0002
## 260 1.1516 nan 0.0010 0.0002
## 280 1.1412 nan 0.0010 0.0002
## 300 1.1309 nan 0.0010 0.0002
## 320 1.1210 nan 0.0010 0.0002
## 340 1.1113 nan 0.0010 0.0002
## 360 1.1018 nan 0.0010 0.0002
## 380 1.0925 nan 0.0010 0.0002
## 400 1.0832 nan 0.0010 0.0002
## 420 1.0744 nan 0.0010 0.0002
## 440 1.0659 nan 0.0010 0.0002
## 460 1.0575 nan 0.0010 0.0002
## 480 1.0491 nan 0.0010 0.0002
## 500 1.0413 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3197 nan 0.0010 0.0003
## 3 1.3189 nan 0.0010 0.0004
## 4 1.3181 nan 0.0010 0.0004
## 5 1.3173 nan 0.0010 0.0003
## 6 1.3165 nan 0.0010 0.0003
## 7 1.3158 nan 0.0010 0.0004
## 8 1.3149 nan 0.0010 0.0004
## 9 1.3141 nan 0.0010 0.0004
## 10 1.3133 nan 0.0010 0.0004
## 20 1.3057 nan 0.0010 0.0003
## 40 1.2909 nan 0.0010 0.0003
## 60 1.2766 nan 0.0010 0.0003
## 80 1.2624 nan 0.0010 0.0003
## 100 1.2490 nan 0.0010 0.0003
## 120 1.2356 nan 0.0010 0.0003
## 140 1.2227 nan 0.0010 0.0003
## 160 1.2103 nan 0.0010 0.0003
## 180 1.1983 nan 0.0010 0.0003
## 200 1.1865 nan 0.0010 0.0003
## 220 1.1750 nan 0.0010 0.0002
## 240 1.1639 nan 0.0010 0.0002
## 260 1.1528 nan 0.0010 0.0003
## 280 1.1422 nan 0.0010 0.0002
## 300 1.1320 nan 0.0010 0.0002
## 320 1.1221 nan 0.0010 0.0002
## 340 1.1122 nan 0.0010 0.0002
## 360 1.1027 nan 0.0010 0.0002
## 380 1.0936 nan 0.0010 0.0002
## 400 1.0847 nan 0.0010 0.0002
## 420 1.0758 nan 0.0010 0.0002
## 440 1.0671 nan 0.0010 0.0002
## 460 1.0587 nan 0.0010 0.0002
## 480 1.0505 nan 0.0010 0.0002
## 500 1.0423 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0003
## 5 1.3169 nan 0.0010 0.0003
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3135 nan 0.0010 0.0003
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3042 nan 0.0010 0.0004
## 40 1.2879 nan 0.0010 0.0004
## 60 1.2719 nan 0.0010 0.0004
## 80 1.2570 nan 0.0010 0.0003
## 100 1.2424 nan 0.0010 0.0003
## 120 1.2284 nan 0.0010 0.0003
## 140 1.2142 nan 0.0010 0.0002
## 160 1.2009 nan 0.0010 0.0003
## 180 1.1879 nan 0.0010 0.0003
## 200 1.1752 nan 0.0010 0.0003
## 220 1.1626 nan 0.0010 0.0003
## 240 1.1506 nan 0.0010 0.0003
## 260 1.1388 nan 0.0010 0.0003
## 280 1.1275 nan 0.0010 0.0002
## 300 1.1164 nan 0.0010 0.0003
## 320 1.1055 nan 0.0010 0.0003
## 340 1.0950 nan 0.0010 0.0002
## 360 1.0849 nan 0.0010 0.0002
## 380 1.0746 nan 0.0010 0.0002
## 400 1.0650 nan 0.0010 0.0002
## 420 1.0558 nan 0.0010 0.0002
## 440 1.0468 nan 0.0010 0.0001
## 460 1.0378 nan 0.0010 0.0002
## 480 1.0291 nan 0.0010 0.0002
## 500 1.0207 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0003
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0003
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3135 nan 0.0010 0.0004
## 10 1.3127 nan 0.0010 0.0003
## 20 1.3045 nan 0.0010 0.0004
## 40 1.2883 nan 0.0010 0.0004
## 60 1.2727 nan 0.0010 0.0003
## 80 1.2575 nan 0.0010 0.0003
## 100 1.2429 nan 0.0010 0.0003
## 120 1.2287 nan 0.0010 0.0003
## 140 1.2150 nan 0.0010 0.0003
## 160 1.2015 nan 0.0010 0.0003
## 180 1.1885 nan 0.0010 0.0003
## 200 1.1757 nan 0.0010 0.0003
## 220 1.1636 nan 0.0010 0.0002
## 240 1.1516 nan 0.0010 0.0002
## 260 1.1399 nan 0.0010 0.0002
## 280 1.1287 nan 0.0010 0.0003
## 300 1.1176 nan 0.0010 0.0002
## 320 1.1069 nan 0.0010 0.0002
## 340 1.0964 nan 0.0010 0.0002
## 360 1.0863 nan 0.0010 0.0002
## 380 1.0762 nan 0.0010 0.0001
## 400 1.0666 nan 0.0010 0.0002
## 420 1.0570 nan 0.0010 0.0002
## 440 1.0481 nan 0.0010 0.0002
## 460 1.0394 nan 0.0010 0.0002
## 480 1.0306 nan 0.0010 0.0002
## 500 1.0223 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3160 nan 0.0010 0.0003
## 7 1.3153 nan 0.0010 0.0004
## 8 1.3145 nan 0.0010 0.0003
## 9 1.3137 nan 0.0010 0.0004
## 10 1.3128 nan 0.0010 0.0004
## 20 1.3045 nan 0.0010 0.0003
## 40 1.2882 nan 0.0010 0.0004
## 60 1.2726 nan 0.0010 0.0003
## 80 1.2572 nan 0.0010 0.0004
## 100 1.2427 nan 0.0010 0.0002
## 120 1.2286 nan 0.0010 0.0003
## 140 1.2150 nan 0.0010 0.0003
## 160 1.2014 nan 0.0010 0.0003
## 180 1.1885 nan 0.0010 0.0003
## 200 1.1760 nan 0.0010 0.0002
## 220 1.1637 nan 0.0010 0.0003
## 240 1.1520 nan 0.0010 0.0003
## 260 1.1403 nan 0.0010 0.0003
## 280 1.1289 nan 0.0010 0.0002
## 300 1.1176 nan 0.0010 0.0002
## 320 1.1069 nan 0.0010 0.0002
## 340 1.0966 nan 0.0010 0.0002
## 360 1.0867 nan 0.0010 0.0002
## 380 1.0770 nan 0.0010 0.0002
## 400 1.0674 nan 0.0010 0.0002
## 420 1.0581 nan 0.0010 0.0002
## 440 1.0491 nan 0.0010 0.0002
## 460 1.0401 nan 0.0010 0.0002
## 480 1.0316 nan 0.0010 0.0002
## 500 1.0230 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3139 nan 0.0010 0.0004
## 9 1.3130 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3033 nan 0.0010 0.0004
## 40 1.2860 nan 0.0010 0.0004
## 60 1.2693 nan 0.0010 0.0003
## 80 1.2532 nan 0.0010 0.0003
## 100 1.2376 nan 0.0010 0.0004
## 120 1.2227 nan 0.0010 0.0003
## 140 1.2079 nan 0.0010 0.0003
## 160 1.1940 nan 0.0010 0.0003
## 180 1.1803 nan 0.0010 0.0003
## 200 1.1670 nan 0.0010 0.0003
## 220 1.1542 nan 0.0010 0.0003
## 240 1.1417 nan 0.0010 0.0002
## 260 1.1294 nan 0.0010 0.0003
## 280 1.1176 nan 0.0010 0.0003
## 300 1.1064 nan 0.0010 0.0002
## 320 1.0953 nan 0.0010 0.0002
## 340 1.0845 nan 0.0010 0.0002
## 360 1.0740 nan 0.0010 0.0002
## 380 1.0637 nan 0.0010 0.0002
## 400 1.0536 nan 0.0010 0.0002
## 420 1.0440 nan 0.0010 0.0002
## 440 1.0345 nan 0.0010 0.0002
## 460 1.0250 nan 0.0010 0.0002
## 480 1.0161 nan 0.0010 0.0002
## 500 1.0072 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3122 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0004
## 40 1.2865 nan 0.0010 0.0004
## 60 1.2699 nan 0.0010 0.0004
## 80 1.2536 nan 0.0010 0.0004
## 100 1.2380 nan 0.0010 0.0003
## 120 1.2230 nan 0.0010 0.0003
## 140 1.2085 nan 0.0010 0.0003
## 160 1.1942 nan 0.0010 0.0003
## 180 1.1805 nan 0.0010 0.0003
## 200 1.1675 nan 0.0010 0.0003
## 220 1.1547 nan 0.0010 0.0003
## 240 1.1421 nan 0.0010 0.0003
## 260 1.1300 nan 0.0010 0.0003
## 280 1.1184 nan 0.0010 0.0002
## 300 1.1070 nan 0.0010 0.0002
## 320 1.0961 nan 0.0010 0.0002
## 340 1.0853 nan 0.0010 0.0002
## 360 1.0746 nan 0.0010 0.0003
## 380 1.0641 nan 0.0010 0.0002
## 400 1.0543 nan 0.0010 0.0002
## 420 1.0445 nan 0.0010 0.0002
## 440 1.0351 nan 0.0010 0.0002
## 460 1.0260 nan 0.0010 0.0002
## 480 1.0170 nan 0.0010 0.0002
## 500 1.0082 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0003
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0004
## 40 1.2863 nan 0.0010 0.0004
## 60 1.2697 nan 0.0010 0.0004
## 80 1.2540 nan 0.0010 0.0004
## 100 1.2387 nan 0.0010 0.0003
## 120 1.2238 nan 0.0010 0.0003
## 140 1.2094 nan 0.0010 0.0003
## 160 1.1958 nan 0.0010 0.0003
## 180 1.1822 nan 0.0010 0.0003
## 200 1.1691 nan 0.0010 0.0003
## 220 1.1564 nan 0.0010 0.0003
## 240 1.1442 nan 0.0010 0.0003
## 260 1.1320 nan 0.0010 0.0003
## 280 1.1205 nan 0.0010 0.0003
## 300 1.1091 nan 0.0010 0.0002
## 320 1.0980 nan 0.0010 0.0003
## 340 1.0873 nan 0.0010 0.0002
## 360 1.0768 nan 0.0010 0.0002
## 380 1.0665 nan 0.0010 0.0002
## 400 1.0566 nan 0.0010 0.0002
## 420 1.0468 nan 0.0010 0.0002
## 440 1.0373 nan 0.0010 0.0002
## 460 1.0282 nan 0.0010 0.0002
## 480 1.0194 nan 0.0010 0.0002
## 500 1.0107 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3135 nan 0.0100 0.0037
## 2 1.3055 nan 0.0100 0.0039
## 3 1.2977 nan 0.0100 0.0037
## 4 1.2900 nan 0.0100 0.0036
## 5 1.2822 nan 0.0100 0.0032
## 6 1.2750 nan 0.0100 0.0033
## 7 1.2680 nan 0.0100 0.0031
## 8 1.2606 nan 0.0100 0.0032
## 9 1.2535 nan 0.0100 0.0033
## 10 1.2468 nan 0.0100 0.0031
## 20 1.1838 nan 0.0100 0.0025
## 40 1.0827 nan 0.0100 0.0019
## 60 1.0027 nan 0.0100 0.0017
## 80 0.9396 nan 0.0100 0.0008
## 100 0.8883 nan 0.0100 0.0010
## 120 0.8468 nan 0.0100 0.0006
## 140 0.8127 nan 0.0100 0.0005
## 160 0.7827 nan 0.0100 0.0005
## 180 0.7576 nan 0.0100 0.0004
## 200 0.7359 nan 0.0100 0.0003
## 220 0.7154 nan 0.0100 0.0002
## 240 0.6983 nan 0.0100 0.0002
## 260 0.6822 nan 0.0100 0.0002
## 280 0.6681 nan 0.0100 0.0001
## 300 0.6546 nan 0.0100 0.0000
## 320 0.6419 nan 0.0100 -0.0000
## 340 0.6304 nan 0.0100 0.0001
## 360 0.6197 nan 0.0100 0.0001
## 380 0.6095 nan 0.0100 0.0001
## 400 0.5991 nan 0.0100 0.0000
## 420 0.5895 nan 0.0100 -0.0001
## 440 0.5800 nan 0.0100 0.0000
## 460 0.5714 nan 0.0100 -0.0001
## 480 0.5628 nan 0.0100 -0.0001
## 500 0.5537 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3135 nan 0.0100 0.0035
## 2 1.3056 nan 0.0100 0.0035
## 3 1.2981 nan 0.0100 0.0036
## 4 1.2902 nan 0.0100 0.0031
## 5 1.2824 nan 0.0100 0.0033
## 6 1.2756 nan 0.0100 0.0028
## 7 1.2688 nan 0.0100 0.0031
## 8 1.2612 nan 0.0100 0.0032
## 9 1.2544 nan 0.0100 0.0033
## 10 1.2476 nan 0.0100 0.0028
## 20 1.1852 nan 0.0100 0.0024
## 40 1.0832 nan 0.0100 0.0020
## 60 1.0038 nan 0.0100 0.0016
## 80 0.9420 nan 0.0100 0.0010
## 100 0.8911 nan 0.0100 0.0009
## 120 0.8499 nan 0.0100 0.0006
## 140 0.8160 nan 0.0100 0.0006
## 160 0.7882 nan 0.0100 0.0005
## 180 0.7632 nan 0.0100 0.0003
## 200 0.7414 nan 0.0100 0.0002
## 220 0.7223 nan 0.0100 0.0003
## 240 0.7051 nan 0.0100 0.0002
## 260 0.6903 nan 0.0100 0.0000
## 280 0.6754 nan 0.0100 -0.0000
## 300 0.6620 nan 0.0100 0.0001
## 320 0.6498 nan 0.0100 -0.0001
## 340 0.6382 nan 0.0100 -0.0001
## 360 0.6286 nan 0.0100 0.0001
## 380 0.6177 nan 0.0100 0.0001
## 400 0.6078 nan 0.0100 -0.0001
## 420 0.5987 nan 0.0100 -0.0001
## 440 0.5898 nan 0.0100 -0.0001
## 460 0.5810 nan 0.0100 -0.0001
## 480 0.5723 nan 0.0100 -0.0000
## 500 0.5636 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3131 nan 0.0100 0.0035
## 2 1.3056 nan 0.0100 0.0032
## 3 1.2978 nan 0.0100 0.0033
## 4 1.2898 nan 0.0100 0.0035
## 5 1.2829 nan 0.0100 0.0031
## 6 1.2756 nan 0.0100 0.0034
## 7 1.2681 nan 0.0100 0.0033
## 8 1.2603 nan 0.0100 0.0033
## 9 1.2540 nan 0.0100 0.0029
## 10 1.2465 nan 0.0100 0.0033
## 20 1.1833 nan 0.0100 0.0024
## 40 1.0830 nan 0.0100 0.0018
## 60 1.0031 nan 0.0100 0.0015
## 80 0.9411 nan 0.0100 0.0011
## 100 0.8904 nan 0.0100 0.0009
## 120 0.8498 nan 0.0100 0.0005
## 140 0.8159 nan 0.0100 0.0004
## 160 0.7867 nan 0.0100 0.0003
## 180 0.7624 nan 0.0100 0.0001
## 200 0.7416 nan 0.0100 0.0003
## 220 0.7227 nan 0.0100 0.0003
## 240 0.7057 nan 0.0100 0.0001
## 260 0.6905 nan 0.0100 0.0001
## 280 0.6769 nan 0.0100 -0.0000
## 300 0.6638 nan 0.0100 0.0000
## 320 0.6519 nan 0.0100 0.0000
## 340 0.6409 nan 0.0100 -0.0002
## 360 0.6302 nan 0.0100 0.0000
## 380 0.6210 nan 0.0100 0.0001
## 400 0.6110 nan 0.0100 -0.0000
## 420 0.6026 nan 0.0100 -0.0000
## 440 0.5937 nan 0.0100 -0.0002
## 460 0.5847 nan 0.0100 -0.0000
## 480 0.5769 nan 0.0100 -0.0001
## 500 0.5678 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3132 nan 0.0100 0.0034
## 2 1.3049 nan 0.0100 0.0035
## 3 1.2968 nan 0.0100 0.0036
## 4 1.2891 nan 0.0100 0.0036
## 5 1.2803 nan 0.0100 0.0038
## 6 1.2729 nan 0.0100 0.0033
## 7 1.2647 nan 0.0100 0.0035
## 8 1.2568 nan 0.0100 0.0036
## 9 1.2492 nan 0.0100 0.0036
## 10 1.2418 nan 0.0100 0.0037
## 20 1.1762 nan 0.0100 0.0025
## 40 1.0678 nan 0.0100 0.0020
## 60 0.9818 nan 0.0100 0.0016
## 80 0.9160 nan 0.0100 0.0008
## 100 0.8624 nan 0.0100 0.0009
## 120 0.8192 nan 0.0100 0.0009
## 140 0.7836 nan 0.0100 0.0005
## 160 0.7540 nan 0.0100 0.0004
## 180 0.7264 nan 0.0100 0.0004
## 200 0.7029 nan 0.0100 0.0002
## 220 0.6826 nan 0.0100 0.0001
## 240 0.6641 nan 0.0100 0.0002
## 260 0.6459 nan 0.0100 0.0000
## 280 0.6298 nan 0.0100 0.0000
## 300 0.6153 nan 0.0100 0.0001
## 320 0.6013 nan 0.0100 0.0003
## 340 0.5887 nan 0.0100 0.0001
## 360 0.5764 nan 0.0100 0.0000
## 380 0.5642 nan 0.0100 -0.0001
## 400 0.5524 nan 0.0100 -0.0000
## 420 0.5415 nan 0.0100 -0.0001
## 440 0.5309 nan 0.0100 -0.0002
## 460 0.5211 nan 0.0100 -0.0000
## 480 0.5115 nan 0.0100 -0.0001
## 500 0.5030 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3126 nan 0.0100 0.0038
## 2 1.3036 nan 0.0100 0.0038
## 3 1.2950 nan 0.0100 0.0038
## 4 1.2880 nan 0.0100 0.0034
## 5 1.2804 nan 0.0100 0.0034
## 6 1.2721 nan 0.0100 0.0038
## 7 1.2640 nan 0.0100 0.0036
## 8 1.2565 nan 0.0100 0.0030
## 9 1.2489 nan 0.0100 0.0028
## 10 1.2414 nan 0.0100 0.0031
## 20 1.1726 nan 0.0100 0.0023
## 40 1.0647 nan 0.0100 0.0018
## 60 0.9801 nan 0.0100 0.0016
## 80 0.9127 nan 0.0100 0.0011
## 100 0.8602 nan 0.0100 0.0009
## 120 0.8187 nan 0.0100 0.0006
## 140 0.7839 nan 0.0100 0.0004
## 160 0.7528 nan 0.0100 0.0005
## 180 0.7271 nan 0.0100 0.0005
## 200 0.7042 nan 0.0100 0.0002
## 220 0.6840 nan 0.0100 0.0002
## 240 0.6663 nan 0.0100 -0.0000
## 260 0.6499 nan 0.0100 -0.0000
## 280 0.6341 nan 0.0100 0.0001
## 300 0.6196 nan 0.0100 0.0001
## 320 0.6053 nan 0.0100 0.0000
## 340 0.5934 nan 0.0100 -0.0000
## 360 0.5815 nan 0.0100 -0.0001
## 380 0.5703 nan 0.0100 -0.0001
## 400 0.5603 nan 0.0100 0.0000
## 420 0.5497 nan 0.0100 -0.0000
## 440 0.5402 nan 0.0100 -0.0001
## 460 0.5309 nan 0.0100 -0.0001
## 480 0.5218 nan 0.0100 -0.0001
## 500 0.5125 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0036
## 2 1.3043 nan 0.0100 0.0035
## 3 1.2963 nan 0.0100 0.0034
## 4 1.2889 nan 0.0100 0.0035
## 5 1.2809 nan 0.0100 0.0037
## 6 1.2734 nan 0.0100 0.0029
## 7 1.2651 nan 0.0100 0.0038
## 8 1.2569 nan 0.0100 0.0031
## 9 1.2497 nan 0.0100 0.0030
## 10 1.2421 nan 0.0100 0.0029
## 20 1.1741 nan 0.0100 0.0027
## 40 1.0652 nan 0.0100 0.0018
## 60 0.9831 nan 0.0100 0.0016
## 80 0.9192 nan 0.0100 0.0011
## 100 0.8675 nan 0.0100 0.0009
## 120 0.8244 nan 0.0100 0.0007
## 140 0.7901 nan 0.0100 0.0003
## 160 0.7603 nan 0.0100 0.0003
## 180 0.7354 nan 0.0100 0.0004
## 200 0.7140 nan 0.0100 0.0001
## 220 0.6933 nan 0.0100 0.0000
## 240 0.6766 nan 0.0100 0.0001
## 260 0.6607 nan 0.0100 0.0000
## 280 0.6461 nan 0.0100 -0.0001
## 300 0.6314 nan 0.0100 0.0000
## 320 0.6180 nan 0.0100 0.0000
## 340 0.6056 nan 0.0100 -0.0000
## 360 0.5938 nan 0.0100 0.0000
## 380 0.5819 nan 0.0100 -0.0000
## 400 0.5711 nan 0.0100 0.0001
## 420 0.5618 nan 0.0100 -0.0001
## 440 0.5515 nan 0.0100 0.0000
## 460 0.5412 nan 0.0100 -0.0001
## 480 0.5322 nan 0.0100 0.0000
## 500 0.5241 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0043
## 2 1.3031 nan 0.0100 0.0039
## 3 1.2945 nan 0.0100 0.0037
## 4 1.2861 nan 0.0100 0.0033
## 5 1.2777 nan 0.0100 0.0034
## 6 1.2692 nan 0.0100 0.0036
## 7 1.2619 nan 0.0100 0.0036
## 8 1.2535 nan 0.0100 0.0037
## 9 1.2462 nan 0.0100 0.0030
## 10 1.2374 nan 0.0100 0.0036
## 20 1.1674 nan 0.0100 0.0029
## 40 1.0518 nan 0.0100 0.0022
## 60 0.9638 nan 0.0100 0.0018
## 80 0.8971 nan 0.0100 0.0008
## 100 0.8418 nan 0.0100 0.0006
## 120 0.7957 nan 0.0100 0.0004
## 140 0.7585 nan 0.0100 0.0005
## 160 0.7251 nan 0.0100 0.0006
## 180 0.6958 nan 0.0100 0.0003
## 200 0.6710 nan 0.0100 0.0001
## 220 0.6492 nan 0.0100 0.0002
## 240 0.6281 nan 0.0100 0.0002
## 260 0.6091 nan 0.0100 0.0001
## 280 0.5913 nan 0.0100 -0.0001
## 300 0.5746 nan 0.0100 0.0001
## 320 0.5598 nan 0.0100 0.0000
## 340 0.5466 nan 0.0100 0.0000
## 360 0.5331 nan 0.0100 0.0000
## 380 0.5205 nan 0.0100 0.0001
## 400 0.5086 nan 0.0100 -0.0001
## 420 0.4964 nan 0.0100 0.0000
## 440 0.4852 nan 0.0100 0.0000
## 460 0.4747 nan 0.0100 0.0001
## 480 0.4644 nan 0.0100 0.0000
## 500 0.4553 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0045
## 2 1.3025 nan 0.0100 0.0042
## 3 1.2938 nan 0.0100 0.0042
## 4 1.2847 nan 0.0100 0.0040
## 5 1.2765 nan 0.0100 0.0037
## 6 1.2685 nan 0.0100 0.0034
## 7 1.2599 nan 0.0100 0.0037
## 8 1.2522 nan 0.0100 0.0035
## 9 1.2449 nan 0.0100 0.0030
## 10 1.2375 nan 0.0100 0.0032
## 20 1.1653 nan 0.0100 0.0029
## 40 1.0506 nan 0.0100 0.0024
## 60 0.9663 nan 0.0100 0.0010
## 80 0.8980 nan 0.0100 0.0011
## 100 0.8430 nan 0.0100 0.0009
## 120 0.7990 nan 0.0100 0.0007
## 140 0.7608 nan 0.0100 0.0005
## 160 0.7295 nan 0.0100 0.0004
## 180 0.7018 nan 0.0100 0.0004
## 200 0.6776 nan 0.0100 0.0001
## 220 0.6554 nan 0.0100 0.0002
## 240 0.6366 nan 0.0100 0.0002
## 260 0.6192 nan 0.0100 0.0000
## 280 0.6032 nan 0.0100 0.0001
## 300 0.5875 nan 0.0100 0.0002
## 320 0.5735 nan 0.0100 0.0000
## 340 0.5602 nan 0.0100 0.0001
## 360 0.5451 nan 0.0100 -0.0001
## 380 0.5322 nan 0.0100 -0.0001
## 400 0.5201 nan 0.0100 0.0000
## 420 0.5083 nan 0.0100 -0.0000
## 440 0.4969 nan 0.0100 -0.0001
## 460 0.4858 nan 0.0100 0.0002
## 480 0.4752 nan 0.0100 -0.0001
## 500 0.4658 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0042
## 2 1.3042 nan 0.0100 0.0035
## 3 1.2952 nan 0.0100 0.0042
## 4 1.2866 nan 0.0100 0.0039
## 5 1.2790 nan 0.0100 0.0035
## 6 1.2709 nan 0.0100 0.0034
## 7 1.2630 nan 0.0100 0.0034
## 8 1.2557 nan 0.0100 0.0033
## 9 1.2481 nan 0.0100 0.0034
## 10 1.2406 nan 0.0100 0.0031
## 20 1.1706 nan 0.0100 0.0027
## 40 1.0581 nan 0.0100 0.0024
## 60 0.9715 nan 0.0100 0.0015
## 80 0.9044 nan 0.0100 0.0011
## 100 0.8508 nan 0.0100 0.0010
## 120 0.8074 nan 0.0100 0.0004
## 140 0.7699 nan 0.0100 0.0003
## 160 0.7382 nan 0.0100 0.0004
## 180 0.7108 nan 0.0100 0.0002
## 200 0.6869 nan 0.0100 0.0002
## 220 0.6654 nan 0.0100 -0.0001
## 240 0.6466 nan 0.0100 0.0002
## 260 0.6298 nan 0.0100 -0.0001
## 280 0.6143 nan 0.0100 -0.0001
## 300 0.5997 nan 0.0100 0.0000
## 320 0.5851 nan 0.0100 0.0002
## 340 0.5705 nan 0.0100 0.0001
## 360 0.5571 nan 0.0100 -0.0000
## 380 0.5451 nan 0.0100 -0.0002
## 400 0.5329 nan 0.0100 -0.0001
## 420 0.5221 nan 0.0100 -0.0001
## 440 0.5116 nan 0.0100 -0.0000
## 460 0.5013 nan 0.0100 -0.0001
## 480 0.4914 nan 0.0100 0.0001
## 500 0.4822 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2455 nan 0.1000 0.0356
## 2 1.1801 nan 0.1000 0.0263
## 3 1.1242 nan 0.1000 0.0253
## 4 1.0772 nan 0.1000 0.0209
## 5 1.0382 nan 0.1000 0.0162
## 6 1.0034 nan 0.1000 0.0128
## 7 0.9698 nan 0.1000 0.0117
## 8 0.9359 nan 0.1000 0.0129
## 9 0.9077 nan 0.1000 0.0123
## 10 0.8843 nan 0.1000 0.0081
## 20 0.7317 nan 0.1000 0.0035
## 40 0.5999 nan 0.1000 0.0002
## 60 0.5085 nan 0.1000 0.0007
## 80 0.4494 nan 0.1000 0.0002
## 100 0.3962 nan 0.1000 -0.0003
## 120 0.3496 nan 0.1000 -0.0010
## 140 0.3158 nan 0.1000 -0.0001
## 160 0.2825 nan 0.1000 -0.0003
## 180 0.2582 nan 0.1000 -0.0002
## 200 0.2334 nan 0.1000 -0.0006
## 220 0.2125 nan 0.1000 -0.0007
## 240 0.1941 nan 0.1000 -0.0002
## 260 0.1784 nan 0.1000 -0.0002
## 280 0.1641 nan 0.1000 -0.0001
## 300 0.1498 nan 0.1000 -0.0001
## 320 0.1383 nan 0.1000 -0.0003
## 340 0.1280 nan 0.1000 0.0001
## 360 0.1197 nan 0.1000 -0.0005
## 380 0.1109 nan 0.1000 -0.0002
## 400 0.1028 nan 0.1000 -0.0001
## 420 0.0950 nan 0.1000 -0.0003
## 440 0.0884 nan 0.1000 -0.0003
## 460 0.0819 nan 0.1000 -0.0002
## 480 0.0758 nan 0.1000 -0.0002
## 500 0.0703 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2370 nan 0.1000 0.0368
## 2 1.1663 nan 0.1000 0.0293
## 3 1.1083 nan 0.1000 0.0237
## 4 1.0638 nan 0.1000 0.0183
## 5 1.0288 nan 0.1000 0.0136
## 6 0.9911 nan 0.1000 0.0144
## 7 0.9591 nan 0.1000 0.0123
## 8 0.9286 nan 0.1000 0.0132
## 9 0.9045 nan 0.1000 0.0085
## 10 0.8808 nan 0.1000 0.0059
## 20 0.7413 nan 0.1000 0.0020
## 40 0.6143 nan 0.1000 -0.0013
## 60 0.5282 nan 0.1000 -0.0001
## 80 0.4724 nan 0.1000 -0.0002
## 100 0.4165 nan 0.1000 -0.0010
## 120 0.3702 nan 0.1000 -0.0009
## 140 0.3289 nan 0.1000 -0.0000
## 160 0.2963 nan 0.1000 -0.0008
## 180 0.2704 nan 0.1000 -0.0007
## 200 0.2479 nan 0.1000 -0.0004
## 220 0.2257 nan 0.1000 -0.0011
## 240 0.2049 nan 0.1000 -0.0002
## 260 0.1877 nan 0.1000 0.0000
## 280 0.1729 nan 0.1000 -0.0005
## 300 0.1591 nan 0.1000 -0.0003
## 320 0.1472 nan 0.1000 -0.0004
## 340 0.1351 nan 0.1000 -0.0003
## 360 0.1247 nan 0.1000 -0.0003
## 380 0.1154 nan 0.1000 -0.0005
## 400 0.1068 nan 0.1000 -0.0002
## 420 0.1005 nan 0.1000 -0.0003
## 440 0.0928 nan 0.1000 -0.0001
## 460 0.0858 nan 0.1000 -0.0000
## 480 0.0805 nan 0.1000 -0.0002
## 500 0.0742 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2426 nan 0.1000 0.0295
## 2 1.1792 nan 0.1000 0.0242
## 3 1.1208 nan 0.1000 0.0249
## 4 1.0708 nan 0.1000 0.0195
## 5 1.0384 nan 0.1000 0.0133
## 6 0.9981 nan 0.1000 0.0146
## 7 0.9640 nan 0.1000 0.0134
## 8 0.9349 nan 0.1000 0.0123
## 9 0.9080 nan 0.1000 0.0108
## 10 0.8859 nan 0.1000 0.0091
## 20 0.7442 nan 0.1000 0.0022
## 40 0.6243 nan 0.1000 -0.0008
## 60 0.5509 nan 0.1000 -0.0011
## 80 0.4842 nan 0.1000 -0.0008
## 100 0.4358 nan 0.1000 -0.0007
## 120 0.3898 nan 0.1000 -0.0005
## 140 0.3544 nan 0.1000 -0.0004
## 160 0.3225 nan 0.1000 -0.0004
## 180 0.2961 nan 0.1000 -0.0011
## 200 0.2712 nan 0.1000 -0.0004
## 220 0.2462 nan 0.1000 -0.0016
## 240 0.2246 nan 0.1000 -0.0005
## 260 0.2074 nan 0.1000 -0.0005
## 280 0.1913 nan 0.1000 -0.0011
## 300 0.1766 nan 0.1000 -0.0002
## 320 0.1628 nan 0.1000 -0.0002
## 340 0.1493 nan 0.1000 -0.0002
## 360 0.1358 nan 0.1000 -0.0003
## 380 0.1262 nan 0.1000 -0.0004
## 400 0.1159 nan 0.1000 -0.0004
## 420 0.1086 nan 0.1000 -0.0002
## 440 0.1013 nan 0.1000 -0.0004
## 460 0.0950 nan 0.1000 -0.0005
## 480 0.0884 nan 0.1000 -0.0002
## 500 0.0830 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2426 nan 0.1000 0.0350
## 2 1.1684 nan 0.1000 0.0326
## 3 1.1100 nan 0.1000 0.0224
## 4 1.0600 nan 0.1000 0.0204
## 5 1.0126 nan 0.1000 0.0188
## 6 0.9727 nan 0.1000 0.0154
## 7 0.9395 nan 0.1000 0.0138
## 8 0.9096 nan 0.1000 0.0129
## 9 0.8808 nan 0.1000 0.0112
## 10 0.8576 nan 0.1000 0.0094
## 20 0.7002 nan 0.1000 0.0010
## 40 0.5580 nan 0.1000 -0.0006
## 60 0.4611 nan 0.1000 0.0006
## 80 0.3901 nan 0.1000 -0.0001
## 100 0.3328 nan 0.1000 0.0001
## 120 0.2872 nan 0.1000 0.0000
## 140 0.2487 nan 0.1000 -0.0000
## 160 0.2204 nan 0.1000 0.0001
## 180 0.1932 nan 0.1000 -0.0003
## 200 0.1696 nan 0.1000 -0.0002
## 220 0.1504 nan 0.1000 -0.0001
## 240 0.1337 nan 0.1000 0.0001
## 260 0.1189 nan 0.1000 -0.0003
## 280 0.1071 nan 0.1000 -0.0000
## 300 0.0964 nan 0.1000 -0.0001
## 320 0.0858 nan 0.1000 -0.0001
## 340 0.0778 nan 0.1000 -0.0001
## 360 0.0698 nan 0.1000 -0.0003
## 380 0.0631 nan 0.1000 -0.0000
## 400 0.0575 nan 0.1000 -0.0002
## 420 0.0522 nan 0.1000 -0.0002
## 440 0.0475 nan 0.1000 -0.0001
## 460 0.0430 nan 0.1000 -0.0001
## 480 0.0392 nan 0.1000 -0.0001
## 500 0.0358 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2444 nan 0.1000 0.0326
## 2 1.1729 nan 0.1000 0.0338
## 3 1.1099 nan 0.1000 0.0232
## 4 1.0545 nan 0.1000 0.0227
## 5 1.0113 nan 0.1000 0.0188
## 6 0.9757 nan 0.1000 0.0143
## 7 0.9388 nan 0.1000 0.0148
## 8 0.9118 nan 0.1000 0.0114
## 9 0.8860 nan 0.1000 0.0070
## 10 0.8628 nan 0.1000 0.0081
## 20 0.7133 nan 0.1000 0.0011
## 40 0.5673 nan 0.1000 0.0010
## 60 0.4748 nan 0.1000 -0.0009
## 80 0.4060 nan 0.1000 -0.0003
## 100 0.3530 nan 0.1000 -0.0012
## 120 0.3069 nan 0.1000 0.0009
## 140 0.2654 nan 0.1000 -0.0006
## 160 0.2336 nan 0.1000 -0.0002
## 180 0.2064 nan 0.1000 -0.0010
## 200 0.1848 nan 0.1000 -0.0001
## 220 0.1643 nan 0.1000 -0.0006
## 240 0.1472 nan 0.1000 -0.0009
## 260 0.1332 nan 0.1000 -0.0002
## 280 0.1202 nan 0.1000 -0.0005
## 300 0.1084 nan 0.1000 -0.0003
## 320 0.0980 nan 0.1000 -0.0002
## 340 0.0878 nan 0.1000 -0.0004
## 360 0.0788 nan 0.1000 -0.0001
## 380 0.0708 nan 0.1000 -0.0003
## 400 0.0645 nan 0.1000 -0.0001
## 420 0.0582 nan 0.1000 -0.0002
## 440 0.0530 nan 0.1000 -0.0001
## 460 0.0487 nan 0.1000 -0.0002
## 480 0.0442 nan 0.1000 -0.0001
## 500 0.0399 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2418 nan 0.1000 0.0329
## 2 1.1758 nan 0.1000 0.0265
## 3 1.1156 nan 0.1000 0.0274
## 4 1.0612 nan 0.1000 0.0214
## 5 1.0198 nan 0.1000 0.0153
## 6 0.9773 nan 0.1000 0.0156
## 7 0.9396 nan 0.1000 0.0141
## 8 0.9162 nan 0.1000 0.0081
## 9 0.8891 nan 0.1000 0.0098
## 10 0.8622 nan 0.1000 0.0099
## 20 0.7152 nan 0.1000 0.0027
## 40 0.5785 nan 0.1000 0.0006
## 60 0.4802 nan 0.1000 -0.0008
## 80 0.4102 nan 0.1000 0.0002
## 100 0.3653 nan 0.1000 -0.0013
## 120 0.3241 nan 0.1000 -0.0006
## 140 0.2857 nan 0.1000 -0.0010
## 160 0.2534 nan 0.1000 -0.0010
## 180 0.2257 nan 0.1000 -0.0003
## 200 0.2037 nan 0.1000 -0.0009
## 220 0.1854 nan 0.1000 -0.0007
## 240 0.1687 nan 0.1000 -0.0001
## 260 0.1526 nan 0.1000 -0.0009
## 280 0.1391 nan 0.1000 -0.0006
## 300 0.1262 nan 0.1000 -0.0003
## 320 0.1155 nan 0.1000 -0.0006
## 340 0.1060 nan 0.1000 -0.0003
## 360 0.0959 nan 0.1000 -0.0003
## 380 0.0884 nan 0.1000 -0.0003
## 400 0.0805 nan 0.1000 -0.0001
## 420 0.0732 nan 0.1000 -0.0003
## 440 0.0670 nan 0.1000 -0.0003
## 460 0.0611 nan 0.1000 -0.0002
## 480 0.0554 nan 0.1000 -0.0002
## 500 0.0505 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2315 nan 0.1000 0.0413
## 2 1.1582 nan 0.1000 0.0317
## 3 1.0991 nan 0.1000 0.0269
## 4 1.0496 nan 0.1000 0.0224
## 5 0.9992 nan 0.1000 0.0201
## 6 0.9581 nan 0.1000 0.0162
## 7 0.9226 nan 0.1000 0.0138
## 8 0.8906 nan 0.1000 0.0145
## 9 0.8584 nan 0.1000 0.0124
## 10 0.8314 nan 0.1000 0.0117
## 20 0.6734 nan 0.1000 0.0038
## 40 0.5177 nan 0.1000 0.0007
## 60 0.4140 nan 0.1000 -0.0007
## 80 0.3421 nan 0.1000 -0.0010
## 100 0.2886 nan 0.1000 -0.0004
## 120 0.2463 nan 0.1000 -0.0001
## 140 0.2108 nan 0.1000 -0.0009
## 160 0.1812 nan 0.1000 -0.0004
## 180 0.1557 nan 0.1000 -0.0008
## 200 0.1374 nan 0.1000 -0.0000
## 220 0.1213 nan 0.1000 -0.0003
## 240 0.1050 nan 0.1000 -0.0002
## 260 0.0934 nan 0.1000 -0.0004
## 280 0.0836 nan 0.1000 -0.0001
## 300 0.0738 nan 0.1000 -0.0002
## 320 0.0654 nan 0.1000 -0.0001
## 340 0.0575 nan 0.1000 -0.0002
## 360 0.0512 nan 0.1000 -0.0001
## 380 0.0453 nan 0.1000 -0.0001
## 400 0.0405 nan 0.1000 -0.0001
## 420 0.0359 nan 0.1000 -0.0001
## 440 0.0321 nan 0.1000 -0.0001
## 460 0.0286 nan 0.1000 -0.0001
## 480 0.0255 nan 0.1000 -0.0001
## 500 0.0224 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2322 nan 0.1000 0.0404
## 2 1.1642 nan 0.1000 0.0292
## 3 1.1028 nan 0.1000 0.0285
## 4 1.0488 nan 0.1000 0.0236
## 5 1.0046 nan 0.1000 0.0179
## 6 0.9639 nan 0.1000 0.0167
## 7 0.9286 nan 0.1000 0.0146
## 8 0.8978 nan 0.1000 0.0104
## 9 0.8741 nan 0.1000 0.0075
## 10 0.8455 nan 0.1000 0.0110
## 20 0.6748 nan 0.1000 0.0056
## 40 0.5235 nan 0.1000 -0.0004
## 60 0.4239 nan 0.1000 -0.0000
## 80 0.3485 nan 0.1000 -0.0009
## 100 0.2880 nan 0.1000 -0.0005
## 120 0.2442 nan 0.1000 -0.0004
## 140 0.2070 nan 0.1000 -0.0001
## 160 0.1786 nan 0.1000 -0.0001
## 180 0.1549 nan 0.1000 -0.0006
## 200 0.1348 nan 0.1000 -0.0008
## 220 0.1202 nan 0.1000 -0.0008
## 240 0.1048 nan 0.1000 -0.0002
## 260 0.0931 nan 0.1000 -0.0004
## 280 0.0828 nan 0.1000 -0.0003
## 300 0.0726 nan 0.1000 -0.0002
## 320 0.0642 nan 0.1000 -0.0002
## 340 0.0570 nan 0.1000 -0.0003
## 360 0.0499 nan 0.1000 -0.0002
## 380 0.0447 nan 0.1000 -0.0001
## 400 0.0395 nan 0.1000 -0.0000
## 420 0.0353 nan 0.1000 -0.0001
## 440 0.0316 nan 0.1000 -0.0001
## 460 0.0281 nan 0.1000 -0.0001
## 480 0.0251 nan 0.1000 -0.0001
## 500 0.0223 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2313 nan 0.1000 0.0392
## 2 1.1544 nan 0.1000 0.0350
## 3 1.0936 nan 0.1000 0.0268
## 4 1.0483 nan 0.1000 0.0147
## 5 1.0049 nan 0.1000 0.0198
## 6 0.9646 nan 0.1000 0.0160
## 7 0.9304 nan 0.1000 0.0132
## 8 0.8987 nan 0.1000 0.0105
## 9 0.8686 nan 0.1000 0.0117
## 10 0.8415 nan 0.1000 0.0093
## 20 0.6871 nan 0.1000 0.0019
## 40 0.5390 nan 0.1000 -0.0007
## 60 0.4441 nan 0.1000 -0.0012
## 80 0.3725 nan 0.1000 -0.0011
## 100 0.3192 nan 0.1000 0.0002
## 120 0.2683 nan 0.1000 -0.0011
## 140 0.2307 nan 0.1000 -0.0011
## 160 0.1985 nan 0.1000 -0.0004
## 180 0.1735 nan 0.1000 -0.0005
## 200 0.1535 nan 0.1000 -0.0007
## 220 0.1351 nan 0.1000 -0.0007
## 240 0.1190 nan 0.1000 -0.0001
## 260 0.1048 nan 0.1000 -0.0006
## 280 0.0928 nan 0.1000 -0.0001
## 300 0.0813 nan 0.1000 -0.0001
## 320 0.0722 nan 0.1000 -0.0004
## 340 0.0644 nan 0.1000 -0.0004
## 360 0.0581 nan 0.1000 -0.0001
## 380 0.0522 nan 0.1000 -0.0002
## 400 0.0472 nan 0.1000 -0.0002
## 420 0.0421 nan 0.1000 -0.0002
## 440 0.0376 nan 0.1000 -0.0002
## 460 0.0336 nan 0.1000 -0.0001
## 480 0.0303 nan 0.1000 -0.0002
## 500 0.0272 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0003
## 7 1.3153 nan 0.0010 0.0004
## 8 1.3145 nan 0.0010 0.0003
## 9 1.3138 nan 0.0010 0.0003
## 10 1.3129 nan 0.0010 0.0004
## 20 1.3049 nan 0.0010 0.0003
## 40 1.2892 nan 0.0010 0.0003
## 60 1.2740 nan 0.0010 0.0004
## 80 1.2591 nan 0.0010 0.0003
## 100 1.2448 nan 0.0010 0.0003
## 120 1.2310 nan 0.0010 0.0003
## 140 1.2173 nan 0.0010 0.0003
## 160 1.2040 nan 0.0010 0.0003
## 180 1.1913 nan 0.0010 0.0003
## 200 1.1789 nan 0.0010 0.0002
## 220 1.1666 nan 0.0010 0.0002
## 240 1.1550 nan 0.0010 0.0002
## 260 1.1434 nan 0.0010 0.0003
## 280 1.1326 nan 0.0010 0.0002
## 300 1.1220 nan 0.0010 0.0002
## 320 1.1114 nan 0.0010 0.0002
## 340 1.1013 nan 0.0010 0.0002
## 360 1.0913 nan 0.0010 0.0002
## 380 1.0815 nan 0.0010 0.0002
## 400 1.0721 nan 0.0010 0.0002
## 420 1.0630 nan 0.0010 0.0001
## 440 1.0539 nan 0.0010 0.0002
## 460 1.0453 nan 0.0010 0.0002
## 480 1.0369 nan 0.0010 0.0002
## 500 1.0284 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0003
## 6 1.3161 nan 0.0010 0.0003
## 7 1.3153 nan 0.0010 0.0004
## 8 1.3145 nan 0.0010 0.0004
## 9 1.3138 nan 0.0010 0.0003
## 10 1.3130 nan 0.0010 0.0003
## 20 1.3047 nan 0.0010 0.0004
## 40 1.2891 nan 0.0010 0.0003
## 60 1.2736 nan 0.0010 0.0004
## 80 1.2590 nan 0.0010 0.0003
## 100 1.2446 nan 0.0010 0.0003
## 120 1.2310 nan 0.0010 0.0003
## 140 1.2176 nan 0.0010 0.0003
## 160 1.2043 nan 0.0010 0.0003
## 180 1.1915 nan 0.0010 0.0002
## 200 1.1792 nan 0.0010 0.0003
## 220 1.1673 nan 0.0010 0.0003
## 240 1.1555 nan 0.0010 0.0002
## 260 1.1442 nan 0.0010 0.0002
## 280 1.1334 nan 0.0010 0.0002
## 300 1.1226 nan 0.0010 0.0002
## 320 1.1119 nan 0.0010 0.0002
## 340 1.1018 nan 0.0010 0.0002
## 360 1.0922 nan 0.0010 0.0002
## 380 1.0825 nan 0.0010 0.0002
## 400 1.0729 nan 0.0010 0.0002
## 420 1.0636 nan 0.0010 0.0002
## 440 1.0546 nan 0.0010 0.0002
## 460 1.0457 nan 0.0010 0.0002
## 480 1.0372 nan 0.0010 0.0002
## 500 1.0288 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3188 nan 0.0010 0.0004
## 4 1.3180 nan 0.0010 0.0003
## 5 1.3172 nan 0.0010 0.0004
## 6 1.3164 nan 0.0010 0.0004
## 7 1.3156 nan 0.0010 0.0003
## 8 1.3148 nan 0.0010 0.0004
## 9 1.3139 nan 0.0010 0.0004
## 10 1.3131 nan 0.0010 0.0004
## 20 1.3050 nan 0.0010 0.0004
## 40 1.2891 nan 0.0010 0.0004
## 60 1.2739 nan 0.0010 0.0003
## 80 1.2591 nan 0.0010 0.0003
## 100 1.2449 nan 0.0010 0.0003
## 120 1.2310 nan 0.0010 0.0003
## 140 1.2175 nan 0.0010 0.0003
## 160 1.2043 nan 0.0010 0.0003
## 180 1.1916 nan 0.0010 0.0003
## 200 1.1794 nan 0.0010 0.0003
## 220 1.1674 nan 0.0010 0.0002
## 240 1.1556 nan 0.0010 0.0002
## 260 1.1443 nan 0.0010 0.0002
## 280 1.1333 nan 0.0010 0.0002
## 300 1.1225 nan 0.0010 0.0002
## 320 1.1123 nan 0.0010 0.0002
## 340 1.1022 nan 0.0010 0.0002
## 360 1.0923 nan 0.0010 0.0002
## 380 1.0826 nan 0.0010 0.0002
## 400 1.0733 nan 0.0010 0.0002
## 420 1.0643 nan 0.0010 0.0002
## 440 1.0553 nan 0.0010 0.0002
## 460 1.0464 nan 0.0010 0.0002
## 480 1.0381 nan 0.0010 0.0002
## 500 1.0298 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3159 nan 0.0010 0.0004
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0004
## 40 1.2864 nan 0.0010 0.0004
## 60 1.2697 nan 0.0010 0.0004
## 80 1.2541 nan 0.0010 0.0003
## 100 1.2390 nan 0.0010 0.0004
## 120 1.2241 nan 0.0010 0.0003
## 140 1.2097 nan 0.0010 0.0003
## 160 1.1960 nan 0.0010 0.0003
## 180 1.1826 nan 0.0010 0.0003
## 200 1.1696 nan 0.0010 0.0003
## 220 1.1569 nan 0.0010 0.0003
## 240 1.1445 nan 0.0010 0.0003
## 260 1.1325 nan 0.0010 0.0002
## 280 1.1210 nan 0.0010 0.0002
## 300 1.1095 nan 0.0010 0.0003
## 320 1.0986 nan 0.0010 0.0002
## 340 1.0878 nan 0.0010 0.0002
## 360 1.0774 nan 0.0010 0.0002
## 380 1.0673 nan 0.0010 0.0002
## 400 1.0572 nan 0.0010 0.0002
## 420 1.0476 nan 0.0010 0.0002
## 440 1.0382 nan 0.0010 0.0002
## 460 1.0291 nan 0.0010 0.0002
## 480 1.0203 nan 0.0010 0.0002
## 500 1.0117 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0003
## 20 1.3037 nan 0.0010 0.0003
## 40 1.2867 nan 0.0010 0.0004
## 60 1.2701 nan 0.0010 0.0004
## 80 1.2544 nan 0.0010 0.0003
## 100 1.2389 nan 0.0010 0.0004
## 120 1.2239 nan 0.0010 0.0003
## 140 1.2097 nan 0.0010 0.0003
## 160 1.1956 nan 0.0010 0.0003
## 180 1.1821 nan 0.0010 0.0003
## 200 1.1690 nan 0.0010 0.0003
## 220 1.1564 nan 0.0010 0.0003
## 240 1.1442 nan 0.0010 0.0002
## 260 1.1326 nan 0.0010 0.0002
## 280 1.1206 nan 0.0010 0.0002
## 300 1.1094 nan 0.0010 0.0002
## 320 1.0984 nan 0.0010 0.0003
## 340 1.0878 nan 0.0010 0.0002
## 360 1.0774 nan 0.0010 0.0002
## 380 1.0671 nan 0.0010 0.0002
## 400 1.0571 nan 0.0010 0.0002
## 420 1.0474 nan 0.0010 0.0002
## 440 1.0378 nan 0.0010 0.0002
## 460 1.0287 nan 0.0010 0.0002
## 480 1.0197 nan 0.0010 0.0002
## 500 1.0108 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0003
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3160 nan 0.0010 0.0003
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3134 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3041 nan 0.0010 0.0004
## 40 1.2869 nan 0.0010 0.0003
## 60 1.2711 nan 0.0010 0.0004
## 80 1.2555 nan 0.0010 0.0004
## 100 1.2405 nan 0.0010 0.0004
## 120 1.2255 nan 0.0010 0.0003
## 140 1.2114 nan 0.0010 0.0003
## 160 1.1980 nan 0.0010 0.0003
## 180 1.1843 nan 0.0010 0.0003
## 200 1.1715 nan 0.0010 0.0003
## 220 1.1592 nan 0.0010 0.0003
## 240 1.1469 nan 0.0010 0.0003
## 260 1.1349 nan 0.0010 0.0002
## 280 1.1235 nan 0.0010 0.0002
## 300 1.1120 nan 0.0010 0.0003
## 320 1.1009 nan 0.0010 0.0003
## 340 1.0901 nan 0.0010 0.0003
## 360 1.0797 nan 0.0010 0.0002
## 380 1.0697 nan 0.0010 0.0002
## 400 1.0598 nan 0.0010 0.0002
## 420 1.0503 nan 0.0010 0.0002
## 440 1.0410 nan 0.0010 0.0002
## 460 1.0321 nan 0.0010 0.0002
## 480 1.0234 nan 0.0010 0.0002
## 500 1.0149 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0005
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3029 nan 0.0010 0.0004
## 40 1.2850 nan 0.0010 0.0004
## 60 1.2680 nan 0.0010 0.0003
## 80 1.2513 nan 0.0010 0.0003
## 100 1.2354 nan 0.0010 0.0003
## 120 1.2197 nan 0.0010 0.0004
## 140 1.2049 nan 0.0010 0.0003
## 160 1.1902 nan 0.0010 0.0003
## 180 1.1759 nan 0.0010 0.0003
## 200 1.1623 nan 0.0010 0.0003
## 220 1.1489 nan 0.0010 0.0003
## 240 1.1358 nan 0.0010 0.0003
## 260 1.1231 nan 0.0010 0.0002
## 280 1.1106 nan 0.0010 0.0002
## 300 1.0985 nan 0.0010 0.0003
## 320 1.0871 nan 0.0010 0.0002
## 340 1.0756 nan 0.0010 0.0003
## 360 1.0647 nan 0.0010 0.0002
## 380 1.0538 nan 0.0010 0.0002
## 400 1.0433 nan 0.0010 0.0002
## 420 1.0333 nan 0.0010 0.0002
## 440 1.0235 nan 0.0010 0.0002
## 460 1.0139 nan 0.0010 0.0002
## 480 1.0045 nan 0.0010 0.0002
## 500 0.9957 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0004
## 2 1.3192 nan 0.0010 0.0004
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0003
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0003
## 8 1.3138 nan 0.0010 0.0005
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3029 nan 0.0010 0.0004
## 40 1.2851 nan 0.0010 0.0004
## 60 1.2683 nan 0.0010 0.0004
## 80 1.2518 nan 0.0010 0.0003
## 100 1.2359 nan 0.0010 0.0004
## 120 1.2201 nan 0.0010 0.0004
## 140 1.2055 nan 0.0010 0.0003
## 160 1.1912 nan 0.0010 0.0003
## 180 1.1770 nan 0.0010 0.0003
## 200 1.1632 nan 0.0010 0.0003
## 220 1.1499 nan 0.0010 0.0002
## 240 1.1372 nan 0.0010 0.0003
## 260 1.1244 nan 0.0010 0.0003
## 280 1.1120 nan 0.0010 0.0002
## 300 1.1001 nan 0.0010 0.0003
## 320 1.0886 nan 0.0010 0.0002
## 340 1.0774 nan 0.0010 0.0002
## 360 1.0666 nan 0.0010 0.0002
## 380 1.0559 nan 0.0010 0.0002
## 400 1.0458 nan 0.0010 0.0002
## 420 1.0358 nan 0.0010 0.0002
## 440 1.0260 nan 0.0010 0.0002
## 460 1.0167 nan 0.0010 0.0002
## 480 1.0074 nan 0.0010 0.0002
## 500 0.9983 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0005
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3122 nan 0.0010 0.0004
## 20 1.3034 nan 0.0010 0.0003
## 40 1.2859 nan 0.0010 0.0003
## 60 1.2691 nan 0.0010 0.0004
## 80 1.2526 nan 0.0010 0.0004
## 100 1.2367 nan 0.0010 0.0003
## 120 1.2216 nan 0.0010 0.0003
## 140 1.2065 nan 0.0010 0.0003
## 160 1.1922 nan 0.0010 0.0003
## 180 1.1781 nan 0.0010 0.0003
## 200 1.1647 nan 0.0010 0.0003
## 220 1.1516 nan 0.0010 0.0003
## 240 1.1391 nan 0.0010 0.0002
## 260 1.1266 nan 0.0010 0.0003
## 280 1.1146 nan 0.0010 0.0003
## 300 1.1030 nan 0.0010 0.0003
## 320 1.0915 nan 0.0010 0.0002
## 340 1.0804 nan 0.0010 0.0002
## 360 1.0697 nan 0.0010 0.0003
## 380 1.0592 nan 0.0010 0.0002
## 400 1.0486 nan 0.0010 0.0002
## 420 1.0387 nan 0.0010 0.0002
## 440 1.0292 nan 0.0010 0.0002
## 460 1.0198 nan 0.0010 0.0002
## 480 1.0108 nan 0.0010 0.0002
## 500 1.0018 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3129 nan 0.0100 0.0038
## 2 1.3039 nan 0.0100 0.0041
## 3 1.2957 nan 0.0100 0.0033
## 4 1.2876 nan 0.0100 0.0038
## 5 1.2794 nan 0.0100 0.0036
## 6 1.2721 nan 0.0100 0.0030
## 7 1.2647 nan 0.0100 0.0035
## 8 1.2572 nan 0.0100 0.0032
## 9 1.2496 nan 0.0100 0.0035
## 10 1.2423 nan 0.0100 0.0031
## 20 1.1775 nan 0.0100 0.0029
## 40 1.0695 nan 0.0100 0.0018
## 60 0.9858 nan 0.0100 0.0017
## 80 0.9211 nan 0.0100 0.0013
## 100 0.8687 nan 0.0100 0.0008
## 120 0.8256 nan 0.0100 0.0007
## 140 0.7887 nan 0.0100 0.0004
## 160 0.7598 nan 0.0100 0.0002
## 180 0.7347 nan 0.0100 0.0003
## 200 0.7117 nan 0.0100 0.0004
## 220 0.6918 nan 0.0100 0.0003
## 240 0.6739 nan 0.0100 0.0003
## 260 0.6581 nan 0.0100 0.0000
## 280 0.6438 nan 0.0100 -0.0000
## 300 0.6315 nan 0.0100 0.0000
## 320 0.6193 nan 0.0100 0.0001
## 340 0.6080 nan 0.0100 0.0000
## 360 0.5974 nan 0.0100 -0.0000
## 380 0.5866 nan 0.0100 0.0001
## 400 0.5771 nan 0.0100 0.0001
## 420 0.5684 nan 0.0100 -0.0001
## 440 0.5595 nan 0.0100 -0.0001
## 460 0.5508 nan 0.0100 0.0000
## 480 0.5428 nan 0.0100 -0.0000
## 500 0.5352 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3130 nan 0.0100 0.0033
## 2 1.3046 nan 0.0100 0.0038
## 3 1.2967 nan 0.0100 0.0037
## 4 1.2890 nan 0.0100 0.0033
## 5 1.2808 nan 0.0100 0.0036
## 6 1.2730 nan 0.0100 0.0037
## 7 1.2651 nan 0.0100 0.0033
## 8 1.2568 nan 0.0100 0.0035
## 9 1.2502 nan 0.0100 0.0026
## 10 1.2434 nan 0.0100 0.0034
## 20 1.1772 nan 0.0100 0.0027
## 40 1.0715 nan 0.0100 0.0021
## 60 0.9882 nan 0.0100 0.0015
## 80 0.9234 nan 0.0100 0.0007
## 100 0.8708 nan 0.0100 0.0009
## 120 0.8280 nan 0.0100 0.0008
## 140 0.7928 nan 0.0100 0.0005
## 160 0.7653 nan 0.0100 0.0002
## 180 0.7394 nan 0.0100 0.0003
## 200 0.7171 nan 0.0100 0.0003
## 220 0.6971 nan 0.0100 0.0004
## 240 0.6796 nan 0.0100 -0.0000
## 260 0.6647 nan 0.0100 0.0002
## 280 0.6499 nan 0.0100 0.0001
## 300 0.6369 nan 0.0100 0.0000
## 320 0.6245 nan 0.0100 0.0002
## 340 0.6129 nan 0.0100 -0.0001
## 360 0.6020 nan 0.0100 0.0000
## 380 0.5913 nan 0.0100 -0.0001
## 400 0.5807 nan 0.0100 -0.0000
## 420 0.5726 nan 0.0100 -0.0001
## 440 0.5647 nan 0.0100 0.0001
## 460 0.5566 nan 0.0100 0.0000
## 480 0.5477 nan 0.0100 0.0000
## 500 0.5395 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3126 nan 0.0100 0.0037
## 2 1.3045 nan 0.0100 0.0037
## 3 1.2965 nan 0.0100 0.0038
## 4 1.2887 nan 0.0100 0.0038
## 5 1.2812 nan 0.0100 0.0032
## 6 1.2726 nan 0.0100 0.0038
## 7 1.2652 nan 0.0100 0.0031
## 8 1.2574 nan 0.0100 0.0037
## 9 1.2501 nan 0.0100 0.0035
## 10 1.2431 nan 0.0100 0.0034
## 20 1.1776 nan 0.0100 0.0027
## 40 1.0724 nan 0.0100 0.0019
## 60 0.9923 nan 0.0100 0.0012
## 80 0.9264 nan 0.0100 0.0014
## 100 0.8725 nan 0.0100 0.0010
## 120 0.8298 nan 0.0100 0.0008
## 140 0.7940 nan 0.0100 0.0006
## 160 0.7655 nan 0.0100 0.0004
## 180 0.7403 nan 0.0100 0.0003
## 200 0.7195 nan 0.0100 0.0002
## 220 0.7011 nan 0.0100 0.0001
## 240 0.6843 nan 0.0100 0.0002
## 260 0.6682 nan 0.0100 0.0001
## 280 0.6550 nan 0.0100 0.0001
## 300 0.6420 nan 0.0100 0.0002
## 320 0.6299 nan 0.0100 -0.0001
## 340 0.6194 nan 0.0100 0.0000
## 360 0.6087 nan 0.0100 0.0000
## 380 0.5984 nan 0.0100 -0.0001
## 400 0.5893 nan 0.0100 -0.0000
## 420 0.5810 nan 0.0100 -0.0000
## 440 0.5725 nan 0.0100 -0.0000
## 460 0.5638 nan 0.0100 -0.0000
## 480 0.5557 nan 0.0100 -0.0001
## 500 0.5471 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0042
## 2 1.3041 nan 0.0100 0.0036
## 3 1.2960 nan 0.0100 0.0035
## 4 1.2878 nan 0.0100 0.0037
## 5 1.2794 nan 0.0100 0.0038
## 6 1.2714 nan 0.0100 0.0036
## 7 1.2641 nan 0.0100 0.0025
## 8 1.2562 nan 0.0100 0.0032
## 9 1.2479 nan 0.0100 0.0035
## 10 1.2406 nan 0.0100 0.0028
## 20 1.1724 nan 0.0100 0.0026
## 40 1.0587 nan 0.0100 0.0020
## 60 0.9728 nan 0.0100 0.0015
## 80 0.9050 nan 0.0100 0.0010
## 100 0.8496 nan 0.0100 0.0006
## 120 0.8043 nan 0.0100 0.0008
## 140 0.7656 nan 0.0100 0.0006
## 160 0.7324 nan 0.0100 0.0004
## 180 0.7058 nan 0.0100 0.0004
## 200 0.6827 nan 0.0100 0.0000
## 220 0.6614 nan 0.0100 0.0003
## 240 0.6412 nan 0.0100 0.0002
## 260 0.6241 nan 0.0100 0.0000
## 280 0.6073 nan 0.0100 0.0001
## 300 0.5923 nan 0.0100 0.0000
## 320 0.5786 nan 0.0100 0.0000
## 340 0.5666 nan 0.0100 -0.0001
## 360 0.5539 nan 0.0100 -0.0000
## 380 0.5429 nan 0.0100 0.0000
## 400 0.5320 nan 0.0100 0.0001
## 420 0.5223 nan 0.0100 0.0002
## 440 0.5125 nan 0.0100 -0.0002
## 460 0.5028 nan 0.0100 -0.0000
## 480 0.4935 nan 0.0100 0.0001
## 500 0.4846 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0041
## 2 1.3029 nan 0.0100 0.0040
## 3 1.2957 nan 0.0100 0.0036
## 4 1.2867 nan 0.0100 0.0039
## 5 1.2780 nan 0.0100 0.0039
## 6 1.2698 nan 0.0100 0.0035
## 7 1.2620 nan 0.0100 0.0034
## 8 1.2540 nan 0.0100 0.0038
## 9 1.2463 nan 0.0100 0.0031
## 10 1.2386 nan 0.0100 0.0032
## 20 1.1691 nan 0.0100 0.0029
## 40 1.0573 nan 0.0100 0.0022
## 60 0.9698 nan 0.0100 0.0018
## 80 0.9020 nan 0.0100 0.0013
## 100 0.8456 nan 0.0100 0.0007
## 120 0.8014 nan 0.0100 0.0007
## 140 0.7648 nan 0.0100 0.0004
## 160 0.7340 nan 0.0100 0.0004
## 180 0.7069 nan 0.0100 0.0004
## 200 0.6836 nan 0.0100 0.0001
## 220 0.6630 nan 0.0100 0.0000
## 240 0.6455 nan 0.0100 0.0004
## 260 0.6297 nan 0.0100 0.0001
## 280 0.6143 nan 0.0100 0.0001
## 300 0.5993 nan 0.0100 0.0001
## 320 0.5862 nan 0.0100 0.0001
## 340 0.5733 nan 0.0100 -0.0001
## 360 0.5616 nan 0.0100 0.0000
## 380 0.5498 nan 0.0100 -0.0001
## 400 0.5388 nan 0.0100 0.0001
## 420 0.5285 nan 0.0100 -0.0001
## 440 0.5188 nan 0.0100 0.0001
## 460 0.5090 nan 0.0100 -0.0002
## 480 0.4998 nan 0.0100 -0.0001
## 500 0.4917 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0040
## 2 1.3042 nan 0.0100 0.0037
## 3 1.2956 nan 0.0100 0.0042
## 4 1.2869 nan 0.0100 0.0040
## 5 1.2791 nan 0.0100 0.0032
## 6 1.2708 nan 0.0100 0.0036
## 7 1.2621 nan 0.0100 0.0037
## 8 1.2542 nan 0.0100 0.0032
## 9 1.2466 nan 0.0100 0.0035
## 10 1.2391 nan 0.0100 0.0034
## 20 1.1701 nan 0.0100 0.0026
## 40 1.0582 nan 0.0100 0.0023
## 60 0.9739 nan 0.0100 0.0016
## 80 0.9054 nan 0.0100 0.0014
## 100 0.8511 nan 0.0100 0.0008
## 120 0.8076 nan 0.0100 0.0008
## 140 0.7709 nan 0.0100 0.0005
## 160 0.7385 nan 0.0100 0.0005
## 180 0.7134 nan 0.0100 0.0002
## 200 0.6907 nan 0.0100 0.0002
## 220 0.6713 nan 0.0100 0.0001
## 240 0.6536 nan 0.0100 0.0002
## 260 0.6371 nan 0.0100 0.0001
## 280 0.6221 nan 0.0100 0.0002
## 300 0.6085 nan 0.0100 0.0000
## 320 0.5953 nan 0.0100 0.0001
## 340 0.5832 nan 0.0100 -0.0001
## 360 0.5715 nan 0.0100 -0.0001
## 380 0.5613 nan 0.0100 -0.0001
## 400 0.5516 nan 0.0100 0.0001
## 420 0.5415 nan 0.0100 -0.0000
## 440 0.5321 nan 0.0100 -0.0001
## 460 0.5228 nan 0.0100 -0.0000
## 480 0.5141 nan 0.0100 -0.0001
## 500 0.5052 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3113 nan 0.0100 0.0043
## 2 1.3012 nan 0.0100 0.0042
## 3 1.2923 nan 0.0100 0.0038
## 4 1.2841 nan 0.0100 0.0035
## 5 1.2747 nan 0.0100 0.0044
## 6 1.2657 nan 0.0100 0.0036
## 7 1.2570 nan 0.0100 0.0036
## 8 1.2494 nan 0.0100 0.0034
## 9 1.2415 nan 0.0100 0.0032
## 10 1.2338 nan 0.0100 0.0033
## 20 1.1613 nan 0.0100 0.0030
## 40 1.0438 nan 0.0100 0.0024
## 60 0.9528 nan 0.0100 0.0016
## 80 0.8806 nan 0.0100 0.0012
## 100 0.8236 nan 0.0100 0.0009
## 120 0.7763 nan 0.0100 0.0008
## 140 0.7362 nan 0.0100 0.0005
## 160 0.7033 nan 0.0100 0.0004
## 180 0.6748 nan 0.0100 0.0003
## 200 0.6492 nan 0.0100 0.0001
## 220 0.6259 nan 0.0100 0.0003
## 240 0.6061 nan 0.0100 0.0002
## 260 0.5882 nan 0.0100 0.0002
## 280 0.5716 nan 0.0100 0.0003
## 300 0.5564 nan 0.0100 0.0000
## 320 0.5421 nan 0.0100 0.0001
## 340 0.5290 nan 0.0100 -0.0001
## 360 0.5165 nan 0.0100 -0.0000
## 380 0.5045 nan 0.0100 0.0001
## 400 0.4916 nan 0.0100 -0.0002
## 420 0.4799 nan 0.0100 0.0001
## 440 0.4695 nan 0.0100 -0.0000
## 460 0.4592 nan 0.0100 0.0002
## 480 0.4485 nan 0.0100 -0.0001
## 500 0.4390 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3115 nan 0.0100 0.0046
## 2 1.3027 nan 0.0100 0.0041
## 3 1.2939 nan 0.0100 0.0040
## 4 1.2850 nan 0.0100 0.0041
## 5 1.2759 nan 0.0100 0.0041
## 6 1.2679 nan 0.0100 0.0035
## 7 1.2594 nan 0.0100 0.0036
## 8 1.2509 nan 0.0100 0.0038
## 9 1.2429 nan 0.0100 0.0036
## 10 1.2358 nan 0.0100 0.0030
## 20 1.1641 nan 0.0100 0.0025
## 40 1.0456 nan 0.0100 0.0019
## 60 0.9581 nan 0.0100 0.0016
## 80 0.8863 nan 0.0100 0.0012
## 100 0.8303 nan 0.0100 0.0010
## 120 0.7833 nan 0.0100 0.0007
## 140 0.7439 nan 0.0100 0.0003
## 160 0.7102 nan 0.0100 0.0004
## 180 0.6819 nan 0.0100 0.0001
## 200 0.6567 nan 0.0100 0.0002
## 220 0.6353 nan 0.0100 0.0002
## 240 0.6160 nan 0.0100 0.0001
## 260 0.5978 nan 0.0100 0.0003
## 280 0.5816 nan 0.0100 0.0001
## 300 0.5665 nan 0.0100 0.0001
## 320 0.5525 nan 0.0100 0.0000
## 340 0.5388 nan 0.0100 -0.0000
## 360 0.5261 nan 0.0100 0.0000
## 380 0.5140 nan 0.0100 0.0001
## 400 0.5033 nan 0.0100 -0.0001
## 420 0.4915 nan 0.0100 0.0001
## 440 0.4801 nan 0.0100 0.0000
## 460 0.4702 nan 0.0100 0.0000
## 480 0.4605 nan 0.0100 -0.0000
## 500 0.4508 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3114 nan 0.0100 0.0044
## 2 1.3021 nan 0.0100 0.0039
## 3 1.2928 nan 0.0100 0.0042
## 4 1.2839 nan 0.0100 0.0037
## 5 1.2762 nan 0.0100 0.0036
## 6 1.2678 nan 0.0100 0.0037
## 7 1.2590 nan 0.0100 0.0036
## 8 1.2509 nan 0.0100 0.0034
## 9 1.2428 nan 0.0100 0.0032
## 10 1.2354 nan 0.0100 0.0031
## 20 1.1614 nan 0.0100 0.0031
## 40 1.0442 nan 0.0100 0.0022
## 60 0.9536 nan 0.0100 0.0016
## 80 0.8846 nan 0.0100 0.0013
## 100 0.8279 nan 0.0100 0.0009
## 120 0.7819 nan 0.0100 0.0007
## 140 0.7438 nan 0.0100 0.0005
## 160 0.7118 nan 0.0100 0.0005
## 180 0.6847 nan 0.0100 0.0004
## 200 0.6611 nan 0.0100 0.0002
## 220 0.6399 nan 0.0100 0.0001
## 240 0.6211 nan 0.0100 0.0001
## 260 0.6034 nan 0.0100 0.0001
## 280 0.5879 nan 0.0100 -0.0001
## 300 0.5723 nan 0.0100 0.0000
## 320 0.5579 nan 0.0100 -0.0003
## 340 0.5438 nan 0.0100 -0.0001
## 360 0.5306 nan 0.0100 -0.0001
## 380 0.5192 nan 0.0100 -0.0000
## 400 0.5084 nan 0.0100 -0.0001
## 420 0.4967 nan 0.0100 0.0000
## 440 0.4864 nan 0.0100 0.0000
## 460 0.4762 nan 0.0100 -0.0000
## 480 0.4664 nan 0.0100 -0.0001
## 500 0.4570 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2371 nan 0.1000 0.0350
## 2 1.1688 nan 0.1000 0.0332
## 3 1.1141 nan 0.1000 0.0253
## 4 1.0668 nan 0.1000 0.0198
## 5 1.0290 nan 0.1000 0.0138
## 6 0.9913 nan 0.1000 0.0153
## 7 0.9629 nan 0.1000 0.0119
## 8 0.9345 nan 0.1000 0.0119
## 9 0.9094 nan 0.1000 0.0087
## 10 0.8836 nan 0.1000 0.0086
## 20 0.7168 nan 0.1000 0.0001
## 40 0.5774 nan 0.1000 -0.0001
## 60 0.4981 nan 0.1000 0.0000
## 80 0.4358 nan 0.1000 0.0002
## 100 0.3820 nan 0.1000 -0.0009
## 120 0.3404 nan 0.1000 -0.0007
## 140 0.3062 nan 0.1000 -0.0012
## 160 0.2757 nan 0.1000 -0.0002
## 180 0.2470 nan 0.1000 0.0000
## 200 0.2205 nan 0.1000 -0.0001
## 220 0.2013 nan 0.1000 -0.0007
## 240 0.1807 nan 0.1000 -0.0004
## 260 0.1646 nan 0.1000 -0.0003
## 280 0.1515 nan 0.1000 -0.0003
## 300 0.1379 nan 0.1000 -0.0004
## 320 0.1255 nan 0.1000 -0.0004
## 340 0.1129 nan 0.1000 -0.0002
## 360 0.1018 nan 0.1000 -0.0001
## 380 0.0933 nan 0.1000 -0.0002
## 400 0.0859 nan 0.1000 -0.0000
## 420 0.0788 nan 0.1000 -0.0003
## 440 0.0722 nan 0.1000 -0.0002
## 460 0.0678 nan 0.1000 -0.0002
## 480 0.0612 nan 0.1000 -0.0002
## 500 0.0567 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2409 nan 0.1000 0.0362
## 2 1.1744 nan 0.1000 0.0340
## 3 1.1116 nan 0.1000 0.0251
## 4 1.0595 nan 0.1000 0.0213
## 5 1.0150 nan 0.1000 0.0202
## 6 0.9805 nan 0.1000 0.0126
## 7 0.9478 nan 0.1000 0.0136
## 8 0.9183 nan 0.1000 0.0127
## 9 0.8890 nan 0.1000 0.0106
## 10 0.8645 nan 0.1000 0.0108
## 20 0.7184 nan 0.1000 0.0028
## 40 0.5998 nan 0.1000 0.0001
## 60 0.5173 nan 0.1000 -0.0009
## 80 0.4584 nan 0.1000 -0.0010
## 100 0.4009 nan 0.1000 -0.0004
## 120 0.3563 nan 0.1000 -0.0014
## 140 0.3193 nan 0.1000 0.0000
## 160 0.2880 nan 0.1000 -0.0009
## 180 0.2582 nan 0.1000 -0.0004
## 200 0.2347 nan 0.1000 -0.0007
## 220 0.2129 nan 0.1000 -0.0005
## 240 0.1932 nan 0.1000 -0.0003
## 260 0.1771 nan 0.1000 -0.0008
## 280 0.1614 nan 0.1000 -0.0005
## 300 0.1475 nan 0.1000 -0.0002
## 320 0.1348 nan 0.1000 -0.0005
## 340 0.1235 nan 0.1000 -0.0001
## 360 0.1147 nan 0.1000 -0.0008
## 380 0.1056 nan 0.1000 -0.0002
## 400 0.0978 nan 0.1000 -0.0003
## 420 0.0905 nan 0.1000 -0.0003
## 440 0.0834 nan 0.1000 -0.0000
## 460 0.0771 nan 0.1000 -0.0004
## 480 0.0716 nan 0.1000 -0.0003
## 500 0.0664 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2397 nan 0.1000 0.0378
## 2 1.1726 nan 0.1000 0.0310
## 3 1.1083 nan 0.1000 0.0290
## 4 1.0607 nan 0.1000 0.0212
## 5 1.0227 nan 0.1000 0.0179
## 6 0.9898 nan 0.1000 0.0134
## 7 0.9604 nan 0.1000 0.0117
## 8 0.9328 nan 0.1000 0.0101
## 9 0.9033 nan 0.1000 0.0128
## 10 0.8784 nan 0.1000 0.0105
## 20 0.7264 nan 0.1000 0.0015
## 40 0.6068 nan 0.1000 -0.0003
## 60 0.5203 nan 0.1000 -0.0013
## 80 0.4656 nan 0.1000 -0.0013
## 100 0.4127 nan 0.1000 -0.0009
## 120 0.3703 nan 0.1000 -0.0010
## 140 0.3309 nan 0.1000 -0.0006
## 160 0.2965 nan 0.1000 -0.0005
## 180 0.2645 nan 0.1000 -0.0003
## 200 0.2384 nan 0.1000 -0.0008
## 220 0.2177 nan 0.1000 -0.0011
## 240 0.1992 nan 0.1000 -0.0008
## 260 0.1804 nan 0.1000 -0.0004
## 280 0.1646 nan 0.1000 -0.0002
## 300 0.1511 nan 0.1000 -0.0005
## 320 0.1383 nan 0.1000 -0.0000
## 340 0.1274 nan 0.1000 -0.0006
## 360 0.1176 nan 0.1000 -0.0001
## 380 0.1084 nan 0.1000 -0.0002
## 400 0.1001 nan 0.1000 -0.0002
## 420 0.0927 nan 0.1000 -0.0003
## 440 0.0865 nan 0.1000 -0.0004
## 460 0.0797 nan 0.1000 -0.0002
## 480 0.0743 nan 0.1000 -0.0002
## 500 0.0694 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2401 nan 0.1000 0.0378
## 2 1.1714 nan 0.1000 0.0290
## 3 1.1112 nan 0.1000 0.0239
## 4 1.0593 nan 0.1000 0.0235
## 5 1.0093 nan 0.1000 0.0236
## 6 0.9703 nan 0.1000 0.0138
## 7 0.9365 nan 0.1000 0.0120
## 8 0.9054 nan 0.1000 0.0145
## 9 0.8767 nan 0.1000 0.0092
## 10 0.8507 nan 0.1000 0.0102
## 20 0.6876 nan 0.1000 0.0005
## 40 0.5419 nan 0.1000 -0.0012
## 60 0.4595 nan 0.1000 -0.0028
## 80 0.3933 nan 0.1000 -0.0011
## 100 0.3328 nan 0.1000 -0.0008
## 120 0.2879 nan 0.1000 -0.0012
## 140 0.2499 nan 0.1000 -0.0007
## 160 0.2202 nan 0.1000 -0.0001
## 180 0.1921 nan 0.1000 -0.0005
## 200 0.1693 nan 0.1000 -0.0007
## 220 0.1517 nan 0.1000 -0.0000
## 240 0.1347 nan 0.1000 -0.0004
## 260 0.1200 nan 0.1000 -0.0001
## 280 0.1075 nan 0.1000 -0.0001
## 300 0.0971 nan 0.1000 -0.0002
## 320 0.0867 nan 0.1000 -0.0004
## 340 0.0773 nan 0.1000 -0.0002
## 360 0.0700 nan 0.1000 -0.0002
## 380 0.0623 nan 0.1000 -0.0001
## 400 0.0569 nan 0.1000 -0.0001
## 420 0.0518 nan 0.1000 -0.0001
## 440 0.0468 nan 0.1000 -0.0002
## 460 0.0426 nan 0.1000 -0.0001
## 480 0.0384 nan 0.1000 -0.0001
## 500 0.0349 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2369 nan 0.1000 0.0407
## 2 1.1608 nan 0.1000 0.0340
## 3 1.1053 nan 0.1000 0.0279
## 4 1.0493 nan 0.1000 0.0243
## 5 1.0053 nan 0.1000 0.0171
## 6 0.9656 nan 0.1000 0.0180
## 7 0.9311 nan 0.1000 0.0160
## 8 0.9021 nan 0.1000 0.0109
## 9 0.8743 nan 0.1000 0.0088
## 10 0.8493 nan 0.1000 0.0094
## 20 0.6853 nan 0.1000 0.0018
## 40 0.5473 nan 0.1000 0.0001
## 60 0.4505 nan 0.1000 -0.0007
## 80 0.3848 nan 0.1000 -0.0016
## 100 0.3370 nan 0.1000 -0.0000
## 120 0.2907 nan 0.1000 -0.0008
## 140 0.2559 nan 0.1000 -0.0001
## 160 0.2248 nan 0.1000 -0.0003
## 180 0.1974 nan 0.1000 -0.0005
## 200 0.1760 nan 0.1000 -0.0009
## 220 0.1571 nan 0.1000 -0.0003
## 240 0.1405 nan 0.1000 -0.0003
## 260 0.1264 nan 0.1000 -0.0003
## 280 0.1121 nan 0.1000 -0.0003
## 300 0.1008 nan 0.1000 -0.0001
## 320 0.0904 nan 0.1000 -0.0000
## 340 0.0815 nan 0.1000 -0.0002
## 360 0.0735 nan 0.1000 -0.0002
## 380 0.0662 nan 0.1000 -0.0003
## 400 0.0603 nan 0.1000 -0.0002
## 420 0.0552 nan 0.1000 -0.0002
## 440 0.0502 nan 0.1000 -0.0001
## 460 0.0452 nan 0.1000 -0.0001
## 480 0.0411 nan 0.1000 -0.0001
## 500 0.0370 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2357 nan 0.1000 0.0363
## 2 1.1726 nan 0.1000 0.0275
## 3 1.1146 nan 0.1000 0.0274
## 4 1.0586 nan 0.1000 0.0256
## 5 1.0102 nan 0.1000 0.0196
## 6 0.9701 nan 0.1000 0.0157
## 7 0.9365 nan 0.1000 0.0141
## 8 0.9029 nan 0.1000 0.0139
## 9 0.8777 nan 0.1000 0.0093
## 10 0.8535 nan 0.1000 0.0102
## 20 0.6964 nan 0.1000 0.0016
## 40 0.5495 nan 0.1000 -0.0004
## 60 0.4639 nan 0.1000 -0.0011
## 80 0.3948 nan 0.1000 -0.0004
## 100 0.3454 nan 0.1000 -0.0016
## 120 0.3027 nan 0.1000 -0.0019
## 140 0.2667 nan 0.1000 -0.0003
## 160 0.2366 nan 0.1000 -0.0009
## 180 0.2106 nan 0.1000 -0.0007
## 200 0.1891 nan 0.1000 -0.0006
## 220 0.1680 nan 0.1000 -0.0000
## 240 0.1516 nan 0.1000 -0.0005
## 260 0.1356 nan 0.1000 -0.0005
## 280 0.1214 nan 0.1000 -0.0006
## 300 0.1093 nan 0.1000 -0.0008
## 320 0.0997 nan 0.1000 -0.0003
## 340 0.0886 nan 0.1000 -0.0003
## 360 0.0809 nan 0.1000 -0.0002
## 380 0.0731 nan 0.1000 -0.0002
## 400 0.0671 nan 0.1000 -0.0002
## 420 0.0607 nan 0.1000 -0.0001
## 440 0.0554 nan 0.1000 -0.0001
## 460 0.0506 nan 0.1000 -0.0002
## 480 0.0458 nan 0.1000 -0.0002
## 500 0.0417 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2403 nan 0.1000 0.0394
## 2 1.1616 nan 0.1000 0.0363
## 3 1.1005 nan 0.1000 0.0246
## 4 1.0468 nan 0.1000 0.0219
## 5 0.9977 nan 0.1000 0.0216
## 6 0.9526 nan 0.1000 0.0192
## 7 0.9180 nan 0.1000 0.0129
## 8 0.8855 nan 0.1000 0.0127
## 9 0.8546 nan 0.1000 0.0107
## 10 0.8276 nan 0.1000 0.0092
## 20 0.6545 nan 0.1000 0.0019
## 40 0.5027 nan 0.1000 -0.0013
## 60 0.4003 nan 0.1000 -0.0016
## 80 0.3313 nan 0.1000 -0.0002
## 100 0.2815 nan 0.1000 -0.0003
## 120 0.2454 nan 0.1000 -0.0009
## 140 0.2049 nan 0.1000 -0.0004
## 160 0.1745 nan 0.1000 -0.0002
## 180 0.1491 nan 0.1000 -0.0004
## 200 0.1292 nan 0.1000 -0.0004
## 220 0.1130 nan 0.1000 -0.0003
## 240 0.0986 nan 0.1000 -0.0005
## 260 0.0882 nan 0.1000 -0.0004
## 280 0.0782 nan 0.1000 -0.0003
## 300 0.0686 nan 0.1000 -0.0002
## 320 0.0607 nan 0.1000 -0.0001
## 340 0.0539 nan 0.1000 -0.0001
## 360 0.0469 nan 0.1000 -0.0000
## 380 0.0416 nan 0.1000 -0.0001
## 400 0.0367 nan 0.1000 0.0000
## 420 0.0321 nan 0.1000 -0.0001
## 440 0.0286 nan 0.1000 -0.0001
## 460 0.0256 nan 0.1000 -0.0000
## 480 0.0229 nan 0.1000 -0.0001
## 500 0.0202 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2367 nan 0.1000 0.0389
## 2 1.1643 nan 0.1000 0.0291
## 3 1.0923 nan 0.1000 0.0311
## 4 1.0380 nan 0.1000 0.0219
## 5 0.9910 nan 0.1000 0.0197
## 6 0.9471 nan 0.1000 0.0167
## 7 0.9127 nan 0.1000 0.0150
## 8 0.8831 nan 0.1000 0.0113
## 9 0.8550 nan 0.1000 0.0114
## 10 0.8280 nan 0.1000 0.0098
## 20 0.6623 nan 0.1000 0.0030
## 40 0.5102 nan 0.1000 -0.0014
## 60 0.4213 nan 0.1000 -0.0016
## 80 0.3473 nan 0.1000 -0.0006
## 100 0.2955 nan 0.1000 -0.0000
## 120 0.2507 nan 0.1000 -0.0007
## 140 0.2121 nan 0.1000 -0.0007
## 160 0.1808 nan 0.1000 -0.0003
## 180 0.1544 nan 0.1000 -0.0005
## 200 0.1341 nan 0.1000 -0.0003
## 220 0.1168 nan 0.1000 -0.0005
## 240 0.1018 nan 0.1000 -0.0003
## 260 0.0892 nan 0.1000 -0.0002
## 280 0.0785 nan 0.1000 -0.0003
## 300 0.0687 nan 0.1000 -0.0003
## 320 0.0606 nan 0.1000 -0.0001
## 340 0.0528 nan 0.1000 -0.0001
## 360 0.0463 nan 0.1000 -0.0001
## 380 0.0406 nan 0.1000 -0.0002
## 400 0.0356 nan 0.1000 -0.0001
## 420 0.0317 nan 0.1000 -0.0000
## 440 0.0284 nan 0.1000 -0.0001
## 460 0.0248 nan 0.1000 -0.0001
## 480 0.0218 nan 0.1000 -0.0000
## 500 0.0194 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2320 nan 0.1000 0.0410
## 2 1.1620 nan 0.1000 0.0305
## 3 1.1031 nan 0.1000 0.0272
## 4 1.0519 nan 0.1000 0.0200
## 5 1.0070 nan 0.1000 0.0199
## 6 0.9596 nan 0.1000 0.0204
## 7 0.9246 nan 0.1000 0.0112
## 8 0.8866 nan 0.1000 0.0144
## 9 0.8580 nan 0.1000 0.0098
## 10 0.8330 nan 0.1000 0.0053
## 20 0.6744 nan 0.1000 0.0006
## 40 0.5217 nan 0.1000 0.0016
## 60 0.4239 nan 0.1000 -0.0004
## 80 0.3529 nan 0.1000 -0.0010
## 100 0.2978 nan 0.1000 -0.0019
## 120 0.2534 nan 0.1000 -0.0011
## 140 0.2178 nan 0.1000 -0.0006
## 160 0.1886 nan 0.1000 -0.0009
## 180 0.1613 nan 0.1000 -0.0002
## 200 0.1413 nan 0.1000 -0.0002
## 220 0.1227 nan 0.1000 -0.0001
## 240 0.1083 nan 0.1000 -0.0002
## 260 0.0950 nan 0.1000 -0.0003
## 280 0.0845 nan 0.1000 -0.0002
## 300 0.0746 nan 0.1000 -0.0004
## 320 0.0650 nan 0.1000 -0.0002
## 340 0.0579 nan 0.1000 -0.0002
## 360 0.0515 nan 0.1000 -0.0002
## 380 0.0453 nan 0.1000 -0.0002
## 400 0.0401 nan 0.1000 -0.0003
## 420 0.0354 nan 0.1000 -0.0001
## 440 0.0315 nan 0.1000 -0.0001
## 460 0.0279 nan 0.1000 -0.0002
## 480 0.0247 nan 0.1000 -0.0001
## 500 0.0219 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0003
## 2 1.3192 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0003
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0003
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0003
## 8 1.3145 nan 0.0010 0.0004
## 9 1.3137 nan 0.0010 0.0003
## 10 1.3129 nan 0.0010 0.0003
## 20 1.3048 nan 0.0010 0.0004
## 40 1.2896 nan 0.0010 0.0003
## 60 1.2745 nan 0.0010 0.0004
## 80 1.2602 nan 0.0010 0.0003
## 100 1.2461 nan 0.0010 0.0003
## 120 1.2327 nan 0.0010 0.0003
## 140 1.2200 nan 0.0010 0.0003
## 160 1.2071 nan 0.0010 0.0003
## 180 1.1946 nan 0.0010 0.0002
## 200 1.1826 nan 0.0010 0.0003
## 220 1.1711 nan 0.0010 0.0003
## 240 1.1598 nan 0.0010 0.0003
## 260 1.1486 nan 0.0010 0.0002
## 280 1.1381 nan 0.0010 0.0002
## 300 1.1274 nan 0.0010 0.0002
## 320 1.1173 nan 0.0010 0.0002
## 340 1.1074 nan 0.0010 0.0002
## 360 1.0979 nan 0.0010 0.0002
## 380 1.0886 nan 0.0010 0.0002
## 400 1.0790 nan 0.0010 0.0002
## 420 1.0701 nan 0.0010 0.0002
## 440 1.0614 nan 0.0010 0.0002
## 460 1.0530 nan 0.0010 0.0001
## 480 1.0448 nan 0.0010 0.0002
## 500 1.0369 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3183 nan 0.0010 0.0003
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3159 nan 0.0010 0.0004
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0003
## 9 1.3135 nan 0.0010 0.0003
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3049 nan 0.0010 0.0004
## 40 1.2895 nan 0.0010 0.0004
## 60 1.2749 nan 0.0010 0.0003
## 80 1.2603 nan 0.0010 0.0003
## 100 1.2465 nan 0.0010 0.0003
## 120 1.2328 nan 0.0010 0.0003
## 140 1.2196 nan 0.0010 0.0003
## 160 1.2067 nan 0.0010 0.0003
## 180 1.1945 nan 0.0010 0.0003
## 200 1.1823 nan 0.0010 0.0002
## 220 1.1703 nan 0.0010 0.0003
## 240 1.1589 nan 0.0010 0.0003
## 260 1.1479 nan 0.0010 0.0002
## 280 1.1372 nan 0.0010 0.0002
## 300 1.1267 nan 0.0010 0.0002
## 320 1.1164 nan 0.0010 0.0002
## 340 1.1065 nan 0.0010 0.0002
## 360 1.0969 nan 0.0010 0.0002
## 380 1.0875 nan 0.0010 0.0002
## 400 1.0787 nan 0.0010 0.0002
## 420 1.0697 nan 0.0010 0.0002
## 440 1.0610 nan 0.0010 0.0002
## 460 1.0527 nan 0.0010 0.0002
## 480 1.0443 nan 0.0010 0.0002
## 500 1.0365 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3159 nan 0.0010 0.0003
## 7 1.3151 nan 0.0010 0.0003
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3136 nan 0.0010 0.0004
## 10 1.3128 nan 0.0010 0.0004
## 20 1.3050 nan 0.0010 0.0004
## 40 1.2896 nan 0.0010 0.0003
## 60 1.2747 nan 0.0010 0.0003
## 80 1.2603 nan 0.0010 0.0003
## 100 1.2462 nan 0.0010 0.0003
## 120 1.2326 nan 0.0010 0.0003
## 140 1.2197 nan 0.0010 0.0003
## 160 1.2070 nan 0.0010 0.0003
## 180 1.1943 nan 0.0010 0.0003
## 200 1.1825 nan 0.0010 0.0003
## 220 1.1707 nan 0.0010 0.0003
## 240 1.1594 nan 0.0010 0.0002
## 260 1.1486 nan 0.0010 0.0002
## 280 1.1380 nan 0.0010 0.0003
## 300 1.1279 nan 0.0010 0.0002
## 320 1.1176 nan 0.0010 0.0002
## 340 1.1078 nan 0.0010 0.0002
## 360 1.0983 nan 0.0010 0.0002
## 380 1.0891 nan 0.0010 0.0002
## 400 1.0798 nan 0.0010 0.0002
## 420 1.0711 nan 0.0010 0.0002
## 440 1.0625 nan 0.0010 0.0002
## 460 1.0541 nan 0.0010 0.0002
## 480 1.0459 nan 0.0010 0.0002
## 500 1.0377 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3033 nan 0.0010 0.0004
## 40 1.2867 nan 0.0010 0.0004
## 60 1.2709 nan 0.0010 0.0004
## 80 1.2553 nan 0.0010 0.0003
## 100 1.2406 nan 0.0010 0.0003
## 120 1.2259 nan 0.0010 0.0003
## 140 1.2120 nan 0.0010 0.0003
## 160 1.1986 nan 0.0010 0.0003
## 180 1.1853 nan 0.0010 0.0003
## 200 1.1726 nan 0.0010 0.0003
## 220 1.1601 nan 0.0010 0.0003
## 240 1.1481 nan 0.0010 0.0002
## 260 1.1364 nan 0.0010 0.0002
## 280 1.1249 nan 0.0010 0.0002
## 300 1.1140 nan 0.0010 0.0002
## 320 1.1032 nan 0.0010 0.0002
## 340 1.0926 nan 0.0010 0.0002
## 360 1.0823 nan 0.0010 0.0002
## 380 1.0724 nan 0.0010 0.0002
## 400 1.0627 nan 0.0010 0.0002
## 420 1.0535 nan 0.0010 0.0002
## 440 1.0445 nan 0.0010 0.0002
## 460 1.0354 nan 0.0010 0.0002
## 480 1.0267 nan 0.0010 0.0002
## 500 1.0182 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3032 nan 0.0010 0.0004
## 40 1.2871 nan 0.0010 0.0004
## 60 1.2711 nan 0.0010 0.0004
## 80 1.2558 nan 0.0010 0.0003
## 100 1.2410 nan 0.0010 0.0003
## 120 1.2268 nan 0.0010 0.0003
## 140 1.2128 nan 0.0010 0.0003
## 160 1.1991 nan 0.0010 0.0003
## 180 1.1856 nan 0.0010 0.0003
## 200 1.1729 nan 0.0010 0.0003
## 220 1.1605 nan 0.0010 0.0003
## 240 1.1486 nan 0.0010 0.0003
## 260 1.1369 nan 0.0010 0.0003
## 280 1.1256 nan 0.0010 0.0002
## 300 1.1148 nan 0.0010 0.0002
## 320 1.1041 nan 0.0010 0.0002
## 340 1.0935 nan 0.0010 0.0002
## 360 1.0834 nan 0.0010 0.0002
## 380 1.0737 nan 0.0010 0.0002
## 400 1.0640 nan 0.0010 0.0002
## 420 1.0544 nan 0.0010 0.0002
## 440 1.0456 nan 0.0010 0.0002
## 460 1.0369 nan 0.0010 0.0002
## 480 1.0281 nan 0.0010 0.0002
## 500 1.0198 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0003
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3035 nan 0.0010 0.0003
## 40 1.2872 nan 0.0010 0.0004
## 60 1.2711 nan 0.0010 0.0003
## 80 1.2560 nan 0.0010 0.0003
## 100 1.2409 nan 0.0010 0.0003
## 120 1.2266 nan 0.0010 0.0003
## 140 1.2127 nan 0.0010 0.0003
## 160 1.1993 nan 0.0010 0.0003
## 180 1.1860 nan 0.0010 0.0003
## 200 1.1732 nan 0.0010 0.0003
## 220 1.1610 nan 0.0010 0.0002
## 240 1.1492 nan 0.0010 0.0003
## 260 1.1375 nan 0.0010 0.0003
## 280 1.1261 nan 0.0010 0.0002
## 300 1.1151 nan 0.0010 0.0002
## 320 1.1042 nan 0.0010 0.0002
## 340 1.0938 nan 0.0010 0.0002
## 360 1.0835 nan 0.0010 0.0002
## 380 1.0738 nan 0.0010 0.0002
## 400 1.0643 nan 0.0010 0.0002
## 420 1.0550 nan 0.0010 0.0002
## 440 1.0459 nan 0.0010 0.0002
## 460 1.0373 nan 0.0010 0.0002
## 480 1.0289 nan 0.0010 0.0002
## 500 1.0206 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0004
## 6 1.3150 nan 0.0010 0.0004
## 7 1.3141 nan 0.0010 0.0004
## 8 1.3133 nan 0.0010 0.0004
## 9 1.3124 nan 0.0010 0.0004
## 10 1.3114 nan 0.0010 0.0004
## 20 1.3024 nan 0.0010 0.0004
## 40 1.2854 nan 0.0010 0.0003
## 60 1.2686 nan 0.0010 0.0004
## 80 1.2523 nan 0.0010 0.0003
## 100 1.2368 nan 0.0010 0.0003
## 120 1.2214 nan 0.0010 0.0003
## 140 1.2068 nan 0.0010 0.0003
## 160 1.1926 nan 0.0010 0.0003
## 180 1.1787 nan 0.0010 0.0003
## 200 1.1657 nan 0.0010 0.0003
## 220 1.1528 nan 0.0010 0.0003
## 240 1.1403 nan 0.0010 0.0002
## 260 1.1281 nan 0.0010 0.0002
## 280 1.1160 nan 0.0010 0.0003
## 300 1.1044 nan 0.0010 0.0002
## 320 1.0932 nan 0.0010 0.0002
## 340 1.0821 nan 0.0010 0.0002
## 360 1.0714 nan 0.0010 0.0002
## 380 1.0612 nan 0.0010 0.0002
## 400 1.0510 nan 0.0010 0.0002
## 420 1.0413 nan 0.0010 0.0001
## 440 1.0317 nan 0.0010 0.0002
## 460 1.0225 nan 0.0010 0.0002
## 480 1.0135 nan 0.0010 0.0002
## 500 1.0047 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0003
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3151 nan 0.0010 0.0004
## 7 1.3142 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0004
## 10 1.3116 nan 0.0010 0.0004
## 20 1.3027 nan 0.0010 0.0003
## 40 1.2855 nan 0.0010 0.0004
## 60 1.2688 nan 0.0010 0.0004
## 80 1.2527 nan 0.0010 0.0003
## 100 1.2373 nan 0.0010 0.0004
## 120 1.2222 nan 0.0010 0.0003
## 140 1.2077 nan 0.0010 0.0003
## 160 1.1935 nan 0.0010 0.0003
## 180 1.1799 nan 0.0010 0.0003
## 200 1.1667 nan 0.0010 0.0003
## 220 1.1536 nan 0.0010 0.0003
## 240 1.1411 nan 0.0010 0.0002
## 260 1.1293 nan 0.0010 0.0003
## 280 1.1174 nan 0.0010 0.0002
## 300 1.1059 nan 0.0010 0.0003
## 320 1.0948 nan 0.0010 0.0002
## 340 1.0839 nan 0.0010 0.0002
## 360 1.0735 nan 0.0010 0.0002
## 380 1.0634 nan 0.0010 0.0002
## 400 1.0534 nan 0.0010 0.0002
## 420 1.0438 nan 0.0010 0.0002
## 440 1.0342 nan 0.0010 0.0002
## 460 1.0249 nan 0.0010 0.0002
## 480 1.0161 nan 0.0010 0.0002
## 500 1.0071 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3033 nan 0.0010 0.0003
## 40 1.2862 nan 0.0010 0.0004
## 60 1.2697 nan 0.0010 0.0004
## 80 1.2535 nan 0.0010 0.0004
## 100 1.2378 nan 0.0010 0.0003
## 120 1.2226 nan 0.0010 0.0003
## 140 1.2083 nan 0.0010 0.0003
## 160 1.1943 nan 0.0010 0.0003
## 180 1.1807 nan 0.0010 0.0003
## 200 1.1676 nan 0.0010 0.0003
## 220 1.1551 nan 0.0010 0.0002
## 240 1.1430 nan 0.0010 0.0003
## 260 1.1308 nan 0.0010 0.0003
## 280 1.1191 nan 0.0010 0.0002
## 300 1.1073 nan 0.0010 0.0002
## 320 1.0964 nan 0.0010 0.0002
## 340 1.0857 nan 0.0010 0.0002
## 360 1.0751 nan 0.0010 0.0002
## 380 1.0650 nan 0.0010 0.0002
## 400 1.0551 nan 0.0010 0.0002
## 420 1.0453 nan 0.0010 0.0002
## 440 1.0360 nan 0.0010 0.0002
## 460 1.0267 nan 0.0010 0.0002
## 480 1.0179 nan 0.0010 0.0002
## 500 1.0092 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0039
## 2 1.3041 nan 0.0100 0.0037
## 3 1.2973 nan 0.0100 0.0031
## 4 1.2900 nan 0.0100 0.0031
## 5 1.2825 nan 0.0100 0.0033
## 6 1.2748 nan 0.0100 0.0035
## 7 1.2679 nan 0.0100 0.0027
## 8 1.2608 nan 0.0100 0.0032
## 9 1.2536 nan 0.0100 0.0031
## 10 1.2460 nan 0.0100 0.0031
## 20 1.1829 nan 0.0100 0.0026
## 40 1.0789 nan 0.0100 0.0019
## 60 1.0016 nan 0.0100 0.0016
## 80 0.9369 nan 0.0100 0.0009
## 100 0.8881 nan 0.0100 0.0009
## 120 0.8468 nan 0.0100 0.0008
## 140 0.8130 nan 0.0100 0.0003
## 160 0.7844 nan 0.0100 0.0004
## 180 0.7585 nan 0.0100 0.0002
## 200 0.7373 nan 0.0100 0.0003
## 220 0.7174 nan 0.0100 0.0001
## 240 0.6999 nan 0.0100 0.0001
## 260 0.6843 nan 0.0100 -0.0001
## 280 0.6696 nan 0.0100 0.0000
## 300 0.6565 nan 0.0100 0.0002
## 320 0.6440 nan 0.0100 -0.0001
## 340 0.6330 nan 0.0100 -0.0001
## 360 0.6230 nan 0.0100 -0.0001
## 380 0.6121 nan 0.0100 -0.0000
## 400 0.6027 nan 0.0100 0.0001
## 420 0.5940 nan 0.0100 -0.0001
## 440 0.5843 nan 0.0100 0.0001
## 460 0.5752 nan 0.0100 0.0000
## 480 0.5662 nan 0.0100 -0.0001
## 500 0.5573 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3131 nan 0.0100 0.0036
## 2 1.3047 nan 0.0100 0.0038
## 3 1.2964 nan 0.0100 0.0040
## 4 1.2900 nan 0.0100 0.0028
## 5 1.2830 nan 0.0100 0.0033
## 6 1.2759 nan 0.0100 0.0033
## 7 1.2686 nan 0.0100 0.0033
## 8 1.2621 nan 0.0100 0.0030
## 9 1.2546 nan 0.0100 0.0033
## 10 1.2474 nan 0.0100 0.0031
## 20 1.1838 nan 0.0100 0.0025
## 40 1.0797 nan 0.0100 0.0020
## 60 0.9986 nan 0.0100 0.0013
## 80 0.9356 nan 0.0100 0.0013
## 100 0.8861 nan 0.0100 0.0010
## 120 0.8451 nan 0.0100 0.0006
## 140 0.8116 nan 0.0100 0.0004
## 160 0.7839 nan 0.0100 0.0003
## 180 0.7599 nan 0.0100 0.0003
## 200 0.7395 nan 0.0100 0.0002
## 220 0.7213 nan 0.0100 0.0002
## 240 0.7049 nan 0.0100 0.0002
## 260 0.6900 nan 0.0100 0.0000
## 280 0.6764 nan 0.0100 0.0001
## 300 0.6629 nan 0.0100 0.0000
## 320 0.6515 nan 0.0100 0.0000
## 340 0.6394 nan 0.0100 -0.0001
## 360 0.6299 nan 0.0100 0.0000
## 380 0.6200 nan 0.0100 -0.0001
## 400 0.6103 nan 0.0100 0.0000
## 420 0.6002 nan 0.0100 -0.0001
## 440 0.5907 nan 0.0100 -0.0000
## 460 0.5826 nan 0.0100 0.0000
## 480 0.5743 nan 0.0100 -0.0001
## 500 0.5653 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3127 nan 0.0100 0.0035
## 2 1.3050 nan 0.0100 0.0032
## 3 1.2970 nan 0.0100 0.0036
## 4 1.2889 nan 0.0100 0.0036
## 5 1.2815 nan 0.0100 0.0033
## 6 1.2733 nan 0.0100 0.0035
## 7 1.2662 nan 0.0100 0.0029
## 8 1.2595 nan 0.0100 0.0026
## 9 1.2528 nan 0.0100 0.0031
## 10 1.2458 nan 0.0100 0.0028
## 20 1.1820 nan 0.0100 0.0027
## 40 1.0790 nan 0.0100 0.0018
## 60 1.0013 nan 0.0100 0.0013
## 80 0.9380 nan 0.0100 0.0009
## 100 0.8891 nan 0.0100 0.0009
## 120 0.8483 nan 0.0100 0.0007
## 140 0.8142 nan 0.0100 0.0005
## 160 0.7857 nan 0.0100 0.0004
## 180 0.7619 nan 0.0100 0.0003
## 200 0.7397 nan 0.0100 0.0003
## 220 0.7207 nan 0.0100 0.0001
## 240 0.7048 nan 0.0100 -0.0002
## 260 0.6895 nan 0.0100 -0.0000
## 280 0.6756 nan 0.0100 0.0001
## 300 0.6628 nan 0.0100 0.0000
## 320 0.6514 nan 0.0100 0.0000
## 340 0.6411 nan 0.0100 -0.0000
## 360 0.6306 nan 0.0100 -0.0002
## 380 0.6201 nan 0.0100 0.0001
## 400 0.6108 nan 0.0100 -0.0002
## 420 0.6018 nan 0.0100 -0.0002
## 440 0.5938 nan 0.0100 0.0000
## 460 0.5862 nan 0.0100 -0.0000
## 480 0.5786 nan 0.0100 -0.0002
## 500 0.5704 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3116 nan 0.0100 0.0041
## 2 1.3025 nan 0.0100 0.0042
## 3 1.2931 nan 0.0100 0.0038
## 4 1.2845 nan 0.0100 0.0034
## 5 1.2768 nan 0.0100 0.0031
## 6 1.2685 nan 0.0100 0.0035
## 7 1.2607 nan 0.0100 0.0033
## 8 1.2535 nan 0.0100 0.0033
## 9 1.2461 nan 0.0100 0.0033
## 10 1.2385 nan 0.0100 0.0034
## 20 1.1692 nan 0.0100 0.0028
## 40 1.0603 nan 0.0100 0.0016
## 60 0.9769 nan 0.0100 0.0014
## 80 0.9108 nan 0.0100 0.0011
## 100 0.8587 nan 0.0100 0.0010
## 120 0.8165 nan 0.0100 0.0008
## 140 0.7813 nan 0.0100 0.0006
## 160 0.7496 nan 0.0100 0.0003
## 180 0.7244 nan 0.0100 0.0003
## 200 0.7009 nan 0.0100 0.0001
## 220 0.6807 nan 0.0100 0.0001
## 240 0.6625 nan 0.0100 0.0002
## 260 0.6450 nan 0.0100 0.0001
## 280 0.6293 nan 0.0100 0.0000
## 300 0.6139 nan 0.0100 -0.0001
## 320 0.6011 nan 0.0100 -0.0001
## 340 0.5877 nan 0.0100 0.0001
## 360 0.5764 nan 0.0100 0.0001
## 380 0.5653 nan 0.0100 -0.0001
## 400 0.5538 nan 0.0100 -0.0000
## 420 0.5425 nan 0.0100 0.0001
## 440 0.5327 nan 0.0100 0.0000
## 460 0.5231 nan 0.0100 -0.0000
## 480 0.5141 nan 0.0100 -0.0000
## 500 0.5056 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0039
## 2 1.3034 nan 0.0100 0.0042
## 3 1.2946 nan 0.0100 0.0037
## 4 1.2860 nan 0.0100 0.0040
## 5 1.2780 nan 0.0100 0.0035
## 6 1.2703 nan 0.0100 0.0032
## 7 1.2623 nan 0.0100 0.0036
## 8 1.2548 nan 0.0100 0.0038
## 9 1.2468 nan 0.0100 0.0033
## 10 1.2393 nan 0.0100 0.0034
## 20 1.1718 nan 0.0100 0.0030
## 40 1.0631 nan 0.0100 0.0018
## 60 0.9811 nan 0.0100 0.0016
## 80 0.9158 nan 0.0100 0.0013
## 100 0.8616 nan 0.0100 0.0007
## 120 0.8181 nan 0.0100 0.0008
## 140 0.7836 nan 0.0100 0.0005
## 160 0.7548 nan 0.0100 0.0003
## 180 0.7285 nan 0.0100 0.0002
## 200 0.7048 nan 0.0100 -0.0000
## 220 0.6849 nan 0.0100 0.0002
## 240 0.6669 nan 0.0100 0.0004
## 260 0.6499 nan 0.0100 0.0000
## 280 0.6351 nan 0.0100 0.0001
## 300 0.6209 nan 0.0100 0.0003
## 320 0.6076 nan 0.0100 -0.0001
## 340 0.5954 nan 0.0100 -0.0001
## 360 0.5834 nan 0.0100 0.0001
## 380 0.5723 nan 0.0100 -0.0002
## 400 0.5621 nan 0.0100 -0.0002
## 420 0.5522 nan 0.0100 -0.0001
## 440 0.5425 nan 0.0100 -0.0000
## 460 0.5326 nan 0.0100 -0.0002
## 480 0.5228 nan 0.0100 -0.0001
## 500 0.5139 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0039
## 2 1.3036 nan 0.0100 0.0040
## 3 1.2949 nan 0.0100 0.0039
## 4 1.2866 nan 0.0100 0.0036
## 5 1.2793 nan 0.0100 0.0033
## 6 1.2711 nan 0.0100 0.0035
## 7 1.2630 nan 0.0100 0.0037
## 8 1.2557 nan 0.0100 0.0033
## 9 1.2489 nan 0.0100 0.0031
## 10 1.2412 nan 0.0100 0.0034
## 20 1.1745 nan 0.0100 0.0025
## 40 1.0647 nan 0.0100 0.0019
## 60 0.9813 nan 0.0100 0.0014
## 80 0.9169 nan 0.0100 0.0012
## 100 0.8654 nan 0.0100 0.0007
## 120 0.8232 nan 0.0100 0.0005
## 140 0.7892 nan 0.0100 0.0004
## 160 0.7596 nan 0.0100 0.0005
## 180 0.7351 nan 0.0100 0.0003
## 200 0.7133 nan 0.0100 0.0004
## 220 0.6940 nan 0.0100 0.0001
## 240 0.6772 nan 0.0100 0.0001
## 260 0.6611 nan 0.0100 0.0000
## 280 0.6461 nan 0.0100 0.0001
## 300 0.6314 nan 0.0100 0.0001
## 320 0.6191 nan 0.0100 0.0001
## 340 0.6066 nan 0.0100 -0.0001
## 360 0.5946 nan 0.0100 -0.0000
## 380 0.5846 nan 0.0100 -0.0000
## 400 0.5734 nan 0.0100 -0.0000
## 420 0.5626 nan 0.0100 -0.0001
## 440 0.5530 nan 0.0100 -0.0001
## 460 0.5429 nan 0.0100 0.0001
## 480 0.5335 nan 0.0100 0.0001
## 500 0.5245 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3115 nan 0.0100 0.0043
## 2 1.3027 nan 0.0100 0.0040
## 3 1.2937 nan 0.0100 0.0044
## 4 1.2856 nan 0.0100 0.0036
## 5 1.2775 nan 0.0100 0.0036
## 6 1.2688 nan 0.0100 0.0040
## 7 1.2604 nan 0.0100 0.0037
## 8 1.2515 nan 0.0100 0.0036
## 9 1.2430 nan 0.0100 0.0038
## 10 1.2348 nan 0.0100 0.0035
## 20 1.1648 nan 0.0100 0.0030
## 40 1.0496 nan 0.0100 0.0019
## 60 0.9615 nan 0.0100 0.0014
## 80 0.8931 nan 0.0100 0.0012
## 100 0.8387 nan 0.0100 0.0009
## 120 0.7926 nan 0.0100 0.0004
## 140 0.7536 nan 0.0100 0.0006
## 160 0.7215 nan 0.0100 0.0003
## 180 0.6953 nan 0.0100 0.0002
## 200 0.6712 nan 0.0100 0.0003
## 220 0.6479 nan 0.0100 0.0003
## 240 0.6290 nan 0.0100 0.0004
## 260 0.6100 nan 0.0100 0.0001
## 280 0.5923 nan 0.0100 0.0001
## 300 0.5751 nan 0.0100 -0.0000
## 320 0.5596 nan 0.0100 0.0002
## 340 0.5462 nan 0.0100 -0.0000
## 360 0.5321 nan 0.0100 -0.0001
## 380 0.5202 nan 0.0100 0.0000
## 400 0.5079 nan 0.0100 -0.0000
## 420 0.4969 nan 0.0100 0.0001
## 440 0.4855 nan 0.0100 0.0000
## 460 0.4751 nan 0.0100 -0.0001
## 480 0.4643 nan 0.0100 0.0000
## 500 0.4540 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3114 nan 0.0100 0.0040
## 2 1.3026 nan 0.0100 0.0039
## 3 1.2939 nan 0.0100 0.0040
## 4 1.2851 nan 0.0100 0.0035
## 5 1.2769 nan 0.0100 0.0039
## 6 1.2690 nan 0.0100 0.0033
## 7 1.2614 nan 0.0100 0.0034
## 8 1.2537 nan 0.0100 0.0033
## 9 1.2460 nan 0.0100 0.0034
## 10 1.2380 nan 0.0100 0.0036
## 20 1.1663 nan 0.0100 0.0030
## 40 1.0530 nan 0.0100 0.0021
## 60 0.9661 nan 0.0100 0.0014
## 80 0.8985 nan 0.0100 0.0009
## 100 0.8440 nan 0.0100 0.0010
## 120 0.8002 nan 0.0100 0.0007
## 140 0.7635 nan 0.0100 0.0003
## 160 0.7314 nan 0.0100 0.0005
## 180 0.7040 nan 0.0100 0.0002
## 200 0.6803 nan 0.0100 0.0002
## 220 0.6604 nan 0.0100 0.0000
## 240 0.6406 nan 0.0100 -0.0002
## 260 0.6221 nan 0.0100 0.0001
## 280 0.6048 nan 0.0100 0.0001
## 300 0.5898 nan 0.0100 -0.0001
## 320 0.5758 nan 0.0100 -0.0000
## 340 0.5621 nan 0.0100 0.0001
## 360 0.5477 nan 0.0100 0.0000
## 380 0.5344 nan 0.0100 0.0001
## 400 0.5220 nan 0.0100 0.0002
## 420 0.5098 nan 0.0100 0.0001
## 440 0.4994 nan 0.0100 0.0000
## 460 0.4896 nan 0.0100 -0.0000
## 480 0.4797 nan 0.0100 0.0000
## 500 0.4691 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3110 nan 0.0100 0.0045
## 2 1.3017 nan 0.0100 0.0042
## 3 1.2932 nan 0.0100 0.0036
## 4 1.2844 nan 0.0100 0.0039
## 5 1.2761 nan 0.0100 0.0037
## 6 1.2681 nan 0.0100 0.0034
## 7 1.2597 nan 0.0100 0.0040
## 8 1.2520 nan 0.0100 0.0034
## 9 1.2437 nan 0.0100 0.0040
## 10 1.2363 nan 0.0100 0.0032
## 20 1.1660 nan 0.0100 0.0028
## 40 1.0544 nan 0.0100 0.0016
## 60 0.9678 nan 0.0100 0.0017
## 80 0.9021 nan 0.0100 0.0011
## 100 0.8469 nan 0.0100 0.0009
## 120 0.8047 nan 0.0100 0.0006
## 140 0.7676 nan 0.0100 0.0004
## 160 0.7369 nan 0.0100 0.0004
## 180 0.7115 nan 0.0100 0.0003
## 200 0.6884 nan 0.0100 0.0003
## 220 0.6680 nan 0.0100 -0.0000
## 240 0.6493 nan 0.0100 0.0002
## 260 0.6327 nan 0.0100 0.0000
## 280 0.6159 nan 0.0100 0.0000
## 300 0.6003 nan 0.0100 0.0001
## 320 0.5873 nan 0.0100 -0.0001
## 340 0.5746 nan 0.0100 0.0002
## 360 0.5615 nan 0.0100 -0.0001
## 380 0.5495 nan 0.0100 -0.0000
## 400 0.5384 nan 0.0100 0.0000
## 420 0.5272 nan 0.0100 -0.0001
## 440 0.5167 nan 0.0100 -0.0000
## 460 0.5059 nan 0.0100 -0.0001
## 480 0.4962 nan 0.0100 -0.0000
## 500 0.4873 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2468 nan 0.1000 0.0341
## 2 1.1834 nan 0.1000 0.0235
## 3 1.1221 nan 0.1000 0.0258
## 4 1.0735 nan 0.1000 0.0201
## 5 1.0316 nan 0.1000 0.0182
## 6 0.9951 nan 0.1000 0.0140
## 7 0.9626 nan 0.1000 0.0130
## 8 0.9320 nan 0.1000 0.0107
## 9 0.9045 nan 0.1000 0.0100
## 10 0.8812 nan 0.1000 0.0096
## 20 0.7335 nan 0.1000 0.0020
## 40 0.6046 nan 0.1000 -0.0006
## 60 0.5233 nan 0.1000 -0.0010
## 80 0.4591 nan 0.1000 -0.0008
## 100 0.4024 nan 0.1000 0.0003
## 120 0.3538 nan 0.1000 -0.0002
## 140 0.3194 nan 0.1000 -0.0005
## 160 0.2860 nan 0.1000 -0.0001
## 180 0.2575 nan 0.1000 -0.0004
## 200 0.2326 nan 0.1000 -0.0002
## 220 0.2085 nan 0.1000 -0.0007
## 240 0.1906 nan 0.1000 -0.0006
## 260 0.1731 nan 0.1000 -0.0006
## 280 0.1591 nan 0.1000 -0.0004
## 300 0.1439 nan 0.1000 -0.0004
## 320 0.1325 nan 0.1000 0.0001
## 340 0.1205 nan 0.1000 -0.0003
## 360 0.1101 nan 0.1000 -0.0005
## 380 0.1013 nan 0.1000 -0.0003
## 400 0.0928 nan 0.1000 -0.0001
## 420 0.0851 nan 0.1000 -0.0001
## 440 0.0790 nan 0.1000 -0.0001
## 460 0.0729 nan 0.1000 0.0000
## 480 0.0671 nan 0.1000 -0.0001
## 500 0.0615 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2478 nan 0.1000 0.0328
## 2 1.1834 nan 0.1000 0.0296
## 3 1.1244 nan 0.1000 0.0260
## 4 1.0736 nan 0.1000 0.0217
## 5 1.0293 nan 0.1000 0.0180
## 6 0.9950 nan 0.1000 0.0137
## 7 0.9640 nan 0.1000 0.0121
## 8 0.9377 nan 0.1000 0.0106
## 9 0.9100 nan 0.1000 0.0110
## 10 0.8833 nan 0.1000 0.0101
## 20 0.7386 nan 0.1000 0.0030
## 40 0.6094 nan 0.1000 -0.0001
## 60 0.5323 nan 0.1000 -0.0005
## 80 0.4671 nan 0.1000 -0.0010
## 100 0.4143 nan 0.1000 -0.0012
## 120 0.3653 nan 0.1000 -0.0006
## 140 0.3244 nan 0.1000 -0.0011
## 160 0.2955 nan 0.1000 -0.0012
## 180 0.2676 nan 0.1000 -0.0004
## 200 0.2401 nan 0.1000 -0.0002
## 220 0.2191 nan 0.1000 -0.0007
## 240 0.1991 nan 0.1000 -0.0007
## 260 0.1814 nan 0.1000 -0.0000
## 280 0.1689 nan 0.1000 -0.0004
## 300 0.1563 nan 0.1000 -0.0002
## 320 0.1424 nan 0.1000 -0.0004
## 340 0.1301 nan 0.1000 -0.0006
## 360 0.1205 nan 0.1000 -0.0005
## 380 0.1111 nan 0.1000 -0.0004
## 400 0.1014 nan 0.1000 -0.0001
## 420 0.0937 nan 0.1000 -0.0000
## 440 0.0859 nan 0.1000 -0.0002
## 460 0.0788 nan 0.1000 -0.0002
## 480 0.0730 nan 0.1000 -0.0003
## 500 0.0677 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2446 nan 0.1000 0.0350
## 2 1.1811 nan 0.1000 0.0283
## 3 1.1292 nan 0.1000 0.0240
## 4 1.0800 nan 0.1000 0.0215
## 5 1.0386 nan 0.1000 0.0182
## 6 1.0017 nan 0.1000 0.0153
## 7 0.9710 nan 0.1000 0.0142
## 8 0.9389 nan 0.1000 0.0145
## 9 0.9153 nan 0.1000 0.0107
## 10 0.8911 nan 0.1000 0.0101
## 20 0.7474 nan 0.1000 0.0032
## 40 0.6205 nan 0.1000 -0.0004
## 60 0.5439 nan 0.1000 -0.0012
## 80 0.4863 nan 0.1000 -0.0010
## 100 0.4269 nan 0.1000 -0.0006
## 120 0.3867 nan 0.1000 -0.0016
## 140 0.3477 nan 0.1000 0.0001
## 160 0.3125 nan 0.1000 -0.0013
## 180 0.2802 nan 0.1000 -0.0002
## 200 0.2550 nan 0.1000 -0.0004
## 220 0.2323 nan 0.1000 -0.0009
## 240 0.2127 nan 0.1000 -0.0005
## 260 0.1960 nan 0.1000 -0.0008
## 280 0.1811 nan 0.1000 -0.0005
## 300 0.1674 nan 0.1000 -0.0006
## 320 0.1543 nan 0.1000 -0.0005
## 340 0.1430 nan 0.1000 -0.0003
## 360 0.1310 nan 0.1000 -0.0004
## 380 0.1211 nan 0.1000 -0.0002
## 400 0.1125 nan 0.1000 -0.0004
## 420 0.1037 nan 0.1000 -0.0003
## 440 0.0960 nan 0.1000 -0.0005
## 460 0.0887 nan 0.1000 -0.0002
## 480 0.0825 nan 0.1000 -0.0002
## 500 0.0774 nan 0.1000 -0.0004
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2368 nan 0.1000 0.0357
## 2 1.1694 nan 0.1000 0.0287
## 3 1.1077 nan 0.1000 0.0287
## 4 1.0546 nan 0.1000 0.0229
## 5 1.0169 nan 0.1000 0.0160
## 6 0.9760 nan 0.1000 0.0156
## 7 0.9377 nan 0.1000 0.0141
## 8 0.9068 nan 0.1000 0.0076
## 9 0.8787 nan 0.1000 0.0096
## 10 0.8570 nan 0.1000 0.0079
## 20 0.6945 nan 0.1000 0.0024
## 40 0.5458 nan 0.1000 -0.0004
## 60 0.4578 nan 0.1000 0.0005
## 80 0.3871 nan 0.1000 0.0000
## 100 0.3310 nan 0.1000 0.0001
## 120 0.2872 nan 0.1000 -0.0011
## 140 0.2503 nan 0.1000 -0.0005
## 160 0.2213 nan 0.1000 -0.0007
## 180 0.1978 nan 0.1000 -0.0008
## 200 0.1754 nan 0.1000 -0.0004
## 220 0.1563 nan 0.1000 -0.0004
## 240 0.1402 nan 0.1000 -0.0004
## 260 0.1252 nan 0.1000 0.0000
## 280 0.1135 nan 0.1000 -0.0003
## 300 0.1004 nan 0.1000 -0.0001
## 320 0.0902 nan 0.1000 -0.0002
## 340 0.0817 nan 0.1000 -0.0004
## 360 0.0737 nan 0.1000 -0.0001
## 380 0.0662 nan 0.1000 -0.0003
## 400 0.0595 nan 0.1000 -0.0001
## 420 0.0535 nan 0.1000 -0.0002
## 440 0.0486 nan 0.1000 -0.0002
## 460 0.0439 nan 0.1000 -0.0001
## 480 0.0396 nan 0.1000 -0.0001
## 500 0.0359 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2405 nan 0.1000 0.0368
## 2 1.1686 nan 0.1000 0.0346
## 3 1.1090 nan 0.1000 0.0293
## 4 1.0579 nan 0.1000 0.0215
## 5 1.0141 nan 0.1000 0.0159
## 6 0.9773 nan 0.1000 0.0153
## 7 0.9434 nan 0.1000 0.0136
## 8 0.9140 nan 0.1000 0.0110
## 9 0.8802 nan 0.1000 0.0124
## 10 0.8554 nan 0.1000 0.0092
## 20 0.7003 nan 0.1000 0.0010
## 40 0.5591 nan 0.1000 -0.0008
## 60 0.4735 nan 0.1000 -0.0003
## 80 0.4054 nan 0.1000 -0.0005
## 100 0.3474 nan 0.1000 0.0001
## 120 0.3021 nan 0.1000 -0.0001
## 140 0.2605 nan 0.1000 -0.0008
## 160 0.2302 nan 0.1000 -0.0004
## 180 0.2027 nan 0.1000 -0.0008
## 200 0.1811 nan 0.1000 -0.0006
## 220 0.1598 nan 0.1000 0.0001
## 240 0.1415 nan 0.1000 -0.0010
## 260 0.1265 nan 0.1000 -0.0006
## 280 0.1140 nan 0.1000 -0.0002
## 300 0.1007 nan 0.1000 -0.0001
## 320 0.0910 nan 0.1000 -0.0004
## 340 0.0821 nan 0.1000 -0.0001
## 360 0.0745 nan 0.1000 -0.0002
## 380 0.0684 nan 0.1000 -0.0001
## 400 0.0608 nan 0.1000 -0.0002
## 420 0.0552 nan 0.1000 -0.0002
## 440 0.0500 nan 0.1000 -0.0002
## 460 0.0454 nan 0.1000 -0.0002
## 480 0.0413 nan 0.1000 -0.0001
## 500 0.0376 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2418 nan 0.1000 0.0347
## 2 1.1762 nan 0.1000 0.0328
## 3 1.1105 nan 0.1000 0.0273
## 4 1.0616 nan 0.1000 0.0185
## 5 1.0153 nan 0.1000 0.0215
## 6 0.9722 nan 0.1000 0.0160
## 7 0.9378 nan 0.1000 0.0139
## 8 0.9142 nan 0.1000 0.0090
## 9 0.8889 nan 0.1000 0.0109
## 10 0.8607 nan 0.1000 0.0118
## 20 0.7068 nan 0.1000 0.0024
## 40 0.5751 nan 0.1000 -0.0014
## 60 0.4903 nan 0.1000 -0.0009
## 80 0.4284 nan 0.1000 -0.0004
## 100 0.3769 nan 0.1000 -0.0012
## 120 0.3304 nan 0.1000 -0.0006
## 140 0.2922 nan 0.1000 -0.0016
## 160 0.2558 nan 0.1000 -0.0007
## 180 0.2266 nan 0.1000 -0.0005
## 200 0.2031 nan 0.1000 -0.0004
## 220 0.1808 nan 0.1000 -0.0009
## 240 0.1618 nan 0.1000 -0.0006
## 260 0.1451 nan 0.1000 -0.0007
## 280 0.1310 nan 0.1000 -0.0003
## 300 0.1188 nan 0.1000 -0.0003
## 320 0.1069 nan 0.1000 -0.0006
## 340 0.0962 nan 0.1000 -0.0003
## 360 0.0876 nan 0.1000 -0.0003
## 380 0.0793 nan 0.1000 -0.0002
## 400 0.0717 nan 0.1000 -0.0002
## 420 0.0647 nan 0.1000 -0.0001
## 440 0.0581 nan 0.1000 -0.0002
## 460 0.0534 nan 0.1000 -0.0002
## 480 0.0487 nan 0.1000 -0.0001
## 500 0.0445 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2376 nan 0.1000 0.0362
## 2 1.1634 nan 0.1000 0.0316
## 3 1.0971 nan 0.1000 0.0288
## 4 1.0398 nan 0.1000 0.0234
## 5 0.9940 nan 0.1000 0.0191
## 6 0.9582 nan 0.1000 0.0151
## 7 0.9272 nan 0.1000 0.0109
## 8 0.8945 nan 0.1000 0.0132
## 9 0.8684 nan 0.1000 0.0098
## 10 0.8423 nan 0.1000 0.0090
## 20 0.6711 nan 0.1000 0.0029
## 40 0.5180 nan 0.1000 -0.0003
## 60 0.4239 nan 0.1000 -0.0016
## 80 0.3567 nan 0.1000 -0.0004
## 100 0.2943 nan 0.1000 -0.0004
## 120 0.2479 nan 0.1000 -0.0003
## 140 0.2162 nan 0.1000 -0.0005
## 160 0.1842 nan 0.1000 -0.0002
## 180 0.1600 nan 0.1000 -0.0001
## 200 0.1396 nan 0.1000 -0.0000
## 220 0.1228 nan 0.1000 -0.0002
## 240 0.1077 nan 0.1000 -0.0008
## 260 0.0947 nan 0.1000 -0.0001
## 280 0.0821 nan 0.1000 -0.0001
## 300 0.0720 nan 0.1000 -0.0000
## 320 0.0632 nan 0.1000 0.0001
## 340 0.0563 nan 0.1000 -0.0002
## 360 0.0490 nan 0.1000 -0.0000
## 380 0.0433 nan 0.1000 0.0000
## 400 0.0385 nan 0.1000 0.0000
## 420 0.0341 nan 0.1000 -0.0000
## 440 0.0304 nan 0.1000 -0.0001
## 460 0.0265 nan 0.1000 -0.0000
## 480 0.0235 nan 0.1000 -0.0001
## 500 0.0210 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2376 nan 0.1000 0.0374
## 2 1.1699 nan 0.1000 0.0318
## 3 1.1079 nan 0.1000 0.0257
## 4 1.0550 nan 0.1000 0.0245
## 5 1.0074 nan 0.1000 0.0199
## 6 0.9664 nan 0.1000 0.0160
## 7 0.9312 nan 0.1000 0.0127
## 8 0.8967 nan 0.1000 0.0130
## 9 0.8722 nan 0.1000 0.0100
## 10 0.8477 nan 0.1000 0.0076
## 20 0.6880 nan 0.1000 0.0027
## 40 0.5311 nan 0.1000 0.0009
## 60 0.4227 nan 0.1000 -0.0002
## 80 0.3560 nan 0.1000 -0.0009
## 100 0.2962 nan 0.1000 -0.0004
## 120 0.2492 nan 0.1000 -0.0004
## 140 0.2099 nan 0.1000 -0.0007
## 160 0.1786 nan 0.1000 -0.0005
## 180 0.1545 nan 0.1000 -0.0004
## 200 0.1317 nan 0.1000 -0.0006
## 220 0.1143 nan 0.1000 -0.0004
## 240 0.0994 nan 0.1000 -0.0003
## 260 0.0886 nan 0.1000 -0.0005
## 280 0.0778 nan 0.1000 -0.0002
## 300 0.0686 nan 0.1000 -0.0002
## 320 0.0607 nan 0.1000 -0.0001
## 340 0.0545 nan 0.1000 -0.0001
## 360 0.0484 nan 0.1000 -0.0002
## 380 0.0433 nan 0.1000 -0.0002
## 400 0.0386 nan 0.1000 -0.0002
## 420 0.0340 nan 0.1000 -0.0000
## 440 0.0304 nan 0.1000 -0.0000
## 460 0.0272 nan 0.1000 -0.0000
## 480 0.0245 nan 0.1000 -0.0001
## 500 0.0216 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2352 nan 0.1000 0.0333
## 2 1.1661 nan 0.1000 0.0334
## 3 1.1136 nan 0.1000 0.0236
## 4 1.0588 nan 0.1000 0.0245
## 5 1.0065 nan 0.1000 0.0203
## 6 0.9671 nan 0.1000 0.0162
## 7 0.9315 nan 0.1000 0.0130
## 8 0.8992 nan 0.1000 0.0125
## 9 0.8720 nan 0.1000 0.0109
## 10 0.8491 nan 0.1000 0.0077
## 20 0.6896 nan 0.1000 0.0010
## 40 0.5499 nan 0.1000 -0.0014
## 60 0.4583 nan 0.1000 -0.0016
## 80 0.3850 nan 0.1000 -0.0011
## 100 0.3288 nan 0.1000 -0.0004
## 120 0.2830 nan 0.1000 -0.0013
## 140 0.2413 nan 0.1000 -0.0011
## 160 0.2059 nan 0.1000 -0.0006
## 180 0.1801 nan 0.1000 -0.0004
## 200 0.1578 nan 0.1000 -0.0004
## 220 0.1384 nan 0.1000 -0.0005
## 240 0.1228 nan 0.1000 -0.0001
## 260 0.1086 nan 0.1000 0.0000
## 280 0.0964 nan 0.1000 -0.0002
## 300 0.0845 nan 0.1000 -0.0001
## 320 0.0742 nan 0.1000 -0.0003
## 340 0.0655 nan 0.1000 -0.0001
## 360 0.0585 nan 0.1000 -0.0003
## 380 0.0519 nan 0.1000 -0.0003
## 400 0.0462 nan 0.1000 -0.0002
## 420 0.0416 nan 0.1000 -0.0000
## 440 0.0371 nan 0.1000 -0.0002
## 460 0.0331 nan 0.1000 -0.0001
## 480 0.0301 nan 0.1000 -0.0002
## 500 0.0269 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0005
## 2 1.3189 nan 0.0010 0.0003
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3034 nan 0.0010 0.0004
## 40 1.2869 nan 0.0010 0.0004
## 60 1.2711 nan 0.0010 0.0004
## 80 1.2558 nan 0.0010 0.0004
## 100 1.2406 nan 0.0010 0.0003
## 120 1.2266 nan 0.0010 0.0003
## 140 1.2127 nan 0.0010 0.0003
## 160 1.1993 nan 0.0010 0.0002
## 180 1.1867 nan 0.0010 0.0002
## 200 1.1740 nan 0.0010 0.0003
## 220 1.1617 nan 0.0010 0.0002
## 240 1.1496 nan 0.0010 0.0002
## 260 1.1382 nan 0.0010 0.0002
## 280 1.1270 nan 0.0010 0.0002
## 300 1.1160 nan 0.0010 0.0002
## 320 1.1055 nan 0.0010 0.0002
## 340 1.0952 nan 0.0010 0.0002
## 360 1.0855 nan 0.0010 0.0002
## 380 1.0759 nan 0.0010 0.0002
## 400 1.0666 nan 0.0010 0.0002
## 420 1.0574 nan 0.0010 0.0002
## 440 1.0484 nan 0.0010 0.0002
## 460 1.0394 nan 0.0010 0.0002
## 480 1.0309 nan 0.0010 0.0002
## 500 1.0226 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0003
## 6 1.3158 nan 0.0010 0.0003
## 7 1.3150 nan 0.0010 0.0003
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3134 nan 0.0010 0.0003
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3042 nan 0.0010 0.0004
## 40 1.2878 nan 0.0010 0.0003
## 60 1.2719 nan 0.0010 0.0003
## 80 1.2566 nan 0.0010 0.0003
## 100 1.2417 nan 0.0010 0.0003
## 120 1.2275 nan 0.0010 0.0003
## 140 1.2136 nan 0.0010 0.0003
## 160 1.2001 nan 0.0010 0.0003
## 180 1.1875 nan 0.0010 0.0003
## 200 1.1752 nan 0.0010 0.0002
## 220 1.1629 nan 0.0010 0.0003
## 240 1.1511 nan 0.0010 0.0002
## 260 1.1399 nan 0.0010 0.0003
## 280 1.1287 nan 0.0010 0.0002
## 300 1.1178 nan 0.0010 0.0003
## 320 1.1071 nan 0.0010 0.0002
## 340 1.0970 nan 0.0010 0.0002
## 360 1.0870 nan 0.0010 0.0003
## 380 1.0774 nan 0.0010 0.0002
## 400 1.0679 nan 0.0010 0.0002
## 420 1.0588 nan 0.0010 0.0002
## 440 1.0498 nan 0.0010 0.0002
## 460 1.0411 nan 0.0010 0.0002
## 480 1.0323 nan 0.0010 0.0002
## 500 1.0240 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3183 nan 0.0010 0.0003
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0003
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3040 nan 0.0010 0.0004
## 40 1.2875 nan 0.0010 0.0003
## 60 1.2718 nan 0.0010 0.0004
## 80 1.2571 nan 0.0010 0.0003
## 100 1.2424 nan 0.0010 0.0002
## 120 1.2283 nan 0.0010 0.0003
## 140 1.2150 nan 0.0010 0.0003
## 160 1.2018 nan 0.0010 0.0002
## 180 1.1888 nan 0.0010 0.0003
## 200 1.1763 nan 0.0010 0.0003
## 220 1.1642 nan 0.0010 0.0003
## 240 1.1525 nan 0.0010 0.0002
## 260 1.1411 nan 0.0010 0.0002
## 280 1.1302 nan 0.0010 0.0002
## 300 1.1195 nan 0.0010 0.0002
## 320 1.1091 nan 0.0010 0.0002
## 340 1.0993 nan 0.0010 0.0002
## 360 1.0894 nan 0.0010 0.0002
## 380 1.0794 nan 0.0010 0.0002
## 400 1.0701 nan 0.0010 0.0002
## 420 1.0610 nan 0.0010 0.0002
## 440 1.0524 nan 0.0010 0.0002
## 460 1.0435 nan 0.0010 0.0002
## 480 1.0351 nan 0.0010 0.0002
## 500 1.0271 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3152 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0004
## 10 1.3115 nan 0.0010 0.0004
## 20 1.3024 nan 0.0010 0.0004
## 40 1.2849 nan 0.0010 0.0004
## 60 1.2680 nan 0.0010 0.0004
## 80 1.2520 nan 0.0010 0.0003
## 100 1.2361 nan 0.0010 0.0003
## 120 1.2211 nan 0.0010 0.0003
## 140 1.2067 nan 0.0010 0.0003
## 160 1.1927 nan 0.0010 0.0003
## 180 1.1788 nan 0.0010 0.0003
## 200 1.1657 nan 0.0010 0.0003
## 220 1.1528 nan 0.0010 0.0002
## 240 1.1403 nan 0.0010 0.0002
## 260 1.1281 nan 0.0010 0.0003
## 280 1.1160 nan 0.0010 0.0003
## 300 1.1041 nan 0.0010 0.0002
## 320 1.0929 nan 0.0010 0.0002
## 340 1.0818 nan 0.0010 0.0002
## 360 1.0712 nan 0.0010 0.0002
## 380 1.0610 nan 0.0010 0.0002
## 400 1.0509 nan 0.0010 0.0002
## 420 1.0411 nan 0.0010 0.0002
## 440 1.0320 nan 0.0010 0.0002
## 460 1.0226 nan 0.0010 0.0002
## 480 1.0134 nan 0.0010 0.0002
## 500 1.0048 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0003
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3116 nan 0.0010 0.0004
## 20 1.3026 nan 0.0010 0.0003
## 40 1.2855 nan 0.0010 0.0004
## 60 1.2686 nan 0.0010 0.0004
## 80 1.2527 nan 0.0010 0.0004
## 100 1.2373 nan 0.0010 0.0003
## 120 1.2223 nan 0.0010 0.0004
## 140 1.2078 nan 0.0010 0.0003
## 160 1.1932 nan 0.0010 0.0003
## 180 1.1795 nan 0.0010 0.0003
## 200 1.1662 nan 0.0010 0.0003
## 220 1.1531 nan 0.0010 0.0003
## 240 1.1400 nan 0.0010 0.0003
## 260 1.1279 nan 0.0010 0.0002
## 280 1.1163 nan 0.0010 0.0003
## 300 1.1049 nan 0.0010 0.0002
## 320 1.0937 nan 0.0010 0.0003
## 340 1.0829 nan 0.0010 0.0002
## 360 1.0723 nan 0.0010 0.0002
## 380 1.0623 nan 0.0010 0.0002
## 400 1.0525 nan 0.0010 0.0002
## 420 1.0427 nan 0.0010 0.0002
## 440 1.0333 nan 0.0010 0.0002
## 460 1.0238 nan 0.0010 0.0002
## 480 1.0148 nan 0.0010 0.0002
## 500 1.0060 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3033 nan 0.0010 0.0004
## 40 1.2865 nan 0.0010 0.0004
## 60 1.2702 nan 0.0010 0.0003
## 80 1.2544 nan 0.0010 0.0004
## 100 1.2393 nan 0.0010 0.0003
## 120 1.2247 nan 0.0010 0.0003
## 140 1.2106 nan 0.0010 0.0003
## 160 1.1966 nan 0.0010 0.0003
## 180 1.1831 nan 0.0010 0.0003
## 200 1.1702 nan 0.0010 0.0003
## 220 1.1572 nan 0.0010 0.0003
## 240 1.1448 nan 0.0010 0.0003
## 260 1.1326 nan 0.0010 0.0003
## 280 1.1209 nan 0.0010 0.0002
## 300 1.1096 nan 0.0010 0.0003
## 320 1.0985 nan 0.0010 0.0002
## 340 1.0878 nan 0.0010 0.0002
## 360 1.0774 nan 0.0010 0.0002
## 380 1.0673 nan 0.0010 0.0002
## 400 1.0575 nan 0.0010 0.0002
## 420 1.0480 nan 0.0010 0.0002
## 440 1.0388 nan 0.0010 0.0002
## 460 1.0298 nan 0.0010 0.0002
## 480 1.0210 nan 0.0010 0.0002
## 500 1.0124 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3186 nan 0.0010 0.0005
## 3 1.3177 nan 0.0010 0.0004
## 4 1.3168 nan 0.0010 0.0004
## 5 1.3159 nan 0.0010 0.0004
## 6 1.3149 nan 0.0010 0.0005
## 7 1.3140 nan 0.0010 0.0004
## 8 1.3131 nan 0.0010 0.0004
## 9 1.3121 nan 0.0010 0.0004
## 10 1.3112 nan 0.0010 0.0004
## 20 1.3021 nan 0.0010 0.0005
## 40 1.2835 nan 0.0010 0.0003
## 60 1.2662 nan 0.0010 0.0003
## 80 1.2494 nan 0.0010 0.0004
## 100 1.2330 nan 0.0010 0.0004
## 120 1.2172 nan 0.0010 0.0003
## 140 1.2022 nan 0.0010 0.0003
## 160 1.1872 nan 0.0010 0.0003
## 180 1.1729 nan 0.0010 0.0003
## 200 1.1589 nan 0.0010 0.0003
## 220 1.1457 nan 0.0010 0.0003
## 240 1.1324 nan 0.0010 0.0003
## 260 1.1195 nan 0.0010 0.0003
## 280 1.1070 nan 0.0010 0.0003
## 300 1.0951 nan 0.0010 0.0002
## 320 1.0835 nan 0.0010 0.0003
## 340 1.0722 nan 0.0010 0.0003
## 360 1.0611 nan 0.0010 0.0002
## 380 1.0499 nan 0.0010 0.0003
## 400 1.0395 nan 0.0010 0.0002
## 420 1.0292 nan 0.0010 0.0002
## 440 1.0191 nan 0.0010 0.0003
## 460 1.0093 nan 0.0010 0.0002
## 480 1.0000 nan 0.0010 0.0002
## 500 0.9909 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0005
## 2 1.3186 nan 0.0010 0.0004
## 3 1.3177 nan 0.0010 0.0005
## 4 1.3168 nan 0.0010 0.0004
## 5 1.3159 nan 0.0010 0.0004
## 6 1.3149 nan 0.0010 0.0004
## 7 1.3139 nan 0.0010 0.0005
## 8 1.3130 nan 0.0010 0.0004
## 9 1.3121 nan 0.0010 0.0005
## 10 1.3111 nan 0.0010 0.0004
## 20 1.3021 nan 0.0010 0.0004
## 40 1.2841 nan 0.0010 0.0004
## 60 1.2663 nan 0.0010 0.0004
## 80 1.2495 nan 0.0010 0.0004
## 100 1.2330 nan 0.0010 0.0003
## 120 1.2176 nan 0.0010 0.0003
## 140 1.2025 nan 0.0010 0.0003
## 160 1.1878 nan 0.0010 0.0003
## 180 1.1736 nan 0.0010 0.0003
## 200 1.1598 nan 0.0010 0.0003
## 220 1.1460 nan 0.0010 0.0003
## 240 1.1329 nan 0.0010 0.0003
## 260 1.1207 nan 0.0010 0.0003
## 280 1.1084 nan 0.0010 0.0003
## 300 1.0965 nan 0.0010 0.0003
## 320 1.0851 nan 0.0010 0.0003
## 340 1.0738 nan 0.0010 0.0002
## 360 1.0628 nan 0.0010 0.0003
## 380 1.0520 nan 0.0010 0.0002
## 400 1.0418 nan 0.0010 0.0002
## 420 1.0314 nan 0.0010 0.0002
## 440 1.0218 nan 0.0010 0.0002
## 460 1.0122 nan 0.0010 0.0002
## 480 1.0027 nan 0.0010 0.0002
## 500 0.9935 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0004
## 6 1.3152 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0005
## 9 1.3124 nan 0.0010 0.0004
## 10 1.3115 nan 0.0010 0.0005
## 20 1.3025 nan 0.0010 0.0004
## 40 1.2852 nan 0.0010 0.0004
## 60 1.2685 nan 0.0010 0.0004
## 80 1.2521 nan 0.0010 0.0003
## 100 1.2363 nan 0.0010 0.0003
## 120 1.2210 nan 0.0010 0.0004
## 140 1.2063 nan 0.0010 0.0003
## 160 1.1920 nan 0.0010 0.0003
## 180 1.1780 nan 0.0010 0.0004
## 200 1.1644 nan 0.0010 0.0002
## 220 1.1515 nan 0.0010 0.0003
## 240 1.1387 nan 0.0010 0.0003
## 260 1.1261 nan 0.0010 0.0003
## 280 1.1139 nan 0.0010 0.0003
## 300 1.1020 nan 0.0010 0.0002
## 320 1.0909 nan 0.0010 0.0002
## 340 1.0802 nan 0.0010 0.0002
## 360 1.0694 nan 0.0010 0.0002
## 380 1.0590 nan 0.0010 0.0002
## 400 1.0489 nan 0.0010 0.0002
## 420 1.0388 nan 0.0010 0.0002
## 440 1.0290 nan 0.0010 0.0002
## 460 1.0198 nan 0.0010 0.0002
## 480 1.0105 nan 0.0010 0.0002
## 500 1.0017 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3120 nan 0.0100 0.0039
## 2 1.3030 nan 0.0100 0.0045
## 3 1.2944 nan 0.0100 0.0040
## 4 1.2858 nan 0.0100 0.0041
## 5 1.2784 nan 0.0100 0.0033
## 6 1.2714 nan 0.0100 0.0033
## 7 1.2640 nan 0.0100 0.0035
## 8 1.2570 nan 0.0100 0.0031
## 9 1.2494 nan 0.0100 0.0033
## 10 1.2425 nan 0.0100 0.0032
## 20 1.1734 nan 0.0100 0.0027
## 40 1.0635 nan 0.0100 0.0021
## 60 0.9836 nan 0.0100 0.0012
## 80 0.9199 nan 0.0100 0.0012
## 100 0.8683 nan 0.0100 0.0009
## 120 0.8257 nan 0.0100 0.0006
## 140 0.7911 nan 0.0100 0.0005
## 160 0.7615 nan 0.0100 0.0005
## 180 0.7354 nan 0.0100 0.0006
## 200 0.7126 nan 0.0100 0.0000
## 220 0.6926 nan 0.0100 0.0003
## 240 0.6745 nan 0.0100 -0.0001
## 260 0.6584 nan 0.0100 0.0002
## 280 0.6429 nan 0.0100 0.0002
## 300 0.6301 nan 0.0100 0.0001
## 320 0.6170 nan 0.0100 -0.0000
## 340 0.6053 nan 0.0100 0.0001
## 360 0.5939 nan 0.0100 -0.0001
## 380 0.5828 nan 0.0100 -0.0001
## 400 0.5719 nan 0.0100 0.0001
## 420 0.5614 nan 0.0100 -0.0000
## 440 0.5515 nan 0.0100 -0.0001
## 460 0.5412 nan 0.0100 0.0000
## 480 0.5328 nan 0.0100 -0.0001
## 500 0.5244 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3127 nan 0.0100 0.0039
## 2 1.3038 nan 0.0100 0.0043
## 3 1.2962 nan 0.0100 0.0032
## 4 1.2890 nan 0.0100 0.0032
## 5 1.2803 nan 0.0100 0.0042
## 6 1.2736 nan 0.0100 0.0028
## 7 1.2661 nan 0.0100 0.0031
## 8 1.2580 nan 0.0100 0.0037
## 9 1.2500 nan 0.0100 0.0037
## 10 1.2415 nan 0.0100 0.0035
## 20 1.1760 nan 0.0100 0.0028
## 40 1.0679 nan 0.0100 0.0022
## 60 0.9845 nan 0.0100 0.0013
## 80 0.9205 nan 0.0100 0.0011
## 100 0.8690 nan 0.0100 0.0008
## 120 0.8275 nan 0.0100 0.0008
## 140 0.7927 nan 0.0100 0.0001
## 160 0.7623 nan 0.0100 0.0005
## 180 0.7376 nan 0.0100 0.0001
## 200 0.7156 nan 0.0100 0.0003
## 220 0.6973 nan 0.0100 0.0001
## 240 0.6792 nan 0.0100 0.0001
## 260 0.6636 nan 0.0100 0.0003
## 280 0.6505 nan 0.0100 0.0001
## 300 0.6368 nan 0.0100 0.0000
## 320 0.6243 nan 0.0100 0.0000
## 340 0.6126 nan 0.0100 0.0001
## 360 0.6005 nan 0.0100 0.0001
## 380 0.5895 nan 0.0100 -0.0001
## 400 0.5799 nan 0.0100 0.0000
## 420 0.5712 nan 0.0100 -0.0000
## 440 0.5618 nan 0.0100 -0.0001
## 460 0.5529 nan 0.0100 -0.0001
## 480 0.5445 nan 0.0100 0.0000
## 500 0.5362 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3134 nan 0.0100 0.0032
## 2 1.3047 nan 0.0100 0.0038
## 3 1.2957 nan 0.0100 0.0040
## 4 1.2885 nan 0.0100 0.0033
## 5 1.2806 nan 0.0100 0.0035
## 6 1.2735 nan 0.0100 0.0030
## 7 1.2656 nan 0.0100 0.0037
## 8 1.2584 nan 0.0100 0.0034
## 9 1.2513 nan 0.0100 0.0033
## 10 1.2448 nan 0.0100 0.0027
## 20 1.1773 nan 0.0100 0.0030
## 40 1.0688 nan 0.0100 0.0022
## 60 0.9880 nan 0.0100 0.0014
## 80 0.9246 nan 0.0100 0.0009
## 100 0.8732 nan 0.0100 0.0007
## 120 0.8293 nan 0.0100 0.0007
## 140 0.7938 nan 0.0100 0.0006
## 160 0.7646 nan 0.0100 0.0004
## 180 0.7403 nan 0.0100 0.0002
## 200 0.7203 nan 0.0100 0.0002
## 220 0.7012 nan 0.0100 0.0003
## 240 0.6848 nan 0.0100 0.0002
## 260 0.6701 nan 0.0100 0.0002
## 280 0.6556 nan 0.0100 0.0001
## 300 0.6426 nan 0.0100 0.0001
## 320 0.6316 nan 0.0100 0.0002
## 340 0.6197 nan 0.0100 -0.0001
## 360 0.6093 nan 0.0100 -0.0000
## 380 0.6002 nan 0.0100 -0.0001
## 400 0.5907 nan 0.0100 0.0000
## 420 0.5809 nan 0.0100 0.0000
## 440 0.5722 nan 0.0100 0.0001
## 460 0.5641 nan 0.0100 0.0001
## 480 0.5565 nan 0.0100 -0.0003
## 500 0.5487 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0042
## 2 1.3016 nan 0.0100 0.0047
## 3 1.2922 nan 0.0100 0.0041
## 4 1.2833 nan 0.0100 0.0040
## 5 1.2750 nan 0.0100 0.0040
## 6 1.2663 nan 0.0100 0.0042
## 7 1.2584 nan 0.0100 0.0036
## 8 1.2500 nan 0.0100 0.0038
## 9 1.2417 nan 0.0100 0.0040
## 10 1.2331 nan 0.0100 0.0040
## 20 1.1612 nan 0.0100 0.0031
## 40 1.0468 nan 0.0100 0.0024
## 60 0.9597 nan 0.0100 0.0017
## 80 0.8938 nan 0.0100 0.0012
## 100 0.8389 nan 0.0100 0.0011
## 120 0.7941 nan 0.0100 0.0006
## 140 0.7574 nan 0.0100 0.0006
## 160 0.7263 nan 0.0100 0.0006
## 180 0.6990 nan 0.0100 0.0004
## 200 0.6737 nan 0.0100 0.0004
## 220 0.6522 nan 0.0100 0.0002
## 240 0.6344 nan 0.0100 0.0001
## 260 0.6155 nan 0.0100 0.0002
## 280 0.5981 nan 0.0100 0.0001
## 300 0.5833 nan 0.0100 -0.0000
## 320 0.5685 nan 0.0100 0.0001
## 340 0.5547 nan 0.0100 -0.0000
## 360 0.5422 nan 0.0100 0.0001
## 380 0.5304 nan 0.0100 0.0000
## 400 0.5200 nan 0.0100 -0.0001
## 420 0.5087 nan 0.0100 0.0000
## 440 0.4992 nan 0.0100 -0.0000
## 460 0.4892 nan 0.0100 -0.0000
## 480 0.4796 nan 0.0100 -0.0001
## 500 0.4702 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3125 nan 0.0100 0.0038
## 2 1.3040 nan 0.0100 0.0037
## 3 1.2958 nan 0.0100 0.0039
## 4 1.2876 nan 0.0100 0.0036
## 5 1.2803 nan 0.0100 0.0033
## 6 1.2714 nan 0.0100 0.0041
## 7 1.2623 nan 0.0100 0.0038
## 8 1.2542 nan 0.0100 0.0036
## 9 1.2458 nan 0.0100 0.0037
## 10 1.2384 nan 0.0100 0.0034
## 20 1.1701 nan 0.0100 0.0028
## 40 1.0541 nan 0.0100 0.0024
## 60 0.9668 nan 0.0100 0.0015
## 80 0.8997 nan 0.0100 0.0011
## 100 0.8442 nan 0.0100 0.0011
## 120 0.7998 nan 0.0100 0.0007
## 140 0.7630 nan 0.0100 0.0007
## 160 0.7311 nan 0.0100 0.0004
## 180 0.7050 nan 0.0100 0.0002
## 200 0.6820 nan 0.0100 0.0002
## 220 0.6614 nan 0.0100 0.0002
## 240 0.6421 nan 0.0100 0.0002
## 260 0.6255 nan 0.0100 0.0003
## 280 0.6095 nan 0.0100 0.0002
## 300 0.5954 nan 0.0100 0.0001
## 320 0.5819 nan 0.0100 -0.0001
## 340 0.5689 nan 0.0100 0.0001
## 360 0.5577 nan 0.0100 -0.0001
## 380 0.5454 nan 0.0100 -0.0001
## 400 0.5350 nan 0.0100 -0.0001
## 420 0.5249 nan 0.0100 -0.0000
## 440 0.5145 nan 0.0100 0.0001
## 460 0.5047 nan 0.0100 -0.0001
## 480 0.4945 nan 0.0100 0.0002
## 500 0.4841 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3116 nan 0.0100 0.0043
## 2 1.3024 nan 0.0100 0.0041
## 3 1.2934 nan 0.0100 0.0038
## 4 1.2847 nan 0.0100 0.0035
## 5 1.2761 nan 0.0100 0.0041
## 6 1.2678 nan 0.0100 0.0036
## 7 1.2601 nan 0.0100 0.0034
## 8 1.2517 nan 0.0100 0.0038
## 9 1.2441 nan 0.0100 0.0033
## 10 1.2370 nan 0.0100 0.0031
## 20 1.1675 nan 0.0100 0.0026
## 40 1.0543 nan 0.0100 0.0021
## 60 0.9699 nan 0.0100 0.0017
## 80 0.9021 nan 0.0100 0.0012
## 100 0.8491 nan 0.0100 0.0008
## 120 0.8053 nan 0.0100 0.0006
## 140 0.7691 nan 0.0100 0.0005
## 160 0.7391 nan 0.0100 0.0003
## 180 0.7136 nan 0.0100 0.0005
## 200 0.6912 nan 0.0100 0.0003
## 220 0.6708 nan 0.0100 0.0002
## 240 0.6526 nan 0.0100 0.0001
## 260 0.6351 nan 0.0100 0.0001
## 280 0.6206 nan 0.0100 -0.0000
## 300 0.6073 nan 0.0100 -0.0000
## 320 0.5936 nan 0.0100 -0.0001
## 340 0.5819 nan 0.0100 -0.0000
## 360 0.5703 nan 0.0100 -0.0001
## 380 0.5592 nan 0.0100 -0.0002
## 400 0.5482 nan 0.0100 0.0000
## 420 0.5377 nan 0.0100 -0.0001
## 440 0.5273 nan 0.0100 -0.0000
## 460 0.5173 nan 0.0100 -0.0001
## 480 0.5079 nan 0.0100 -0.0001
## 500 0.4992 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3110 nan 0.0100 0.0041
## 2 1.3007 nan 0.0100 0.0044
## 3 1.2917 nan 0.0100 0.0042
## 4 1.2819 nan 0.0100 0.0038
## 5 1.2731 nan 0.0100 0.0041
## 6 1.2645 nan 0.0100 0.0037
## 7 1.2556 nan 0.0100 0.0041
## 8 1.2471 nan 0.0100 0.0034
## 9 1.2393 nan 0.0100 0.0035
## 10 1.2313 nan 0.0100 0.0032
## 20 1.1589 nan 0.0100 0.0033
## 40 1.0411 nan 0.0100 0.0019
## 60 0.9510 nan 0.0100 0.0016
## 80 0.8790 nan 0.0100 0.0014
## 100 0.8228 nan 0.0100 0.0007
## 120 0.7763 nan 0.0100 0.0009
## 140 0.7370 nan 0.0100 0.0004
## 160 0.7035 nan 0.0100 0.0005
## 180 0.6743 nan 0.0100 0.0003
## 200 0.6475 nan 0.0100 0.0004
## 220 0.6237 nan 0.0100 0.0002
## 240 0.6038 nan 0.0100 0.0001
## 260 0.5846 nan 0.0100 0.0001
## 280 0.5670 nan 0.0100 0.0003
## 300 0.5500 nan 0.0100 0.0001
## 320 0.5345 nan 0.0100 -0.0000
## 340 0.5197 nan 0.0100 -0.0000
## 360 0.5059 nan 0.0100 0.0000
## 380 0.4928 nan 0.0100 -0.0000
## 400 0.4799 nan 0.0100 -0.0000
## 420 0.4683 nan 0.0100 0.0000
## 440 0.4578 nan 0.0100 0.0000
## 460 0.4473 nan 0.0100 -0.0000
## 480 0.4381 nan 0.0100 -0.0002
## 500 0.4280 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3116 nan 0.0100 0.0040
## 2 1.3029 nan 0.0100 0.0039
## 3 1.2934 nan 0.0100 0.0044
## 4 1.2845 nan 0.0100 0.0041
## 5 1.2764 nan 0.0100 0.0035
## 6 1.2686 nan 0.0100 0.0034
## 7 1.2598 nan 0.0100 0.0036
## 8 1.2507 nan 0.0100 0.0039
## 9 1.2430 nan 0.0100 0.0037
## 10 1.2349 nan 0.0100 0.0037
## 20 1.1607 nan 0.0100 0.0027
## 40 1.0433 nan 0.0100 0.0024
## 60 0.9543 nan 0.0100 0.0016
## 80 0.8825 nan 0.0100 0.0011
## 100 0.8263 nan 0.0100 0.0007
## 120 0.7811 nan 0.0100 0.0004
## 140 0.7441 nan 0.0100 0.0006
## 160 0.7119 nan 0.0100 0.0003
## 180 0.6820 nan 0.0100 0.0003
## 200 0.6566 nan 0.0100 0.0003
## 220 0.6336 nan 0.0100 0.0004
## 240 0.6135 nan 0.0100 0.0000
## 260 0.5946 nan 0.0100 0.0003
## 280 0.5783 nan 0.0100 -0.0001
## 300 0.5631 nan 0.0100 0.0002
## 320 0.5482 nan 0.0100 0.0000
## 340 0.5338 nan 0.0100 0.0001
## 360 0.5197 nan 0.0100 0.0001
## 380 0.5073 nan 0.0100 -0.0000
## 400 0.4944 nan 0.0100 0.0001
## 420 0.4835 nan 0.0100 0.0000
## 440 0.4728 nan 0.0100 -0.0001
## 460 0.4609 nan 0.0100 -0.0001
## 480 0.4505 nan 0.0100 -0.0001
## 500 0.4405 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3112 nan 0.0100 0.0046
## 2 1.3018 nan 0.0100 0.0044
## 3 1.2936 nan 0.0100 0.0038
## 4 1.2850 nan 0.0100 0.0041
## 5 1.2757 nan 0.0100 0.0038
## 6 1.2671 nan 0.0100 0.0040
## 7 1.2588 nan 0.0100 0.0039
## 8 1.2507 nan 0.0100 0.0034
## 9 1.2426 nan 0.0100 0.0035
## 10 1.2353 nan 0.0100 0.0031
## 20 1.1636 nan 0.0100 0.0033
## 40 1.0490 nan 0.0100 0.0019
## 60 0.9603 nan 0.0100 0.0016
## 80 0.8908 nan 0.0100 0.0012
## 100 0.8381 nan 0.0100 0.0008
## 120 0.7909 nan 0.0100 0.0005
## 140 0.7527 nan 0.0100 0.0004
## 160 0.7205 nan 0.0100 0.0005
## 180 0.6913 nan 0.0100 0.0004
## 200 0.6678 nan 0.0100 0.0002
## 220 0.6460 nan 0.0100 0.0003
## 240 0.6260 nan 0.0100 0.0001
## 260 0.6082 nan 0.0100 0.0001
## 280 0.5914 nan 0.0100 0.0001
## 300 0.5763 nan 0.0100 0.0001
## 320 0.5615 nan 0.0100 0.0003
## 340 0.5478 nan 0.0100 0.0001
## 360 0.5355 nan 0.0100 0.0000
## 380 0.5236 nan 0.0100 -0.0002
## 400 0.5112 nan 0.0100 0.0000
## 420 0.4992 nan 0.0100 -0.0000
## 440 0.4892 nan 0.0100 0.0001
## 460 0.4791 nan 0.0100 -0.0002
## 480 0.4679 nan 0.0100 0.0000
## 500 0.4579 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2442 nan 0.1000 0.0350
## 2 1.1672 nan 0.1000 0.0338
## 3 1.1129 nan 0.1000 0.0205
## 4 1.0685 nan 0.1000 0.0204
## 5 1.0245 nan 0.1000 0.0168
## 6 0.9790 nan 0.1000 0.0172
## 7 0.9461 nan 0.1000 0.0153
## 8 0.9129 nan 0.1000 0.0113
## 9 0.8890 nan 0.1000 0.0102
## 10 0.8611 nan 0.1000 0.0106
## 20 0.7108 nan 0.1000 0.0017
## 40 0.5714 nan 0.1000 -0.0001
## 60 0.4883 nan 0.1000 -0.0004
## 80 0.4195 nan 0.1000 -0.0011
## 100 0.3627 nan 0.1000 -0.0005
## 120 0.3222 nan 0.1000 0.0001
## 140 0.2855 nan 0.1000 -0.0000
## 160 0.2563 nan 0.1000 -0.0004
## 180 0.2325 nan 0.1000 -0.0006
## 200 0.2111 nan 0.1000 -0.0008
## 220 0.1932 nan 0.1000 -0.0011
## 240 0.1754 nan 0.1000 -0.0001
## 260 0.1594 nan 0.1000 -0.0002
## 280 0.1459 nan 0.1000 -0.0006
## 300 0.1326 nan 0.1000 -0.0006
## 320 0.1207 nan 0.1000 -0.0001
## 340 0.1108 nan 0.1000 -0.0004
## 360 0.1023 nan 0.1000 -0.0001
## 380 0.0944 nan 0.1000 -0.0000
## 400 0.0869 nan 0.1000 -0.0001
## 420 0.0797 nan 0.1000 -0.0000
## 440 0.0731 nan 0.1000 -0.0000
## 460 0.0675 nan 0.1000 -0.0001
## 480 0.0626 nan 0.1000 -0.0001
## 500 0.0581 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2398 nan 0.1000 0.0374
## 2 1.1704 nan 0.1000 0.0286
## 3 1.1170 nan 0.1000 0.0271
## 4 1.0679 nan 0.1000 0.0211
## 5 1.0298 nan 0.1000 0.0174
## 6 0.9952 nan 0.1000 0.0137
## 7 0.9633 nan 0.1000 0.0127
## 8 0.9386 nan 0.1000 0.0106
## 9 0.9126 nan 0.1000 0.0096
## 10 0.8903 nan 0.1000 0.0086
## 20 0.7290 nan 0.1000 0.0014
## 40 0.5817 nan 0.1000 -0.0006
## 60 0.5004 nan 0.1000 0.0004
## 80 0.4446 nan 0.1000 0.0000
## 100 0.3866 nan 0.1000 -0.0014
## 120 0.3444 nan 0.1000 -0.0006
## 140 0.3085 nan 0.1000 -0.0006
## 160 0.2740 nan 0.1000 -0.0003
## 180 0.2460 nan 0.1000 0.0002
## 200 0.2222 nan 0.1000 -0.0002
## 220 0.2019 nan 0.1000 -0.0007
## 240 0.1832 nan 0.1000 -0.0005
## 260 0.1657 nan 0.1000 -0.0003
## 280 0.1506 nan 0.1000 -0.0007
## 300 0.1382 nan 0.1000 -0.0002
## 320 0.1269 nan 0.1000 -0.0006
## 340 0.1162 nan 0.1000 -0.0000
## 360 0.1057 nan 0.1000 -0.0003
## 380 0.0961 nan 0.1000 -0.0001
## 400 0.0888 nan 0.1000 -0.0001
## 420 0.0816 nan 0.1000 -0.0002
## 440 0.0750 nan 0.1000 -0.0002
## 460 0.0693 nan 0.1000 -0.0001
## 480 0.0641 nan 0.1000 -0.0005
## 500 0.0595 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2502 nan 0.1000 0.0305
## 2 1.1767 nan 0.1000 0.0361
## 3 1.1154 nan 0.1000 0.0278
## 4 1.0663 nan 0.1000 0.0206
## 5 1.0214 nan 0.1000 0.0170
## 6 0.9842 nan 0.1000 0.0178
## 7 0.9530 nan 0.1000 0.0122
## 8 0.9224 nan 0.1000 0.0107
## 9 0.8952 nan 0.1000 0.0117
## 10 0.8760 nan 0.1000 0.0076
## 20 0.7186 nan 0.1000 0.0024
## 40 0.5998 nan 0.1000 -0.0018
## 60 0.5117 nan 0.1000 0.0005
## 80 0.4578 nan 0.1000 0.0002
## 100 0.3997 nan 0.1000 -0.0014
## 120 0.3590 nan 0.1000 -0.0009
## 140 0.3242 nan 0.1000 -0.0007
## 160 0.2943 nan 0.1000 -0.0011
## 180 0.2647 nan 0.1000 -0.0005
## 200 0.2412 nan 0.1000 -0.0005
## 220 0.2182 nan 0.1000 -0.0008
## 240 0.2005 nan 0.1000 -0.0004
## 260 0.1840 nan 0.1000 -0.0003
## 280 0.1690 nan 0.1000 -0.0005
## 300 0.1553 nan 0.1000 -0.0003
## 320 0.1425 nan 0.1000 -0.0004
## 340 0.1331 nan 0.1000 -0.0004
## 360 0.1218 nan 0.1000 -0.0002
## 380 0.1130 nan 0.1000 -0.0001
## 400 0.1039 nan 0.1000 -0.0002
## 420 0.0962 nan 0.1000 -0.0007
## 440 0.0886 nan 0.1000 -0.0002
## 460 0.0819 nan 0.1000 -0.0001
## 480 0.0753 nan 0.1000 -0.0001
## 500 0.0691 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2283 nan 0.1000 0.0373
## 2 1.1616 nan 0.1000 0.0255
## 3 1.1045 nan 0.1000 0.0259
## 4 1.0567 nan 0.1000 0.0207
## 5 1.0165 nan 0.1000 0.0175
## 6 0.9714 nan 0.1000 0.0188
## 7 0.9325 nan 0.1000 0.0163
## 8 0.9044 nan 0.1000 0.0103
## 9 0.8700 nan 0.1000 0.0122
## 10 0.8442 nan 0.1000 0.0092
## 20 0.6885 nan 0.1000 0.0017
## 40 0.5407 nan 0.1000 -0.0020
## 60 0.4496 nan 0.1000 -0.0011
## 80 0.3733 nan 0.1000 -0.0006
## 100 0.3170 nan 0.1000 -0.0001
## 120 0.2772 nan 0.1000 -0.0002
## 140 0.2432 nan 0.1000 -0.0005
## 160 0.2133 nan 0.1000 -0.0003
## 180 0.1877 nan 0.1000 -0.0004
## 200 0.1641 nan 0.1000 -0.0002
## 220 0.1440 nan 0.1000 -0.0004
## 240 0.1287 nan 0.1000 -0.0002
## 260 0.1151 nan 0.1000 -0.0002
## 280 0.1022 nan 0.1000 -0.0001
## 300 0.0917 nan 0.1000 -0.0003
## 320 0.0833 nan 0.1000 -0.0001
## 340 0.0742 nan 0.1000 -0.0004
## 360 0.0660 nan 0.1000 -0.0004
## 380 0.0599 nan 0.1000 0.0001
## 400 0.0541 nan 0.1000 -0.0001
## 420 0.0491 nan 0.1000 -0.0002
## 440 0.0442 nan 0.1000 -0.0000
## 460 0.0401 nan 0.1000 -0.0002
## 480 0.0364 nan 0.1000 -0.0001
## 500 0.0330 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2299 nan 0.1000 0.0411
## 2 1.1613 nan 0.1000 0.0251
## 3 1.0973 nan 0.1000 0.0251
## 4 1.0495 nan 0.1000 0.0221
## 5 1.0059 nan 0.1000 0.0201
## 6 0.9681 nan 0.1000 0.0136
## 7 0.9314 nan 0.1000 0.0159
## 8 0.8969 nan 0.1000 0.0126
## 9 0.8674 nan 0.1000 0.0110
## 10 0.8417 nan 0.1000 0.0121
## 20 0.6834 nan 0.1000 0.0030
## 40 0.5452 nan 0.1000 -0.0005
## 60 0.4552 nan 0.1000 0.0004
## 80 0.3789 nan 0.1000 -0.0005
## 100 0.3265 nan 0.1000 -0.0003
## 120 0.2820 nan 0.1000 -0.0002
## 140 0.2438 nan 0.1000 -0.0008
## 160 0.2139 nan 0.1000 -0.0006
## 180 0.1902 nan 0.1000 -0.0003
## 200 0.1707 nan 0.1000 -0.0008
## 220 0.1532 nan 0.1000 -0.0008
## 240 0.1356 nan 0.1000 -0.0007
## 260 0.1206 nan 0.1000 -0.0002
## 280 0.1074 nan 0.1000 -0.0004
## 300 0.0959 nan 0.1000 -0.0004
## 320 0.0869 nan 0.1000 -0.0003
## 340 0.0793 nan 0.1000 -0.0003
## 360 0.0708 nan 0.1000 -0.0001
## 380 0.0641 nan 0.1000 -0.0002
## 400 0.0574 nan 0.1000 -0.0003
## 420 0.0514 nan 0.1000 -0.0001
## 440 0.0458 nan 0.1000 -0.0002
## 460 0.0413 nan 0.1000 -0.0001
## 480 0.0374 nan 0.1000 -0.0001
## 500 0.0343 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2354 nan 0.1000 0.0382
## 2 1.1588 nan 0.1000 0.0357
## 3 1.0930 nan 0.1000 0.0272
## 4 1.0394 nan 0.1000 0.0214
## 5 0.9959 nan 0.1000 0.0165
## 6 0.9526 nan 0.1000 0.0172
## 7 0.9148 nan 0.1000 0.0153
## 8 0.8851 nan 0.1000 0.0115
## 9 0.8574 nan 0.1000 0.0129
## 10 0.8322 nan 0.1000 0.0087
## 20 0.6779 nan 0.1000 0.0021
## 40 0.5410 nan 0.1000 0.0004
## 60 0.4543 nan 0.1000 -0.0032
## 80 0.3930 nan 0.1000 -0.0009
## 100 0.3434 nan 0.1000 0.0001
## 120 0.3011 nan 0.1000 -0.0007
## 140 0.2612 nan 0.1000 -0.0003
## 160 0.2283 nan 0.1000 -0.0008
## 180 0.2007 nan 0.1000 -0.0000
## 200 0.1775 nan 0.1000 0.0000
## 220 0.1594 nan 0.1000 -0.0009
## 240 0.1444 nan 0.1000 -0.0005
## 260 0.1288 nan 0.1000 -0.0003
## 280 0.1164 nan 0.1000 -0.0003
## 300 0.1051 nan 0.1000 -0.0000
## 320 0.0944 nan 0.1000 -0.0004
## 340 0.0847 nan 0.1000 -0.0001
## 360 0.0761 nan 0.1000 -0.0002
## 380 0.0687 nan 0.1000 -0.0003
## 400 0.0629 nan 0.1000 -0.0002
## 420 0.0558 nan 0.1000 -0.0001
## 440 0.0508 nan 0.1000 -0.0001
## 460 0.0455 nan 0.1000 -0.0001
## 480 0.0416 nan 0.1000 -0.0001
## 500 0.0380 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2379 nan 0.1000 0.0398
## 2 1.1642 nan 0.1000 0.0283
## 3 1.0981 nan 0.1000 0.0325
## 4 1.0438 nan 0.1000 0.0238
## 5 0.9983 nan 0.1000 0.0178
## 6 0.9545 nan 0.1000 0.0139
## 7 0.9171 nan 0.1000 0.0132
## 8 0.8861 nan 0.1000 0.0096
## 9 0.8525 nan 0.1000 0.0125
## 10 0.8252 nan 0.1000 0.0080
## 20 0.6495 nan 0.1000 0.0046
## 40 0.4886 nan 0.1000 -0.0004
## 60 0.3988 nan 0.1000 -0.0011
## 80 0.3304 nan 0.1000 -0.0014
## 100 0.2740 nan 0.1000 -0.0001
## 120 0.2298 nan 0.1000 -0.0007
## 140 0.1954 nan 0.1000 -0.0003
## 160 0.1701 nan 0.1000 -0.0011
## 180 0.1457 nan 0.1000 -0.0005
## 200 0.1260 nan 0.1000 -0.0003
## 220 0.1099 nan 0.1000 -0.0001
## 240 0.0954 nan 0.1000 -0.0002
## 260 0.0835 nan 0.1000 0.0000
## 280 0.0726 nan 0.1000 -0.0001
## 300 0.0647 nan 0.1000 0.0002
## 320 0.0568 nan 0.1000 -0.0000
## 340 0.0495 nan 0.1000 -0.0000
## 360 0.0442 nan 0.1000 -0.0001
## 380 0.0386 nan 0.1000 -0.0001
## 400 0.0344 nan 0.1000 -0.0001
## 420 0.0306 nan 0.1000 0.0000
## 440 0.0270 nan 0.1000 -0.0001
## 460 0.0237 nan 0.1000 -0.0000
## 480 0.0211 nan 0.1000 0.0000
## 500 0.0186 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2288 nan 0.1000 0.0421
## 2 1.1489 nan 0.1000 0.0369
## 3 1.0851 nan 0.1000 0.0245
## 4 1.0306 nan 0.1000 0.0224
## 5 0.9900 nan 0.1000 0.0171
## 6 0.9497 nan 0.1000 0.0173
## 7 0.9080 nan 0.1000 0.0158
## 8 0.8758 nan 0.1000 0.0115
## 9 0.8437 nan 0.1000 0.0116
## 10 0.8139 nan 0.1000 0.0118
## 20 0.6454 nan 0.1000 0.0038
## 40 0.4964 nan 0.1000 0.0002
## 60 0.4013 nan 0.1000 0.0009
## 80 0.3267 nan 0.1000 -0.0001
## 100 0.2770 nan 0.1000 -0.0010
## 120 0.2318 nan 0.1000 -0.0003
## 140 0.1973 nan 0.1000 -0.0004
## 160 0.1694 nan 0.1000 -0.0008
## 180 0.1441 nan 0.1000 -0.0001
## 200 0.1238 nan 0.1000 -0.0006
## 220 0.1066 nan 0.1000 -0.0004
## 240 0.0922 nan 0.1000 -0.0002
## 260 0.0808 nan 0.1000 -0.0003
## 280 0.0708 nan 0.1000 0.0000
## 300 0.0630 nan 0.1000 -0.0002
## 320 0.0554 nan 0.1000 0.0001
## 340 0.0489 nan 0.1000 -0.0002
## 360 0.0434 nan 0.1000 -0.0002
## 380 0.0384 nan 0.1000 -0.0001
## 400 0.0338 nan 0.1000 -0.0001
## 420 0.0297 nan 0.1000 -0.0002
## 440 0.0263 nan 0.1000 -0.0001
## 460 0.0232 nan 0.1000 -0.0002
## 480 0.0207 nan 0.1000 -0.0001
## 500 0.0184 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2273 nan 0.1000 0.0428
## 2 1.1555 nan 0.1000 0.0329
## 3 1.0949 nan 0.1000 0.0236
## 4 1.0385 nan 0.1000 0.0248
## 5 0.9936 nan 0.1000 0.0166
## 6 0.9543 nan 0.1000 0.0146
## 7 0.9211 nan 0.1000 0.0119
## 8 0.8906 nan 0.1000 0.0123
## 9 0.8556 nan 0.1000 0.0121
## 10 0.8282 nan 0.1000 0.0086
## 20 0.6629 nan 0.1000 0.0024
## 40 0.5129 nan 0.1000 -0.0026
## 60 0.4177 nan 0.1000 -0.0004
## 80 0.3508 nan 0.1000 -0.0002
## 100 0.2918 nan 0.1000 -0.0008
## 120 0.2493 nan 0.1000 -0.0005
## 140 0.2121 nan 0.1000 -0.0010
## 160 0.1817 nan 0.1000 -0.0004
## 180 0.1585 nan 0.1000 -0.0001
## 200 0.1396 nan 0.1000 -0.0006
## 220 0.1209 nan 0.1000 -0.0002
## 240 0.1056 nan 0.1000 -0.0003
## 260 0.0920 nan 0.1000 -0.0004
## 280 0.0817 nan 0.1000 -0.0002
## 300 0.0719 nan 0.1000 -0.0002
## 320 0.0637 nan 0.1000 -0.0003
## 340 0.0557 nan 0.1000 -0.0001
## 360 0.0493 nan 0.1000 -0.0002
## 380 0.0439 nan 0.1000 -0.0002
## 400 0.0394 nan 0.1000 -0.0001
## 420 0.0349 nan 0.1000 -0.0001
## 440 0.0309 nan 0.1000 -0.0001
## 460 0.0274 nan 0.1000 -0.0001
## 480 0.0241 nan 0.1000 -0.0000
## 500 0.0215 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3205 nan 0.0010 0.0003
## 2 1.3196 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0003
## 6 1.3162 nan 0.0010 0.0004
## 7 1.3155 nan 0.0010 0.0004
## 8 1.3146 nan 0.0010 0.0004
## 9 1.3138 nan 0.0010 0.0004
## 10 1.3130 nan 0.0010 0.0004
## 20 1.3054 nan 0.0010 0.0003
## 40 1.2900 nan 0.0010 0.0003
## 60 1.2755 nan 0.0010 0.0003
## 80 1.2611 nan 0.0010 0.0003
## 100 1.2469 nan 0.0010 0.0004
## 120 1.2337 nan 0.0010 0.0003
## 140 1.2211 nan 0.0010 0.0002
## 160 1.2084 nan 0.0010 0.0002
## 180 1.1962 nan 0.0010 0.0002
## 200 1.1846 nan 0.0010 0.0002
## 220 1.1730 nan 0.0010 0.0002
## 240 1.1621 nan 0.0010 0.0003
## 260 1.1512 nan 0.0010 0.0002
## 280 1.1406 nan 0.0010 0.0003
## 300 1.1302 nan 0.0010 0.0002
## 320 1.1205 nan 0.0010 0.0002
## 340 1.1110 nan 0.0010 0.0002
## 360 1.1012 nan 0.0010 0.0002
## 380 1.0921 nan 0.0010 0.0002
## 400 1.0833 nan 0.0010 0.0002
## 420 1.0746 nan 0.0010 0.0002
## 440 1.0660 nan 0.0010 0.0002
## 460 1.0575 nan 0.0010 0.0002
## 480 1.0493 nan 0.0010 0.0002
## 500 1.0413 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0003
## 2 1.3196 nan 0.0010 0.0004
## 3 1.3189 nan 0.0010 0.0003
## 4 1.3181 nan 0.0010 0.0003
## 5 1.3174 nan 0.0010 0.0003
## 6 1.3166 nan 0.0010 0.0003
## 7 1.3158 nan 0.0010 0.0004
## 8 1.3151 nan 0.0010 0.0003
## 9 1.3143 nan 0.0010 0.0004
## 10 1.3135 nan 0.0010 0.0003
## 20 1.3059 nan 0.0010 0.0003
## 40 1.2910 nan 0.0010 0.0004
## 60 1.2758 nan 0.0010 0.0004
## 80 1.2619 nan 0.0010 0.0003
## 100 1.2480 nan 0.0010 0.0003
## 120 1.2353 nan 0.0010 0.0003
## 140 1.2223 nan 0.0010 0.0002
## 160 1.2097 nan 0.0010 0.0003
## 180 1.1975 nan 0.0010 0.0002
## 200 1.1856 nan 0.0010 0.0003
## 220 1.1743 nan 0.0010 0.0002
## 240 1.1634 nan 0.0010 0.0002
## 260 1.1528 nan 0.0010 0.0002
## 280 1.1425 nan 0.0010 0.0002
## 300 1.1322 nan 0.0010 0.0002
## 320 1.1225 nan 0.0010 0.0002
## 340 1.1128 nan 0.0010 0.0002
## 360 1.1036 nan 0.0010 0.0002
## 380 1.0945 nan 0.0010 0.0002
## 400 1.0857 nan 0.0010 0.0002
## 420 1.0769 nan 0.0010 0.0002
## 440 1.0685 nan 0.0010 0.0002
## 460 1.0602 nan 0.0010 0.0002
## 480 1.0521 nan 0.0010 0.0002
## 500 1.0441 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3197 nan 0.0010 0.0003
## 3 1.3189 nan 0.0010 0.0003
## 4 1.3181 nan 0.0010 0.0003
## 5 1.3174 nan 0.0010 0.0003
## 6 1.3166 nan 0.0010 0.0003
## 7 1.3159 nan 0.0010 0.0004
## 8 1.3152 nan 0.0010 0.0003
## 9 1.3144 nan 0.0010 0.0003
## 10 1.3137 nan 0.0010 0.0003
## 20 1.3058 nan 0.0010 0.0003
## 40 1.2906 nan 0.0010 0.0003
## 60 1.2761 nan 0.0010 0.0004
## 80 1.2617 nan 0.0010 0.0003
## 100 1.2481 nan 0.0010 0.0003
## 120 1.2344 nan 0.0010 0.0003
## 140 1.2218 nan 0.0010 0.0003
## 160 1.2094 nan 0.0010 0.0003
## 180 1.1971 nan 0.0010 0.0002
## 200 1.1855 nan 0.0010 0.0003
## 220 1.1741 nan 0.0010 0.0003
## 240 1.1629 nan 0.0010 0.0003
## 260 1.1523 nan 0.0010 0.0002
## 280 1.1421 nan 0.0010 0.0003
## 300 1.1319 nan 0.0010 0.0002
## 320 1.1221 nan 0.0010 0.0002
## 340 1.1125 nan 0.0010 0.0002
## 360 1.1028 nan 0.0010 0.0002
## 380 1.0935 nan 0.0010 0.0002
## 400 1.0845 nan 0.0010 0.0002
## 420 1.0759 nan 0.0010 0.0002
## 440 1.0677 nan 0.0010 0.0002
## 460 1.0595 nan 0.0010 0.0002
## 480 1.0515 nan 0.0010 0.0002
## 500 1.0436 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3162 nan 0.0010 0.0004
## 7 1.3154 nan 0.0010 0.0004
## 8 1.3146 nan 0.0010 0.0003
## 9 1.3138 nan 0.0010 0.0004
## 10 1.3129 nan 0.0010 0.0004
## 20 1.3046 nan 0.0010 0.0004
## 40 1.2885 nan 0.0010 0.0004
## 60 1.2727 nan 0.0010 0.0004
## 80 1.2578 nan 0.0010 0.0003
## 100 1.2431 nan 0.0010 0.0003
## 120 1.2291 nan 0.0010 0.0003
## 140 1.2155 nan 0.0010 0.0003
## 160 1.2022 nan 0.0010 0.0003
## 180 1.1894 nan 0.0010 0.0003
## 200 1.1768 nan 0.0010 0.0003
## 220 1.1644 nan 0.0010 0.0002
## 240 1.1526 nan 0.0010 0.0003
## 260 1.1409 nan 0.0010 0.0002
## 280 1.1296 nan 0.0010 0.0003
## 300 1.1186 nan 0.0010 0.0002
## 320 1.1081 nan 0.0010 0.0002
## 340 1.0977 nan 0.0010 0.0002
## 360 1.0877 nan 0.0010 0.0002
## 380 1.0781 nan 0.0010 0.0002
## 400 1.0686 nan 0.0010 0.0002
## 420 1.0593 nan 0.0010 0.0002
## 440 1.0501 nan 0.0010 0.0002
## 460 1.0412 nan 0.0010 0.0002
## 480 1.0327 nan 0.0010 0.0001
## 500 1.0244 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0003
## 2 1.3195 nan 0.0010 0.0003
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3153 nan 0.0010 0.0004
## 8 1.3145 nan 0.0010 0.0004
## 9 1.3137 nan 0.0010 0.0004
## 10 1.3128 nan 0.0010 0.0004
## 20 1.3046 nan 0.0010 0.0003
## 40 1.2882 nan 0.0010 0.0003
## 60 1.2725 nan 0.0010 0.0004
## 80 1.2575 nan 0.0010 0.0003
## 100 1.2429 nan 0.0010 0.0003
## 120 1.2286 nan 0.0010 0.0003
## 140 1.2151 nan 0.0010 0.0003
## 160 1.2017 nan 0.0010 0.0003
## 180 1.1889 nan 0.0010 0.0003
## 200 1.1764 nan 0.0010 0.0002
## 220 1.1647 nan 0.0010 0.0003
## 240 1.1526 nan 0.0010 0.0003
## 260 1.1410 nan 0.0010 0.0003
## 280 1.1299 nan 0.0010 0.0002
## 300 1.1190 nan 0.0010 0.0002
## 320 1.1085 nan 0.0010 0.0002
## 340 1.0985 nan 0.0010 0.0002
## 360 1.0886 nan 0.0010 0.0002
## 380 1.0791 nan 0.0010 0.0002
## 400 1.0698 nan 0.0010 0.0002
## 420 1.0606 nan 0.0010 0.0002
## 440 1.0517 nan 0.0010 0.0002
## 460 1.0430 nan 0.0010 0.0002
## 480 1.0346 nan 0.0010 0.0001
## 500 1.0266 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0003
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3136 nan 0.0010 0.0004
## 10 1.3128 nan 0.0010 0.0004
## 20 1.3046 nan 0.0010 0.0004
## 40 1.2887 nan 0.0010 0.0003
## 60 1.2732 nan 0.0010 0.0004
## 80 1.2580 nan 0.0010 0.0004
## 100 1.2435 nan 0.0010 0.0003
## 120 1.2293 nan 0.0010 0.0003
## 140 1.2157 nan 0.0010 0.0003
## 160 1.2029 nan 0.0010 0.0003
## 180 1.1903 nan 0.0010 0.0003
## 200 1.1777 nan 0.0010 0.0003
## 220 1.1654 nan 0.0010 0.0002
## 240 1.1536 nan 0.0010 0.0003
## 260 1.1422 nan 0.0010 0.0002
## 280 1.1313 nan 0.0010 0.0003
## 300 1.1205 nan 0.0010 0.0002
## 320 1.1102 nan 0.0010 0.0002
## 340 1.0999 nan 0.0010 0.0002
## 360 1.0899 nan 0.0010 0.0002
## 380 1.0801 nan 0.0010 0.0002
## 400 1.0708 nan 0.0010 0.0002
## 420 1.0616 nan 0.0010 0.0002
## 440 1.0529 nan 0.0010 0.0002
## 460 1.0444 nan 0.0010 0.0002
## 480 1.0360 nan 0.0010 0.0002
## 500 1.0277 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0005
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3130 nan 0.0010 0.0004
## 10 1.3122 nan 0.0010 0.0004
## 20 1.3036 nan 0.0010 0.0004
## 40 1.2869 nan 0.0010 0.0004
## 60 1.2706 nan 0.0010 0.0003
## 80 1.2545 nan 0.0010 0.0003
## 100 1.2389 nan 0.0010 0.0003
## 120 1.2238 nan 0.0010 0.0004
## 140 1.2089 nan 0.0010 0.0003
## 160 1.1952 nan 0.0010 0.0003
## 180 1.1816 nan 0.0010 0.0003
## 200 1.1685 nan 0.0010 0.0003
## 220 1.1558 nan 0.0010 0.0003
## 240 1.1434 nan 0.0010 0.0003
## 260 1.1315 nan 0.0010 0.0002
## 280 1.1198 nan 0.0010 0.0003
## 300 1.1085 nan 0.0010 0.0003
## 320 1.0976 nan 0.0010 0.0002
## 340 1.0867 nan 0.0010 0.0002
## 360 1.0760 nan 0.0010 0.0002
## 380 1.0659 nan 0.0010 0.0002
## 400 1.0561 nan 0.0010 0.0002
## 420 1.0468 nan 0.0010 0.0002
## 440 1.0373 nan 0.0010 0.0002
## 460 1.0282 nan 0.0010 0.0002
## 480 1.0192 nan 0.0010 0.0002
## 500 1.0105 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0003
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3134 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0004
## 40 1.2863 nan 0.0010 0.0003
## 60 1.2702 nan 0.0010 0.0003
## 80 1.2543 nan 0.0010 0.0004
## 100 1.2393 nan 0.0010 0.0004
## 120 1.2247 nan 0.0010 0.0003
## 140 1.2104 nan 0.0010 0.0003
## 160 1.1966 nan 0.0010 0.0003
## 180 1.1830 nan 0.0010 0.0003
## 200 1.1703 nan 0.0010 0.0003
## 220 1.1575 nan 0.0010 0.0002
## 240 1.1454 nan 0.0010 0.0002
## 260 1.1334 nan 0.0010 0.0002
## 280 1.1220 nan 0.0010 0.0003
## 300 1.1107 nan 0.0010 0.0003
## 320 1.0998 nan 0.0010 0.0002
## 340 1.0890 nan 0.0010 0.0002
## 360 1.0785 nan 0.0010 0.0002
## 380 1.0683 nan 0.0010 0.0002
## 400 1.0586 nan 0.0010 0.0002
## 420 1.0492 nan 0.0010 0.0002
## 440 1.0397 nan 0.0010 0.0002
## 460 1.0305 nan 0.0010 0.0002
## 480 1.0216 nan 0.0010 0.0002
## 500 1.0128 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3159 nan 0.0010 0.0004
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0004
## 10 1.3125 nan 0.0010 0.0004
## 20 1.3038 nan 0.0010 0.0004
## 40 1.2872 nan 0.0010 0.0004
## 60 1.2712 nan 0.0010 0.0004
## 80 1.2555 nan 0.0010 0.0003
## 100 1.2407 nan 0.0010 0.0003
## 120 1.2259 nan 0.0010 0.0003
## 140 1.2119 nan 0.0010 0.0003
## 160 1.1977 nan 0.0010 0.0003
## 180 1.1847 nan 0.0010 0.0003
## 200 1.1715 nan 0.0010 0.0003
## 220 1.1592 nan 0.0010 0.0003
## 240 1.1470 nan 0.0010 0.0002
## 260 1.1352 nan 0.0010 0.0003
## 280 1.1236 nan 0.0010 0.0002
## 300 1.1124 nan 0.0010 0.0003
## 320 1.1015 nan 0.0010 0.0002
## 340 1.0910 nan 0.0010 0.0002
## 360 1.0805 nan 0.0010 0.0002
## 380 1.0705 nan 0.0010 0.0002
## 400 1.0608 nan 0.0010 0.0002
## 420 1.0512 nan 0.0010 0.0002
## 440 1.0421 nan 0.0010 0.0002
## 460 1.0332 nan 0.0010 0.0002
## 480 1.0245 nan 0.0010 0.0002
## 500 1.0158 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3128 nan 0.0100 0.0038
## 2 1.3059 nan 0.0100 0.0032
## 3 1.2981 nan 0.0100 0.0034
## 4 1.2908 nan 0.0100 0.0033
## 5 1.2839 nan 0.0100 0.0028
## 6 1.2776 nan 0.0100 0.0028
## 7 1.2700 nan 0.0100 0.0034
## 8 1.2632 nan 0.0100 0.0029
## 9 1.2557 nan 0.0100 0.0034
## 10 1.2492 nan 0.0100 0.0029
## 20 1.1862 nan 0.0100 0.0023
## 40 1.0848 nan 0.0100 0.0017
## 60 1.0071 nan 0.0100 0.0014
## 80 0.9464 nan 0.0100 0.0013
## 100 0.8968 nan 0.0100 0.0008
## 120 0.8567 nan 0.0100 0.0007
## 140 0.8218 nan 0.0100 0.0005
## 160 0.7931 nan 0.0100 0.0001
## 180 0.7674 nan 0.0100 0.0002
## 200 0.7446 nan 0.0100 0.0004
## 220 0.7246 nan 0.0100 0.0002
## 240 0.7085 nan 0.0100 0.0002
## 260 0.6920 nan 0.0100 0.0002
## 280 0.6777 nan 0.0100 -0.0000
## 300 0.6643 nan 0.0100 -0.0000
## 320 0.6515 nan 0.0100 0.0000
## 340 0.6403 nan 0.0100 0.0000
## 360 0.6281 nan 0.0100 0.0000
## 380 0.6168 nan 0.0100 0.0001
## 400 0.6059 nan 0.0100 -0.0001
## 420 0.5968 nan 0.0100 0.0000
## 440 0.5873 nan 0.0100 0.0000
## 460 0.5785 nan 0.0100 0.0000
## 480 0.5693 nan 0.0100 -0.0000
## 500 0.5602 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3133 nan 0.0100 0.0038
## 2 1.3045 nan 0.0100 0.0036
## 3 1.2971 nan 0.0100 0.0031
## 4 1.2903 nan 0.0100 0.0030
## 5 1.2839 nan 0.0100 0.0024
## 6 1.2766 nan 0.0100 0.0034
## 7 1.2695 nan 0.0100 0.0034
## 8 1.2625 nan 0.0100 0.0033
## 9 1.2548 nan 0.0100 0.0034
## 10 1.2484 nan 0.0100 0.0030
## 20 1.1850 nan 0.0100 0.0024
## 40 1.0839 nan 0.0100 0.0019
## 60 1.0052 nan 0.0100 0.0017
## 80 0.9431 nan 0.0100 0.0010
## 100 0.8931 nan 0.0100 0.0011
## 120 0.8510 nan 0.0100 0.0007
## 140 0.8172 nan 0.0100 0.0004
## 160 0.7894 nan 0.0100 0.0002
## 180 0.7651 nan 0.0100 0.0005
## 200 0.7446 nan 0.0100 0.0001
## 220 0.7253 nan 0.0100 0.0003
## 240 0.7077 nan 0.0100 0.0001
## 260 0.6922 nan 0.0100 0.0001
## 280 0.6770 nan 0.0100 0.0001
## 300 0.6634 nan 0.0100 0.0000
## 320 0.6516 nan 0.0100 -0.0001
## 340 0.6397 nan 0.0100 -0.0000
## 360 0.6292 nan 0.0100 -0.0000
## 380 0.6181 nan 0.0100 0.0001
## 400 0.6079 nan 0.0100 -0.0003
## 420 0.5974 nan 0.0100 -0.0001
## 440 0.5887 nan 0.0100 0.0001
## 460 0.5796 nan 0.0100 -0.0001
## 480 0.5713 nan 0.0100 0.0001
## 500 0.5632 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3134 nan 0.0100 0.0038
## 2 1.3050 nan 0.0100 0.0037
## 3 1.2972 nan 0.0100 0.0035
## 4 1.2903 nan 0.0100 0.0031
## 5 1.2829 nan 0.0100 0.0035
## 6 1.2757 nan 0.0100 0.0035
## 7 1.2682 nan 0.0100 0.0034
## 8 1.2609 nan 0.0100 0.0031
## 9 1.2540 nan 0.0100 0.0030
## 10 1.2469 nan 0.0100 0.0032
## 20 1.1855 nan 0.0100 0.0025
## 40 1.0839 nan 0.0100 0.0019
## 60 1.0086 nan 0.0100 0.0015
## 80 0.9482 nan 0.0100 0.0012
## 100 0.9001 nan 0.0100 0.0008
## 120 0.8588 nan 0.0100 0.0008
## 140 0.8246 nan 0.0100 0.0005
## 160 0.7976 nan 0.0100 0.0003
## 180 0.7735 nan 0.0100 0.0003
## 200 0.7512 nan 0.0100 0.0003
## 220 0.7317 nan 0.0100 0.0002
## 240 0.7149 nan 0.0100 0.0002
## 260 0.6993 nan 0.0100 0.0000
## 280 0.6845 nan 0.0100 0.0000
## 300 0.6725 nan 0.0100 0.0001
## 320 0.6602 nan 0.0100 0.0001
## 340 0.6495 nan 0.0100 0.0001
## 360 0.6386 nan 0.0100 0.0001
## 380 0.6292 nan 0.0100 0.0001
## 400 0.6188 nan 0.0100 0.0001
## 420 0.6094 nan 0.0100 -0.0002
## 440 0.6006 nan 0.0100 -0.0001
## 460 0.5920 nan 0.0100 0.0000
## 480 0.5836 nan 0.0100 -0.0000
## 500 0.5758 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0038
## 2 1.3037 nan 0.0100 0.0039
## 3 1.2955 nan 0.0100 0.0040
## 4 1.2875 nan 0.0100 0.0037
## 5 1.2793 nan 0.0100 0.0038
## 6 1.2712 nan 0.0100 0.0034
## 7 1.2636 nan 0.0100 0.0032
## 8 1.2557 nan 0.0100 0.0036
## 9 1.2484 nan 0.0100 0.0034
## 10 1.2409 nan 0.0100 0.0033
## 20 1.1745 nan 0.0100 0.0028
## 40 1.0678 nan 0.0100 0.0020
## 60 0.9852 nan 0.0100 0.0013
## 80 0.9193 nan 0.0100 0.0012
## 100 0.8652 nan 0.0100 0.0009
## 120 0.8243 nan 0.0100 0.0006
## 140 0.7875 nan 0.0100 0.0006
## 160 0.7571 nan 0.0100 0.0005
## 180 0.7311 nan 0.0100 0.0003
## 200 0.7057 nan 0.0100 0.0002
## 220 0.6846 nan 0.0100 0.0000
## 240 0.6667 nan 0.0100 0.0002
## 260 0.6490 nan 0.0100 -0.0001
## 280 0.6348 nan 0.0100 0.0001
## 300 0.6196 nan 0.0100 0.0001
## 320 0.6046 nan 0.0100 -0.0002
## 340 0.5912 nan 0.0100 0.0000
## 360 0.5782 nan 0.0100 -0.0001
## 380 0.5663 nan 0.0100 -0.0001
## 400 0.5540 nan 0.0100 -0.0001
## 420 0.5415 nan 0.0100 0.0000
## 440 0.5306 nan 0.0100 0.0000
## 460 0.5202 nan 0.0100 0.0000
## 480 0.5108 nan 0.0100 -0.0001
## 500 0.5007 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0040
## 2 1.3034 nan 0.0100 0.0039
## 3 1.2955 nan 0.0100 0.0033
## 4 1.2881 nan 0.0100 0.0034
## 5 1.2806 nan 0.0100 0.0035
## 6 1.2728 nan 0.0100 0.0032
## 7 1.2655 nan 0.0100 0.0032
## 8 1.2582 nan 0.0100 0.0031
## 9 1.2504 nan 0.0100 0.0032
## 10 1.2430 nan 0.0100 0.0034
## 20 1.1770 nan 0.0100 0.0025
## 40 1.0716 nan 0.0100 0.0018
## 60 0.9892 nan 0.0100 0.0016
## 80 0.9247 nan 0.0100 0.0013
## 100 0.8732 nan 0.0100 0.0006
## 120 0.8312 nan 0.0100 0.0007
## 140 0.7950 nan 0.0100 0.0006
## 160 0.7641 nan 0.0100 0.0001
## 180 0.7369 nan 0.0100 0.0005
## 200 0.7130 nan 0.0100 0.0001
## 220 0.6921 nan 0.0100 0.0002
## 240 0.6729 nan 0.0100 0.0002
## 260 0.6563 nan 0.0100 0.0001
## 280 0.6414 nan 0.0100 0.0001
## 300 0.6254 nan 0.0100 0.0001
## 320 0.6109 nan 0.0100 -0.0001
## 340 0.5981 nan 0.0100 0.0002
## 360 0.5851 nan 0.0100 -0.0001
## 380 0.5743 nan 0.0100 0.0001
## 400 0.5628 nan 0.0100 -0.0001
## 420 0.5523 nan 0.0100 0.0001
## 440 0.5417 nan 0.0100 -0.0001
## 460 0.5314 nan 0.0100 -0.0000
## 480 0.5224 nan 0.0100 0.0001
## 500 0.5118 nan 0.0100 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3128 nan 0.0100 0.0041
## 2 1.3052 nan 0.0100 0.0038
## 3 1.2974 nan 0.0100 0.0035
## 4 1.2895 nan 0.0100 0.0036
## 5 1.2815 nan 0.0100 0.0036
## 6 1.2736 nan 0.0100 0.0038
## 7 1.2657 nan 0.0100 0.0037
## 8 1.2589 nan 0.0100 0.0032
## 9 1.2510 nan 0.0100 0.0035
## 10 1.2442 nan 0.0100 0.0028
## 20 1.1779 nan 0.0100 0.0029
## 40 1.0712 nan 0.0100 0.0022
## 60 0.9899 nan 0.0100 0.0012
## 80 0.9283 nan 0.0100 0.0012
## 100 0.8787 nan 0.0100 0.0008
## 120 0.8359 nan 0.0100 0.0007
## 140 0.8008 nan 0.0100 0.0005
## 160 0.7711 nan 0.0100 0.0002
## 180 0.7451 nan 0.0100 0.0002
## 200 0.7209 nan 0.0100 0.0001
## 220 0.6999 nan 0.0100 0.0002
## 240 0.6811 nan 0.0100 0.0001
## 260 0.6637 nan 0.0100 0.0003
## 280 0.6485 nan 0.0100 0.0001
## 300 0.6342 nan 0.0100 0.0001
## 320 0.6199 nan 0.0100 0.0000
## 340 0.6070 nan 0.0100 -0.0001
## 360 0.5946 nan 0.0100 0.0000
## 380 0.5827 nan 0.0100 -0.0001
## 400 0.5725 nan 0.0100 -0.0000
## 420 0.5617 nan 0.0100 -0.0001
## 440 0.5512 nan 0.0100 0.0000
## 460 0.5413 nan 0.0100 -0.0002
## 480 0.5318 nan 0.0100 -0.0001
## 500 0.5227 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0038
## 2 1.3035 nan 0.0100 0.0041
## 3 1.2945 nan 0.0100 0.0043
## 4 1.2857 nan 0.0100 0.0038
## 5 1.2776 nan 0.0100 0.0037
## 6 1.2696 nan 0.0100 0.0037
## 7 1.2618 nan 0.0100 0.0032
## 8 1.2545 nan 0.0100 0.0034
## 9 1.2466 nan 0.0100 0.0038
## 10 1.2393 nan 0.0100 0.0031
## 20 1.1703 nan 0.0100 0.0029
## 40 1.0568 nan 0.0100 0.0025
## 60 0.9682 nan 0.0100 0.0017
## 80 0.9012 nan 0.0100 0.0012
## 100 0.8445 nan 0.0100 0.0011
## 120 0.7997 nan 0.0100 0.0007
## 140 0.7602 nan 0.0100 0.0004
## 160 0.7270 nan 0.0100 0.0004
## 180 0.7005 nan 0.0100 0.0003
## 200 0.6745 nan 0.0100 0.0003
## 220 0.6525 nan 0.0100 0.0004
## 240 0.6311 nan 0.0100 0.0000
## 260 0.6117 nan 0.0100 0.0003
## 280 0.5935 nan 0.0100 0.0001
## 300 0.5765 nan 0.0100 -0.0000
## 320 0.5615 nan 0.0100 0.0001
## 340 0.5463 nan 0.0100 -0.0001
## 360 0.5329 nan 0.0100 0.0000
## 380 0.5206 nan 0.0100 -0.0001
## 400 0.5075 nan 0.0100 0.0002
## 420 0.4956 nan 0.0100 -0.0001
## 440 0.4844 nan 0.0100 -0.0000
## 460 0.4732 nan 0.0100 0.0001
## 480 0.4637 nan 0.0100 -0.0000
## 500 0.4531 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0037
## 2 1.3039 nan 0.0100 0.0034
## 3 1.2956 nan 0.0100 0.0034
## 4 1.2873 nan 0.0100 0.0038
## 5 1.2791 nan 0.0100 0.0036
## 6 1.2705 nan 0.0100 0.0038
## 7 1.2624 nan 0.0100 0.0035
## 8 1.2542 nan 0.0100 0.0034
## 9 1.2470 nan 0.0100 0.0032
## 10 1.2389 nan 0.0100 0.0034
## 20 1.1665 nan 0.0100 0.0030
## 40 1.0568 nan 0.0100 0.0022
## 60 0.9711 nan 0.0100 0.0015
## 80 0.9028 nan 0.0100 0.0011
## 100 0.8483 nan 0.0100 0.0008
## 120 0.8049 nan 0.0100 0.0006
## 140 0.7676 nan 0.0100 0.0003
## 160 0.7343 nan 0.0100 0.0004
## 180 0.7082 nan 0.0100 0.0003
## 200 0.6841 nan 0.0100 0.0002
## 220 0.6621 nan 0.0100 0.0001
## 240 0.6396 nan 0.0100 0.0001
## 260 0.6217 nan 0.0100 -0.0000
## 280 0.6040 nan 0.0100 0.0001
## 300 0.5890 nan 0.0100 0.0001
## 320 0.5740 nan 0.0100 0.0001
## 340 0.5605 nan 0.0100 0.0000
## 360 0.5463 nan 0.0100 0.0000
## 380 0.5340 nan 0.0100 -0.0001
## 400 0.5212 nan 0.0100 0.0001
## 420 0.5089 nan 0.0100 -0.0002
## 440 0.4971 nan 0.0100 -0.0001
## 460 0.4865 nan 0.0100 -0.0000
## 480 0.4766 nan 0.0100 0.0001
## 500 0.4663 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0040
## 2 1.3037 nan 0.0100 0.0036
## 3 1.2951 nan 0.0100 0.0038
## 4 1.2873 nan 0.0100 0.0031
## 5 1.2791 nan 0.0100 0.0035
## 6 1.2716 nan 0.0100 0.0035
## 7 1.2639 nan 0.0100 0.0037
## 8 1.2559 nan 0.0100 0.0038
## 9 1.2481 nan 0.0100 0.0036
## 10 1.2416 nan 0.0100 0.0027
## 20 1.1728 nan 0.0100 0.0026
## 40 1.0639 nan 0.0100 0.0022
## 60 0.9786 nan 0.0100 0.0013
## 80 0.9100 nan 0.0100 0.0013
## 100 0.8574 nan 0.0100 0.0009
## 120 0.8130 nan 0.0100 0.0007
## 140 0.7756 nan 0.0100 0.0006
## 160 0.7422 nan 0.0100 0.0006
## 180 0.7144 nan 0.0100 0.0002
## 200 0.6896 nan 0.0100 0.0002
## 220 0.6685 nan 0.0100 0.0001
## 240 0.6479 nan 0.0100 0.0004
## 260 0.6289 nan 0.0100 0.0000
## 280 0.6115 nan 0.0100 0.0000
## 300 0.5955 nan 0.0100 0.0001
## 320 0.5814 nan 0.0100 -0.0001
## 340 0.5675 nan 0.0100 -0.0001
## 360 0.5544 nan 0.0100 -0.0001
## 380 0.5412 nan 0.0100 -0.0001
## 400 0.5304 nan 0.0100 -0.0001
## 420 0.5192 nan 0.0100 0.0001
## 440 0.5083 nan 0.0100 0.0001
## 460 0.4986 nan 0.0100 -0.0002
## 480 0.4886 nan 0.0100 -0.0000
## 500 0.4794 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2466 nan 0.1000 0.0341
## 2 1.1851 nan 0.1000 0.0261
## 3 1.1371 nan 0.1000 0.0178
## 4 1.0881 nan 0.1000 0.0224
## 5 1.0454 nan 0.1000 0.0202
## 6 1.0081 nan 0.1000 0.0161
## 7 0.9755 nan 0.1000 0.0140
## 8 0.9444 nan 0.1000 0.0119
## 9 0.9206 nan 0.1000 0.0103
## 10 0.8935 nan 0.1000 0.0095
## 20 0.7499 nan 0.1000 0.0007
## 40 0.6096 nan 0.1000 0.0015
## 60 0.5238 nan 0.1000 0.0001
## 80 0.4565 nan 0.1000 -0.0006
## 100 0.4006 nan 0.1000 -0.0015
## 120 0.3560 nan 0.1000 -0.0008
## 140 0.3207 nan 0.1000 -0.0011
## 160 0.2900 nan 0.1000 -0.0002
## 180 0.2647 nan 0.1000 -0.0007
## 200 0.2404 nan 0.1000 0.0000
## 220 0.2179 nan 0.1000 0.0004
## 240 0.1978 nan 0.1000 -0.0004
## 260 0.1794 nan 0.1000 -0.0002
## 280 0.1641 nan 0.1000 -0.0006
## 300 0.1487 nan 0.1000 -0.0003
## 320 0.1338 nan 0.1000 -0.0001
## 340 0.1224 nan 0.1000 -0.0003
## 360 0.1124 nan 0.1000 -0.0003
## 380 0.1032 nan 0.1000 -0.0002
## 400 0.0948 nan 0.1000 -0.0000
## 420 0.0876 nan 0.1000 -0.0001
## 440 0.0810 nan 0.1000 -0.0002
## 460 0.0750 nan 0.1000 -0.0001
## 480 0.0697 nan 0.1000 -0.0002
## 500 0.0646 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2457 nan 0.1000 0.0316
## 2 1.1830 nan 0.1000 0.0266
## 3 1.1296 nan 0.1000 0.0240
## 4 1.0807 nan 0.1000 0.0174
## 5 1.0444 nan 0.1000 0.0180
## 6 1.0097 nan 0.1000 0.0120
## 7 0.9782 nan 0.1000 0.0127
## 8 0.9470 nan 0.1000 0.0109
## 9 0.9241 nan 0.1000 0.0099
## 10 0.8993 nan 0.1000 0.0084
## 20 0.7491 nan 0.1000 0.0003
## 40 0.6209 nan 0.1000 -0.0009
## 60 0.5344 nan 0.1000 -0.0001
## 80 0.4732 nan 0.1000 -0.0002
## 100 0.4241 nan 0.1000 -0.0019
## 120 0.3775 nan 0.1000 -0.0005
## 140 0.3362 nan 0.1000 -0.0004
## 160 0.2996 nan 0.1000 -0.0004
## 180 0.2694 nan 0.1000 -0.0002
## 200 0.2450 nan 0.1000 -0.0009
## 220 0.2198 nan 0.1000 0.0002
## 240 0.1993 nan 0.1000 -0.0008
## 260 0.1823 nan 0.1000 -0.0007
## 280 0.1681 nan 0.1000 -0.0006
## 300 0.1545 nan 0.1000 -0.0005
## 320 0.1432 nan 0.1000 -0.0005
## 340 0.1315 nan 0.1000 -0.0003
## 360 0.1211 nan 0.1000 -0.0004
## 380 0.1116 nan 0.1000 -0.0005
## 400 0.1029 nan 0.1000 -0.0003
## 420 0.0955 nan 0.1000 -0.0003
## 440 0.0886 nan 0.1000 -0.0002
## 460 0.0827 nan 0.1000 -0.0002
## 480 0.0761 nan 0.1000 -0.0001
## 500 0.0703 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2498 nan 0.1000 0.0301
## 2 1.1844 nan 0.1000 0.0287
## 3 1.1393 nan 0.1000 0.0164
## 4 1.0858 nan 0.1000 0.0251
## 5 1.0491 nan 0.1000 0.0147
## 6 1.0090 nan 0.1000 0.0157
## 7 0.9732 nan 0.1000 0.0146
## 8 0.9430 nan 0.1000 0.0125
## 9 0.9198 nan 0.1000 0.0078
## 10 0.8973 nan 0.1000 0.0091
## 20 0.7612 nan 0.1000 -0.0007
## 40 0.6302 nan 0.1000 -0.0000
## 60 0.5407 nan 0.1000 -0.0002
## 80 0.4783 nan 0.1000 0.0000
## 100 0.4218 nan 0.1000 -0.0010
## 120 0.3789 nan 0.1000 -0.0011
## 140 0.3393 nan 0.1000 -0.0007
## 160 0.3048 nan 0.1000 -0.0010
## 180 0.2789 nan 0.1000 -0.0014
## 200 0.2567 nan 0.1000 -0.0006
## 220 0.2373 nan 0.1000 -0.0010
## 240 0.2165 nan 0.1000 -0.0006
## 260 0.1981 nan 0.1000 -0.0009
## 280 0.1805 nan 0.1000 -0.0003
## 300 0.1658 nan 0.1000 -0.0005
## 320 0.1527 nan 0.1000 0.0000
## 340 0.1407 nan 0.1000 -0.0004
## 360 0.1301 nan 0.1000 -0.0003
## 380 0.1196 nan 0.1000 -0.0004
## 400 0.1100 nan 0.1000 -0.0002
## 420 0.1015 nan 0.1000 -0.0002
## 440 0.0933 nan 0.1000 -0.0002
## 460 0.0864 nan 0.1000 -0.0003
## 480 0.0799 nan 0.1000 -0.0002
## 500 0.0740 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2460 nan 0.1000 0.0314
## 2 1.1746 nan 0.1000 0.0307
## 3 1.1172 nan 0.1000 0.0226
## 4 1.0678 nan 0.1000 0.0215
## 5 1.0243 nan 0.1000 0.0196
## 6 0.9826 nan 0.1000 0.0169
## 7 0.9441 nan 0.1000 0.0144
## 8 0.9181 nan 0.1000 0.0092
## 9 0.8907 nan 0.1000 0.0096
## 10 0.8694 nan 0.1000 0.0071
## 20 0.7170 nan 0.1000 0.0027
## 40 0.5588 nan 0.1000 -0.0020
## 60 0.4697 nan 0.1000 -0.0002
## 80 0.3926 nan 0.1000 0.0002
## 100 0.3378 nan 0.1000 -0.0012
## 120 0.2923 nan 0.1000 -0.0004
## 140 0.2523 nan 0.1000 -0.0001
## 160 0.2209 nan 0.1000 0.0002
## 180 0.1964 nan 0.1000 -0.0004
## 200 0.1738 nan 0.1000 -0.0004
## 220 0.1548 nan 0.1000 -0.0005
## 240 0.1370 nan 0.1000 -0.0003
## 260 0.1208 nan 0.1000 -0.0001
## 280 0.1083 nan 0.1000 -0.0001
## 300 0.0981 nan 0.1000 -0.0004
## 320 0.0883 nan 0.1000 -0.0004
## 340 0.0794 nan 0.1000 -0.0003
## 360 0.0713 nan 0.1000 -0.0001
## 380 0.0649 nan 0.1000 -0.0003
## 400 0.0597 nan 0.1000 -0.0000
## 420 0.0546 nan 0.1000 -0.0002
## 440 0.0497 nan 0.1000 -0.0002
## 460 0.0452 nan 0.1000 -0.0001
## 480 0.0408 nan 0.1000 -0.0001
## 500 0.0369 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2452 nan 0.1000 0.0345
## 2 1.1755 nan 0.1000 0.0311
## 3 1.1188 nan 0.1000 0.0223
## 4 1.0779 nan 0.1000 0.0189
## 5 1.0334 nan 0.1000 0.0207
## 6 0.9928 nan 0.1000 0.0171
## 7 0.9565 nan 0.1000 0.0156
## 8 0.9266 nan 0.1000 0.0111
## 9 0.8946 nan 0.1000 0.0110
## 10 0.8696 nan 0.1000 0.0079
## 20 0.7127 nan 0.1000 0.0009
## 40 0.5664 nan 0.1000 0.0002
## 60 0.4744 nan 0.1000 -0.0013
## 80 0.4055 nan 0.1000 -0.0001
## 100 0.3510 nan 0.1000 -0.0000
## 120 0.3077 nan 0.1000 -0.0007
## 140 0.2696 nan 0.1000 -0.0003
## 160 0.2345 nan 0.1000 -0.0010
## 180 0.2083 nan 0.1000 -0.0006
## 200 0.1852 nan 0.1000 -0.0007
## 220 0.1661 nan 0.1000 -0.0012
## 240 0.1484 nan 0.1000 -0.0005
## 260 0.1336 nan 0.1000 -0.0004
## 280 0.1188 nan 0.1000 -0.0002
## 300 0.1080 nan 0.1000 -0.0003
## 320 0.0970 nan 0.1000 -0.0001
## 340 0.0881 nan 0.1000 -0.0004
## 360 0.0787 nan 0.1000 0.0000
## 380 0.0709 nan 0.1000 -0.0003
## 400 0.0642 nan 0.1000 -0.0000
## 420 0.0582 nan 0.1000 -0.0001
## 440 0.0526 nan 0.1000 -0.0000
## 460 0.0475 nan 0.1000 -0.0001
## 480 0.0433 nan 0.1000 -0.0001
## 500 0.0390 nan 0.1000 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2384 nan 0.1000 0.0374
## 2 1.1670 nan 0.1000 0.0269
## 3 1.1085 nan 0.1000 0.0227
## 4 1.0553 nan 0.1000 0.0222
## 5 1.0126 nan 0.1000 0.0172
## 6 0.9778 nan 0.1000 0.0154
## 7 0.9428 nan 0.1000 0.0126
## 8 0.9125 nan 0.1000 0.0111
## 9 0.8877 nan 0.1000 0.0097
## 10 0.8667 nan 0.1000 0.0061
## 20 0.7228 nan 0.1000 0.0027
## 40 0.5806 nan 0.1000 -0.0006
## 60 0.4966 nan 0.1000 -0.0021
## 80 0.4272 nan 0.1000 -0.0008
## 100 0.3699 nan 0.1000 -0.0017
## 120 0.3246 nan 0.1000 -0.0004
## 140 0.2824 nan 0.1000 -0.0006
## 160 0.2525 nan 0.1000 -0.0008
## 180 0.2239 nan 0.1000 -0.0007
## 200 0.2012 nan 0.1000 -0.0009
## 220 0.1792 nan 0.1000 -0.0005
## 240 0.1591 nan 0.1000 -0.0001
## 260 0.1421 nan 0.1000 -0.0007
## 280 0.1279 nan 0.1000 -0.0004
## 300 0.1145 nan 0.1000 -0.0007
## 320 0.1028 nan 0.1000 -0.0004
## 340 0.0920 nan 0.1000 -0.0003
## 360 0.0839 nan 0.1000 -0.0004
## 380 0.0769 nan 0.1000 -0.0003
## 400 0.0699 nan 0.1000 -0.0003
## 420 0.0637 nan 0.1000 -0.0004
## 440 0.0576 nan 0.1000 -0.0003
## 460 0.0527 nan 0.1000 -0.0000
## 480 0.0479 nan 0.1000 -0.0003
## 500 0.0438 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2361 nan 0.1000 0.0401
## 2 1.1665 nan 0.1000 0.0298
## 3 1.1087 nan 0.1000 0.0203
## 4 1.0538 nan 0.1000 0.0207
## 5 1.0068 nan 0.1000 0.0184
## 6 0.9611 nan 0.1000 0.0165
## 7 0.9291 nan 0.1000 0.0108
## 8 0.8982 nan 0.1000 0.0118
## 9 0.8722 nan 0.1000 0.0076
## 10 0.8465 nan 0.1000 0.0088
## 20 0.6692 nan 0.1000 0.0036
## 40 0.5076 nan 0.1000 0.0012
## 60 0.4065 nan 0.1000 0.0001
## 80 0.3323 nan 0.1000 -0.0002
## 100 0.2780 nan 0.1000 -0.0005
## 120 0.2296 nan 0.1000 -0.0002
## 140 0.1962 nan 0.1000 -0.0005
## 160 0.1701 nan 0.1000 -0.0001
## 180 0.1474 nan 0.1000 -0.0005
## 200 0.1276 nan 0.1000 -0.0003
## 220 0.1111 nan 0.1000 0.0002
## 240 0.0975 nan 0.1000 -0.0002
## 260 0.0852 nan 0.1000 -0.0002
## 280 0.0754 nan 0.1000 -0.0001
## 300 0.0661 nan 0.1000 -0.0002
## 320 0.0586 nan 0.1000 -0.0003
## 340 0.0520 nan 0.1000 -0.0002
## 360 0.0461 nan 0.1000 -0.0002
## 380 0.0408 nan 0.1000 -0.0002
## 400 0.0367 nan 0.1000 -0.0001
## 420 0.0324 nan 0.1000 -0.0001
## 440 0.0286 nan 0.1000 -0.0000
## 460 0.0255 nan 0.1000 -0.0001
## 480 0.0227 nan 0.1000 -0.0000
## 500 0.0202 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2361 nan 0.1000 0.0369
## 2 1.1675 nan 0.1000 0.0286
## 3 1.1065 nan 0.1000 0.0263
## 4 1.0605 nan 0.1000 0.0192
## 5 1.0169 nan 0.1000 0.0199
## 6 0.9747 nan 0.1000 0.0176
## 7 0.9404 nan 0.1000 0.0130
## 8 0.9103 nan 0.1000 0.0105
## 9 0.8796 nan 0.1000 0.0120
## 10 0.8524 nan 0.1000 0.0091
## 20 0.6867 nan 0.1000 0.0006
## 40 0.5318 nan 0.1000 -0.0024
## 60 0.4401 nan 0.1000 -0.0014
## 80 0.3595 nan 0.1000 -0.0005
## 100 0.3075 nan 0.1000 -0.0005
## 120 0.2630 nan 0.1000 -0.0008
## 140 0.2268 nan 0.1000 -0.0017
## 160 0.1960 nan 0.1000 -0.0004
## 180 0.1690 nan 0.1000 -0.0005
## 200 0.1475 nan 0.1000 -0.0007
## 220 0.1267 nan 0.1000 -0.0006
## 240 0.1100 nan 0.1000 -0.0003
## 260 0.0964 nan 0.1000 -0.0004
## 280 0.0844 nan 0.1000 -0.0006
## 300 0.0737 nan 0.1000 -0.0002
## 320 0.0651 nan 0.1000 -0.0002
## 340 0.0571 nan 0.1000 -0.0001
## 360 0.0504 nan 0.1000 -0.0002
## 380 0.0443 nan 0.1000 -0.0000
## 400 0.0393 nan 0.1000 -0.0001
## 420 0.0354 nan 0.1000 -0.0001
## 440 0.0315 nan 0.1000 -0.0001
## 460 0.0279 nan 0.1000 -0.0000
## 480 0.0251 nan 0.1000 -0.0001
## 500 0.0224 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2285 nan 0.1000 0.0381
## 2 1.1627 nan 0.1000 0.0329
## 3 1.1049 nan 0.1000 0.0245
## 4 1.0575 nan 0.1000 0.0187
## 5 1.0143 nan 0.1000 0.0190
## 6 0.9759 nan 0.1000 0.0167
## 7 0.9405 nan 0.1000 0.0135
## 8 0.9080 nan 0.1000 0.0151
## 9 0.8802 nan 0.1000 0.0088
## 10 0.8533 nan 0.1000 0.0118
## 20 0.6891 nan 0.1000 0.0027
## 40 0.5265 nan 0.1000 -0.0007
## 60 0.4257 nan 0.1000 -0.0006
## 80 0.3565 nan 0.1000 -0.0001
## 100 0.2999 nan 0.1000 -0.0014
## 120 0.2534 nan 0.1000 -0.0005
## 140 0.2190 nan 0.1000 -0.0002
## 160 0.1902 nan 0.1000 -0.0009
## 180 0.1665 nan 0.1000 -0.0006
## 200 0.1451 nan 0.1000 -0.0004
## 220 0.1278 nan 0.1000 -0.0002
## 240 0.1124 nan 0.1000 -0.0002
## 260 0.1000 nan 0.1000 -0.0006
## 280 0.0870 nan 0.1000 -0.0005
## 300 0.0782 nan 0.1000 -0.0003
## 320 0.0698 nan 0.1000 -0.0004
## 340 0.0623 nan 0.1000 -0.0001
## 360 0.0554 nan 0.1000 -0.0003
## 380 0.0495 nan 0.1000 -0.0003
## 400 0.0437 nan 0.1000 -0.0002
## 420 0.0390 nan 0.1000 -0.0001
## 440 0.0345 nan 0.1000 -0.0001
## 460 0.0304 nan 0.1000 -0.0001
## 480 0.0269 nan 0.1000 -0.0001
## 500 0.0242 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3196 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0005
## 4 1.3179 nan 0.0010 0.0003
## 5 1.3171 nan 0.0010 0.0004
## 6 1.3163 nan 0.0010 0.0003
## 7 1.3155 nan 0.0010 0.0004
## 8 1.3147 nan 0.0010 0.0004
## 9 1.3138 nan 0.0010 0.0004
## 10 1.3130 nan 0.0010 0.0004
## 20 1.3050 nan 0.0010 0.0003
## 40 1.2891 nan 0.0010 0.0004
## 60 1.2738 nan 0.0010 0.0003
## 80 1.2587 nan 0.0010 0.0003
## 100 1.2442 nan 0.0010 0.0004
## 120 1.2304 nan 0.0010 0.0003
## 140 1.2170 nan 0.0010 0.0003
## 160 1.2037 nan 0.0010 0.0003
## 180 1.1911 nan 0.0010 0.0003
## 200 1.1787 nan 0.0010 0.0003
## 220 1.1667 nan 0.0010 0.0003
## 240 1.1552 nan 0.0010 0.0002
## 260 1.1441 nan 0.0010 0.0002
## 280 1.1331 nan 0.0010 0.0003
## 300 1.1224 nan 0.0010 0.0002
## 320 1.1119 nan 0.0010 0.0002
## 340 1.1017 nan 0.0010 0.0002
## 360 1.0917 nan 0.0010 0.0002
## 380 1.0824 nan 0.0010 0.0002
## 400 1.0729 nan 0.0010 0.0002
## 420 1.0638 nan 0.0010 0.0002
## 440 1.0551 nan 0.0010 0.0001
## 460 1.0464 nan 0.0010 0.0002
## 480 1.0379 nan 0.0010 0.0002
## 500 1.0296 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3196 nan 0.0010 0.0004
## 3 1.3189 nan 0.0010 0.0004
## 4 1.3181 nan 0.0010 0.0004
## 5 1.3173 nan 0.0010 0.0004
## 6 1.3166 nan 0.0010 0.0003
## 7 1.3157 nan 0.0010 0.0004
## 8 1.3150 nan 0.0010 0.0003
## 9 1.3141 nan 0.0010 0.0004
## 10 1.3134 nan 0.0010 0.0003
## 20 1.3051 nan 0.0010 0.0004
## 40 1.2894 nan 0.0010 0.0003
## 60 1.2741 nan 0.0010 0.0004
## 80 1.2591 nan 0.0010 0.0003
## 100 1.2447 nan 0.0010 0.0003
## 120 1.2308 nan 0.0010 0.0003
## 140 1.2177 nan 0.0010 0.0003
## 160 1.2042 nan 0.0010 0.0003
## 180 1.1913 nan 0.0010 0.0002
## 200 1.1791 nan 0.0010 0.0003
## 220 1.1673 nan 0.0010 0.0002
## 240 1.1553 nan 0.0010 0.0003
## 260 1.1440 nan 0.0010 0.0003
## 280 1.1331 nan 0.0010 0.0002
## 300 1.1224 nan 0.0010 0.0003
## 320 1.1123 nan 0.0010 0.0002
## 340 1.1023 nan 0.0010 0.0002
## 360 1.0924 nan 0.0010 0.0002
## 380 1.0829 nan 0.0010 0.0002
## 400 1.0734 nan 0.0010 0.0002
## 420 1.0639 nan 0.0010 0.0002
## 440 1.0548 nan 0.0010 0.0002
## 460 1.0464 nan 0.0010 0.0002
## 480 1.0383 nan 0.0010 0.0002
## 500 1.0303 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0003
## 2 1.3196 nan 0.0010 0.0004
## 3 1.3188 nan 0.0010 0.0004
## 4 1.3180 nan 0.0010 0.0003
## 5 1.3173 nan 0.0010 0.0003
## 6 1.3165 nan 0.0010 0.0004
## 7 1.3157 nan 0.0010 0.0003
## 8 1.3149 nan 0.0010 0.0003
## 9 1.3141 nan 0.0010 0.0004
## 10 1.3132 nan 0.0010 0.0004
## 20 1.3050 nan 0.0010 0.0004
## 40 1.2894 nan 0.0010 0.0004
## 60 1.2740 nan 0.0010 0.0003
## 80 1.2590 nan 0.0010 0.0003
## 100 1.2452 nan 0.0010 0.0003
## 120 1.2314 nan 0.0010 0.0003
## 140 1.2180 nan 0.0010 0.0003
## 160 1.2050 nan 0.0010 0.0003
## 180 1.1922 nan 0.0010 0.0003
## 200 1.1801 nan 0.0010 0.0002
## 220 1.1679 nan 0.0010 0.0002
## 240 1.1564 nan 0.0010 0.0003
## 260 1.1451 nan 0.0010 0.0003
## 280 1.1345 nan 0.0010 0.0002
## 300 1.1240 nan 0.0010 0.0002
## 320 1.1137 nan 0.0010 0.0002
## 340 1.1037 nan 0.0010 0.0002
## 360 1.0939 nan 0.0010 0.0002
## 380 1.0845 nan 0.0010 0.0002
## 400 1.0749 nan 0.0010 0.0002
## 420 1.0655 nan 0.0010 0.0002
## 440 1.0567 nan 0.0010 0.0002
## 460 1.0480 nan 0.0010 0.0002
## 480 1.0397 nan 0.0010 0.0002
## 500 1.0313 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0003
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0004
## 10 1.3125 nan 0.0010 0.0004
## 20 1.3039 nan 0.0010 0.0004
## 40 1.2873 nan 0.0010 0.0004
## 60 1.2711 nan 0.0010 0.0003
## 80 1.2554 nan 0.0010 0.0004
## 100 1.2398 nan 0.0010 0.0004
## 120 1.2251 nan 0.0010 0.0003
## 140 1.2108 nan 0.0010 0.0003
## 160 1.1971 nan 0.0010 0.0003
## 180 1.1834 nan 0.0010 0.0003
## 200 1.1702 nan 0.0010 0.0002
## 220 1.1577 nan 0.0010 0.0003
## 240 1.1456 nan 0.0010 0.0003
## 260 1.1334 nan 0.0010 0.0003
## 280 1.1218 nan 0.0010 0.0002
## 300 1.1105 nan 0.0010 0.0003
## 320 1.0998 nan 0.0010 0.0002
## 340 1.0894 nan 0.0010 0.0002
## 360 1.0791 nan 0.0010 0.0002
## 380 1.0690 nan 0.0010 0.0002
## 400 1.0591 nan 0.0010 0.0002
## 420 1.0495 nan 0.0010 0.0002
## 440 1.0399 nan 0.0010 0.0002
## 460 1.0307 nan 0.0010 0.0002
## 480 1.0217 nan 0.0010 0.0002
## 500 1.0132 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3196 nan 0.0010 0.0003
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0005
## 5 1.3169 nan 0.0010 0.0005
## 6 1.3159 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3035 nan 0.0010 0.0004
## 40 1.2868 nan 0.0010 0.0004
## 60 1.2703 nan 0.0010 0.0004
## 80 1.2546 nan 0.0010 0.0003
## 100 1.2395 nan 0.0010 0.0003
## 120 1.2247 nan 0.0010 0.0003
## 140 1.2106 nan 0.0010 0.0003
## 160 1.1970 nan 0.0010 0.0003
## 180 1.1836 nan 0.0010 0.0003
## 200 1.1705 nan 0.0010 0.0003
## 220 1.1579 nan 0.0010 0.0003
## 240 1.1457 nan 0.0010 0.0003
## 260 1.1337 nan 0.0010 0.0003
## 280 1.1218 nan 0.0010 0.0003
## 300 1.1103 nan 0.0010 0.0002
## 320 1.0993 nan 0.0010 0.0002
## 340 1.0885 nan 0.0010 0.0002
## 360 1.0783 nan 0.0010 0.0002
## 380 1.0683 nan 0.0010 0.0002
## 400 1.0586 nan 0.0010 0.0002
## 420 1.0491 nan 0.0010 0.0001
## 440 1.0400 nan 0.0010 0.0002
## 460 1.0309 nan 0.0010 0.0002
## 480 1.0219 nan 0.0010 0.0002
## 500 1.0134 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3150 nan 0.0010 0.0003
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0004
## 10 1.3125 nan 0.0010 0.0003
## 20 1.3040 nan 0.0010 0.0004
## 40 1.2872 nan 0.0010 0.0004
## 60 1.2709 nan 0.0010 0.0003
## 80 1.2551 nan 0.0010 0.0004
## 100 1.2399 nan 0.0010 0.0003
## 120 1.2256 nan 0.0010 0.0004
## 140 1.2117 nan 0.0010 0.0003
## 160 1.1979 nan 0.0010 0.0003
## 180 1.1844 nan 0.0010 0.0002
## 200 1.1718 nan 0.0010 0.0003
## 220 1.1591 nan 0.0010 0.0003
## 240 1.1470 nan 0.0010 0.0003
## 260 1.1350 nan 0.0010 0.0003
## 280 1.1233 nan 0.0010 0.0003
## 300 1.1121 nan 0.0010 0.0003
## 320 1.1013 nan 0.0010 0.0002
## 340 1.0908 nan 0.0010 0.0002
## 360 1.0806 nan 0.0010 0.0002
## 380 1.0705 nan 0.0010 0.0002
## 400 1.0609 nan 0.0010 0.0002
## 420 1.0514 nan 0.0010 0.0002
## 440 1.0420 nan 0.0010 0.0002
## 460 1.0332 nan 0.0010 0.0002
## 480 1.0244 nan 0.0010 0.0002
## 500 1.0159 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0005
## 2 1.3192 nan 0.0010 0.0005
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0005
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3025 nan 0.0010 0.0004
## 40 1.2846 nan 0.0010 0.0004
## 60 1.2673 nan 0.0010 0.0004
## 80 1.2505 nan 0.0010 0.0004
## 100 1.2345 nan 0.0010 0.0003
## 120 1.2189 nan 0.0010 0.0003
## 140 1.2044 nan 0.0010 0.0003
## 160 1.1896 nan 0.0010 0.0003
## 180 1.1756 nan 0.0010 0.0003
## 200 1.1624 nan 0.0010 0.0002
## 220 1.1491 nan 0.0010 0.0003
## 240 1.1362 nan 0.0010 0.0003
## 260 1.1236 nan 0.0010 0.0002
## 280 1.1116 nan 0.0010 0.0002
## 300 1.1000 nan 0.0010 0.0003
## 320 1.0885 nan 0.0010 0.0002
## 340 1.0774 nan 0.0010 0.0002
## 360 1.0665 nan 0.0010 0.0002
## 380 1.0559 nan 0.0010 0.0002
## 400 1.0457 nan 0.0010 0.0002
## 420 1.0357 nan 0.0010 0.0002
## 440 1.0261 nan 0.0010 0.0002
## 460 1.0168 nan 0.0010 0.0002
## 480 1.0075 nan 0.0010 0.0002
## 500 0.9985 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0005
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3139 nan 0.0010 0.0004
## 9 1.3130 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3027 nan 0.0010 0.0004
## 40 1.2850 nan 0.0010 0.0004
## 60 1.2680 nan 0.0010 0.0004
## 80 1.2514 nan 0.0010 0.0004
## 100 1.2358 nan 0.0010 0.0003
## 120 1.2201 nan 0.0010 0.0003
## 140 1.2049 nan 0.0010 0.0004
## 160 1.1906 nan 0.0010 0.0003
## 180 1.1767 nan 0.0010 0.0003
## 200 1.1633 nan 0.0010 0.0003
## 220 1.1502 nan 0.0010 0.0003
## 240 1.1376 nan 0.0010 0.0002
## 260 1.1254 nan 0.0010 0.0003
## 280 1.1132 nan 0.0010 0.0003
## 300 1.1014 nan 0.0010 0.0002
## 320 1.0901 nan 0.0010 0.0002
## 340 1.0790 nan 0.0010 0.0002
## 360 1.0682 nan 0.0010 0.0002
## 380 1.0575 nan 0.0010 0.0002
## 400 1.0473 nan 0.0010 0.0002
## 420 1.0373 nan 0.0010 0.0002
## 440 1.0278 nan 0.0010 0.0002
## 460 1.0183 nan 0.0010 0.0002
## 480 1.0092 nan 0.0010 0.0002
## 500 1.0003 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0005
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3032 nan 0.0010 0.0004
## 40 1.2855 nan 0.0010 0.0003
## 60 1.2688 nan 0.0010 0.0004
## 80 1.2525 nan 0.0010 0.0003
## 100 1.2368 nan 0.0010 0.0003
## 120 1.2217 nan 0.0010 0.0003
## 140 1.2071 nan 0.0010 0.0003
## 160 1.1930 nan 0.0010 0.0003
## 180 1.1792 nan 0.0010 0.0003
## 200 1.1657 nan 0.0010 0.0003
## 220 1.1526 nan 0.0010 0.0003
## 240 1.1400 nan 0.0010 0.0002
## 260 1.1276 nan 0.0010 0.0003
## 280 1.1158 nan 0.0010 0.0003
## 300 1.1043 nan 0.0010 0.0003
## 320 1.0928 nan 0.0010 0.0002
## 340 1.0818 nan 0.0010 0.0003
## 360 1.0714 nan 0.0010 0.0002
## 380 1.0608 nan 0.0010 0.0002
## 400 1.0504 nan 0.0010 0.0002
## 420 1.0405 nan 0.0010 0.0002
## 440 1.0310 nan 0.0010 0.0002
## 460 1.0219 nan 0.0010 0.0002
## 480 1.0127 nan 0.0010 0.0002
## 500 1.0039 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3131 nan 0.0100 0.0036
## 2 1.3056 nan 0.0100 0.0031
## 3 1.2989 nan 0.0100 0.0031
## 4 1.2915 nan 0.0100 0.0033
## 5 1.2844 nan 0.0100 0.0034
## 6 1.2760 nan 0.0100 0.0039
## 7 1.2689 nan 0.0100 0.0030
## 8 1.2617 nan 0.0100 0.0030
## 9 1.2550 nan 0.0100 0.0028
## 10 1.2476 nan 0.0100 0.0033
## 20 1.1810 nan 0.0100 0.0026
## 40 1.0770 nan 0.0100 0.0019
## 60 0.9941 nan 0.0100 0.0015
## 80 0.9291 nan 0.0100 0.0012
## 100 0.8778 nan 0.0100 0.0008
## 120 0.8359 nan 0.0100 0.0005
## 140 0.8006 nan 0.0100 0.0001
## 160 0.7702 nan 0.0100 0.0003
## 180 0.7437 nan 0.0100 0.0003
## 200 0.7208 nan 0.0100 0.0004
## 220 0.7012 nan 0.0100 0.0002
## 240 0.6837 nan 0.0100 0.0001
## 260 0.6676 nan 0.0100 0.0000
## 280 0.6541 nan 0.0100 0.0001
## 300 0.6411 nan 0.0100 0.0001
## 320 0.6286 nan 0.0100 0.0001
## 340 0.6162 nan 0.0100 -0.0001
## 360 0.6051 nan 0.0100 0.0002
## 380 0.5945 nan 0.0100 -0.0001
## 400 0.5843 nan 0.0100 0.0000
## 420 0.5745 nan 0.0100 -0.0002
## 440 0.5655 nan 0.0100 -0.0000
## 460 0.5561 nan 0.0100 -0.0001
## 480 0.5472 nan 0.0100 -0.0000
## 500 0.5394 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3135 nan 0.0100 0.0035
## 2 1.3046 nan 0.0100 0.0040
## 3 1.2961 nan 0.0100 0.0037
## 4 1.2893 nan 0.0100 0.0031
## 5 1.2815 nan 0.0100 0.0038
## 6 1.2734 nan 0.0100 0.0038
## 7 1.2659 nan 0.0100 0.0035
## 8 1.2591 nan 0.0100 0.0031
## 9 1.2524 nan 0.0100 0.0028
## 10 1.2448 nan 0.0100 0.0035
## 20 1.1812 nan 0.0100 0.0022
## 40 1.0738 nan 0.0100 0.0018
## 60 0.9950 nan 0.0100 0.0015
## 80 0.9284 nan 0.0100 0.0010
## 100 0.8757 nan 0.0100 0.0009
## 120 0.8329 nan 0.0100 0.0005
## 140 0.7977 nan 0.0100 0.0006
## 160 0.7680 nan 0.0100 0.0004
## 180 0.7428 nan 0.0100 0.0002
## 200 0.7210 nan 0.0100 0.0003
## 220 0.7031 nan 0.0100 -0.0000
## 240 0.6867 nan 0.0100 -0.0000
## 260 0.6708 nan 0.0100 0.0001
## 280 0.6578 nan 0.0100 -0.0000
## 300 0.6446 nan 0.0100 0.0001
## 320 0.6326 nan 0.0100 0.0000
## 340 0.6218 nan 0.0100 -0.0000
## 360 0.6117 nan 0.0100 0.0000
## 380 0.6011 nan 0.0100 0.0001
## 400 0.5916 nan 0.0100 -0.0001
## 420 0.5826 nan 0.0100 0.0000
## 440 0.5731 nan 0.0100 -0.0000
## 460 0.5654 nan 0.0100 -0.0000
## 480 0.5569 nan 0.0100 -0.0002
## 500 0.5484 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0040
## 2 1.3049 nan 0.0100 0.0029
## 3 1.2962 nan 0.0100 0.0038
## 4 1.2886 nan 0.0100 0.0038
## 5 1.2812 nan 0.0100 0.0032
## 6 1.2732 nan 0.0100 0.0034
## 7 1.2653 nan 0.0100 0.0035
## 8 1.2582 nan 0.0100 0.0030
## 9 1.2510 nan 0.0100 0.0034
## 10 1.2441 nan 0.0100 0.0035
## 20 1.1805 nan 0.0100 0.0026
## 40 1.0739 nan 0.0100 0.0023
## 60 0.9941 nan 0.0100 0.0013
## 80 0.9314 nan 0.0100 0.0011
## 100 0.8796 nan 0.0100 0.0007
## 120 0.8379 nan 0.0100 0.0008
## 140 0.8038 nan 0.0100 0.0005
## 160 0.7737 nan 0.0100 0.0004
## 180 0.7497 nan 0.0100 0.0003
## 200 0.7289 nan 0.0100 0.0002
## 220 0.7098 nan 0.0100 0.0004
## 240 0.6931 nan 0.0100 0.0001
## 260 0.6774 nan 0.0100 -0.0000
## 280 0.6634 nan 0.0100 -0.0001
## 300 0.6515 nan 0.0100 0.0000
## 320 0.6408 nan 0.0100 -0.0001
## 340 0.6308 nan 0.0100 0.0001
## 360 0.6205 nan 0.0100 0.0001
## 380 0.6108 nan 0.0100 -0.0001
## 400 0.6019 nan 0.0100 0.0000
## 420 0.5924 nan 0.0100 0.0000
## 440 0.5833 nan 0.0100 -0.0000
## 460 0.5731 nan 0.0100 -0.0001
## 480 0.5653 nan 0.0100 -0.0001
## 500 0.5580 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0043
## 2 1.3038 nan 0.0100 0.0041
## 3 1.2951 nan 0.0100 0.0039
## 4 1.2865 nan 0.0100 0.0041
## 5 1.2787 nan 0.0100 0.0037
## 6 1.2698 nan 0.0100 0.0038
## 7 1.2616 nan 0.0100 0.0037
## 8 1.2542 nan 0.0100 0.0034
## 9 1.2465 nan 0.0100 0.0037
## 10 1.2394 nan 0.0100 0.0031
## 20 1.1698 nan 0.0100 0.0030
## 40 1.0587 nan 0.0100 0.0021
## 60 0.9710 nan 0.0100 0.0017
## 80 0.9029 nan 0.0100 0.0009
## 100 0.8483 nan 0.0100 0.0009
## 120 0.8030 nan 0.0100 0.0006
## 140 0.7663 nan 0.0100 0.0004
## 160 0.7354 nan 0.0100 0.0002
## 180 0.7083 nan 0.0100 0.0002
## 200 0.6859 nan 0.0100 0.0002
## 220 0.6637 nan 0.0100 0.0002
## 240 0.6442 nan 0.0100 0.0001
## 260 0.6264 nan 0.0100 0.0002
## 280 0.6114 nan 0.0100 0.0001
## 300 0.5976 nan 0.0100 -0.0001
## 320 0.5828 nan 0.0100 0.0000
## 340 0.5696 nan 0.0100 0.0001
## 360 0.5566 nan 0.0100 0.0000
## 380 0.5453 nan 0.0100 -0.0001
## 400 0.5354 nan 0.0100 -0.0001
## 420 0.5235 nan 0.0100 -0.0001
## 440 0.5137 nan 0.0100 -0.0000
## 460 0.5040 nan 0.0100 -0.0002
## 480 0.4943 nan 0.0100 -0.0000
## 500 0.4852 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0042
## 2 1.3042 nan 0.0100 0.0041
## 3 1.2958 nan 0.0100 0.0037
## 4 1.2875 nan 0.0100 0.0039
## 5 1.2789 nan 0.0100 0.0040
## 6 1.2705 nan 0.0100 0.0038
## 7 1.2628 nan 0.0100 0.0032
## 8 1.2549 nan 0.0100 0.0034
## 9 1.2478 nan 0.0100 0.0035
## 10 1.2396 nan 0.0100 0.0036
## 20 1.1691 nan 0.0100 0.0027
## 40 1.0552 nan 0.0100 0.0022
## 60 0.9716 nan 0.0100 0.0014
## 80 0.9060 nan 0.0100 0.0012
## 100 0.8526 nan 0.0100 0.0007
## 120 0.8094 nan 0.0100 0.0008
## 140 0.7729 nan 0.0100 0.0007
## 160 0.7428 nan 0.0100 0.0003
## 180 0.7174 nan 0.0100 0.0001
## 200 0.6948 nan 0.0100 0.0002
## 220 0.6748 nan 0.0100 0.0001
## 240 0.6551 nan 0.0100 0.0001
## 260 0.6387 nan 0.0100 0.0003
## 280 0.6239 nan 0.0100 0.0000
## 300 0.6099 nan 0.0100 0.0000
## 320 0.5974 nan 0.0100 -0.0001
## 340 0.5846 nan 0.0100 0.0001
## 360 0.5733 nan 0.0100 0.0000
## 380 0.5626 nan 0.0100 -0.0001
## 400 0.5509 nan 0.0100 -0.0001
## 420 0.5407 nan 0.0100 0.0000
## 440 0.5303 nan 0.0100 -0.0002
## 460 0.5197 nan 0.0100 -0.0001
## 480 0.5102 nan 0.0100 -0.0000
## 500 0.5011 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0042
## 2 1.3030 nan 0.0100 0.0036
## 3 1.2947 nan 0.0100 0.0035
## 4 1.2864 nan 0.0100 0.0036
## 5 1.2790 nan 0.0100 0.0035
## 6 1.2706 nan 0.0100 0.0037
## 7 1.2632 nan 0.0100 0.0035
## 8 1.2545 nan 0.0100 0.0036
## 9 1.2467 nan 0.0100 0.0035
## 10 1.2399 nan 0.0100 0.0032
## 20 1.1702 nan 0.0100 0.0025
## 40 1.0588 nan 0.0100 0.0022
## 60 0.9763 nan 0.0100 0.0014
## 80 0.9095 nan 0.0100 0.0014
## 100 0.8547 nan 0.0100 0.0009
## 120 0.8119 nan 0.0100 0.0007
## 140 0.7765 nan 0.0100 0.0007
## 160 0.7465 nan 0.0100 0.0003
## 180 0.7196 nan 0.0100 0.0003
## 200 0.6972 nan 0.0100 0.0003
## 220 0.6778 nan 0.0100 0.0001
## 240 0.6605 nan 0.0100 0.0003
## 260 0.6453 nan 0.0100 0.0001
## 280 0.6300 nan 0.0100 0.0001
## 300 0.6160 nan 0.0100 0.0001
## 320 0.6029 nan 0.0100 -0.0002
## 340 0.5910 nan 0.0100 -0.0001
## 360 0.5787 nan 0.0100 0.0001
## 380 0.5681 nan 0.0100 -0.0001
## 400 0.5577 nan 0.0100 -0.0000
## 420 0.5475 nan 0.0100 0.0001
## 440 0.5381 nan 0.0100 -0.0001
## 460 0.5278 nan 0.0100 -0.0001
## 480 0.5184 nan 0.0100 -0.0002
## 500 0.5092 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3118 nan 0.0100 0.0039
## 2 1.3032 nan 0.0100 0.0039
## 3 1.2941 nan 0.0100 0.0041
## 4 1.2853 nan 0.0100 0.0042
## 5 1.2760 nan 0.0100 0.0037
## 6 1.2677 nan 0.0100 0.0039
## 7 1.2588 nan 0.0100 0.0039
## 8 1.2507 nan 0.0100 0.0038
## 9 1.2432 nan 0.0100 0.0036
## 10 1.2357 nan 0.0100 0.0032
## 20 1.1618 nan 0.0100 0.0028
## 40 1.0451 nan 0.0100 0.0020
## 60 0.9574 nan 0.0100 0.0015
## 80 0.8876 nan 0.0100 0.0012
## 100 0.8308 nan 0.0100 0.0009
## 120 0.7837 nan 0.0100 0.0009
## 140 0.7445 nan 0.0100 0.0005
## 160 0.7127 nan 0.0100 0.0001
## 180 0.6835 nan 0.0100 0.0003
## 200 0.6577 nan 0.0100 0.0001
## 220 0.6351 nan 0.0100 0.0003
## 240 0.6147 nan 0.0100 0.0002
## 260 0.5947 nan 0.0100 0.0002
## 280 0.5773 nan 0.0100 0.0001
## 300 0.5605 nan 0.0100 0.0000
## 320 0.5457 nan 0.0100 -0.0000
## 340 0.5309 nan 0.0100 0.0001
## 360 0.5182 nan 0.0100 -0.0000
## 380 0.5066 nan 0.0100 0.0001
## 400 0.4946 nan 0.0100 -0.0001
## 420 0.4833 nan 0.0100 -0.0001
## 440 0.4720 nan 0.0100 -0.0000
## 460 0.4603 nan 0.0100 0.0000
## 480 0.4507 nan 0.0100 -0.0001
## 500 0.4412 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0039
## 2 1.3032 nan 0.0100 0.0036
## 3 1.2942 nan 0.0100 0.0042
## 4 1.2854 nan 0.0100 0.0043
## 5 1.2760 nan 0.0100 0.0040
## 6 1.2669 nan 0.0100 0.0041
## 7 1.2582 nan 0.0100 0.0036
## 8 1.2502 nan 0.0100 0.0036
## 9 1.2429 nan 0.0100 0.0032
## 10 1.2346 nan 0.0100 0.0037
## 20 1.1635 nan 0.0100 0.0030
## 40 1.0476 nan 0.0100 0.0022
## 60 0.9599 nan 0.0100 0.0015
## 80 0.8907 nan 0.0100 0.0013
## 100 0.8334 nan 0.0100 0.0007
## 120 0.7874 nan 0.0100 0.0005
## 140 0.7486 nan 0.0100 0.0003
## 160 0.7170 nan 0.0100 0.0003
## 180 0.6893 nan 0.0100 0.0003
## 200 0.6647 nan 0.0100 0.0002
## 220 0.6437 nan 0.0100 -0.0000
## 240 0.6235 nan 0.0100 0.0001
## 260 0.6056 nan 0.0100 0.0002
## 280 0.5885 nan 0.0100 0.0001
## 300 0.5728 nan 0.0100 -0.0001
## 320 0.5596 nan 0.0100 0.0001
## 340 0.5467 nan 0.0100 -0.0000
## 360 0.5336 nan 0.0100 0.0001
## 380 0.5210 nan 0.0100 -0.0002
## 400 0.5085 nan 0.0100 0.0000
## 420 0.4974 nan 0.0100 0.0001
## 440 0.4865 nan 0.0100 -0.0001
## 460 0.4744 nan 0.0100 0.0000
## 480 0.4642 nan 0.0100 -0.0000
## 500 0.4552 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0042
## 2 1.3032 nan 0.0100 0.0038
## 3 1.2946 nan 0.0100 0.0039
## 4 1.2859 nan 0.0100 0.0037
## 5 1.2777 nan 0.0100 0.0033
## 6 1.2686 nan 0.0100 0.0037
## 7 1.2608 nan 0.0100 0.0035
## 8 1.2526 nan 0.0100 0.0038
## 9 1.2445 nan 0.0100 0.0035
## 10 1.2367 nan 0.0100 0.0034
## 20 1.1643 nan 0.0100 0.0031
## 40 1.0478 nan 0.0100 0.0022
## 60 0.9598 nan 0.0100 0.0017
## 80 0.8910 nan 0.0100 0.0012
## 100 0.8363 nan 0.0100 0.0006
## 120 0.7915 nan 0.0100 0.0005
## 140 0.7534 nan 0.0100 0.0005
## 160 0.7220 nan 0.0100 0.0006
## 180 0.6960 nan 0.0100 0.0002
## 200 0.6714 nan 0.0100 0.0001
## 220 0.6508 nan 0.0100 0.0001
## 240 0.6315 nan 0.0100 -0.0001
## 260 0.6138 nan 0.0100 0.0002
## 280 0.5967 nan 0.0100 0.0001
## 300 0.5815 nan 0.0100 0.0001
## 320 0.5672 nan 0.0100 -0.0001
## 340 0.5548 nan 0.0100 -0.0001
## 360 0.5425 nan 0.0100 0.0001
## 380 0.5311 nan 0.0100 -0.0001
## 400 0.5199 nan 0.0100 0.0000
## 420 0.5093 nan 0.0100 0.0000
## 440 0.4988 nan 0.0100 0.0001
## 460 0.4876 nan 0.0100 -0.0000
## 480 0.4769 nan 0.0100 -0.0000
## 500 0.4677 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2436 nan 0.1000 0.0331
## 2 1.1719 nan 0.1000 0.0327
## 3 1.1169 nan 0.1000 0.0219
## 4 1.0648 nan 0.1000 0.0199
## 5 1.0278 nan 0.1000 0.0159
## 6 0.9878 nan 0.1000 0.0180
## 7 0.9507 nan 0.1000 0.0138
## 8 0.9179 nan 0.1000 0.0117
## 9 0.8921 nan 0.1000 0.0094
## 10 0.8644 nan 0.1000 0.0113
## 20 0.7172 nan 0.1000 0.0037
## 40 0.5838 nan 0.1000 -0.0001
## 60 0.5030 nan 0.1000 -0.0006
## 80 0.4408 nan 0.1000 0.0008
## 100 0.3873 nan 0.1000 -0.0003
## 120 0.3478 nan 0.1000 -0.0019
## 140 0.3101 nan 0.1000 0.0002
## 160 0.2765 nan 0.1000 -0.0003
## 180 0.2507 nan 0.1000 -0.0009
## 200 0.2245 nan 0.1000 -0.0003
## 220 0.2047 nan 0.1000 -0.0000
## 240 0.1841 nan 0.1000 -0.0004
## 260 0.1672 nan 0.1000 -0.0003
## 280 0.1508 nan 0.1000 -0.0004
## 300 0.1365 nan 0.1000 -0.0003
## 320 0.1254 nan 0.1000 0.0001
## 340 0.1129 nan 0.1000 -0.0002
## 360 0.1040 nan 0.1000 0.0000
## 380 0.0948 nan 0.1000 0.0001
## 400 0.0860 nan 0.1000 0.0001
## 420 0.0787 nan 0.1000 -0.0001
## 440 0.0726 nan 0.1000 -0.0003
## 460 0.0664 nan 0.1000 -0.0002
## 480 0.0613 nan 0.1000 -0.0001
## 500 0.0569 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2400 nan 0.1000 0.0331
## 2 1.1701 nan 0.1000 0.0312
## 3 1.1141 nan 0.1000 0.0224
## 4 1.0693 nan 0.1000 0.0215
## 5 1.0268 nan 0.1000 0.0191
## 6 0.9905 nan 0.1000 0.0123
## 7 0.9549 nan 0.1000 0.0143
## 8 0.9303 nan 0.1000 0.0060
## 9 0.9049 nan 0.1000 0.0107
## 10 0.8813 nan 0.1000 0.0080
## 20 0.7258 nan 0.1000 0.0036
## 40 0.6031 nan 0.1000 -0.0012
## 60 0.5170 nan 0.1000 -0.0001
## 80 0.4544 nan 0.1000 -0.0010
## 100 0.4002 nan 0.1000 -0.0003
## 120 0.3515 nan 0.1000 0.0000
## 140 0.3152 nan 0.1000 -0.0005
## 160 0.2830 nan 0.1000 -0.0003
## 180 0.2551 nan 0.1000 -0.0003
## 200 0.2295 nan 0.1000 -0.0007
## 220 0.2091 nan 0.1000 -0.0006
## 240 0.1904 nan 0.1000 -0.0007
## 260 0.1727 nan 0.1000 -0.0003
## 280 0.1565 nan 0.1000 -0.0001
## 300 0.1437 nan 0.1000 -0.0004
## 320 0.1304 nan 0.1000 -0.0002
## 340 0.1208 nan 0.1000 -0.0001
## 360 0.1119 nan 0.1000 0.0001
## 380 0.1038 nan 0.1000 -0.0001
## 400 0.0951 nan 0.1000 -0.0003
## 420 0.0870 nan 0.1000 -0.0005
## 440 0.0800 nan 0.1000 -0.0002
## 460 0.0741 nan 0.1000 -0.0001
## 480 0.0693 nan 0.1000 -0.0001
## 500 0.0634 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2423 nan 0.1000 0.0317
## 2 1.1759 nan 0.1000 0.0272
## 3 1.1235 nan 0.1000 0.0233
## 4 1.0776 nan 0.1000 0.0190
## 5 1.0380 nan 0.1000 0.0173
## 6 0.9965 nan 0.1000 0.0162
## 7 0.9659 nan 0.1000 0.0141
## 8 0.9307 nan 0.1000 0.0136
## 9 0.9049 nan 0.1000 0.0098
## 10 0.8817 nan 0.1000 0.0103
## 20 0.7282 nan 0.1000 0.0036
## 40 0.5988 nan 0.1000 -0.0012
## 60 0.5198 nan 0.1000 -0.0009
## 80 0.4583 nan 0.1000 -0.0017
## 100 0.4085 nan 0.1000 -0.0007
## 120 0.3698 nan 0.1000 0.0001
## 140 0.3329 nan 0.1000 -0.0010
## 160 0.3020 nan 0.1000 -0.0005
## 180 0.2755 nan 0.1000 -0.0003
## 200 0.2487 nan 0.1000 -0.0012
## 220 0.2263 nan 0.1000 -0.0005
## 240 0.2048 nan 0.1000 -0.0005
## 260 0.1860 nan 0.1000 -0.0003
## 280 0.1703 nan 0.1000 -0.0000
## 300 0.1567 nan 0.1000 -0.0005
## 320 0.1444 nan 0.1000 -0.0004
## 340 0.1325 nan 0.1000 -0.0003
## 360 0.1220 nan 0.1000 -0.0005
## 380 0.1112 nan 0.1000 -0.0002
## 400 0.1027 nan 0.1000 -0.0001
## 420 0.0945 nan 0.1000 -0.0000
## 440 0.0878 nan 0.1000 -0.0003
## 460 0.0803 nan 0.1000 -0.0003
## 480 0.0748 nan 0.1000 -0.0003
## 500 0.0698 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2412 nan 0.1000 0.0374
## 2 1.1728 nan 0.1000 0.0306
## 3 1.1056 nan 0.1000 0.0285
## 4 1.0541 nan 0.1000 0.0207
## 5 1.0081 nan 0.1000 0.0212
## 6 0.9691 nan 0.1000 0.0161
## 7 0.9348 nan 0.1000 0.0141
## 8 0.9001 nan 0.1000 0.0132
## 9 0.8728 nan 0.1000 0.0106
## 10 0.8433 nan 0.1000 0.0123
## 20 0.6791 nan 0.1000 0.0036
## 40 0.5331 nan 0.1000 0.0013
## 60 0.4468 nan 0.1000 -0.0013
## 80 0.3822 nan 0.1000 -0.0006
## 100 0.3189 nan 0.1000 -0.0003
## 120 0.2758 nan 0.1000 -0.0009
## 140 0.2392 nan 0.1000 -0.0003
## 160 0.2095 nan 0.1000 -0.0004
## 180 0.1843 nan 0.1000 -0.0007
## 200 0.1630 nan 0.1000 -0.0004
## 220 0.1451 nan 0.1000 -0.0000
## 240 0.1300 nan 0.1000 -0.0003
## 260 0.1153 nan 0.1000 -0.0005
## 280 0.1010 nan 0.1000 -0.0001
## 300 0.0893 nan 0.1000 0.0001
## 320 0.0800 nan 0.1000 -0.0001
## 340 0.0721 nan 0.1000 -0.0003
## 360 0.0651 nan 0.1000 -0.0001
## 380 0.0589 nan 0.1000 -0.0001
## 400 0.0531 nan 0.1000 -0.0001
## 420 0.0481 nan 0.1000 -0.0001
## 440 0.0438 nan 0.1000 -0.0001
## 460 0.0393 nan 0.1000 -0.0001
## 480 0.0352 nan 0.1000 -0.0000
## 500 0.0319 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2403 nan 0.1000 0.0369
## 2 1.1657 nan 0.1000 0.0336
## 3 1.1088 nan 0.1000 0.0239
## 4 1.0575 nan 0.1000 0.0199
## 5 1.0073 nan 0.1000 0.0189
## 6 0.9711 nan 0.1000 0.0143
## 7 0.9346 nan 0.1000 0.0143
## 8 0.9008 nan 0.1000 0.0131
## 9 0.8750 nan 0.1000 0.0096
## 10 0.8458 nan 0.1000 0.0114
## 20 0.6868 nan 0.1000 0.0038
## 40 0.5439 nan 0.1000 -0.0001
## 60 0.4617 nan 0.1000 -0.0004
## 80 0.3869 nan 0.1000 -0.0008
## 100 0.3362 nan 0.1000 0.0002
## 120 0.2920 nan 0.1000 -0.0024
## 140 0.2517 nan 0.1000 -0.0009
## 160 0.2223 nan 0.1000 -0.0007
## 180 0.1955 nan 0.1000 -0.0004
## 200 0.1717 nan 0.1000 -0.0007
## 220 0.1521 nan 0.1000 0.0000
## 240 0.1340 nan 0.1000 -0.0002
## 260 0.1206 nan 0.1000 -0.0002
## 280 0.1088 nan 0.1000 -0.0001
## 300 0.0968 nan 0.1000 -0.0003
## 320 0.0864 nan 0.1000 -0.0002
## 340 0.0769 nan 0.1000 -0.0000
## 360 0.0690 nan 0.1000 -0.0002
## 380 0.0633 nan 0.1000 -0.0002
## 400 0.0570 nan 0.1000 -0.0002
## 420 0.0522 nan 0.1000 -0.0003
## 440 0.0470 nan 0.1000 -0.0001
## 460 0.0421 nan 0.1000 -0.0002
## 480 0.0378 nan 0.1000 -0.0001
## 500 0.0340 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2396 nan 0.1000 0.0396
## 2 1.1711 nan 0.1000 0.0288
## 3 1.1114 nan 0.1000 0.0274
## 4 1.0533 nan 0.1000 0.0265
## 5 1.0075 nan 0.1000 0.0216
## 6 0.9690 nan 0.1000 0.0170
## 7 0.9355 nan 0.1000 0.0131
## 8 0.9022 nan 0.1000 0.0121
## 9 0.8735 nan 0.1000 0.0117
## 10 0.8495 nan 0.1000 0.0109
## 20 0.7045 nan 0.1000 0.0019
## 40 0.5598 nan 0.1000 -0.0003
## 60 0.4729 nan 0.1000 -0.0021
## 80 0.4054 nan 0.1000 -0.0011
## 100 0.3555 nan 0.1000 -0.0005
## 120 0.3114 nan 0.1000 -0.0012
## 140 0.2722 nan 0.1000 -0.0008
## 160 0.2413 nan 0.1000 -0.0012
## 180 0.2133 nan 0.1000 -0.0003
## 200 0.1898 nan 0.1000 -0.0008
## 220 0.1686 nan 0.1000 -0.0006
## 240 0.1499 nan 0.1000 -0.0004
## 260 0.1341 nan 0.1000 -0.0003
## 280 0.1196 nan 0.1000 -0.0002
## 300 0.1076 nan 0.1000 -0.0002
## 320 0.0978 nan 0.1000 -0.0003
## 340 0.0871 nan 0.1000 -0.0003
## 360 0.0780 nan 0.1000 -0.0001
## 380 0.0705 nan 0.1000 -0.0002
## 400 0.0626 nan 0.1000 -0.0003
## 420 0.0566 nan 0.1000 -0.0003
## 440 0.0516 nan 0.1000 -0.0001
## 460 0.0465 nan 0.1000 -0.0001
## 480 0.0429 nan 0.1000 -0.0001
## 500 0.0388 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2250 nan 0.1000 0.0400
## 2 1.1572 nan 0.1000 0.0306
## 3 1.0904 nan 0.1000 0.0275
## 4 1.0394 nan 0.1000 0.0202
## 5 0.9936 nan 0.1000 0.0192
## 6 0.9503 nan 0.1000 0.0179
## 7 0.9198 nan 0.1000 0.0117
## 8 0.8849 nan 0.1000 0.0126
## 9 0.8546 nan 0.1000 0.0119
## 10 0.8336 nan 0.1000 0.0076
## 20 0.6669 nan 0.1000 0.0013
## 40 0.5028 nan 0.1000 -0.0004
## 60 0.4057 nan 0.1000 0.0008
## 80 0.3339 nan 0.1000 -0.0004
## 100 0.2793 nan 0.1000 -0.0008
## 120 0.2359 nan 0.1000 -0.0003
## 140 0.1971 nan 0.1000 -0.0003
## 160 0.1673 nan 0.1000 -0.0003
## 180 0.1440 nan 0.1000 -0.0001
## 200 0.1229 nan 0.1000 -0.0003
## 220 0.1066 nan 0.1000 -0.0001
## 240 0.0922 nan 0.1000 -0.0001
## 260 0.0803 nan 0.1000 -0.0001
## 280 0.0693 nan 0.1000 -0.0002
## 300 0.0607 nan 0.1000 -0.0001
## 320 0.0536 nan 0.1000 -0.0002
## 340 0.0469 nan 0.1000 0.0000
## 360 0.0407 nan 0.1000 -0.0001
## 380 0.0362 nan 0.1000 -0.0001
## 400 0.0322 nan 0.1000 -0.0000
## 420 0.0284 nan 0.1000 -0.0000
## 440 0.0250 nan 0.1000 -0.0001
## 460 0.0221 nan 0.1000 -0.0000
## 480 0.0191 nan 0.1000 -0.0000
## 500 0.0166 nan 0.1000 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2260 nan 0.1000 0.0424
## 2 1.1547 nan 0.1000 0.0328
## 3 1.0917 nan 0.1000 0.0276
## 4 1.0383 nan 0.1000 0.0232
## 5 0.9924 nan 0.1000 0.0199
## 6 0.9546 nan 0.1000 0.0165
## 7 0.9212 nan 0.1000 0.0088
## 8 0.8891 nan 0.1000 0.0127
## 9 0.8605 nan 0.1000 0.0099
## 10 0.8311 nan 0.1000 0.0102
## 20 0.6672 nan 0.1000 0.0010
## 40 0.5128 nan 0.1000 0.0010
## 60 0.4136 nan 0.1000 -0.0005
## 80 0.3448 nan 0.1000 0.0008
## 100 0.2927 nan 0.1000 -0.0013
## 120 0.2409 nan 0.1000 -0.0002
## 140 0.2067 nan 0.1000 -0.0004
## 160 0.1750 nan 0.1000 -0.0008
## 180 0.1503 nan 0.1000 -0.0005
## 200 0.1301 nan 0.1000 -0.0005
## 220 0.1137 nan 0.1000 -0.0005
## 240 0.0973 nan 0.1000 -0.0004
## 260 0.0839 nan 0.1000 -0.0003
## 280 0.0725 nan 0.1000 0.0002
## 300 0.0629 nan 0.1000 -0.0001
## 320 0.0558 nan 0.1000 -0.0000
## 340 0.0492 nan 0.1000 -0.0001
## 360 0.0437 nan 0.1000 -0.0002
## 380 0.0385 nan 0.1000 -0.0001
## 400 0.0343 nan 0.1000 -0.0002
## 420 0.0302 nan 0.1000 -0.0000
## 440 0.0266 nan 0.1000 -0.0000
## 460 0.0233 nan 0.1000 -0.0000
## 480 0.0205 nan 0.1000 -0.0000
## 500 0.0181 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2337 nan 0.1000 0.0382
## 2 1.1610 nan 0.1000 0.0312
## 3 1.0937 nan 0.1000 0.0269
## 4 1.0439 nan 0.1000 0.0224
## 5 0.9954 nan 0.1000 0.0192
## 6 0.9569 nan 0.1000 0.0150
## 7 0.9237 nan 0.1000 0.0131
## 8 0.8890 nan 0.1000 0.0119
## 9 0.8635 nan 0.1000 0.0097
## 10 0.8387 nan 0.1000 0.0087
## 20 0.6812 nan 0.1000 0.0029
## 40 0.5281 nan 0.1000 -0.0007
## 60 0.4291 nan 0.1000 -0.0010
## 80 0.3561 nan 0.1000 0.0002
## 100 0.3033 nan 0.1000 -0.0001
## 120 0.2546 nan 0.1000 -0.0004
## 140 0.2191 nan 0.1000 -0.0005
## 160 0.1897 nan 0.1000 -0.0013
## 180 0.1620 nan 0.1000 -0.0000
## 200 0.1396 nan 0.1000 0.0000
## 220 0.1208 nan 0.1000 -0.0001
## 240 0.1044 nan 0.1000 -0.0001
## 260 0.0920 nan 0.1000 -0.0004
## 280 0.0803 nan 0.1000 -0.0006
## 300 0.0710 nan 0.1000 -0.0001
## 320 0.0627 nan 0.1000 -0.0002
## 340 0.0558 nan 0.1000 -0.0001
## 360 0.0493 nan 0.1000 -0.0002
## 380 0.0434 nan 0.1000 -0.0001
## 400 0.0385 nan 0.1000 -0.0001
## 420 0.0341 nan 0.1000 -0.0000
## 440 0.0303 nan 0.1000 -0.0001
## 460 0.0267 nan 0.1000 -0.0001
## 480 0.0239 nan 0.1000 -0.0001
## 500 0.0213 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0003
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3033 nan 0.0010 0.0003
## 40 1.2868 nan 0.0010 0.0004
## 60 1.2710 nan 0.0010 0.0004
## 80 1.2558 nan 0.0010 0.0003
## 100 1.2412 nan 0.0010 0.0003
## 120 1.2271 nan 0.0010 0.0003
## 140 1.2133 nan 0.0010 0.0003
## 160 1.1999 nan 0.0010 0.0003
## 180 1.1873 nan 0.0010 0.0003
## 200 1.1747 nan 0.0010 0.0002
## 220 1.1626 nan 0.0010 0.0002
## 240 1.1507 nan 0.0010 0.0002
## 260 1.1392 nan 0.0010 0.0002
## 280 1.1281 nan 0.0010 0.0003
## 300 1.1172 nan 0.0010 0.0003
## 320 1.1064 nan 0.0010 0.0002
## 340 1.0960 nan 0.0010 0.0002
## 360 1.0858 nan 0.0010 0.0002
## 380 1.0761 nan 0.0010 0.0002
## 400 1.0664 nan 0.0010 0.0002
## 420 1.0571 nan 0.0010 0.0002
## 440 1.0481 nan 0.0010 0.0002
## 460 1.0394 nan 0.0010 0.0002
## 480 1.0308 nan 0.0010 0.0002
## 500 1.0224 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0003
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0003
## 20 1.3039 nan 0.0010 0.0004
## 40 1.2877 nan 0.0010 0.0003
## 60 1.2720 nan 0.0010 0.0003
## 80 1.2568 nan 0.0010 0.0004
## 100 1.2420 nan 0.0010 0.0003
## 120 1.2275 nan 0.0010 0.0003
## 140 1.2135 nan 0.0010 0.0003
## 160 1.2000 nan 0.0010 0.0003
## 180 1.1870 nan 0.0010 0.0003
## 200 1.1744 nan 0.0010 0.0002
## 220 1.1620 nan 0.0010 0.0003
## 240 1.1499 nan 0.0010 0.0003
## 260 1.1383 nan 0.0010 0.0002
## 280 1.1269 nan 0.0010 0.0002
## 300 1.1162 nan 0.0010 0.0002
## 320 1.1057 nan 0.0010 0.0002
## 340 1.0956 nan 0.0010 0.0002
## 360 1.0855 nan 0.0010 0.0002
## 380 1.0757 nan 0.0010 0.0002
## 400 1.0661 nan 0.0010 0.0002
## 420 1.0567 nan 0.0010 0.0002
## 440 1.0476 nan 0.0010 0.0002
## 460 1.0389 nan 0.0010 0.0002
## 480 1.0302 nan 0.0010 0.0002
## 500 1.0218 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0003
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0003
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3039 nan 0.0010 0.0003
## 40 1.2876 nan 0.0010 0.0004
## 60 1.2716 nan 0.0010 0.0003
## 80 1.2564 nan 0.0010 0.0003
## 100 1.2417 nan 0.0010 0.0003
## 120 1.2274 nan 0.0010 0.0003
## 140 1.2139 nan 0.0010 0.0003
## 160 1.2007 nan 0.0010 0.0003
## 180 1.1877 nan 0.0010 0.0003
## 200 1.1752 nan 0.0010 0.0003
## 220 1.1629 nan 0.0010 0.0003
## 240 1.1512 nan 0.0010 0.0003
## 260 1.1397 nan 0.0010 0.0003
## 280 1.1286 nan 0.0010 0.0003
## 300 1.1178 nan 0.0010 0.0002
## 320 1.1073 nan 0.0010 0.0002
## 340 1.0971 nan 0.0010 0.0002
## 360 1.0870 nan 0.0010 0.0002
## 380 1.0774 nan 0.0010 0.0002
## 400 1.0681 nan 0.0010 0.0002
## 420 1.0587 nan 0.0010 0.0002
## 440 1.0496 nan 0.0010 0.0002
## 460 1.0408 nan 0.0010 0.0002
## 480 1.0324 nan 0.0010 0.0002
## 500 1.0241 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3116 nan 0.0010 0.0004
## 20 1.3026 nan 0.0010 0.0004
## 40 1.2850 nan 0.0010 0.0004
## 60 1.2685 nan 0.0010 0.0003
## 80 1.2525 nan 0.0010 0.0003
## 100 1.2367 nan 0.0010 0.0003
## 120 1.2215 nan 0.0010 0.0003
## 140 1.2070 nan 0.0010 0.0003
## 160 1.1928 nan 0.0010 0.0003
## 180 1.1793 nan 0.0010 0.0003
## 200 1.1656 nan 0.0010 0.0003
## 220 1.1528 nan 0.0010 0.0002
## 240 1.1403 nan 0.0010 0.0002
## 260 1.1280 nan 0.0010 0.0003
## 280 1.1162 nan 0.0010 0.0002
## 300 1.1048 nan 0.0010 0.0002
## 320 1.0933 nan 0.0010 0.0002
## 340 1.0823 nan 0.0010 0.0002
## 360 1.0717 nan 0.0010 0.0002
## 380 1.0611 nan 0.0010 0.0002
## 400 1.0514 nan 0.0010 0.0002
## 420 1.0416 nan 0.0010 0.0002
## 440 1.0319 nan 0.0010 0.0002
## 460 1.0225 nan 0.0010 0.0002
## 480 1.0133 nan 0.0010 0.0002
## 500 1.0045 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0003
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3028 nan 0.0010 0.0004
## 40 1.2858 nan 0.0010 0.0004
## 60 1.2694 nan 0.0010 0.0004
## 80 1.2532 nan 0.0010 0.0004
## 100 1.2372 nan 0.0010 0.0004
## 120 1.2222 nan 0.0010 0.0003
## 140 1.2076 nan 0.0010 0.0003
## 160 1.1936 nan 0.0010 0.0003
## 180 1.1798 nan 0.0010 0.0003
## 200 1.1666 nan 0.0010 0.0003
## 220 1.1536 nan 0.0010 0.0003
## 240 1.1409 nan 0.0010 0.0003
## 260 1.1285 nan 0.0010 0.0003
## 280 1.1169 nan 0.0010 0.0003
## 300 1.1053 nan 0.0010 0.0003
## 320 1.0941 nan 0.0010 0.0002
## 340 1.0831 nan 0.0010 0.0002
## 360 1.0724 nan 0.0010 0.0002
## 380 1.0623 nan 0.0010 0.0002
## 400 1.0524 nan 0.0010 0.0002
## 420 1.0427 nan 0.0010 0.0002
## 440 1.0334 nan 0.0010 0.0002
## 460 1.0242 nan 0.0010 0.0002
## 480 1.0153 nan 0.0010 0.0002
## 500 1.0066 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0005
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0004
## 6 1.3151 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0005
## 10 1.3116 nan 0.0010 0.0004
## 20 1.3029 nan 0.0010 0.0004
## 40 1.2858 nan 0.0010 0.0004
## 60 1.2690 nan 0.0010 0.0004
## 80 1.2531 nan 0.0010 0.0003
## 100 1.2375 nan 0.0010 0.0003
## 120 1.2225 nan 0.0010 0.0004
## 140 1.2080 nan 0.0010 0.0003
## 160 1.1939 nan 0.0010 0.0003
## 180 1.1802 nan 0.0010 0.0003
## 200 1.1671 nan 0.0010 0.0003
## 220 1.1544 nan 0.0010 0.0002
## 240 1.1420 nan 0.0010 0.0003
## 260 1.1301 nan 0.0010 0.0003
## 280 1.1181 nan 0.0010 0.0003
## 300 1.1065 nan 0.0010 0.0002
## 320 1.0954 nan 0.0010 0.0002
## 340 1.0846 nan 0.0010 0.0003
## 360 1.0741 nan 0.0010 0.0002
## 380 1.0639 nan 0.0010 0.0002
## 400 1.0540 nan 0.0010 0.0002
## 420 1.0444 nan 0.0010 0.0002
## 440 1.0350 nan 0.0010 0.0002
## 460 1.0259 nan 0.0010 0.0002
## 480 1.0170 nan 0.0010 0.0002
## 500 1.0083 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3196 nan 0.0010 0.0004
## 2 1.3187 nan 0.0010 0.0004
## 3 1.3177 nan 0.0010 0.0004
## 4 1.3168 nan 0.0010 0.0004
## 5 1.3158 nan 0.0010 0.0004
## 6 1.3148 nan 0.0010 0.0004
## 7 1.3139 nan 0.0010 0.0005
## 8 1.3130 nan 0.0010 0.0004
## 9 1.3120 nan 0.0010 0.0004
## 10 1.3111 nan 0.0010 0.0004
## 20 1.3018 nan 0.0010 0.0003
## 40 1.2835 nan 0.0010 0.0004
## 60 1.2659 nan 0.0010 0.0004
## 80 1.2492 nan 0.0010 0.0003
## 100 1.2326 nan 0.0010 0.0004
## 120 1.2166 nan 0.0010 0.0003
## 140 1.2012 nan 0.0010 0.0004
## 160 1.1864 nan 0.0010 0.0003
## 180 1.1717 nan 0.0010 0.0003
## 200 1.1579 nan 0.0010 0.0003
## 220 1.1446 nan 0.0010 0.0003
## 240 1.1313 nan 0.0010 0.0003
## 260 1.1185 nan 0.0010 0.0003
## 280 1.1062 nan 0.0010 0.0002
## 300 1.0942 nan 0.0010 0.0002
## 320 1.0824 nan 0.0010 0.0003
## 340 1.0710 nan 0.0010 0.0002
## 360 1.0597 nan 0.0010 0.0002
## 380 1.0491 nan 0.0010 0.0002
## 400 1.0387 nan 0.0010 0.0002
## 420 1.0284 nan 0.0010 0.0002
## 440 1.0186 nan 0.0010 0.0002
## 460 1.0088 nan 0.0010 0.0002
## 480 0.9992 nan 0.0010 0.0002
## 500 0.9900 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0004
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3159 nan 0.0010 0.0004
## 6 1.3150 nan 0.0010 0.0004
## 7 1.3141 nan 0.0010 0.0004
## 8 1.3132 nan 0.0010 0.0004
## 9 1.3123 nan 0.0010 0.0004
## 10 1.3114 nan 0.0010 0.0004
## 20 1.3020 nan 0.0010 0.0004
## 40 1.2840 nan 0.0010 0.0004
## 60 1.2667 nan 0.0010 0.0004
## 80 1.2501 nan 0.0010 0.0004
## 100 1.2341 nan 0.0010 0.0004
## 120 1.2186 nan 0.0010 0.0003
## 140 1.2034 nan 0.0010 0.0003
## 160 1.1885 nan 0.0010 0.0004
## 180 1.1743 nan 0.0010 0.0003
## 200 1.1607 nan 0.0010 0.0003
## 220 1.1473 nan 0.0010 0.0003
## 240 1.1342 nan 0.0010 0.0003
## 260 1.1216 nan 0.0010 0.0003
## 280 1.1093 nan 0.0010 0.0003
## 300 1.0976 nan 0.0010 0.0003
## 320 1.0861 nan 0.0010 0.0003
## 340 1.0752 nan 0.0010 0.0002
## 360 1.0644 nan 0.0010 0.0002
## 380 1.0538 nan 0.0010 0.0002
## 400 1.0434 nan 0.0010 0.0002
## 420 1.0333 nan 0.0010 0.0002
## 440 1.0234 nan 0.0010 0.0002
## 460 1.0135 nan 0.0010 0.0002
## 480 1.0042 nan 0.0010 0.0002
## 500 0.9952 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0005
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0004
## 6 1.3150 nan 0.0010 0.0004
## 7 1.3141 nan 0.0010 0.0004
## 8 1.3131 nan 0.0010 0.0004
## 9 1.3122 nan 0.0010 0.0004
## 10 1.3113 nan 0.0010 0.0004
## 20 1.3022 nan 0.0010 0.0004
## 40 1.2842 nan 0.0010 0.0004
## 60 1.2672 nan 0.0010 0.0004
## 80 1.2506 nan 0.0010 0.0004
## 100 1.2345 nan 0.0010 0.0004
## 120 1.2189 nan 0.0010 0.0003
## 140 1.2038 nan 0.0010 0.0003
## 160 1.1891 nan 0.0010 0.0003
## 180 1.1749 nan 0.0010 0.0003
## 200 1.1612 nan 0.0010 0.0003
## 220 1.1481 nan 0.0010 0.0003
## 240 1.1354 nan 0.0010 0.0003
## 260 1.1229 nan 0.0010 0.0003
## 280 1.1108 nan 0.0010 0.0003
## 300 1.0989 nan 0.0010 0.0002
## 320 1.0874 nan 0.0010 0.0002
## 340 1.0762 nan 0.0010 0.0002
## 360 1.0654 nan 0.0010 0.0003
## 380 1.0547 nan 0.0010 0.0002
## 400 1.0444 nan 0.0010 0.0002
## 420 1.0344 nan 0.0010 0.0002
## 440 1.0245 nan 0.0010 0.0002
## 460 1.0150 nan 0.0010 0.0002
## 480 1.0057 nan 0.0010 0.0002
## 500 0.9967 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0040
## 2 1.3035 nan 0.0100 0.0039
## 3 1.2956 nan 0.0100 0.0038
## 4 1.2875 nan 0.0100 0.0033
## 5 1.2794 nan 0.0100 0.0035
## 6 1.2715 nan 0.0100 0.0037
## 7 1.2634 nan 0.0100 0.0033
## 8 1.2567 nan 0.0100 0.0030
## 9 1.2492 nan 0.0100 0.0035
## 10 1.2422 nan 0.0100 0.0031
## 20 1.1765 nan 0.0100 0.0030
## 40 1.0656 nan 0.0100 0.0022
## 60 0.9816 nan 0.0100 0.0013
## 80 0.9162 nan 0.0100 0.0010
## 100 0.8624 nan 0.0100 0.0010
## 120 0.8194 nan 0.0100 0.0006
## 140 0.7824 nan 0.0100 0.0006
## 160 0.7522 nan 0.0100 0.0002
## 180 0.7261 nan 0.0100 0.0003
## 200 0.7033 nan 0.0100 0.0002
## 220 0.6842 nan 0.0100 0.0002
## 240 0.6664 nan 0.0100 0.0000
## 260 0.6507 nan 0.0100 0.0001
## 280 0.6369 nan 0.0100 0.0001
## 300 0.6232 nan 0.0100 0.0000
## 320 0.6113 nan 0.0100 -0.0001
## 340 0.5994 nan 0.0100 0.0001
## 360 0.5888 nan 0.0100 -0.0000
## 380 0.5790 nan 0.0100 -0.0001
## 400 0.5696 nan 0.0100 0.0000
## 420 0.5605 nan 0.0100 -0.0000
## 440 0.5518 nan 0.0100 0.0000
## 460 0.5426 nan 0.0100 -0.0001
## 480 0.5336 nan 0.0100 -0.0000
## 500 0.5255 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3126 nan 0.0100 0.0037
## 2 1.3044 nan 0.0100 0.0039
## 3 1.2961 nan 0.0100 0.0037
## 4 1.2885 nan 0.0100 0.0033
## 5 1.2804 nan 0.0100 0.0038
## 6 1.2727 nan 0.0100 0.0037
## 7 1.2648 nan 0.0100 0.0040
## 8 1.2580 nan 0.0100 0.0034
## 9 1.2504 nan 0.0100 0.0034
## 10 1.2431 nan 0.0100 0.0032
## 20 1.1762 nan 0.0100 0.0030
## 40 1.0685 nan 0.0100 0.0020
## 60 0.9842 nan 0.0100 0.0017
## 80 0.9164 nan 0.0100 0.0014
## 100 0.8655 nan 0.0100 0.0011
## 120 0.8213 nan 0.0100 0.0007
## 140 0.7872 nan 0.0100 0.0004
## 160 0.7553 nan 0.0100 0.0006
## 180 0.7306 nan 0.0100 0.0005
## 200 0.7083 nan 0.0100 0.0003
## 220 0.6891 nan 0.0100 0.0002
## 240 0.6717 nan 0.0100 0.0002
## 260 0.6561 nan 0.0100 0.0001
## 280 0.6423 nan 0.0100 0.0001
## 300 0.6290 nan 0.0100 0.0000
## 320 0.6169 nan 0.0100 0.0001
## 340 0.6054 nan 0.0100 0.0002
## 360 0.5942 nan 0.0100 0.0001
## 380 0.5841 nan 0.0100 0.0000
## 400 0.5752 nan 0.0100 -0.0000
## 420 0.5660 nan 0.0100 -0.0000
## 440 0.5575 nan 0.0100 0.0001
## 460 0.5483 nan 0.0100 -0.0001
## 480 0.5404 nan 0.0100 0.0000
## 500 0.5324 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3114 nan 0.0100 0.0039
## 2 1.3036 nan 0.0100 0.0037
## 3 1.2950 nan 0.0100 0.0040
## 4 1.2872 nan 0.0100 0.0038
## 5 1.2795 nan 0.0100 0.0035
## 6 1.2717 nan 0.0100 0.0035
## 7 1.2640 nan 0.0100 0.0038
## 8 1.2567 nan 0.0100 0.0034
## 9 1.2489 nan 0.0100 0.0034
## 10 1.2415 nan 0.0100 0.0032
## 20 1.1739 nan 0.0100 0.0022
## 40 1.0672 nan 0.0100 0.0020
## 60 0.9831 nan 0.0100 0.0016
## 80 0.9191 nan 0.0100 0.0011
## 100 0.8649 nan 0.0100 0.0009
## 120 0.8228 nan 0.0100 0.0007
## 140 0.7885 nan 0.0100 0.0006
## 160 0.7607 nan 0.0100 0.0003
## 180 0.7354 nan 0.0100 0.0002
## 200 0.7132 nan 0.0100 0.0002
## 220 0.6937 nan 0.0100 0.0001
## 240 0.6769 nan 0.0100 0.0001
## 260 0.6617 nan 0.0100 0.0000
## 280 0.6475 nan 0.0100 0.0001
## 300 0.6347 nan 0.0100 0.0000
## 320 0.6225 nan 0.0100 -0.0001
## 340 0.6121 nan 0.0100 -0.0001
## 360 0.6026 nan 0.0100 0.0001
## 380 0.5925 nan 0.0100 0.0002
## 400 0.5831 nan 0.0100 -0.0000
## 420 0.5736 nan 0.0100 0.0002
## 440 0.5654 nan 0.0100 -0.0002
## 460 0.5575 nan 0.0100 -0.0002
## 480 0.5494 nan 0.0100 -0.0001
## 500 0.5422 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0038
## 2 1.3025 nan 0.0100 0.0042
## 3 1.2931 nan 0.0100 0.0041
## 4 1.2851 nan 0.0100 0.0036
## 5 1.2764 nan 0.0100 0.0040
## 6 1.2678 nan 0.0100 0.0038
## 7 1.2594 nan 0.0100 0.0037
## 8 1.2508 nan 0.0100 0.0037
## 9 1.2427 nan 0.0100 0.0037
## 10 1.2347 nan 0.0100 0.0034
## 20 1.1647 nan 0.0100 0.0029
## 40 1.0524 nan 0.0100 0.0020
## 60 0.9646 nan 0.0100 0.0014
## 80 0.8959 nan 0.0100 0.0010
## 100 0.8411 nan 0.0100 0.0008
## 120 0.7965 nan 0.0100 0.0006
## 140 0.7581 nan 0.0100 0.0004
## 160 0.7251 nan 0.0100 0.0006
## 180 0.6979 nan 0.0100 0.0004
## 200 0.6740 nan 0.0100 0.0005
## 220 0.6519 nan 0.0100 0.0002
## 240 0.6323 nan 0.0100 0.0001
## 260 0.6153 nan 0.0100 0.0003
## 280 0.6001 nan 0.0100 0.0001
## 300 0.5855 nan 0.0100 0.0002
## 320 0.5724 nan 0.0100 0.0001
## 340 0.5594 nan 0.0100 0.0001
## 360 0.5480 nan 0.0100 -0.0002
## 380 0.5369 nan 0.0100 -0.0000
## 400 0.5266 nan 0.0100 0.0001
## 420 0.5157 nan 0.0100 0.0001
## 440 0.5057 nan 0.0100 -0.0001
## 460 0.4958 nan 0.0100 -0.0001
## 480 0.4863 nan 0.0100 0.0000
## 500 0.4772 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0040
## 2 1.3035 nan 0.0100 0.0037
## 3 1.2944 nan 0.0100 0.0038
## 4 1.2860 nan 0.0100 0.0040
## 5 1.2773 nan 0.0100 0.0038
## 6 1.2692 nan 0.0100 0.0037
## 7 1.2613 nan 0.0100 0.0034
## 8 1.2535 nan 0.0100 0.0035
## 9 1.2456 nan 0.0100 0.0033
## 10 1.2377 nan 0.0100 0.0036
## 20 1.1664 nan 0.0100 0.0029
## 40 1.0519 nan 0.0100 0.0022
## 60 0.9647 nan 0.0100 0.0017
## 80 0.8951 nan 0.0100 0.0012
## 100 0.8421 nan 0.0100 0.0008
## 120 0.7991 nan 0.0100 0.0006
## 140 0.7600 nan 0.0100 0.0005
## 160 0.7280 nan 0.0100 0.0002
## 180 0.7007 nan 0.0100 0.0004
## 200 0.6771 nan 0.0100 0.0002
## 220 0.6566 nan 0.0100 0.0003
## 240 0.6381 nan 0.0100 0.0001
## 260 0.6211 nan 0.0100 0.0001
## 280 0.6062 nan 0.0100 0.0001
## 300 0.5919 nan 0.0100 -0.0001
## 320 0.5793 nan 0.0100 0.0000
## 340 0.5671 nan 0.0100 -0.0000
## 360 0.5552 nan 0.0100 -0.0000
## 380 0.5431 nan 0.0100 -0.0001
## 400 0.5318 nan 0.0100 0.0001
## 420 0.5211 nan 0.0100 0.0000
## 440 0.5117 nan 0.0100 0.0000
## 460 0.5014 nan 0.0100 0.0000
## 480 0.4926 nan 0.0100 -0.0001
## 500 0.4835 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0036
## 2 1.3031 nan 0.0100 0.0040
## 3 1.2941 nan 0.0100 0.0041
## 4 1.2857 nan 0.0100 0.0036
## 5 1.2769 nan 0.0100 0.0039
## 6 1.2687 nan 0.0100 0.0039
## 7 1.2598 nan 0.0100 0.0038
## 8 1.2520 nan 0.0100 0.0034
## 9 1.2439 nan 0.0100 0.0038
## 10 1.2357 nan 0.0100 0.0035
## 20 1.1656 nan 0.0100 0.0030
## 40 1.0531 nan 0.0100 0.0021
## 60 0.9670 nan 0.0100 0.0015
## 80 0.8999 nan 0.0100 0.0013
## 100 0.8451 nan 0.0100 0.0010
## 120 0.8003 nan 0.0100 0.0005
## 140 0.7638 nan 0.0100 0.0003
## 160 0.7325 nan 0.0100 0.0003
## 180 0.7054 nan 0.0100 0.0002
## 200 0.6831 nan 0.0100 0.0003
## 220 0.6637 nan 0.0100 0.0001
## 240 0.6457 nan 0.0100 0.0001
## 260 0.6284 nan 0.0100 0.0001
## 280 0.6136 nan 0.0100 0.0000
## 300 0.6007 nan 0.0100 0.0000
## 320 0.5875 nan 0.0100 -0.0000
## 340 0.5760 nan 0.0100 -0.0001
## 360 0.5653 nan 0.0100 0.0001
## 380 0.5542 nan 0.0100 -0.0001
## 400 0.5434 nan 0.0100 -0.0000
## 420 0.5342 nan 0.0100 -0.0000
## 440 0.5252 nan 0.0100 -0.0002
## 460 0.5167 nan 0.0100 -0.0001
## 480 0.5080 nan 0.0100 -0.0001
## 500 0.4981 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3108 nan 0.0100 0.0048
## 2 1.3014 nan 0.0100 0.0042
## 3 1.2927 nan 0.0100 0.0040
## 4 1.2835 nan 0.0100 0.0044
## 5 1.2746 nan 0.0100 0.0041
## 6 1.2660 nan 0.0100 0.0037
## 7 1.2567 nan 0.0100 0.0039
## 8 1.2481 nan 0.0100 0.0039
## 9 1.2398 nan 0.0100 0.0041
## 10 1.2316 nan 0.0100 0.0035
## 20 1.1558 nan 0.0100 0.0028
## 40 1.0371 nan 0.0100 0.0018
## 60 0.9458 nan 0.0100 0.0015
## 80 0.8743 nan 0.0100 0.0012
## 100 0.8173 nan 0.0100 0.0009
## 120 0.7714 nan 0.0100 0.0006
## 140 0.7307 nan 0.0100 0.0005
## 160 0.6976 nan 0.0100 0.0003
## 180 0.6688 nan 0.0100 0.0005
## 200 0.6428 nan 0.0100 0.0002
## 220 0.6200 nan 0.0100 0.0003
## 240 0.5990 nan 0.0100 0.0001
## 260 0.5806 nan 0.0100 0.0001
## 280 0.5635 nan 0.0100 0.0001
## 300 0.5476 nan 0.0100 0.0001
## 320 0.5320 nan 0.0100 -0.0000
## 340 0.5171 nan 0.0100 0.0000
## 360 0.5033 nan 0.0100 0.0001
## 380 0.4902 nan 0.0100 -0.0000
## 400 0.4791 nan 0.0100 -0.0001
## 420 0.4689 nan 0.0100 -0.0001
## 440 0.4590 nan 0.0100 0.0001
## 460 0.4489 nan 0.0100 -0.0001
## 480 0.4390 nan 0.0100 0.0002
## 500 0.4305 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3112 nan 0.0100 0.0042
## 2 1.3019 nan 0.0100 0.0044
## 3 1.2929 nan 0.0100 0.0038
## 4 1.2841 nan 0.0100 0.0040
## 5 1.2751 nan 0.0100 0.0037
## 6 1.2664 nan 0.0100 0.0040
## 7 1.2580 nan 0.0100 0.0036
## 8 1.2499 nan 0.0100 0.0035
## 9 1.2414 nan 0.0100 0.0037
## 10 1.2331 nan 0.0100 0.0036
## 20 1.1581 nan 0.0100 0.0032
## 40 1.0423 nan 0.0100 0.0022
## 60 0.9514 nan 0.0100 0.0016
## 80 0.8823 nan 0.0100 0.0014
## 100 0.8229 nan 0.0100 0.0009
## 120 0.7755 nan 0.0100 0.0006
## 140 0.7382 nan 0.0100 0.0004
## 160 0.7049 nan 0.0100 0.0004
## 180 0.6768 nan 0.0100 0.0003
## 200 0.6514 nan 0.0100 0.0002
## 220 0.6289 nan 0.0100 0.0002
## 240 0.6098 nan 0.0100 0.0001
## 260 0.5919 nan 0.0100 0.0002
## 280 0.5756 nan 0.0100 0.0000
## 300 0.5596 nan 0.0100 0.0001
## 320 0.5454 nan 0.0100 0.0000
## 340 0.5312 nan 0.0100 -0.0000
## 360 0.5183 nan 0.0100 -0.0000
## 380 0.5065 nan 0.0100 0.0001
## 400 0.4949 nan 0.0100 -0.0000
## 420 0.4839 nan 0.0100 -0.0001
## 440 0.4731 nan 0.0100 -0.0001
## 460 0.4636 nan 0.0100 -0.0001
## 480 0.4540 nan 0.0100 -0.0001
## 500 0.4441 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3109 nan 0.0100 0.0043
## 2 1.3020 nan 0.0100 0.0040
## 3 1.2929 nan 0.0100 0.0040
## 4 1.2845 nan 0.0100 0.0035
## 5 1.2750 nan 0.0100 0.0043
## 6 1.2665 nan 0.0100 0.0040
## 7 1.2577 nan 0.0100 0.0040
## 8 1.2496 nan 0.0100 0.0038
## 9 1.2411 nan 0.0100 0.0035
## 10 1.2325 nan 0.0100 0.0034
## 20 1.1614 nan 0.0100 0.0031
## 40 1.0446 nan 0.0100 0.0021
## 60 0.9539 nan 0.0100 0.0017
## 80 0.8837 nan 0.0100 0.0010
## 100 0.8266 nan 0.0100 0.0011
## 120 0.7802 nan 0.0100 0.0007
## 140 0.7426 nan 0.0100 0.0003
## 160 0.7102 nan 0.0100 0.0003
## 180 0.6833 nan 0.0100 0.0002
## 200 0.6600 nan 0.0100 0.0003
## 220 0.6386 nan 0.0100 0.0002
## 240 0.6185 nan 0.0100 0.0002
## 260 0.6005 nan 0.0100 0.0001
## 280 0.5840 nan 0.0100 0.0000
## 300 0.5680 nan 0.0100 0.0001
## 320 0.5540 nan 0.0100 0.0000
## 340 0.5414 nan 0.0100 0.0000
## 360 0.5294 nan 0.0100 -0.0000
## 380 0.5177 nan 0.0100 -0.0000
## 400 0.5059 nan 0.0100 -0.0001
## 420 0.4954 nan 0.0100 -0.0002
## 440 0.4849 nan 0.0100 0.0002
## 460 0.4753 nan 0.0100 -0.0001
## 480 0.4665 nan 0.0100 -0.0001
## 500 0.4575 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2377 nan 0.1000 0.0329
## 2 1.1714 nan 0.1000 0.0296
## 3 1.1159 nan 0.1000 0.0245
## 4 1.0606 nan 0.1000 0.0222
## 5 1.0134 nan 0.1000 0.0209
## 6 0.9734 nan 0.1000 0.0154
## 7 0.9407 nan 0.1000 0.0129
## 8 0.9124 nan 0.1000 0.0115
## 9 0.8865 nan 0.1000 0.0079
## 10 0.8650 nan 0.1000 0.0094
## 20 0.7155 nan 0.1000 0.0009
## 40 0.5787 nan 0.1000 -0.0012
## 60 0.4938 nan 0.1000 -0.0003
## 80 0.4313 nan 0.1000 -0.0006
## 100 0.3868 nan 0.1000 -0.0010
## 120 0.3428 nan 0.1000 -0.0001
## 140 0.3062 nan 0.1000 0.0002
## 160 0.2771 nan 0.1000 -0.0005
## 180 0.2489 nan 0.1000 -0.0001
## 200 0.2235 nan 0.1000 -0.0002
## 220 0.2005 nan 0.1000 -0.0001
## 240 0.1821 nan 0.1000 -0.0006
## 260 0.1664 nan 0.1000 -0.0007
## 280 0.1514 nan 0.1000 -0.0002
## 300 0.1385 nan 0.1000 -0.0007
## 320 0.1265 nan 0.1000 -0.0001
## 340 0.1160 nan 0.1000 -0.0006
## 360 0.1062 nan 0.1000 -0.0001
## 380 0.0975 nan 0.1000 -0.0003
## 400 0.0898 nan 0.1000 -0.0003
## 420 0.0825 nan 0.1000 -0.0001
## 440 0.0768 nan 0.1000 -0.0004
## 460 0.0706 nan 0.1000 -0.0004
## 480 0.0650 nan 0.1000 -0.0003
## 500 0.0589 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2333 nan 0.1000 0.0347
## 2 1.1655 nan 0.1000 0.0310
## 3 1.1108 nan 0.1000 0.0218
## 4 1.0556 nan 0.1000 0.0226
## 5 1.0129 nan 0.1000 0.0165
## 6 0.9762 nan 0.1000 0.0158
## 7 0.9421 nan 0.1000 0.0133
## 8 0.9158 nan 0.1000 0.0114
## 9 0.8858 nan 0.1000 0.0109
## 10 0.8624 nan 0.1000 0.0068
## 20 0.7191 nan 0.1000 0.0025
## 40 0.5778 nan 0.1000 0.0001
## 60 0.4976 nan 0.1000 -0.0018
## 80 0.4386 nan 0.1000 0.0007
## 100 0.3899 nan 0.1000 -0.0003
## 120 0.3442 nan 0.1000 -0.0008
## 140 0.3062 nan 0.1000 -0.0007
## 160 0.2768 nan 0.1000 -0.0016
## 180 0.2490 nan 0.1000 -0.0006
## 200 0.2262 nan 0.1000 -0.0005
## 220 0.2046 nan 0.1000 -0.0003
## 240 0.1867 nan 0.1000 -0.0001
## 260 0.1695 nan 0.1000 -0.0003
## 280 0.1547 nan 0.1000 -0.0000
## 300 0.1418 nan 0.1000 -0.0001
## 320 0.1294 nan 0.1000 -0.0006
## 340 0.1188 nan 0.1000 -0.0004
## 360 0.1095 nan 0.1000 -0.0002
## 380 0.1004 nan 0.1000 -0.0001
## 400 0.0928 nan 0.1000 -0.0003
## 420 0.0852 nan 0.1000 -0.0001
## 440 0.0789 nan 0.1000 -0.0004
## 460 0.0723 nan 0.1000 -0.0002
## 480 0.0671 nan 0.1000 -0.0002
## 500 0.0624 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2412 nan 0.1000 0.0365
## 2 1.1751 nan 0.1000 0.0274
## 3 1.1175 nan 0.1000 0.0233
## 4 1.0627 nan 0.1000 0.0249
## 5 1.0152 nan 0.1000 0.0206
## 6 0.9796 nan 0.1000 0.0125
## 7 0.9441 nan 0.1000 0.0156
## 8 0.9135 nan 0.1000 0.0114
## 9 0.8863 nan 0.1000 0.0108
## 10 0.8602 nan 0.1000 0.0099
## 20 0.7106 nan 0.1000 0.0013
## 40 0.5746 nan 0.1000 -0.0004
## 60 0.5084 nan 0.1000 -0.0017
## 80 0.4550 nan 0.1000 -0.0010
## 100 0.4051 nan 0.1000 -0.0013
## 120 0.3624 nan 0.1000 -0.0010
## 140 0.3214 nan 0.1000 -0.0005
## 160 0.2906 nan 0.1000 -0.0010
## 180 0.2676 nan 0.1000 -0.0013
## 200 0.2460 nan 0.1000 -0.0012
## 220 0.2267 nan 0.1000 -0.0012
## 240 0.2061 nan 0.1000 -0.0006
## 260 0.1879 nan 0.1000 -0.0005
## 280 0.1735 nan 0.1000 -0.0011
## 300 0.1608 nan 0.1000 -0.0013
## 320 0.1484 nan 0.1000 -0.0004
## 340 0.1355 nan 0.1000 -0.0004
## 360 0.1247 nan 0.1000 -0.0006
## 380 0.1127 nan 0.1000 0.0001
## 400 0.1035 nan 0.1000 -0.0004
## 420 0.0958 nan 0.1000 -0.0003
## 440 0.0889 nan 0.1000 -0.0003
## 460 0.0830 nan 0.1000 -0.0003
## 480 0.0772 nan 0.1000 -0.0002
## 500 0.0712 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2291 nan 0.1000 0.0394
## 2 1.1562 nan 0.1000 0.0360
## 3 1.0966 nan 0.1000 0.0242
## 4 1.0431 nan 0.1000 0.0241
## 5 1.0002 nan 0.1000 0.0164
## 6 0.9651 nan 0.1000 0.0137
## 7 0.9258 nan 0.1000 0.0170
## 8 0.8933 nan 0.1000 0.0115
## 9 0.8643 nan 0.1000 0.0101
## 10 0.8358 nan 0.1000 0.0090
## 20 0.6692 nan 0.1000 0.0053
## 40 0.5183 nan 0.1000 0.0013
## 60 0.4231 nan 0.1000 0.0001
## 80 0.3563 nan 0.1000 -0.0005
## 100 0.3125 nan 0.1000 -0.0008
## 120 0.2695 nan 0.1000 0.0003
## 140 0.2364 nan 0.1000 -0.0007
## 160 0.2052 nan 0.1000 0.0002
## 180 0.1806 nan 0.1000 -0.0004
## 200 0.1608 nan 0.1000 -0.0005
## 220 0.1415 nan 0.1000 -0.0001
## 240 0.1261 nan 0.1000 -0.0001
## 260 0.1128 nan 0.1000 -0.0004
## 280 0.1015 nan 0.1000 -0.0001
## 300 0.0920 nan 0.1000 -0.0004
## 320 0.0825 nan 0.1000 -0.0002
## 340 0.0743 nan 0.1000 -0.0003
## 360 0.0658 nan 0.1000 -0.0002
## 380 0.0592 nan 0.1000 -0.0001
## 400 0.0538 nan 0.1000 -0.0001
## 420 0.0485 nan 0.1000 -0.0002
## 440 0.0434 nan 0.1000 -0.0001
## 460 0.0391 nan 0.1000 -0.0002
## 480 0.0355 nan 0.1000 -0.0000
## 500 0.0319 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2375 nan 0.1000 0.0394
## 2 1.1607 nan 0.1000 0.0333
## 3 1.1010 nan 0.1000 0.0256
## 4 1.0490 nan 0.1000 0.0233
## 5 1.0087 nan 0.1000 0.0193
## 6 0.9665 nan 0.1000 0.0163
## 7 0.9349 nan 0.1000 0.0115
## 8 0.9056 nan 0.1000 0.0106
## 9 0.8796 nan 0.1000 0.0081
## 10 0.8581 nan 0.1000 0.0072
## 20 0.6824 nan 0.1000 -0.0005
## 40 0.5409 nan 0.1000 -0.0003
## 60 0.4578 nan 0.1000 -0.0009
## 80 0.3836 nan 0.1000 -0.0009
## 100 0.3261 nan 0.1000 -0.0006
## 120 0.2803 nan 0.1000 -0.0010
## 140 0.2475 nan 0.1000 -0.0005
## 160 0.2143 nan 0.1000 -0.0007
## 180 0.1911 nan 0.1000 -0.0005
## 200 0.1668 nan 0.1000 -0.0004
## 220 0.1497 nan 0.1000 -0.0008
## 240 0.1335 nan 0.1000 -0.0002
## 260 0.1191 nan 0.1000 -0.0004
## 280 0.1067 nan 0.1000 -0.0002
## 300 0.0951 nan 0.1000 -0.0000
## 320 0.0841 nan 0.1000 -0.0001
## 340 0.0759 nan 0.1000 -0.0000
## 360 0.0677 nan 0.1000 -0.0003
## 380 0.0612 nan 0.1000 -0.0003
## 400 0.0548 nan 0.1000 -0.0000
## 420 0.0497 nan 0.1000 -0.0001
## 440 0.0453 nan 0.1000 -0.0002
## 460 0.0410 nan 0.1000 -0.0000
## 480 0.0375 nan 0.1000 -0.0002
## 500 0.0340 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2323 nan 0.1000 0.0426
## 2 1.1593 nan 0.1000 0.0311
## 3 1.0959 nan 0.1000 0.0260
## 4 1.0466 nan 0.1000 0.0237
## 5 1.0039 nan 0.1000 0.0190
## 6 0.9678 nan 0.1000 0.0154
## 7 0.9352 nan 0.1000 0.0125
## 8 0.9030 nan 0.1000 0.0127
## 9 0.8756 nan 0.1000 0.0104
## 10 0.8536 nan 0.1000 0.0078
## 20 0.6988 nan 0.1000 -0.0001
## 40 0.5580 nan 0.1000 -0.0001
## 60 0.4674 nan 0.1000 -0.0011
## 80 0.4048 nan 0.1000 -0.0016
## 100 0.3544 nan 0.1000 -0.0011
## 120 0.3129 nan 0.1000 -0.0004
## 140 0.2743 nan 0.1000 -0.0002
## 160 0.2419 nan 0.1000 -0.0014
## 180 0.2175 nan 0.1000 -0.0014
## 200 0.1918 nan 0.1000 -0.0009
## 220 0.1705 nan 0.1000 -0.0007
## 240 0.1541 nan 0.1000 -0.0006
## 260 0.1391 nan 0.1000 -0.0006
## 280 0.1257 nan 0.1000 -0.0008
## 300 0.1126 nan 0.1000 -0.0001
## 320 0.1026 nan 0.1000 -0.0007
## 340 0.0927 nan 0.1000 -0.0005
## 360 0.0832 nan 0.1000 -0.0003
## 380 0.0760 nan 0.1000 -0.0003
## 400 0.0692 nan 0.1000 -0.0004
## 420 0.0627 nan 0.1000 -0.0000
## 440 0.0565 nan 0.1000 -0.0001
## 460 0.0509 nan 0.1000 -0.0001
## 480 0.0462 nan 0.1000 -0.0001
## 500 0.0421 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2248 nan 0.1000 0.0403
## 2 1.1497 nan 0.1000 0.0344
## 3 1.0846 nan 0.1000 0.0313
## 4 1.0259 nan 0.1000 0.0236
## 5 0.9801 nan 0.1000 0.0214
## 6 0.9396 nan 0.1000 0.0152
## 7 0.9047 nan 0.1000 0.0148
## 8 0.8753 nan 0.1000 0.0119
## 9 0.8443 nan 0.1000 0.0108
## 10 0.8201 nan 0.1000 0.0092
## 20 0.6546 nan 0.1000 0.0014
## 40 0.4964 nan 0.1000 0.0002
## 60 0.4024 nan 0.1000 -0.0012
## 80 0.3299 nan 0.1000 -0.0003
## 100 0.2741 nan 0.1000 -0.0003
## 120 0.2324 nan 0.1000 -0.0002
## 140 0.2001 nan 0.1000 -0.0005
## 160 0.1711 nan 0.1000 -0.0003
## 180 0.1460 nan 0.1000 -0.0004
## 200 0.1267 nan 0.1000 -0.0006
## 220 0.1083 nan 0.1000 -0.0001
## 240 0.0945 nan 0.1000 -0.0002
## 260 0.0824 nan 0.1000 -0.0001
## 280 0.0719 nan 0.1000 -0.0002
## 300 0.0634 nan 0.1000 -0.0003
## 320 0.0561 nan 0.1000 -0.0001
## 340 0.0494 nan 0.1000 -0.0000
## 360 0.0430 nan 0.1000 -0.0001
## 380 0.0381 nan 0.1000 -0.0000
## 400 0.0335 nan 0.1000 -0.0001
## 420 0.0297 nan 0.1000 -0.0001
## 440 0.0260 nan 0.1000 -0.0001
## 460 0.0231 nan 0.1000 0.0000
## 480 0.0204 nan 0.1000 -0.0000
## 500 0.0180 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2255 nan 0.1000 0.0443
## 2 1.1543 nan 0.1000 0.0319
## 3 1.0906 nan 0.1000 0.0276
## 4 1.0399 nan 0.1000 0.0207
## 5 0.9941 nan 0.1000 0.0200
## 6 0.9510 nan 0.1000 0.0162
## 7 0.9140 nan 0.1000 0.0155
## 8 0.8790 nan 0.1000 0.0127
## 9 0.8482 nan 0.1000 0.0111
## 10 0.8225 nan 0.1000 0.0102
## 20 0.6573 nan 0.1000 0.0012
## 40 0.5057 nan 0.1000 -0.0024
## 60 0.4092 nan 0.1000 -0.0001
## 80 0.3386 nan 0.1000 -0.0004
## 100 0.2903 nan 0.1000 -0.0009
## 120 0.2455 nan 0.1000 0.0005
## 140 0.2094 nan 0.1000 -0.0000
## 160 0.1835 nan 0.1000 -0.0010
## 180 0.1525 nan 0.1000 -0.0004
## 200 0.1326 nan 0.1000 -0.0009
## 220 0.1151 nan 0.1000 -0.0003
## 240 0.0988 nan 0.1000 -0.0004
## 260 0.0857 nan 0.1000 -0.0001
## 280 0.0765 nan 0.1000 -0.0006
## 300 0.0673 nan 0.1000 -0.0002
## 320 0.0591 nan 0.1000 0.0000
## 340 0.0523 nan 0.1000 -0.0002
## 360 0.0458 nan 0.1000 -0.0001
## 380 0.0400 nan 0.1000 -0.0002
## 400 0.0351 nan 0.1000 -0.0001
## 420 0.0311 nan 0.1000 -0.0001
## 440 0.0276 nan 0.1000 -0.0001
## 460 0.0246 nan 0.1000 -0.0000
## 480 0.0218 nan 0.1000 -0.0000
## 500 0.0193 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2282 nan 0.1000 0.0434
## 2 1.1482 nan 0.1000 0.0367
## 3 1.0902 nan 0.1000 0.0237
## 4 1.0405 nan 0.1000 0.0174
## 5 0.9875 nan 0.1000 0.0217
## 6 0.9459 nan 0.1000 0.0184
## 7 0.9103 nan 0.1000 0.0133
## 8 0.8746 nan 0.1000 0.0151
## 9 0.8450 nan 0.1000 0.0120
## 10 0.8189 nan 0.1000 0.0100
## 20 0.6598 nan 0.1000 0.0009
## 40 0.5050 nan 0.1000 -0.0001
## 60 0.4150 nan 0.1000 -0.0001
## 80 0.3510 nan 0.1000 -0.0016
## 100 0.2980 nan 0.1000 -0.0007
## 120 0.2560 nan 0.1000 -0.0002
## 140 0.2217 nan 0.1000 -0.0010
## 160 0.1919 nan 0.1000 -0.0006
## 180 0.1683 nan 0.1000 -0.0005
## 200 0.1439 nan 0.1000 -0.0004
## 220 0.1260 nan 0.1000 -0.0006
## 240 0.1105 nan 0.1000 -0.0001
## 260 0.0963 nan 0.1000 -0.0005
## 280 0.0855 nan 0.1000 -0.0003
## 300 0.0759 nan 0.1000 -0.0002
## 320 0.0676 nan 0.1000 -0.0001
## 340 0.0608 nan 0.1000 -0.0003
## 360 0.0537 nan 0.1000 -0.0001
## 380 0.0474 nan 0.1000 -0.0001
## 400 0.0420 nan 0.1000 -0.0002
## 420 0.0374 nan 0.1000 -0.0001
## 440 0.0331 nan 0.1000 -0.0001
## 460 0.0291 nan 0.1000 -0.0000
## 480 0.0260 nan 0.1000 -0.0001
## 500 0.0235 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0003
## 2 1.3191 nan 0.0010 0.0004
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0003
## 6 1.3160 nan 0.0010 0.0003
## 7 1.3152 nan 0.0010 0.0003
## 8 1.3145 nan 0.0010 0.0004
## 9 1.3136 nan 0.0010 0.0003
## 10 1.3128 nan 0.0010 0.0004
## 20 1.3047 nan 0.0010 0.0004
## 40 1.2893 nan 0.0010 0.0003
## 60 1.2744 nan 0.0010 0.0003
## 80 1.2603 nan 0.0010 0.0003
## 100 1.2460 nan 0.0010 0.0003
## 120 1.2323 nan 0.0010 0.0003
## 140 1.2191 nan 0.0010 0.0003
## 160 1.2064 nan 0.0010 0.0003
## 180 1.1936 nan 0.0010 0.0002
## 200 1.1815 nan 0.0010 0.0002
## 220 1.1696 nan 0.0010 0.0003
## 240 1.1581 nan 0.0010 0.0003
## 260 1.1467 nan 0.0010 0.0002
## 280 1.1359 nan 0.0010 0.0003
## 300 1.1254 nan 0.0010 0.0002
## 320 1.1152 nan 0.0010 0.0002
## 340 1.1053 nan 0.0010 0.0002
## 360 1.0956 nan 0.0010 0.0002
## 380 1.0864 nan 0.0010 0.0002
## 400 1.0771 nan 0.0010 0.0002
## 420 1.0680 nan 0.0010 0.0002
## 440 1.0595 nan 0.0010 0.0002
## 460 1.0512 nan 0.0010 0.0001
## 480 1.0428 nan 0.0010 0.0002
## 500 1.0345 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0004
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0003
## 6 1.3159 nan 0.0010 0.0003
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3134 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3048 nan 0.0010 0.0004
## 40 1.2892 nan 0.0010 0.0004
## 60 1.2744 nan 0.0010 0.0003
## 80 1.2596 nan 0.0010 0.0003
## 100 1.2455 nan 0.0010 0.0003
## 120 1.2316 nan 0.0010 0.0003
## 140 1.2186 nan 0.0010 0.0003
## 160 1.2057 nan 0.0010 0.0003
## 180 1.1931 nan 0.0010 0.0003
## 200 1.1812 nan 0.0010 0.0003
## 220 1.1696 nan 0.0010 0.0003
## 240 1.1583 nan 0.0010 0.0002
## 260 1.1473 nan 0.0010 0.0003
## 280 1.1360 nan 0.0010 0.0003
## 300 1.1256 nan 0.0010 0.0002
## 320 1.1153 nan 0.0010 0.0002
## 340 1.1055 nan 0.0010 0.0002
## 360 1.0957 nan 0.0010 0.0002
## 380 1.0863 nan 0.0010 0.0002
## 400 1.0771 nan 0.0010 0.0002
## 420 1.0683 nan 0.0010 0.0002
## 440 1.0594 nan 0.0010 0.0002
## 460 1.0506 nan 0.0010 0.0002
## 480 1.0423 nan 0.0010 0.0001
## 500 1.0343 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0003
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0003
## 9 1.3133 nan 0.0010 0.0003
## 10 1.3126 nan 0.0010 0.0003
## 20 1.3045 nan 0.0010 0.0004
## 40 1.2889 nan 0.0010 0.0003
## 60 1.2738 nan 0.0010 0.0003
## 80 1.2591 nan 0.0010 0.0003
## 100 1.2451 nan 0.0010 0.0003
## 120 1.2316 nan 0.0010 0.0003
## 140 1.2182 nan 0.0010 0.0003
## 160 1.2051 nan 0.0010 0.0003
## 180 1.1929 nan 0.0010 0.0003
## 200 1.1810 nan 0.0010 0.0002
## 220 1.1691 nan 0.0010 0.0003
## 240 1.1579 nan 0.0010 0.0003
## 260 1.1468 nan 0.0010 0.0002
## 280 1.1359 nan 0.0010 0.0002
## 300 1.1251 nan 0.0010 0.0002
## 320 1.1150 nan 0.0010 0.0002
## 340 1.1050 nan 0.0010 0.0002
## 360 1.0957 nan 0.0010 0.0002
## 380 1.0865 nan 0.0010 0.0002
## 400 1.0775 nan 0.0010 0.0002
## 420 1.0684 nan 0.0010 0.0002
## 440 1.0599 nan 0.0010 0.0002
## 460 1.0515 nan 0.0010 0.0002
## 480 1.0434 nan 0.0010 0.0002
## 500 1.0354 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0003
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3035 nan 0.0010 0.0004
## 40 1.2867 nan 0.0010 0.0004
## 60 1.2707 nan 0.0010 0.0004
## 80 1.2553 nan 0.0010 0.0003
## 100 1.2404 nan 0.0010 0.0003
## 120 1.2258 nan 0.0010 0.0003
## 140 1.2115 nan 0.0010 0.0003
## 160 1.1978 nan 0.0010 0.0003
## 180 1.1844 nan 0.0010 0.0003
## 200 1.1715 nan 0.0010 0.0003
## 220 1.1592 nan 0.0010 0.0003
## 240 1.1472 nan 0.0010 0.0003
## 260 1.1352 nan 0.0010 0.0003
## 280 1.1239 nan 0.0010 0.0003
## 300 1.1128 nan 0.0010 0.0002
## 320 1.1023 nan 0.0010 0.0002
## 340 1.0917 nan 0.0010 0.0002
## 360 1.0814 nan 0.0010 0.0002
## 380 1.0718 nan 0.0010 0.0002
## 400 1.0622 nan 0.0010 0.0002
## 420 1.0525 nan 0.0010 0.0002
## 440 1.0433 nan 0.0010 0.0002
## 460 1.0345 nan 0.0010 0.0002
## 480 1.0257 nan 0.0010 0.0002
## 500 1.0169 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0003
## 3 1.3181 nan 0.0010 0.0003
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3036 nan 0.0010 0.0004
## 40 1.2870 nan 0.0010 0.0004
## 60 1.2710 nan 0.0010 0.0004
## 80 1.2555 nan 0.0010 0.0004
## 100 1.2405 nan 0.0010 0.0003
## 120 1.2261 nan 0.0010 0.0004
## 140 1.2120 nan 0.0010 0.0003
## 160 1.1984 nan 0.0010 0.0003
## 180 1.1852 nan 0.0010 0.0003
## 200 1.1725 nan 0.0010 0.0002
## 220 1.1598 nan 0.0010 0.0003
## 240 1.1479 nan 0.0010 0.0002
## 260 1.1363 nan 0.0010 0.0003
## 280 1.1249 nan 0.0010 0.0003
## 300 1.1138 nan 0.0010 0.0002
## 320 1.1032 nan 0.0010 0.0003
## 340 1.0927 nan 0.0010 0.0002
## 360 1.0825 nan 0.0010 0.0002
## 380 1.0724 nan 0.0010 0.0002
## 400 1.0626 nan 0.0010 0.0002
## 420 1.0531 nan 0.0010 0.0002
## 440 1.0439 nan 0.0010 0.0002
## 460 1.0352 nan 0.0010 0.0002
## 480 1.0264 nan 0.0010 0.0002
## 500 1.0178 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0004
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3041 nan 0.0010 0.0004
## 40 1.2880 nan 0.0010 0.0003
## 60 1.2721 nan 0.0010 0.0004
## 80 1.2570 nan 0.0010 0.0003
## 100 1.2419 nan 0.0010 0.0004
## 120 1.2278 nan 0.0010 0.0003
## 140 1.2141 nan 0.0010 0.0003
## 160 1.2007 nan 0.0010 0.0003
## 180 1.1874 nan 0.0010 0.0003
## 200 1.1746 nan 0.0010 0.0003
## 220 1.1620 nan 0.0010 0.0003
## 240 1.1502 nan 0.0010 0.0003
## 260 1.1383 nan 0.0010 0.0003
## 280 1.1272 nan 0.0010 0.0002
## 300 1.1163 nan 0.0010 0.0002
## 320 1.1055 nan 0.0010 0.0003
## 340 1.0951 nan 0.0010 0.0002
## 360 1.0849 nan 0.0010 0.0002
## 380 1.0753 nan 0.0010 0.0002
## 400 1.0657 nan 0.0010 0.0002
## 420 1.0563 nan 0.0010 0.0002
## 440 1.0471 nan 0.0010 0.0002
## 460 1.0383 nan 0.0010 0.0002
## 480 1.0294 nan 0.0010 0.0002
## 500 1.0210 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0003
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2855 nan 0.0010 0.0004
## 60 1.2684 nan 0.0010 0.0003
## 80 1.2521 nan 0.0010 0.0004
## 100 1.2363 nan 0.0010 0.0003
## 120 1.2210 nan 0.0010 0.0003
## 140 1.2062 nan 0.0010 0.0003
## 160 1.1920 nan 0.0010 0.0003
## 180 1.1784 nan 0.0010 0.0003
## 200 1.1650 nan 0.0010 0.0003
## 220 1.1523 nan 0.0010 0.0003
## 240 1.1396 nan 0.0010 0.0003
## 260 1.1270 nan 0.0010 0.0002
## 280 1.1151 nan 0.0010 0.0003
## 300 1.1036 nan 0.0010 0.0002
## 320 1.0923 nan 0.0010 0.0002
## 340 1.0810 nan 0.0010 0.0002
## 360 1.0705 nan 0.0010 0.0002
## 380 1.0601 nan 0.0010 0.0002
## 400 1.0501 nan 0.0010 0.0002
## 420 1.0402 nan 0.0010 0.0002
## 440 1.0305 nan 0.0010 0.0002
## 460 1.0210 nan 0.0010 0.0002
## 480 1.0120 nan 0.0010 0.0002
## 500 1.0032 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3028 nan 0.0010 0.0003
## 40 1.2855 nan 0.0010 0.0003
## 60 1.2689 nan 0.0010 0.0004
## 80 1.2530 nan 0.0010 0.0004
## 100 1.2373 nan 0.0010 0.0003
## 120 1.2222 nan 0.0010 0.0004
## 140 1.2075 nan 0.0010 0.0003
## 160 1.1931 nan 0.0010 0.0003
## 180 1.1795 nan 0.0010 0.0003
## 200 1.1662 nan 0.0010 0.0002
## 220 1.1532 nan 0.0010 0.0003
## 240 1.1406 nan 0.0010 0.0003
## 260 1.1288 nan 0.0010 0.0003
## 280 1.1169 nan 0.0010 0.0003
## 300 1.1055 nan 0.0010 0.0002
## 320 1.0942 nan 0.0010 0.0002
## 340 1.0835 nan 0.0010 0.0002
## 360 1.0729 nan 0.0010 0.0002
## 380 1.0624 nan 0.0010 0.0002
## 400 1.0523 nan 0.0010 0.0002
## 420 1.0425 nan 0.0010 0.0002
## 440 1.0328 nan 0.0010 0.0002
## 460 1.0236 nan 0.0010 0.0002
## 480 1.0150 nan 0.0010 0.0002
## 500 1.0060 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2863 nan 0.0010 0.0003
## 60 1.2699 nan 0.0010 0.0003
## 80 1.2537 nan 0.0010 0.0004
## 100 1.2381 nan 0.0010 0.0003
## 120 1.2230 nan 0.0010 0.0003
## 140 1.2086 nan 0.0010 0.0003
## 160 1.1948 nan 0.0010 0.0003
## 180 1.1812 nan 0.0010 0.0003
## 200 1.1681 nan 0.0010 0.0003
## 220 1.1554 nan 0.0010 0.0003
## 240 1.1430 nan 0.0010 0.0003
## 260 1.1308 nan 0.0010 0.0002
## 280 1.1193 nan 0.0010 0.0002
## 300 1.1080 nan 0.0010 0.0002
## 320 1.0970 nan 0.0010 0.0003
## 340 1.0864 nan 0.0010 0.0002
## 360 1.0760 nan 0.0010 0.0002
## 380 1.0657 nan 0.0010 0.0002
## 400 1.0558 nan 0.0010 0.0002
## 420 1.0463 nan 0.0010 0.0002
## 440 1.0371 nan 0.0010 0.0002
## 460 1.0279 nan 0.0010 0.0002
## 480 1.0191 nan 0.0010 0.0002
## 500 1.0104 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0038
## 2 1.3037 nan 0.0100 0.0033
## 3 1.2955 nan 0.0100 0.0037
## 4 1.2878 nan 0.0100 0.0034
## 5 1.2792 nan 0.0100 0.0037
## 6 1.2717 nan 0.0100 0.0033
## 7 1.2644 nan 0.0100 0.0033
## 8 1.2568 nan 0.0100 0.0030
## 9 1.2498 nan 0.0100 0.0031
## 10 1.2435 nan 0.0100 0.0028
## 20 1.1798 nan 0.0100 0.0028
## 40 1.0760 nan 0.0100 0.0016
## 60 0.9961 nan 0.0100 0.0016
## 80 0.9339 nan 0.0100 0.0011
## 100 0.8844 nan 0.0100 0.0007
## 120 0.8438 nan 0.0100 0.0006
## 140 0.8088 nan 0.0100 0.0005
## 160 0.7789 nan 0.0100 0.0003
## 180 0.7526 nan 0.0100 0.0002
## 200 0.7313 nan 0.0100 0.0001
## 220 0.7124 nan 0.0100 0.0002
## 240 0.6943 nan 0.0100 0.0002
## 260 0.6789 nan 0.0100 0.0000
## 280 0.6636 nan 0.0100 -0.0002
## 300 0.6492 nan 0.0100 0.0001
## 320 0.6371 nan 0.0100 0.0000
## 340 0.6249 nan 0.0100 0.0001
## 360 0.6138 nan 0.0100 0.0000
## 380 0.6051 nan 0.0100 -0.0001
## 400 0.5950 nan 0.0100 -0.0001
## 420 0.5842 nan 0.0100 0.0000
## 440 0.5744 nan 0.0100 -0.0000
## 460 0.5661 nan 0.0100 -0.0001
## 480 0.5568 nan 0.0100 -0.0001
## 500 0.5487 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0039
## 2 1.3038 nan 0.0100 0.0039
## 3 1.2957 nan 0.0100 0.0030
## 4 1.2886 nan 0.0100 0.0030
## 5 1.2805 nan 0.0100 0.0036
## 6 1.2733 nan 0.0100 0.0030
## 7 1.2656 nan 0.0100 0.0034
## 8 1.2581 nan 0.0100 0.0036
## 9 1.2509 nan 0.0100 0.0032
## 10 1.2439 nan 0.0100 0.0032
## 20 1.1807 nan 0.0100 0.0024
## 40 1.0763 nan 0.0100 0.0017
## 60 0.9954 nan 0.0100 0.0015
## 80 0.9334 nan 0.0100 0.0011
## 100 0.8846 nan 0.0100 0.0009
## 120 0.8429 nan 0.0100 0.0007
## 140 0.8085 nan 0.0100 0.0004
## 160 0.7797 nan 0.0100 0.0005
## 180 0.7549 nan 0.0100 0.0003
## 200 0.7340 nan 0.0100 0.0001
## 220 0.7147 nan 0.0100 0.0000
## 240 0.6992 nan 0.0100 0.0000
## 260 0.6839 nan 0.0100 0.0003
## 280 0.6704 nan 0.0100 0.0000
## 300 0.6580 nan 0.0100 -0.0002
## 320 0.6466 nan 0.0100 0.0001
## 340 0.6354 nan 0.0100 -0.0001
## 360 0.6250 nan 0.0100 0.0000
## 380 0.6164 nan 0.0100 -0.0001
## 400 0.6070 nan 0.0100 0.0001
## 420 0.5988 nan 0.0100 0.0001
## 440 0.5907 nan 0.0100 -0.0000
## 460 0.5821 nan 0.0100 0.0000
## 480 0.5733 nan 0.0100 0.0000
## 500 0.5653 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0037
## 2 1.3045 nan 0.0100 0.0035
## 3 1.2965 nan 0.0100 0.0037
## 4 1.2895 nan 0.0100 0.0025
## 5 1.2816 nan 0.0100 0.0034
## 6 1.2743 nan 0.0100 0.0029
## 7 1.2673 nan 0.0100 0.0029
## 8 1.2602 nan 0.0100 0.0030
## 9 1.2527 nan 0.0100 0.0032
## 10 1.2451 nan 0.0100 0.0031
## 20 1.1789 nan 0.0100 0.0029
## 40 1.0746 nan 0.0100 0.0020
## 60 0.9955 nan 0.0100 0.0014
## 80 0.9346 nan 0.0100 0.0009
## 100 0.8837 nan 0.0100 0.0008
## 120 0.8432 nan 0.0100 0.0008
## 140 0.8104 nan 0.0100 0.0004
## 160 0.7818 nan 0.0100 0.0005
## 180 0.7579 nan 0.0100 0.0003
## 200 0.7370 nan 0.0100 0.0004
## 220 0.7186 nan 0.0100 0.0001
## 240 0.7026 nan 0.0100 0.0001
## 260 0.6871 nan 0.0100 -0.0002
## 280 0.6740 nan 0.0100 -0.0000
## 300 0.6616 nan 0.0100 -0.0000
## 320 0.6506 nan 0.0100 -0.0001
## 340 0.6405 nan 0.0100 -0.0001
## 360 0.6309 nan 0.0100 -0.0001
## 380 0.6224 nan 0.0100 -0.0000
## 400 0.6129 nan 0.0100 0.0000
## 420 0.6051 nan 0.0100 -0.0002
## 440 0.5957 nan 0.0100 0.0002
## 460 0.5874 nan 0.0100 -0.0002
## 480 0.5795 nan 0.0100 0.0000
## 500 0.5717 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3125 nan 0.0100 0.0032
## 2 1.3046 nan 0.0100 0.0036
## 3 1.2965 nan 0.0100 0.0038
## 4 1.2878 nan 0.0100 0.0041
## 5 1.2795 nan 0.0100 0.0035
## 6 1.2720 nan 0.0100 0.0032
## 7 1.2642 nan 0.0100 0.0036
## 8 1.2565 nan 0.0100 0.0034
## 9 1.2486 nan 0.0100 0.0039
## 10 1.2404 nan 0.0100 0.0035
## 20 1.1695 nan 0.0100 0.0028
## 40 1.0617 nan 0.0100 0.0022
## 60 0.9772 nan 0.0100 0.0015
## 80 0.9108 nan 0.0100 0.0013
## 100 0.8576 nan 0.0100 0.0009
## 120 0.8145 nan 0.0100 0.0008
## 140 0.7768 nan 0.0100 0.0006
## 160 0.7474 nan 0.0100 0.0001
## 180 0.7195 nan 0.0100 0.0004
## 200 0.6960 nan 0.0100 0.0004
## 220 0.6765 nan 0.0100 0.0001
## 240 0.6579 nan 0.0100 0.0003
## 260 0.6406 nan 0.0100 0.0000
## 280 0.6244 nan 0.0100 0.0002
## 300 0.6102 nan 0.0100 0.0000
## 320 0.5967 nan 0.0100 0.0001
## 340 0.5840 nan 0.0100 -0.0000
## 360 0.5724 nan 0.0100 0.0001
## 380 0.5598 nan 0.0100 0.0000
## 400 0.5493 nan 0.0100 -0.0000
## 420 0.5395 nan 0.0100 -0.0001
## 440 0.5285 nan 0.0100 -0.0000
## 460 0.5186 nan 0.0100 -0.0001
## 480 0.5091 nan 0.0100 0.0000
## 500 0.4993 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3111 nan 0.0100 0.0041
## 2 1.3023 nan 0.0100 0.0039
## 3 1.2942 nan 0.0100 0.0036
## 4 1.2856 nan 0.0100 0.0036
## 5 1.2780 nan 0.0100 0.0033
## 6 1.2701 nan 0.0100 0.0037
## 7 1.2620 nan 0.0100 0.0036
## 8 1.2547 nan 0.0100 0.0033
## 9 1.2472 nan 0.0100 0.0029
## 10 1.2394 nan 0.0100 0.0036
## 20 1.1716 nan 0.0100 0.0025
## 40 1.0618 nan 0.0100 0.0020
## 60 0.9782 nan 0.0100 0.0016
## 80 0.9135 nan 0.0100 0.0011
## 100 0.8621 nan 0.0100 0.0008
## 120 0.8185 nan 0.0100 0.0008
## 140 0.7830 nan 0.0100 0.0003
## 160 0.7519 nan 0.0100 0.0003
## 180 0.7262 nan 0.0100 0.0004
## 200 0.7015 nan 0.0100 0.0004
## 220 0.6816 nan 0.0100 0.0003
## 240 0.6636 nan 0.0100 0.0003
## 260 0.6452 nan 0.0100 0.0002
## 280 0.6291 nan 0.0100 0.0001
## 300 0.6160 nan 0.0100 0.0001
## 320 0.6030 nan 0.0100 0.0001
## 340 0.5911 nan 0.0100 -0.0001
## 360 0.5788 nan 0.0100 -0.0000
## 380 0.5676 nan 0.0100 0.0001
## 400 0.5572 nan 0.0100 -0.0001
## 420 0.5475 nan 0.0100 -0.0001
## 440 0.5374 nan 0.0100 -0.0000
## 460 0.5283 nan 0.0100 -0.0001
## 480 0.5192 nan 0.0100 -0.0001
## 500 0.5097 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0038
## 2 1.3034 nan 0.0100 0.0041
## 3 1.2954 nan 0.0100 0.0032
## 4 1.2877 nan 0.0100 0.0035
## 5 1.2797 nan 0.0100 0.0034
## 6 1.2713 nan 0.0100 0.0035
## 7 1.2635 nan 0.0100 0.0037
## 8 1.2556 nan 0.0100 0.0036
## 9 1.2485 nan 0.0100 0.0031
## 10 1.2410 nan 0.0100 0.0029
## 20 1.1732 nan 0.0100 0.0026
## 40 1.0638 nan 0.0100 0.0021
## 60 0.9798 nan 0.0100 0.0014
## 80 0.9134 nan 0.0100 0.0011
## 100 0.8619 nan 0.0100 0.0007
## 120 0.8197 nan 0.0100 0.0006
## 140 0.7847 nan 0.0100 0.0004
## 160 0.7559 nan 0.0100 0.0002
## 180 0.7313 nan 0.0100 0.0004
## 200 0.7097 nan 0.0100 0.0002
## 220 0.6895 nan 0.0100 0.0003
## 240 0.6719 nan 0.0100 -0.0002
## 260 0.6568 nan 0.0100 0.0002
## 280 0.6424 nan 0.0100 0.0000
## 300 0.6284 nan 0.0100 0.0000
## 320 0.6148 nan 0.0100 0.0001
## 340 0.6030 nan 0.0100 0.0001
## 360 0.5909 nan 0.0100 -0.0000
## 380 0.5807 nan 0.0100 -0.0001
## 400 0.5698 nan 0.0100 -0.0001
## 420 0.5592 nan 0.0100 -0.0000
## 440 0.5490 nan 0.0100 -0.0001
## 460 0.5397 nan 0.0100 -0.0001
## 480 0.5305 nan 0.0100 0.0002
## 500 0.5222 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0039
## 2 1.3031 nan 0.0100 0.0040
## 3 1.2942 nan 0.0100 0.0042
## 4 1.2858 nan 0.0100 0.0039
## 5 1.2770 nan 0.0100 0.0037
## 6 1.2683 nan 0.0100 0.0037
## 7 1.2601 nan 0.0100 0.0037
## 8 1.2520 nan 0.0100 0.0035
## 9 1.2432 nan 0.0100 0.0034
## 10 1.2352 nan 0.0100 0.0033
## 20 1.1631 nan 0.0100 0.0027
## 40 1.0471 nan 0.0100 0.0021
## 60 0.9591 nan 0.0100 0.0017
## 80 0.8903 nan 0.0100 0.0015
## 100 0.8349 nan 0.0100 0.0009
## 120 0.7887 nan 0.0100 0.0006
## 140 0.7507 nan 0.0100 0.0005
## 160 0.7177 nan 0.0100 0.0003
## 180 0.6902 nan 0.0100 0.0001
## 200 0.6671 nan 0.0100 0.0001
## 220 0.6453 nan 0.0100 0.0002
## 240 0.6249 nan 0.0100 0.0003
## 260 0.6048 nan 0.0100 0.0000
## 280 0.5875 nan 0.0100 0.0000
## 300 0.5702 nan 0.0100 -0.0001
## 320 0.5559 nan 0.0100 -0.0001
## 340 0.5417 nan 0.0100 0.0000
## 360 0.5287 nan 0.0100 0.0000
## 380 0.5168 nan 0.0100 0.0001
## 400 0.5056 nan 0.0100 0.0000
## 420 0.4942 nan 0.0100 -0.0001
## 440 0.4847 nan 0.0100 -0.0001
## 460 0.4742 nan 0.0100 -0.0001
## 480 0.4646 nan 0.0100 -0.0000
## 500 0.4548 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3118 nan 0.0100 0.0040
## 2 1.3027 nan 0.0100 0.0041
## 3 1.2944 nan 0.0100 0.0034
## 4 1.2856 nan 0.0100 0.0038
## 5 1.2774 nan 0.0100 0.0038
## 6 1.2688 nan 0.0100 0.0036
## 7 1.2602 nan 0.0100 0.0038
## 8 1.2522 nan 0.0100 0.0039
## 9 1.2446 nan 0.0100 0.0033
## 10 1.2370 nan 0.0100 0.0034
## 20 1.1670 nan 0.0100 0.0031
## 40 1.0506 nan 0.0100 0.0021
## 60 0.9655 nan 0.0100 0.0017
## 80 0.8963 nan 0.0100 0.0009
## 100 0.8422 nan 0.0100 0.0009
## 120 0.7967 nan 0.0100 0.0004
## 140 0.7594 nan 0.0100 0.0004
## 160 0.7273 nan 0.0100 0.0004
## 180 0.7002 nan 0.0100 0.0003
## 200 0.6755 nan 0.0100 0.0003
## 220 0.6550 nan 0.0100 -0.0001
## 240 0.6351 nan 0.0100 0.0002
## 260 0.6170 nan 0.0100 0.0002
## 280 0.6009 nan 0.0100 -0.0002
## 300 0.5860 nan 0.0100 -0.0001
## 320 0.5713 nan 0.0100 0.0000
## 340 0.5578 nan 0.0100 0.0000
## 360 0.5446 nan 0.0100 -0.0000
## 380 0.5314 nan 0.0100 0.0002
## 400 0.5199 nan 0.0100 -0.0002
## 420 0.5097 nan 0.0100 -0.0000
## 440 0.4980 nan 0.0100 0.0002
## 460 0.4870 nan 0.0100 -0.0001
## 480 0.4772 nan 0.0100 -0.0000
## 500 0.4668 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0039
## 2 1.3033 nan 0.0100 0.0037
## 3 1.2952 nan 0.0100 0.0035
## 4 1.2870 nan 0.0100 0.0037
## 5 1.2786 nan 0.0100 0.0037
## 6 1.2701 nan 0.0100 0.0038
## 7 1.2618 nan 0.0100 0.0033
## 8 1.2534 nan 0.0100 0.0034
## 9 1.2453 nan 0.0100 0.0035
## 10 1.2376 nan 0.0100 0.0033
## 20 1.1687 nan 0.0100 0.0033
## 40 1.0562 nan 0.0100 0.0020
## 60 0.9708 nan 0.0100 0.0014
## 80 0.9047 nan 0.0100 0.0010
## 100 0.8498 nan 0.0100 0.0011
## 120 0.8061 nan 0.0100 0.0008
## 140 0.7686 nan 0.0100 0.0005
## 160 0.7369 nan 0.0100 0.0002
## 180 0.7108 nan 0.0100 0.0004
## 200 0.6863 nan 0.0100 0.0000
## 220 0.6645 nan 0.0100 0.0001
## 240 0.6462 nan 0.0100 0.0002
## 260 0.6290 nan 0.0100 0.0001
## 280 0.6123 nan 0.0100 -0.0000
## 300 0.5974 nan 0.0100 -0.0001
## 320 0.5834 nan 0.0100 -0.0001
## 340 0.5693 nan 0.0100 -0.0001
## 360 0.5561 nan 0.0100 -0.0000
## 380 0.5439 nan 0.0100 -0.0002
## 400 0.5330 nan 0.0100 -0.0000
## 420 0.5219 nan 0.0100 -0.0002
## 440 0.5122 nan 0.0100 0.0000
## 460 0.5015 nan 0.0100 0.0000
## 480 0.4916 nan 0.0100 -0.0002
## 500 0.4825 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2502 nan 0.1000 0.0310
## 2 1.1941 nan 0.1000 0.0222
## 3 1.1409 nan 0.1000 0.0222
## 4 1.0922 nan 0.1000 0.0219
## 5 1.0514 nan 0.1000 0.0165
## 6 1.0108 nan 0.1000 0.0163
## 7 0.9734 nan 0.1000 0.0130
## 8 0.9370 nan 0.1000 0.0137
## 9 0.9158 nan 0.1000 0.0071
## 10 0.8921 nan 0.1000 0.0095
## 20 0.7340 nan 0.1000 0.0037
## 40 0.6019 nan 0.1000 -0.0008
## 60 0.5167 nan 0.1000 0.0003
## 80 0.4604 nan 0.1000 -0.0008
## 100 0.4053 nan 0.1000 -0.0005
## 120 0.3595 nan 0.1000 -0.0005
## 140 0.3205 nan 0.1000 -0.0006
## 160 0.2892 nan 0.1000 0.0000
## 180 0.2606 nan 0.1000 -0.0010
## 200 0.2373 nan 0.1000 -0.0004
## 220 0.2159 nan 0.1000 -0.0000
## 240 0.1981 nan 0.1000 -0.0003
## 260 0.1798 nan 0.1000 -0.0004
## 280 0.1665 nan 0.1000 -0.0004
## 300 0.1528 nan 0.1000 -0.0002
## 320 0.1411 nan 0.1000 -0.0003
## 340 0.1296 nan 0.1000 -0.0002
## 360 0.1202 nan 0.1000 -0.0004
## 380 0.1097 nan 0.1000 -0.0004
## 400 0.0998 nan 0.1000 -0.0001
## 420 0.0923 nan 0.1000 -0.0003
## 440 0.0852 nan 0.1000 -0.0003
## 460 0.0793 nan 0.1000 -0.0001
## 480 0.0738 nan 0.1000 -0.0000
## 500 0.0681 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2359 nan 0.1000 0.0365
## 2 1.1720 nan 0.1000 0.0259
## 3 1.1154 nan 0.1000 0.0232
## 4 1.0671 nan 0.1000 0.0201
## 5 1.0294 nan 0.1000 0.0165
## 6 0.9922 nan 0.1000 0.0157
## 7 0.9595 nan 0.1000 0.0156
## 8 0.9266 nan 0.1000 0.0118
## 9 0.9019 nan 0.1000 0.0098
## 10 0.8812 nan 0.1000 0.0067
## 20 0.7315 nan 0.1000 0.0032
## 40 0.6053 nan 0.1000 -0.0018
## 60 0.5237 nan 0.1000 -0.0013
## 80 0.4635 nan 0.1000 -0.0009
## 100 0.4139 nan 0.1000 0.0006
## 120 0.3709 nan 0.1000 0.0004
## 140 0.3322 nan 0.1000 -0.0008
## 160 0.2983 nan 0.1000 -0.0005
## 180 0.2710 nan 0.1000 -0.0001
## 200 0.2446 nan 0.1000 -0.0010
## 220 0.2222 nan 0.1000 -0.0006
## 240 0.2029 nan 0.1000 -0.0008
## 260 0.1869 nan 0.1000 -0.0006
## 280 0.1721 nan 0.1000 -0.0007
## 300 0.1584 nan 0.1000 -0.0001
## 320 0.1447 nan 0.1000 -0.0003
## 340 0.1340 nan 0.1000 -0.0001
## 360 0.1229 nan 0.1000 -0.0004
## 380 0.1130 nan 0.1000 -0.0000
## 400 0.1044 nan 0.1000 -0.0002
## 420 0.0962 nan 0.1000 -0.0003
## 440 0.0894 nan 0.1000 -0.0002
## 460 0.0830 nan 0.1000 -0.0003
## 480 0.0777 nan 0.1000 -0.0003
## 500 0.0724 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2404 nan 0.1000 0.0370
## 2 1.1727 nan 0.1000 0.0342
## 3 1.1160 nan 0.1000 0.0276
## 4 1.0706 nan 0.1000 0.0190
## 5 1.0299 nan 0.1000 0.0162
## 6 0.9940 nan 0.1000 0.0157
## 7 0.9625 nan 0.1000 0.0115
## 8 0.9338 nan 0.1000 0.0101
## 9 0.9036 nan 0.1000 0.0126
## 10 0.8813 nan 0.1000 0.0074
## 20 0.7389 nan 0.1000 0.0046
## 40 0.6106 nan 0.1000 -0.0000
## 60 0.5407 nan 0.1000 -0.0002
## 80 0.4790 nan 0.1000 -0.0016
## 100 0.4314 nan 0.1000 -0.0013
## 120 0.3893 nan 0.1000 -0.0014
## 140 0.3459 nan 0.1000 -0.0003
## 160 0.3168 nan 0.1000 -0.0008
## 180 0.2882 nan 0.1000 -0.0005
## 200 0.2667 nan 0.1000 -0.0009
## 220 0.2456 nan 0.1000 -0.0004
## 240 0.2248 nan 0.1000 -0.0010
## 260 0.2076 nan 0.1000 -0.0006
## 280 0.1893 nan 0.1000 -0.0007
## 300 0.1744 nan 0.1000 -0.0004
## 320 0.1617 nan 0.1000 -0.0007
## 340 0.1502 nan 0.1000 0.0001
## 360 0.1397 nan 0.1000 -0.0005
## 380 0.1301 nan 0.1000 -0.0008
## 400 0.1200 nan 0.1000 -0.0003
## 420 0.1121 nan 0.1000 -0.0003
## 440 0.1038 nan 0.1000 -0.0002
## 460 0.0961 nan 0.1000 -0.0003
## 480 0.0897 nan 0.1000 -0.0001
## 500 0.0836 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2371 nan 0.1000 0.0378
## 2 1.1645 nan 0.1000 0.0292
## 3 1.0998 nan 0.1000 0.0284
## 4 1.0539 nan 0.1000 0.0199
## 5 1.0144 nan 0.1000 0.0153
## 6 0.9810 nan 0.1000 0.0160
## 7 0.9457 nan 0.1000 0.0143
## 8 0.9146 nan 0.1000 0.0125
## 9 0.8875 nan 0.1000 0.0109
## 10 0.8645 nan 0.1000 0.0099
## 20 0.7009 nan 0.1000 0.0041
## 40 0.5558 nan 0.1000 -0.0005
## 60 0.4610 nan 0.1000 -0.0010
## 80 0.3883 nan 0.1000 0.0003
## 100 0.3383 nan 0.1000 -0.0009
## 120 0.2935 nan 0.1000 0.0000
## 140 0.2567 nan 0.1000 -0.0007
## 160 0.2272 nan 0.1000 0.0001
## 180 0.2023 nan 0.1000 -0.0004
## 200 0.1782 nan 0.1000 -0.0001
## 220 0.1589 nan 0.1000 -0.0003
## 240 0.1424 nan 0.1000 -0.0005
## 260 0.1265 nan 0.1000 -0.0003
## 280 0.1136 nan 0.1000 -0.0003
## 300 0.1022 nan 0.1000 -0.0003
## 320 0.0933 nan 0.1000 -0.0003
## 340 0.0833 nan 0.1000 -0.0001
## 360 0.0757 nan 0.1000 -0.0002
## 380 0.0685 nan 0.1000 -0.0001
## 400 0.0619 nan 0.1000 -0.0001
## 420 0.0568 nan 0.1000 -0.0002
## 440 0.0518 nan 0.1000 0.0000
## 460 0.0479 nan 0.1000 -0.0001
## 480 0.0440 nan 0.1000 -0.0002
## 500 0.0398 nan 0.1000 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2422 nan 0.1000 0.0362
## 2 1.1708 nan 0.1000 0.0272
## 3 1.1153 nan 0.1000 0.0231
## 4 1.0612 nan 0.1000 0.0217
## 5 1.0188 nan 0.1000 0.0169
## 6 0.9747 nan 0.1000 0.0172
## 7 0.9379 nan 0.1000 0.0149
## 8 0.9057 nan 0.1000 0.0138
## 9 0.8797 nan 0.1000 0.0101
## 10 0.8546 nan 0.1000 0.0075
## 20 0.6973 nan 0.1000 0.0007
## 40 0.5569 nan 0.1000 0.0001
## 60 0.4713 nan 0.1000 -0.0013
## 80 0.4063 nan 0.1000 -0.0017
## 100 0.3488 nan 0.1000 -0.0004
## 120 0.3040 nan 0.1000 -0.0002
## 140 0.2669 nan 0.1000 -0.0011
## 160 0.2380 nan 0.1000 -0.0011
## 180 0.2116 nan 0.1000 -0.0010
## 200 0.1902 nan 0.1000 -0.0010
## 220 0.1692 nan 0.1000 -0.0004
## 240 0.1532 nan 0.1000 -0.0011
## 260 0.1376 nan 0.1000 -0.0001
## 280 0.1235 nan 0.1000 -0.0011
## 300 0.1117 nan 0.1000 -0.0004
## 320 0.0997 nan 0.1000 -0.0002
## 340 0.0918 nan 0.1000 -0.0003
## 360 0.0824 nan 0.1000 -0.0001
## 380 0.0742 nan 0.1000 -0.0002
## 400 0.0680 nan 0.1000 -0.0004
## 420 0.0623 nan 0.1000 -0.0003
## 440 0.0555 nan 0.1000 -0.0002
## 460 0.0511 nan 0.1000 -0.0002
## 480 0.0463 nan 0.1000 -0.0001
## 500 0.0423 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2399 nan 0.1000 0.0379
## 2 1.1668 nan 0.1000 0.0329
## 3 1.1041 nan 0.1000 0.0294
## 4 1.0533 nan 0.1000 0.0197
## 5 1.0094 nan 0.1000 0.0175
## 6 0.9747 nan 0.1000 0.0151
## 7 0.9389 nan 0.1000 0.0139
## 8 0.9067 nan 0.1000 0.0147
## 9 0.8793 nan 0.1000 0.0096
## 10 0.8552 nan 0.1000 0.0082
## 20 0.7078 nan 0.1000 0.0035
## 40 0.5789 nan 0.1000 -0.0006
## 60 0.4827 nan 0.1000 -0.0006
## 80 0.4181 nan 0.1000 0.0001
## 100 0.3624 nan 0.1000 0.0002
## 120 0.3163 nan 0.1000 -0.0004
## 140 0.2773 nan 0.1000 -0.0011
## 160 0.2471 nan 0.1000 -0.0007
## 180 0.2199 nan 0.1000 -0.0006
## 200 0.1985 nan 0.1000 -0.0006
## 220 0.1789 nan 0.1000 -0.0008
## 240 0.1606 nan 0.1000 -0.0006
## 260 0.1442 nan 0.1000 -0.0004
## 280 0.1294 nan 0.1000 -0.0002
## 300 0.1173 nan 0.1000 -0.0006
## 320 0.1066 nan 0.1000 -0.0004
## 340 0.0965 nan 0.1000 -0.0002
## 360 0.0877 nan 0.1000 -0.0001
## 380 0.0802 nan 0.1000 -0.0004
## 400 0.0726 nan 0.1000 -0.0002
## 420 0.0664 nan 0.1000 -0.0003
## 440 0.0604 nan 0.1000 -0.0000
## 460 0.0553 nan 0.1000 -0.0001
## 480 0.0506 nan 0.1000 -0.0002
## 500 0.0460 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2401 nan 0.1000 0.0366
## 2 1.1668 nan 0.1000 0.0334
## 3 1.1072 nan 0.1000 0.0234
## 4 1.0437 nan 0.1000 0.0276
## 5 0.9959 nan 0.1000 0.0201
## 6 0.9506 nan 0.1000 0.0165
## 7 0.9180 nan 0.1000 0.0115
## 8 0.8897 nan 0.1000 0.0103
## 9 0.8634 nan 0.1000 0.0096
## 10 0.8413 nan 0.1000 0.0080
## 20 0.6742 nan 0.1000 0.0018
## 40 0.5270 nan 0.1000 0.0000
## 60 0.4265 nan 0.1000 0.0003
## 80 0.3616 nan 0.1000 -0.0011
## 100 0.3049 nan 0.1000 -0.0001
## 120 0.2602 nan 0.1000 -0.0007
## 140 0.2228 nan 0.1000 -0.0012
## 160 0.1926 nan 0.1000 -0.0001
## 180 0.1641 nan 0.1000 -0.0006
## 200 0.1428 nan 0.1000 -0.0003
## 220 0.1253 nan 0.1000 -0.0003
## 240 0.1086 nan 0.1000 -0.0004
## 260 0.0960 nan 0.1000 0.0001
## 280 0.0857 nan 0.1000 -0.0001
## 300 0.0756 nan 0.1000 -0.0002
## 320 0.0670 nan 0.1000 -0.0003
## 340 0.0591 nan 0.1000 -0.0000
## 360 0.0522 nan 0.1000 -0.0000
## 380 0.0473 nan 0.1000 -0.0002
## 400 0.0421 nan 0.1000 -0.0001
## 420 0.0378 nan 0.1000 -0.0001
## 440 0.0335 nan 0.1000 -0.0001
## 460 0.0297 nan 0.1000 -0.0001
## 480 0.0266 nan 0.1000 -0.0001
## 500 0.0236 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2310 nan 0.1000 0.0418
## 2 1.1637 nan 0.1000 0.0273
## 3 1.0971 nan 0.1000 0.0283
## 4 1.0471 nan 0.1000 0.0224
## 5 0.9999 nan 0.1000 0.0193
## 6 0.9579 nan 0.1000 0.0168
## 7 0.9190 nan 0.1000 0.0172
## 8 0.8923 nan 0.1000 0.0094
## 9 0.8653 nan 0.1000 0.0098
## 10 0.8384 nan 0.1000 0.0105
## 20 0.6777 nan 0.1000 0.0015
## 40 0.5352 nan 0.1000 -0.0016
## 60 0.4294 nan 0.1000 -0.0012
## 80 0.3525 nan 0.1000 -0.0010
## 100 0.2941 nan 0.1000 0.0004
## 120 0.2472 nan 0.1000 0.0002
## 140 0.2127 nan 0.1000 -0.0004
## 160 0.1822 nan 0.1000 -0.0005
## 180 0.1591 nan 0.1000 -0.0003
## 200 0.1377 nan 0.1000 -0.0004
## 220 0.1190 nan 0.1000 -0.0004
## 240 0.1047 nan 0.1000 -0.0005
## 260 0.0933 nan 0.1000 -0.0001
## 280 0.0830 nan 0.1000 -0.0002
## 300 0.0735 nan 0.1000 -0.0001
## 320 0.0654 nan 0.1000 -0.0002
## 340 0.0575 nan 0.1000 -0.0002
## 360 0.0511 nan 0.1000 -0.0000
## 380 0.0457 nan 0.1000 -0.0001
## 400 0.0409 nan 0.1000 0.0000
## 420 0.0365 nan 0.1000 -0.0001
## 440 0.0332 nan 0.1000 -0.0001
## 460 0.0300 nan 0.1000 -0.0001
## 480 0.0268 nan 0.1000 -0.0001
## 500 0.0238 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2344 nan 0.1000 0.0383
## 2 1.1662 nan 0.1000 0.0292
## 3 1.1067 nan 0.1000 0.0252
## 4 1.0516 nan 0.1000 0.0204
## 5 1.0096 nan 0.1000 0.0212
## 6 0.9658 nan 0.1000 0.0149
## 7 0.9301 nan 0.1000 0.0125
## 8 0.8990 nan 0.1000 0.0132
## 9 0.8714 nan 0.1000 0.0100
## 10 0.8471 nan 0.1000 0.0080
## 20 0.6950 nan 0.1000 -0.0013
## 40 0.5500 nan 0.1000 -0.0000
## 60 0.4511 nan 0.1000 0.0005
## 80 0.3817 nan 0.1000 -0.0023
## 100 0.3240 nan 0.1000 -0.0008
## 120 0.2821 nan 0.1000 0.0002
## 140 0.2429 nan 0.1000 -0.0005
## 160 0.2107 nan 0.1000 -0.0013
## 180 0.1811 nan 0.1000 -0.0009
## 200 0.1578 nan 0.1000 0.0001
## 220 0.1388 nan 0.1000 -0.0005
## 240 0.1231 nan 0.1000 -0.0004
## 260 0.1074 nan 0.1000 -0.0004
## 280 0.0945 nan 0.1000 -0.0003
## 300 0.0839 nan 0.1000 -0.0003
## 320 0.0743 nan 0.1000 -0.0004
## 340 0.0660 nan 0.1000 -0.0003
## 360 0.0585 nan 0.1000 -0.0003
## 380 0.0523 nan 0.1000 -0.0003
## 400 0.0470 nan 0.1000 -0.0002
## 420 0.0416 nan 0.1000 -0.0002
## 440 0.0370 nan 0.1000 -0.0002
## 460 0.0332 nan 0.1000 -0.0001
## 480 0.0293 nan 0.1000 -0.0001
## 500 0.0266 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3179 nan 0.0010 0.0003
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3162 nan 0.0010 0.0004
## 7 1.3153 nan 0.0010 0.0004
## 8 1.3144 nan 0.0010 0.0004
## 9 1.3135 nan 0.0010 0.0004
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3042 nan 0.0010 0.0004
## 40 1.2875 nan 0.0010 0.0004
## 60 1.2714 nan 0.0010 0.0003
## 80 1.2561 nan 0.0010 0.0003
## 100 1.2412 nan 0.0010 0.0003
## 120 1.2269 nan 0.0010 0.0003
## 140 1.2132 nan 0.0010 0.0003
## 160 1.1995 nan 0.0010 0.0003
## 180 1.1862 nan 0.0010 0.0003
## 200 1.1735 nan 0.0010 0.0003
## 220 1.1608 nan 0.0010 0.0002
## 240 1.1487 nan 0.0010 0.0003
## 260 1.1372 nan 0.0010 0.0003
## 280 1.1258 nan 0.0010 0.0002
## 300 1.1147 nan 0.0010 0.0002
## 320 1.1039 nan 0.0010 0.0002
## 340 1.0934 nan 0.0010 0.0002
## 360 1.0832 nan 0.0010 0.0002
## 380 1.0731 nan 0.0010 0.0002
## 400 1.0636 nan 0.0010 0.0002
## 420 1.0542 nan 0.0010 0.0002
## 440 1.0450 nan 0.0010 0.0002
## 460 1.0361 nan 0.0010 0.0002
## 480 1.0272 nan 0.0010 0.0002
## 500 1.0187 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3144 nan 0.0010 0.0004
## 9 1.3136 nan 0.0010 0.0004
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3041 nan 0.0010 0.0004
## 40 1.2877 nan 0.0010 0.0004
## 60 1.2719 nan 0.0010 0.0003
## 80 1.2566 nan 0.0010 0.0003
## 100 1.2418 nan 0.0010 0.0003
## 120 1.2273 nan 0.0010 0.0003
## 140 1.2135 nan 0.0010 0.0003
## 160 1.1999 nan 0.0010 0.0003
## 180 1.1869 nan 0.0010 0.0003
## 200 1.1744 nan 0.0010 0.0003
## 220 1.1621 nan 0.0010 0.0003
## 240 1.1499 nan 0.0010 0.0003
## 260 1.1383 nan 0.0010 0.0002
## 280 1.1271 nan 0.0010 0.0002
## 300 1.1157 nan 0.0010 0.0003
## 320 1.1049 nan 0.0010 0.0002
## 340 1.0942 nan 0.0010 0.0002
## 360 1.0841 nan 0.0010 0.0002
## 380 1.0742 nan 0.0010 0.0002
## 400 1.0647 nan 0.0010 0.0002
## 420 1.0551 nan 0.0010 0.0002
## 440 1.0457 nan 0.0010 0.0002
## 460 1.0367 nan 0.0010 0.0002
## 480 1.0278 nan 0.0010 0.0001
## 500 1.0193 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0003
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3153 nan 0.0010 0.0004
## 8 1.3145 nan 0.0010 0.0004
## 9 1.3136 nan 0.0010 0.0004
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3042 nan 0.0010 0.0004
## 40 1.2880 nan 0.0010 0.0003
## 60 1.2720 nan 0.0010 0.0003
## 80 1.2568 nan 0.0010 0.0003
## 100 1.2420 nan 0.0010 0.0004
## 120 1.2274 nan 0.0010 0.0003
## 140 1.2134 nan 0.0010 0.0003
## 160 1.2001 nan 0.0010 0.0003
## 180 1.1872 nan 0.0010 0.0003
## 200 1.1741 nan 0.0010 0.0003
## 220 1.1617 nan 0.0010 0.0002
## 240 1.1497 nan 0.0010 0.0003
## 260 1.1381 nan 0.0010 0.0002
## 280 1.1266 nan 0.0010 0.0003
## 300 1.1157 nan 0.0010 0.0003
## 320 1.1049 nan 0.0010 0.0002
## 340 1.0946 nan 0.0010 0.0002
## 360 1.0843 nan 0.0010 0.0002
## 380 1.0743 nan 0.0010 0.0002
## 400 1.0647 nan 0.0010 0.0002
## 420 1.0555 nan 0.0010 0.0002
## 440 1.0463 nan 0.0010 0.0002
## 460 1.0372 nan 0.0010 0.0002
## 480 1.0285 nan 0.0010 0.0002
## 500 1.0199 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0005
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3122 nan 0.0010 0.0004
## 20 1.3033 nan 0.0010 0.0004
## 40 1.2858 nan 0.0010 0.0004
## 60 1.2685 nan 0.0010 0.0004
## 80 1.2522 nan 0.0010 0.0003
## 100 1.2364 nan 0.0010 0.0004
## 120 1.2210 nan 0.0010 0.0004
## 140 1.2064 nan 0.0010 0.0003
## 160 1.1920 nan 0.0010 0.0003
## 180 1.1781 nan 0.0010 0.0003
## 200 1.1644 nan 0.0010 0.0003
## 220 1.1514 nan 0.0010 0.0003
## 240 1.1386 nan 0.0010 0.0003
## 260 1.1262 nan 0.0010 0.0003
## 280 1.1140 nan 0.0010 0.0003
## 300 1.1023 nan 0.0010 0.0003
## 320 1.0906 nan 0.0010 0.0002
## 340 1.0794 nan 0.0010 0.0002
## 360 1.0686 nan 0.0010 0.0002
## 380 1.0582 nan 0.0010 0.0002
## 400 1.0479 nan 0.0010 0.0002
## 420 1.0381 nan 0.0010 0.0002
## 440 1.0285 nan 0.0010 0.0002
## 460 1.0190 nan 0.0010 0.0002
## 480 1.0097 nan 0.0010 0.0002
## 500 1.0006 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0005
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3122 nan 0.0010 0.0004
## 20 1.3034 nan 0.0010 0.0004
## 40 1.2861 nan 0.0010 0.0004
## 60 1.2694 nan 0.0010 0.0004
## 80 1.2528 nan 0.0010 0.0003
## 100 1.2374 nan 0.0010 0.0004
## 120 1.2221 nan 0.0010 0.0004
## 140 1.2072 nan 0.0010 0.0003
## 160 1.1929 nan 0.0010 0.0003
## 180 1.1789 nan 0.0010 0.0003
## 200 1.1653 nan 0.0010 0.0003
## 220 1.1523 nan 0.0010 0.0003
## 240 1.1396 nan 0.0010 0.0003
## 260 1.1272 nan 0.0010 0.0002
## 280 1.1151 nan 0.0010 0.0002
## 300 1.1034 nan 0.0010 0.0002
## 320 1.0921 nan 0.0010 0.0003
## 340 1.0810 nan 0.0010 0.0002
## 360 1.0702 nan 0.0010 0.0002
## 380 1.0597 nan 0.0010 0.0003
## 400 1.0493 nan 0.0010 0.0002
## 420 1.0394 nan 0.0010 0.0002
## 440 1.0297 nan 0.0010 0.0002
## 460 1.0203 nan 0.0010 0.0002
## 480 1.0110 nan 0.0010 0.0002
## 500 1.0021 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3034 nan 0.0010 0.0004
## 40 1.2858 nan 0.0010 0.0004
## 60 1.2693 nan 0.0010 0.0004
## 80 1.2530 nan 0.0010 0.0004
## 100 1.2373 nan 0.0010 0.0004
## 120 1.2219 nan 0.0010 0.0003
## 140 1.2075 nan 0.0010 0.0003
## 160 1.1932 nan 0.0010 0.0003
## 180 1.1793 nan 0.0010 0.0003
## 200 1.1658 nan 0.0010 0.0003
## 220 1.1528 nan 0.0010 0.0003
## 240 1.1404 nan 0.0010 0.0003
## 260 1.1280 nan 0.0010 0.0003
## 280 1.1162 nan 0.0010 0.0002
## 300 1.1046 nan 0.0010 0.0003
## 320 1.0934 nan 0.0010 0.0002
## 340 1.0822 nan 0.0010 0.0002
## 360 1.0716 nan 0.0010 0.0002
## 380 1.0612 nan 0.0010 0.0002
## 400 1.0510 nan 0.0010 0.0002
## 420 1.0410 nan 0.0010 0.0002
## 440 1.0313 nan 0.0010 0.0002
## 460 1.0220 nan 0.0010 0.0002
## 480 1.0129 nan 0.0010 0.0002
## 500 1.0039 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0005
## 2 1.3192 nan 0.0010 0.0004
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3026 nan 0.0010 0.0004
## 40 1.2843 nan 0.0010 0.0004
## 60 1.2666 nan 0.0010 0.0004
## 80 1.2497 nan 0.0010 0.0003
## 100 1.2328 nan 0.0010 0.0004
## 120 1.2166 nan 0.0010 0.0003
## 140 1.2012 nan 0.0010 0.0003
## 160 1.1862 nan 0.0010 0.0004
## 180 1.1716 nan 0.0010 0.0003
## 200 1.1575 nan 0.0010 0.0003
## 220 1.1436 nan 0.0010 0.0003
## 240 1.1304 nan 0.0010 0.0002
## 260 1.1172 nan 0.0010 0.0003
## 280 1.1046 nan 0.0010 0.0003
## 300 1.0924 nan 0.0010 0.0003
## 320 1.0805 nan 0.0010 0.0003
## 340 1.0690 nan 0.0010 0.0002
## 360 1.0576 nan 0.0010 0.0003
## 380 1.0468 nan 0.0010 0.0003
## 400 1.0361 nan 0.0010 0.0002
## 420 1.0255 nan 0.0010 0.0002
## 440 1.0154 nan 0.0010 0.0002
## 460 1.0055 nan 0.0010 0.0002
## 480 0.9959 nan 0.0010 0.0002
## 500 0.9864 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0005
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3116 nan 0.0010 0.0004
## 20 1.3024 nan 0.0010 0.0004
## 40 1.2843 nan 0.0010 0.0004
## 60 1.2666 nan 0.0010 0.0004
## 80 1.2496 nan 0.0010 0.0003
## 100 1.2332 nan 0.0010 0.0003
## 120 1.2174 nan 0.0010 0.0004
## 140 1.2019 nan 0.0010 0.0003
## 160 1.1868 nan 0.0010 0.0003
## 180 1.1720 nan 0.0010 0.0003
## 200 1.1583 nan 0.0010 0.0003
## 220 1.1446 nan 0.0010 0.0003
## 240 1.1314 nan 0.0010 0.0003
## 260 1.1183 nan 0.0010 0.0003
## 280 1.1058 nan 0.0010 0.0003
## 300 1.0936 nan 0.0010 0.0003
## 320 1.0816 nan 0.0010 0.0002
## 340 1.0703 nan 0.0010 0.0003
## 360 1.0590 nan 0.0010 0.0002
## 380 1.0480 nan 0.0010 0.0002
## 400 1.0373 nan 0.0010 0.0002
## 420 1.0269 nan 0.0010 0.0002
## 440 1.0167 nan 0.0010 0.0002
## 460 1.0071 nan 0.0010 0.0002
## 480 0.9977 nan 0.0010 0.0002
## 500 0.9885 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0004
## 2 1.3192 nan 0.0010 0.0005
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0005
## 20 1.3023 nan 0.0010 0.0004
## 40 1.2841 nan 0.0010 0.0004
## 60 1.2667 nan 0.0010 0.0004
## 80 1.2497 nan 0.0010 0.0003
## 100 1.2334 nan 0.0010 0.0004
## 120 1.2178 nan 0.0010 0.0003
## 140 1.2024 nan 0.0010 0.0004
## 160 1.1878 nan 0.0010 0.0003
## 180 1.1734 nan 0.0010 0.0003
## 200 1.1596 nan 0.0010 0.0003
## 220 1.1459 nan 0.0010 0.0003
## 240 1.1326 nan 0.0010 0.0003
## 260 1.1198 nan 0.0010 0.0003
## 280 1.1075 nan 0.0010 0.0002
## 300 1.0955 nan 0.0010 0.0002
## 320 1.0837 nan 0.0010 0.0002
## 340 1.0722 nan 0.0010 0.0003
## 360 1.0612 nan 0.0010 0.0002
## 380 1.0503 nan 0.0010 0.0002
## 400 1.0400 nan 0.0010 0.0002
## 420 1.0298 nan 0.0010 0.0002
## 440 1.0199 nan 0.0010 0.0002
## 460 1.0102 nan 0.0010 0.0002
## 480 1.0008 nan 0.0010 0.0001
## 500 0.9917 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0044
## 2 1.3030 nan 0.0100 0.0041
## 3 1.2956 nan 0.0100 0.0029
## 4 1.2874 nan 0.0100 0.0035
## 5 1.2789 nan 0.0100 0.0039
## 6 1.2705 nan 0.0100 0.0035
## 7 1.2629 nan 0.0100 0.0031
## 8 1.2558 nan 0.0100 0.0034
## 9 1.2474 nan 0.0100 0.0035
## 10 1.2396 nan 0.0100 0.0032
## 20 1.1712 nan 0.0100 0.0031
## 40 1.0599 nan 0.0100 0.0020
## 60 0.9776 nan 0.0100 0.0015
## 80 0.9109 nan 0.0100 0.0010
## 100 0.8571 nan 0.0100 0.0006
## 120 0.8127 nan 0.0100 0.0005
## 140 0.7744 nan 0.0100 0.0007
## 160 0.7445 nan 0.0100 0.0004
## 180 0.7185 nan 0.0100 0.0005
## 200 0.6959 nan 0.0100 0.0002
## 220 0.6767 nan 0.0100 0.0003
## 240 0.6585 nan 0.0100 0.0001
## 260 0.6422 nan 0.0100 0.0001
## 280 0.6284 nan 0.0100 -0.0000
## 300 0.6147 nan 0.0100 0.0001
## 320 0.6012 nan 0.0100 0.0001
## 340 0.5891 nan 0.0100 -0.0000
## 360 0.5773 nan 0.0100 0.0001
## 380 0.5680 nan 0.0100 0.0001
## 400 0.5583 nan 0.0100 0.0000
## 420 0.5488 nan 0.0100 -0.0000
## 440 0.5399 nan 0.0100 0.0001
## 460 0.5309 nan 0.0100 -0.0000
## 480 0.5227 nan 0.0100 -0.0001
## 500 0.5146 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3139 nan 0.0100 0.0033
## 2 1.3058 nan 0.0100 0.0035
## 3 1.2976 nan 0.0100 0.0038
## 4 1.2895 nan 0.0100 0.0037
## 5 1.2813 nan 0.0100 0.0038
## 6 1.2731 nan 0.0100 0.0039
## 7 1.2645 nan 0.0100 0.0036
## 8 1.2576 nan 0.0100 0.0031
## 9 1.2499 nan 0.0100 0.0035
## 10 1.2422 nan 0.0100 0.0035
## 20 1.1750 nan 0.0100 0.0028
## 40 1.0641 nan 0.0100 0.0017
## 60 0.9788 nan 0.0100 0.0012
## 80 0.9129 nan 0.0100 0.0012
## 100 0.8588 nan 0.0100 0.0011
## 120 0.8144 nan 0.0100 0.0007
## 140 0.7780 nan 0.0100 0.0006
## 160 0.7472 nan 0.0100 0.0005
## 180 0.7214 nan 0.0100 0.0001
## 200 0.6993 nan 0.0100 0.0003
## 220 0.6798 nan 0.0100 0.0004
## 240 0.6627 nan 0.0100 0.0001
## 260 0.6476 nan 0.0100 0.0002
## 280 0.6330 nan 0.0100 0.0002
## 300 0.6202 nan 0.0100 -0.0000
## 320 0.6086 nan 0.0100 0.0002
## 340 0.5971 nan 0.0100 0.0000
## 360 0.5868 nan 0.0100 -0.0001
## 380 0.5765 nan 0.0100 -0.0000
## 400 0.5672 nan 0.0100 0.0000
## 420 0.5582 nan 0.0100 0.0001
## 440 0.5495 nan 0.0100 -0.0000
## 460 0.5413 nan 0.0100 0.0001
## 480 0.5331 nan 0.0100 0.0000
## 500 0.5258 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3126 nan 0.0100 0.0041
## 2 1.3040 nan 0.0100 0.0039
## 3 1.2967 nan 0.0100 0.0033
## 4 1.2887 nan 0.0100 0.0034
## 5 1.2813 nan 0.0100 0.0036
## 6 1.2730 nan 0.0100 0.0040
## 7 1.2645 nan 0.0100 0.0039
## 8 1.2574 nan 0.0100 0.0034
## 9 1.2495 nan 0.0100 0.0036
## 10 1.2418 nan 0.0100 0.0033
## 20 1.1744 nan 0.0100 0.0027
## 40 1.0655 nan 0.0100 0.0020
## 60 0.9792 nan 0.0100 0.0014
## 80 0.9120 nan 0.0100 0.0009
## 100 0.8572 nan 0.0100 0.0010
## 120 0.8123 nan 0.0100 0.0006
## 140 0.7764 nan 0.0100 0.0006
## 160 0.7475 nan 0.0100 0.0004
## 180 0.7223 nan 0.0100 0.0006
## 200 0.7007 nan 0.0100 0.0001
## 220 0.6820 nan 0.0100 0.0002
## 240 0.6655 nan 0.0100 0.0002
## 260 0.6505 nan 0.0100 0.0000
## 280 0.6369 nan 0.0100 -0.0001
## 300 0.6253 nan 0.0100 -0.0001
## 320 0.6134 nan 0.0100 0.0001
## 340 0.6027 nan 0.0100 -0.0000
## 360 0.5919 nan 0.0100 -0.0001
## 380 0.5820 nan 0.0100 -0.0002
## 400 0.5730 nan 0.0100 0.0000
## 420 0.5643 nan 0.0100 -0.0000
## 440 0.5559 nan 0.0100 -0.0001
## 460 0.5475 nan 0.0100 -0.0000
## 480 0.5396 nan 0.0100 -0.0000
## 500 0.5323 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3111 nan 0.0100 0.0042
## 2 1.3021 nan 0.0100 0.0037
## 3 1.2929 nan 0.0100 0.0042
## 4 1.2840 nan 0.0100 0.0039
## 5 1.2753 nan 0.0100 0.0040
## 6 1.2666 nan 0.0100 0.0033
## 7 1.2586 nan 0.0100 0.0038
## 8 1.2502 nan 0.0100 0.0036
## 9 1.2418 nan 0.0100 0.0039
## 10 1.2342 nan 0.0100 0.0032
## 20 1.1605 nan 0.0100 0.0031
## 40 1.0454 nan 0.0100 0.0023
## 60 0.9568 nan 0.0100 0.0017
## 80 0.8879 nan 0.0100 0.0015
## 100 0.8332 nan 0.0100 0.0009
## 120 0.7855 nan 0.0100 0.0007
## 140 0.7470 nan 0.0100 0.0005
## 160 0.7158 nan 0.0100 0.0006
## 180 0.6882 nan 0.0100 0.0003
## 200 0.6640 nan 0.0100 0.0001
## 220 0.6430 nan 0.0100 0.0001
## 240 0.6240 nan 0.0100 0.0001
## 260 0.6067 nan 0.0100 0.0001
## 280 0.5907 nan 0.0100 0.0002
## 300 0.5759 nan 0.0100 -0.0000
## 320 0.5631 nan 0.0100 0.0000
## 340 0.5511 nan 0.0100 -0.0001
## 360 0.5371 nan 0.0100 0.0001
## 380 0.5259 nan 0.0100 -0.0002
## 400 0.5159 nan 0.0100 -0.0002
## 420 0.5053 nan 0.0100 0.0001
## 440 0.4950 nan 0.0100 -0.0000
## 460 0.4858 nan 0.0100 -0.0001
## 480 0.4769 nan 0.0100 -0.0000
## 500 0.4676 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3118 nan 0.0100 0.0043
## 2 1.3025 nan 0.0100 0.0039
## 3 1.2939 nan 0.0100 0.0039
## 4 1.2852 nan 0.0100 0.0038
## 5 1.2767 nan 0.0100 0.0038
## 6 1.2691 nan 0.0100 0.0032
## 7 1.2607 nan 0.0100 0.0038
## 8 1.2522 nan 0.0100 0.0037
## 9 1.2438 nan 0.0100 0.0037
## 10 1.2360 nan 0.0100 0.0033
## 20 1.1618 nan 0.0100 0.0029
## 40 1.0479 nan 0.0100 0.0020
## 60 0.9604 nan 0.0100 0.0018
## 80 0.8927 nan 0.0100 0.0011
## 100 0.8366 nan 0.0100 0.0010
## 120 0.7916 nan 0.0100 0.0006
## 140 0.7526 nan 0.0100 0.0006
## 160 0.7208 nan 0.0100 0.0002
## 180 0.6927 nan 0.0100 0.0002
## 200 0.6674 nan 0.0100 0.0001
## 220 0.6472 nan 0.0100 0.0001
## 240 0.6277 nan 0.0100 0.0001
## 260 0.6111 nan 0.0100 -0.0001
## 280 0.5954 nan 0.0100 0.0002
## 300 0.5811 nan 0.0100 -0.0000
## 320 0.5692 nan 0.0100 -0.0001
## 340 0.5559 nan 0.0100 -0.0000
## 360 0.5438 nan 0.0100 -0.0001
## 380 0.5330 nan 0.0100 -0.0000
## 400 0.5228 nan 0.0100 0.0000
## 420 0.5123 nan 0.0100 -0.0000
## 440 0.5030 nan 0.0100 -0.0002
## 460 0.4938 nan 0.0100 0.0001
## 480 0.4842 nan 0.0100 -0.0000
## 500 0.4763 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0041
## 2 1.3031 nan 0.0100 0.0040
## 3 1.2944 nan 0.0100 0.0040
## 4 1.2862 nan 0.0100 0.0034
## 5 1.2775 nan 0.0100 0.0038
## 6 1.2697 nan 0.0100 0.0036
## 7 1.2618 nan 0.0100 0.0038
## 8 1.2538 nan 0.0100 0.0038
## 9 1.2456 nan 0.0100 0.0039
## 10 1.2378 nan 0.0100 0.0036
## 20 1.1665 nan 0.0100 0.0028
## 40 1.0516 nan 0.0100 0.0022
## 60 0.9637 nan 0.0100 0.0017
## 80 0.8946 nan 0.0100 0.0012
## 100 0.8396 nan 0.0100 0.0010
## 120 0.7942 nan 0.0100 0.0009
## 140 0.7574 nan 0.0100 0.0007
## 160 0.7251 nan 0.0100 0.0005
## 180 0.6976 nan 0.0100 0.0003
## 200 0.6744 nan 0.0100 0.0002
## 220 0.6538 nan 0.0100 0.0003
## 240 0.6349 nan 0.0100 -0.0000
## 260 0.6191 nan 0.0100 0.0002
## 280 0.6042 nan 0.0100 0.0001
## 300 0.5906 nan 0.0100 0.0000
## 320 0.5789 nan 0.0100 0.0001
## 340 0.5674 nan 0.0100 -0.0001
## 360 0.5554 nan 0.0100 0.0000
## 380 0.5440 nan 0.0100 -0.0001
## 400 0.5338 nan 0.0100 -0.0000
## 420 0.5246 nan 0.0100 -0.0002
## 440 0.5153 nan 0.0100 0.0001
## 460 0.5063 nan 0.0100 -0.0000
## 480 0.4970 nan 0.0100 -0.0001
## 500 0.4892 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0040
## 2 1.3032 nan 0.0100 0.0040
## 3 1.2942 nan 0.0100 0.0043
## 4 1.2845 nan 0.0100 0.0044
## 5 1.2759 nan 0.0100 0.0035
## 6 1.2667 nan 0.0100 0.0041
## 7 1.2578 nan 0.0100 0.0039
## 8 1.2494 nan 0.0100 0.0039
## 9 1.2403 nan 0.0100 0.0037
## 10 1.2335 nan 0.0100 0.0026
## 20 1.1586 nan 0.0100 0.0030
## 40 1.0376 nan 0.0100 0.0023
## 60 0.9473 nan 0.0100 0.0017
## 80 0.8736 nan 0.0100 0.0013
## 100 0.8149 nan 0.0100 0.0009
## 120 0.7687 nan 0.0100 0.0007
## 140 0.7279 nan 0.0100 0.0005
## 160 0.6945 nan 0.0100 0.0004
## 180 0.6654 nan 0.0100 0.0000
## 200 0.6401 nan 0.0100 0.0003
## 220 0.6169 nan 0.0100 0.0003
## 240 0.5972 nan 0.0100 0.0001
## 260 0.5777 nan 0.0100 0.0001
## 280 0.5610 nan 0.0100 0.0000
## 300 0.5451 nan 0.0100 0.0001
## 320 0.5306 nan 0.0100 0.0001
## 340 0.5176 nan 0.0100 0.0000
## 360 0.5045 nan 0.0100 -0.0000
## 380 0.4914 nan 0.0100 -0.0001
## 400 0.4798 nan 0.0100 -0.0002
## 420 0.4683 nan 0.0100 0.0001
## 440 0.4574 nan 0.0100 0.0000
## 460 0.4469 nan 0.0100 0.0000
## 480 0.4372 nan 0.0100 0.0000
## 500 0.4284 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3116 nan 0.0100 0.0042
## 2 1.3020 nan 0.0100 0.0044
## 3 1.2931 nan 0.0100 0.0041
## 4 1.2849 nan 0.0100 0.0039
## 5 1.2758 nan 0.0100 0.0042
## 6 1.2671 nan 0.0100 0.0036
## 7 1.2588 nan 0.0100 0.0038
## 8 1.2498 nan 0.0100 0.0040
## 9 1.2414 nan 0.0100 0.0036
## 10 1.2331 nan 0.0100 0.0035
## 20 1.1606 nan 0.0100 0.0027
## 40 1.0404 nan 0.0100 0.0022
## 60 0.9457 nan 0.0100 0.0019
## 80 0.8722 nan 0.0100 0.0012
## 100 0.8138 nan 0.0100 0.0008
## 120 0.7662 nan 0.0100 0.0009
## 140 0.7269 nan 0.0100 0.0007
## 160 0.6947 nan 0.0100 0.0002
## 180 0.6665 nan 0.0100 0.0005
## 200 0.6424 nan 0.0100 0.0003
## 220 0.6205 nan 0.0100 0.0001
## 240 0.6007 nan 0.0100 0.0001
## 260 0.5812 nan 0.0100 0.0002
## 280 0.5650 nan 0.0100 0.0000
## 300 0.5497 nan 0.0100 0.0001
## 320 0.5353 nan 0.0100 -0.0000
## 340 0.5227 nan 0.0100 -0.0000
## 360 0.5105 nan 0.0100 0.0000
## 380 0.4990 nan 0.0100 0.0001
## 400 0.4881 nan 0.0100 0.0000
## 420 0.4765 nan 0.0100 0.0001
## 440 0.4659 nan 0.0100 -0.0000
## 460 0.4565 nan 0.0100 -0.0001
## 480 0.4471 nan 0.0100 0.0001
## 500 0.4380 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3115 nan 0.0100 0.0044
## 2 1.3023 nan 0.0100 0.0042
## 3 1.2928 nan 0.0100 0.0040
## 4 1.2848 nan 0.0100 0.0034
## 5 1.2759 nan 0.0100 0.0040
## 6 1.2675 nan 0.0100 0.0040
## 7 1.2590 nan 0.0100 0.0037
## 8 1.2508 nan 0.0100 0.0038
## 9 1.2424 nan 0.0100 0.0038
## 10 1.2353 nan 0.0100 0.0031
## 20 1.1625 nan 0.0100 0.0030
## 40 1.0406 nan 0.0100 0.0021
## 60 0.9489 nan 0.0100 0.0015
## 80 0.8773 nan 0.0100 0.0013
## 100 0.8196 nan 0.0100 0.0009
## 120 0.7728 nan 0.0100 0.0007
## 140 0.7353 nan 0.0100 0.0004
## 160 0.7025 nan 0.0100 0.0004
## 180 0.6747 nan 0.0100 0.0003
## 200 0.6501 nan 0.0100 0.0004
## 220 0.6299 nan 0.0100 0.0003
## 240 0.6114 nan 0.0100 0.0001
## 260 0.5942 nan 0.0100 0.0001
## 280 0.5771 nan 0.0100 0.0000
## 300 0.5618 nan 0.0100 0.0001
## 320 0.5475 nan 0.0100 -0.0000
## 340 0.5356 nan 0.0100 -0.0000
## 360 0.5231 nan 0.0100 -0.0001
## 380 0.5117 nan 0.0100 -0.0001
## 400 0.5004 nan 0.0100 -0.0002
## 420 0.4901 nan 0.0100 0.0001
## 440 0.4801 nan 0.0100 -0.0001
## 460 0.4697 nan 0.0100 -0.0001
## 480 0.4609 nan 0.0100 -0.0001
## 500 0.4522 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2405 nan 0.1000 0.0351
## 2 1.1734 nan 0.1000 0.0283
## 3 1.1110 nan 0.1000 0.0237
## 4 1.0690 nan 0.1000 0.0162
## 5 1.0245 nan 0.1000 0.0202
## 6 0.9844 nan 0.1000 0.0174
## 7 0.9471 nan 0.1000 0.0172
## 8 0.9116 nan 0.1000 0.0155
## 9 0.8835 nan 0.1000 0.0108
## 10 0.8646 nan 0.1000 0.0047
## 20 0.7063 nan 0.1000 0.0012
## 40 0.5690 nan 0.1000 0.0015
## 60 0.4826 nan 0.1000 -0.0019
## 80 0.4257 nan 0.1000 -0.0012
## 100 0.3697 nan 0.1000 -0.0001
## 120 0.3281 nan 0.1000 -0.0009
## 140 0.2925 nan 0.1000 -0.0001
## 160 0.2595 nan 0.1000 -0.0000
## 180 0.2343 nan 0.1000 -0.0004
## 200 0.2098 nan 0.1000 -0.0007
## 220 0.1909 nan 0.1000 0.0001
## 240 0.1743 nan 0.1000 -0.0002
## 260 0.1584 nan 0.1000 -0.0003
## 280 0.1459 nan 0.1000 -0.0001
## 300 0.1346 nan 0.1000 -0.0006
## 320 0.1236 nan 0.1000 -0.0002
## 340 0.1136 nan 0.1000 -0.0003
## 360 0.1041 nan 0.1000 -0.0002
## 380 0.0960 nan 0.1000 -0.0004
## 400 0.0886 nan 0.1000 -0.0000
## 420 0.0823 nan 0.1000 -0.0003
## 440 0.0763 nan 0.1000 -0.0002
## 460 0.0709 nan 0.1000 -0.0000
## 480 0.0655 nan 0.1000 -0.0001
## 500 0.0615 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2422 nan 0.1000 0.0347
## 2 1.1821 nan 0.1000 0.0275
## 3 1.1195 nan 0.1000 0.0299
## 4 1.0665 nan 0.1000 0.0206
## 5 1.0272 nan 0.1000 0.0186
## 6 0.9885 nan 0.1000 0.0165
## 7 0.9525 nan 0.1000 0.0151
## 8 0.9203 nan 0.1000 0.0143
## 9 0.8926 nan 0.1000 0.0100
## 10 0.8689 nan 0.1000 0.0082
## 20 0.7078 nan 0.1000 0.0012
## 40 0.5779 nan 0.1000 0.0004
## 60 0.4994 nan 0.1000 0.0003
## 80 0.4415 nan 0.1000 0.0002
## 100 0.3910 nan 0.1000 -0.0016
## 120 0.3499 nan 0.1000 0.0002
## 140 0.3146 nan 0.1000 -0.0006
## 160 0.2796 nan 0.1000 -0.0005
## 180 0.2527 nan 0.1000 -0.0008
## 200 0.2307 nan 0.1000 -0.0004
## 220 0.2071 nan 0.1000 -0.0000
## 240 0.1900 nan 0.1000 -0.0004
## 260 0.1737 nan 0.1000 -0.0003
## 280 0.1573 nan 0.1000 -0.0004
## 300 0.1414 nan 0.1000 -0.0004
## 320 0.1298 nan 0.1000 -0.0003
## 340 0.1197 nan 0.1000 -0.0003
## 360 0.1109 nan 0.1000 -0.0003
## 380 0.1028 nan 0.1000 -0.0004
## 400 0.0939 nan 0.1000 -0.0002
## 420 0.0867 nan 0.1000 -0.0002
## 440 0.0805 nan 0.1000 -0.0001
## 460 0.0747 nan 0.1000 -0.0003
## 480 0.0698 nan 0.1000 -0.0002
## 500 0.0650 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2388 nan 0.1000 0.0379
## 2 1.1705 nan 0.1000 0.0309
## 3 1.1107 nan 0.1000 0.0244
## 4 1.0597 nan 0.1000 0.0217
## 5 1.0178 nan 0.1000 0.0150
## 6 0.9779 nan 0.1000 0.0154
## 7 0.9437 nan 0.1000 0.0136
## 8 0.9165 nan 0.1000 0.0083
## 9 0.8901 nan 0.1000 0.0117
## 10 0.8615 nan 0.1000 0.0100
## 20 0.7104 nan 0.1000 0.0024
## 40 0.5821 nan 0.1000 -0.0007
## 60 0.5065 nan 0.1000 -0.0016
## 80 0.4468 nan 0.1000 -0.0020
## 100 0.3957 nan 0.1000 -0.0004
## 120 0.3600 nan 0.1000 -0.0002
## 140 0.3237 nan 0.1000 -0.0003
## 160 0.2895 nan 0.1000 0.0004
## 180 0.2616 nan 0.1000 -0.0012
## 200 0.2377 nan 0.1000 -0.0008
## 220 0.2179 nan 0.1000 -0.0015
## 240 0.1981 nan 0.1000 -0.0007
## 260 0.1829 nan 0.1000 -0.0008
## 280 0.1698 nan 0.1000 -0.0005
## 300 0.1558 nan 0.1000 -0.0009
## 320 0.1424 nan 0.1000 -0.0002
## 340 0.1307 nan 0.1000 -0.0004
## 360 0.1207 nan 0.1000 -0.0006
## 380 0.1120 nan 0.1000 -0.0003
## 400 0.1049 nan 0.1000 -0.0005
## 420 0.0981 nan 0.1000 -0.0004
## 440 0.0914 nan 0.1000 -0.0005
## 460 0.0840 nan 0.1000 -0.0003
## 480 0.0773 nan 0.1000 -0.0004
## 500 0.0717 nan 0.1000 -0.0005
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2286 nan 0.1000 0.0435
## 2 1.1564 nan 0.1000 0.0330
## 3 1.0911 nan 0.1000 0.0264
## 4 1.0417 nan 0.1000 0.0201
## 5 0.9884 nan 0.1000 0.0204
## 6 0.9513 nan 0.1000 0.0168
## 7 0.9103 nan 0.1000 0.0146
## 8 0.8796 nan 0.1000 0.0118
## 9 0.8496 nan 0.1000 0.0120
## 10 0.8251 nan 0.1000 0.0104
## 20 0.6624 nan 0.1000 0.0032
## 40 0.5250 nan 0.1000 -0.0001
## 60 0.4379 nan 0.1000 0.0005
## 80 0.3727 nan 0.1000 0.0006
## 100 0.3197 nan 0.1000 -0.0010
## 120 0.2714 nan 0.1000 -0.0008
## 140 0.2351 nan 0.1000 -0.0008
## 160 0.2048 nan 0.1000 -0.0004
## 180 0.1806 nan 0.1000 -0.0003
## 200 0.1613 nan 0.1000 -0.0003
## 220 0.1419 nan 0.1000 -0.0002
## 240 0.1268 nan 0.1000 -0.0002
## 260 0.1139 nan 0.1000 -0.0004
## 280 0.1021 nan 0.1000 -0.0004
## 300 0.0908 nan 0.1000 -0.0002
## 320 0.0814 nan 0.1000 -0.0003
## 340 0.0723 nan 0.1000 -0.0002
## 360 0.0658 nan 0.1000 -0.0002
## 380 0.0601 nan 0.1000 -0.0002
## 400 0.0548 nan 0.1000 -0.0002
## 420 0.0496 nan 0.1000 -0.0000
## 440 0.0449 nan 0.1000 -0.0000
## 460 0.0408 nan 0.1000 -0.0001
## 480 0.0370 nan 0.1000 -0.0000
## 500 0.0335 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2394 nan 0.1000 0.0370
## 2 1.1659 nan 0.1000 0.0330
## 3 1.1021 nan 0.1000 0.0308
## 4 1.0523 nan 0.1000 0.0207
## 5 1.0028 nan 0.1000 0.0233
## 6 0.9615 nan 0.1000 0.0190
## 7 0.9270 nan 0.1000 0.0137
## 8 0.8977 nan 0.1000 0.0139
## 9 0.8678 nan 0.1000 0.0136
## 10 0.8399 nan 0.1000 0.0120
## 20 0.6668 nan 0.1000 0.0030
## 40 0.5338 nan 0.1000 -0.0007
## 60 0.4448 nan 0.1000 0.0002
## 80 0.3828 nan 0.1000 -0.0008
## 100 0.3321 nan 0.1000 -0.0016
## 120 0.2859 nan 0.1000 -0.0016
## 140 0.2501 nan 0.1000 -0.0006
## 160 0.2191 nan 0.1000 -0.0007
## 180 0.1945 nan 0.1000 -0.0005
## 200 0.1710 nan 0.1000 -0.0001
## 220 0.1513 nan 0.1000 -0.0006
## 240 0.1342 nan 0.1000 -0.0003
## 260 0.1201 nan 0.1000 -0.0003
## 280 0.1081 nan 0.1000 -0.0004
## 300 0.0970 nan 0.1000 -0.0003
## 320 0.0873 nan 0.1000 -0.0003
## 340 0.0794 nan 0.1000 -0.0000
## 360 0.0716 nan 0.1000 -0.0002
## 380 0.0641 nan 0.1000 -0.0001
## 400 0.0583 nan 0.1000 -0.0001
## 420 0.0524 nan 0.1000 -0.0001
## 440 0.0478 nan 0.1000 -0.0001
## 460 0.0435 nan 0.1000 -0.0000
## 480 0.0395 nan 0.1000 -0.0002
## 500 0.0358 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2358 nan 0.1000 0.0402
## 2 1.1620 nan 0.1000 0.0279
## 3 1.1022 nan 0.1000 0.0288
## 4 1.0537 nan 0.1000 0.0203
## 5 1.0004 nan 0.1000 0.0212
## 6 0.9554 nan 0.1000 0.0170
## 7 0.9207 nan 0.1000 0.0130
## 8 0.8864 nan 0.1000 0.0135
## 9 0.8573 nan 0.1000 0.0100
## 10 0.8266 nan 0.1000 0.0095
## 20 0.6691 nan 0.1000 0.0009
## 40 0.5300 nan 0.1000 -0.0012
## 60 0.4469 nan 0.1000 -0.0006
## 80 0.3738 nan 0.1000 -0.0008
## 100 0.3249 nan 0.1000 -0.0012
## 120 0.2856 nan 0.1000 -0.0011
## 140 0.2545 nan 0.1000 -0.0006
## 160 0.2278 nan 0.1000 -0.0007
## 180 0.2015 nan 0.1000 -0.0002
## 200 0.1782 nan 0.1000 -0.0006
## 220 0.1585 nan 0.1000 -0.0006
## 240 0.1431 nan 0.1000 -0.0005
## 260 0.1263 nan 0.1000 -0.0003
## 280 0.1146 nan 0.1000 -0.0004
## 300 0.1026 nan 0.1000 -0.0004
## 320 0.0937 nan 0.1000 -0.0003
## 340 0.0846 nan 0.1000 -0.0002
## 360 0.0763 nan 0.1000 -0.0002
## 380 0.0690 nan 0.1000 -0.0003
## 400 0.0625 nan 0.1000 -0.0002
## 420 0.0572 nan 0.1000 -0.0003
## 440 0.0524 nan 0.1000 -0.0000
## 460 0.0477 nan 0.1000 -0.0001
## 480 0.0431 nan 0.1000 -0.0002
## 500 0.0392 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2268 nan 0.1000 0.0420
## 2 1.1527 nan 0.1000 0.0333
## 3 1.0922 nan 0.1000 0.0268
## 4 1.0354 nan 0.1000 0.0242
## 5 0.9861 nan 0.1000 0.0209
## 6 0.9434 nan 0.1000 0.0176
## 7 0.9070 nan 0.1000 0.0147
## 8 0.8712 nan 0.1000 0.0139
## 9 0.8436 nan 0.1000 0.0104
## 10 0.8167 nan 0.1000 0.0095
## 20 0.6464 nan 0.1000 0.0008
## 40 0.4898 nan 0.1000 -0.0003
## 60 0.3964 nan 0.1000 -0.0006
## 80 0.3201 nan 0.1000 -0.0016
## 100 0.2658 nan 0.1000 -0.0009
## 120 0.2207 nan 0.1000 -0.0003
## 140 0.1852 nan 0.1000 -0.0002
## 160 0.1622 nan 0.1000 -0.0002
## 180 0.1426 nan 0.1000 -0.0005
## 200 0.1238 nan 0.1000 -0.0000
## 220 0.1092 nan 0.1000 -0.0002
## 240 0.0952 nan 0.1000 -0.0005
## 260 0.0829 nan 0.1000 -0.0002
## 280 0.0749 nan 0.1000 -0.0003
## 300 0.0649 nan 0.1000 -0.0000
## 320 0.0578 nan 0.1000 -0.0002
## 340 0.0514 nan 0.1000 -0.0000
## 360 0.0451 nan 0.1000 -0.0002
## 380 0.0401 nan 0.1000 -0.0000
## 400 0.0352 nan 0.1000 -0.0000
## 420 0.0315 nan 0.1000 -0.0001
## 440 0.0281 nan 0.1000 -0.0001
## 460 0.0244 nan 0.1000 -0.0001
## 480 0.0214 nan 0.1000 -0.0000
## 500 0.0192 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2354 nan 0.1000 0.0356
## 2 1.1627 nan 0.1000 0.0282
## 3 1.1019 nan 0.1000 0.0269
## 4 1.0429 nan 0.1000 0.0265
## 5 0.9926 nan 0.1000 0.0191
## 6 0.9510 nan 0.1000 0.0163
## 7 0.9086 nan 0.1000 0.0181
## 8 0.8713 nan 0.1000 0.0137
## 9 0.8451 nan 0.1000 0.0090
## 10 0.8190 nan 0.1000 0.0095
## 20 0.6438 nan 0.1000 0.0034
## 40 0.4907 nan 0.1000 -0.0014
## 60 0.3997 nan 0.1000 0.0002
## 80 0.3386 nan 0.1000 -0.0028
## 100 0.2837 nan 0.1000 -0.0004
## 120 0.2437 nan 0.1000 -0.0013
## 140 0.2105 nan 0.1000 -0.0006
## 160 0.1805 nan 0.1000 -0.0008
## 180 0.1528 nan 0.1000 -0.0005
## 200 0.1329 nan 0.1000 -0.0002
## 220 0.1147 nan 0.1000 -0.0002
## 240 0.0997 nan 0.1000 -0.0002
## 260 0.0882 nan 0.1000 -0.0004
## 280 0.0781 nan 0.1000 -0.0003
## 300 0.0698 nan 0.1000 -0.0002
## 320 0.0620 nan 0.1000 -0.0003
## 340 0.0547 nan 0.1000 -0.0002
## 360 0.0474 nan 0.1000 -0.0001
## 380 0.0423 nan 0.1000 -0.0002
## 400 0.0372 nan 0.1000 -0.0001
## 420 0.0331 nan 0.1000 -0.0002
## 440 0.0293 nan 0.1000 -0.0001
## 460 0.0261 nan 0.1000 -0.0001
## 480 0.0234 nan 0.1000 -0.0001
## 500 0.0206 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2252 nan 0.1000 0.0465
## 2 1.1522 nan 0.1000 0.0328
## 3 1.0983 nan 0.1000 0.0223
## 4 1.0410 nan 0.1000 0.0257
## 5 0.9909 nan 0.1000 0.0198
## 6 0.9490 nan 0.1000 0.0186
## 7 0.9084 nan 0.1000 0.0168
## 8 0.8750 nan 0.1000 0.0134
## 9 0.8453 nan 0.1000 0.0108
## 10 0.8186 nan 0.1000 0.0113
## 20 0.6555 nan 0.1000 0.0002
## 40 0.5053 nan 0.1000 -0.0002
## 60 0.4196 nan 0.1000 -0.0013
## 80 0.3509 nan 0.1000 -0.0010
## 100 0.2953 nan 0.1000 -0.0009
## 120 0.2539 nan 0.1000 -0.0002
## 140 0.2168 nan 0.1000 -0.0009
## 160 0.1850 nan 0.1000 -0.0002
## 180 0.1625 nan 0.1000 -0.0007
## 200 0.1429 nan 0.1000 -0.0005
## 220 0.1257 nan 0.1000 -0.0005
## 240 0.1092 nan 0.1000 -0.0004
## 260 0.0968 nan 0.1000 -0.0004
## 280 0.0850 nan 0.1000 -0.0000
## 300 0.0758 nan 0.1000 -0.0002
## 320 0.0659 nan 0.1000 -0.0003
## 340 0.0575 nan 0.1000 -0.0001
## 360 0.0517 nan 0.1000 -0.0001
## 380 0.0460 nan 0.1000 -0.0003
## 400 0.0404 nan 0.1000 -0.0003
## 420 0.0365 nan 0.1000 -0.0002
## 440 0.0324 nan 0.1000 -0.0001
## 460 0.0290 nan 0.1000 -0.0001
## 480 0.0258 nan 0.1000 -0.0002
## 500 0.0230 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0003
## 3 1.3182 nan 0.0010 0.0003
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3045 nan 0.0010 0.0003
## 40 1.2894 nan 0.0010 0.0003
## 60 1.2746 nan 0.0010 0.0003
## 80 1.2604 nan 0.0010 0.0003
## 100 1.2461 nan 0.0010 0.0003
## 120 1.2331 nan 0.0010 0.0003
## 140 1.2201 nan 0.0010 0.0003
## 160 1.2074 nan 0.0010 0.0003
## 180 1.1952 nan 0.0010 0.0003
## 200 1.1835 nan 0.0010 0.0003
## 220 1.1718 nan 0.0010 0.0002
## 240 1.1603 nan 0.0010 0.0002
## 260 1.1492 nan 0.0010 0.0002
## 280 1.1383 nan 0.0010 0.0002
## 300 1.1278 nan 0.0010 0.0003
## 320 1.1177 nan 0.0010 0.0002
## 340 1.1078 nan 0.0010 0.0002
## 360 1.0981 nan 0.0010 0.0002
## 380 1.0891 nan 0.0010 0.0002
## 400 1.0801 nan 0.0010 0.0001
## 420 1.0711 nan 0.0010 0.0001
## 440 1.0621 nan 0.0010 0.0002
## 460 1.0537 nan 0.0010 0.0002
## 480 1.0453 nan 0.0010 0.0002
## 500 1.0374 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0003
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0003
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3136 nan 0.0010 0.0003
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3049 nan 0.0010 0.0003
## 40 1.2898 nan 0.0010 0.0004
## 60 1.2750 nan 0.0010 0.0003
## 80 1.2608 nan 0.0010 0.0004
## 100 1.2469 nan 0.0010 0.0003
## 120 1.2336 nan 0.0010 0.0003
## 140 1.2204 nan 0.0010 0.0003
## 160 1.2075 nan 0.0010 0.0003
## 180 1.1948 nan 0.0010 0.0003
## 200 1.1828 nan 0.0010 0.0003
## 220 1.1708 nan 0.0010 0.0003
## 240 1.1596 nan 0.0010 0.0002
## 260 1.1487 nan 0.0010 0.0002
## 280 1.1380 nan 0.0010 0.0003
## 300 1.1277 nan 0.0010 0.0002
## 320 1.1180 nan 0.0010 0.0002
## 340 1.1079 nan 0.0010 0.0002
## 360 1.0983 nan 0.0010 0.0002
## 380 1.0890 nan 0.0010 0.0002
## 400 1.0799 nan 0.0010 0.0002
## 420 1.0711 nan 0.0010 0.0002
## 440 1.0623 nan 0.0010 0.0002
## 460 1.0538 nan 0.0010 0.0002
## 480 1.0454 nan 0.0010 0.0002
## 500 1.0375 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3183 nan 0.0010 0.0003
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3159 nan 0.0010 0.0003
## 7 1.3151 nan 0.0010 0.0003
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3134 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3049 nan 0.0010 0.0003
## 40 1.2898 nan 0.0010 0.0004
## 60 1.2750 nan 0.0010 0.0003
## 80 1.2604 nan 0.0010 0.0003
## 100 1.2464 nan 0.0010 0.0003
## 120 1.2332 nan 0.0010 0.0003
## 140 1.2203 nan 0.0010 0.0003
## 160 1.2077 nan 0.0010 0.0003
## 180 1.1955 nan 0.0010 0.0003
## 200 1.1832 nan 0.0010 0.0003
## 220 1.1718 nan 0.0010 0.0003
## 240 1.1606 nan 0.0010 0.0003
## 260 1.1499 nan 0.0010 0.0002
## 280 1.1391 nan 0.0010 0.0002
## 300 1.1288 nan 0.0010 0.0002
## 320 1.1187 nan 0.0010 0.0003
## 340 1.1089 nan 0.0010 0.0002
## 360 1.0993 nan 0.0010 0.0002
## 380 1.0901 nan 0.0010 0.0002
## 400 1.0812 nan 0.0010 0.0002
## 420 1.0724 nan 0.0010 0.0001
## 440 1.0640 nan 0.0010 0.0002
## 460 1.0557 nan 0.0010 0.0002
## 480 1.0477 nan 0.0010 0.0002
## 500 1.0395 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3130 nan 0.0010 0.0003
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3034 nan 0.0010 0.0004
## 40 1.2868 nan 0.0010 0.0004
## 60 1.2709 nan 0.0010 0.0003
## 80 1.2554 nan 0.0010 0.0003
## 100 1.2407 nan 0.0010 0.0003
## 120 1.2263 nan 0.0010 0.0003
## 140 1.2124 nan 0.0010 0.0003
## 160 1.1988 nan 0.0010 0.0003
## 180 1.1857 nan 0.0010 0.0002
## 200 1.1731 nan 0.0010 0.0003
## 220 1.1609 nan 0.0010 0.0002
## 240 1.1488 nan 0.0010 0.0003
## 260 1.1372 nan 0.0010 0.0002
## 280 1.1258 nan 0.0010 0.0002
## 300 1.1148 nan 0.0010 0.0002
## 320 1.1043 nan 0.0010 0.0002
## 340 1.0938 nan 0.0010 0.0002
## 360 1.0836 nan 0.0010 0.0002
## 380 1.0737 nan 0.0010 0.0002
## 400 1.0641 nan 0.0010 0.0002
## 420 1.0546 nan 0.0010 0.0002
## 440 1.0454 nan 0.0010 0.0002
## 460 1.0366 nan 0.0010 0.0001
## 480 1.0278 nan 0.0010 0.0002
## 500 1.0195 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3040 nan 0.0010 0.0004
## 40 1.2876 nan 0.0010 0.0004
## 60 1.2717 nan 0.0010 0.0004
## 80 1.2565 nan 0.0010 0.0003
## 100 1.2417 nan 0.0010 0.0003
## 120 1.2272 nan 0.0010 0.0003
## 140 1.2130 nan 0.0010 0.0002
## 160 1.1997 nan 0.0010 0.0003
## 180 1.1863 nan 0.0010 0.0003
## 200 1.1735 nan 0.0010 0.0003
## 220 1.1614 nan 0.0010 0.0003
## 240 1.1492 nan 0.0010 0.0002
## 260 1.1377 nan 0.0010 0.0002
## 280 1.1262 nan 0.0010 0.0003
## 300 1.1151 nan 0.0010 0.0002
## 320 1.1045 nan 0.0010 0.0002
## 340 1.0944 nan 0.0010 0.0003
## 360 1.0841 nan 0.0010 0.0002
## 380 1.0744 nan 0.0010 0.0002
## 400 1.0648 nan 0.0010 0.0002
## 420 1.0555 nan 0.0010 0.0002
## 440 1.0464 nan 0.0010 0.0002
## 460 1.0374 nan 0.0010 0.0002
## 480 1.0286 nan 0.0010 0.0002
## 500 1.0204 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0004
## 20 1.3039 nan 0.0010 0.0003
## 40 1.2881 nan 0.0010 0.0004
## 60 1.2724 nan 0.0010 0.0003
## 80 1.2574 nan 0.0010 0.0003
## 100 1.2428 nan 0.0010 0.0004
## 120 1.2283 nan 0.0010 0.0003
## 140 1.2145 nan 0.0010 0.0003
## 160 1.2010 nan 0.0010 0.0003
## 180 1.1883 nan 0.0010 0.0003
## 200 1.1757 nan 0.0010 0.0003
## 220 1.1636 nan 0.0010 0.0003
## 240 1.1515 nan 0.0010 0.0002
## 260 1.1399 nan 0.0010 0.0002
## 280 1.1287 nan 0.0010 0.0002
## 300 1.1179 nan 0.0010 0.0002
## 320 1.1075 nan 0.0010 0.0002
## 340 1.0972 nan 0.0010 0.0002
## 360 1.0870 nan 0.0010 0.0002
## 380 1.0772 nan 0.0010 0.0002
## 400 1.0674 nan 0.0010 0.0002
## 420 1.0582 nan 0.0010 0.0002
## 440 1.0494 nan 0.0010 0.0002
## 460 1.0406 nan 0.0010 0.0002
## 480 1.0320 nan 0.0010 0.0002
## 500 1.0237 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3030 nan 0.0010 0.0004
## 40 1.2856 nan 0.0010 0.0004
## 60 1.2687 nan 0.0010 0.0004
## 80 1.2525 nan 0.0010 0.0004
## 100 1.2371 nan 0.0010 0.0003
## 120 1.2222 nan 0.0010 0.0003
## 140 1.2076 nan 0.0010 0.0003
## 160 1.1935 nan 0.0010 0.0003
## 180 1.1800 nan 0.0010 0.0003
## 200 1.1664 nan 0.0010 0.0003
## 220 1.1533 nan 0.0010 0.0003
## 240 1.1407 nan 0.0010 0.0002
## 260 1.1286 nan 0.0010 0.0003
## 280 1.1167 nan 0.0010 0.0002
## 300 1.1056 nan 0.0010 0.0002
## 320 1.0945 nan 0.0010 0.0002
## 340 1.0836 nan 0.0010 0.0002
## 360 1.0730 nan 0.0010 0.0002
## 380 1.0628 nan 0.0010 0.0002
## 400 1.0528 nan 0.0010 0.0002
## 420 1.0430 nan 0.0010 0.0002
## 440 1.0335 nan 0.0010 0.0002
## 460 1.0243 nan 0.0010 0.0002
## 480 1.0154 nan 0.0010 0.0002
## 500 1.0067 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0005
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0005
## 6 1.3152 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3124 nan 0.0010 0.0005
## 10 1.3116 nan 0.0010 0.0004
## 20 1.3027 nan 0.0010 0.0004
## 40 1.2858 nan 0.0010 0.0004
## 60 1.2692 nan 0.0010 0.0003
## 80 1.2535 nan 0.0010 0.0003
## 100 1.2382 nan 0.0010 0.0003
## 120 1.2234 nan 0.0010 0.0003
## 140 1.2085 nan 0.0010 0.0003
## 160 1.1946 nan 0.0010 0.0003
## 180 1.1810 nan 0.0010 0.0003
## 200 1.1677 nan 0.0010 0.0002
## 220 1.1549 nan 0.0010 0.0003
## 240 1.1424 nan 0.0010 0.0003
## 260 1.1303 nan 0.0010 0.0003
## 280 1.1187 nan 0.0010 0.0002
## 300 1.1073 nan 0.0010 0.0002
## 320 1.0960 nan 0.0010 0.0003
## 340 1.0850 nan 0.0010 0.0002
## 360 1.0747 nan 0.0010 0.0002
## 380 1.0646 nan 0.0010 0.0002
## 400 1.0546 nan 0.0010 0.0002
## 420 1.0449 nan 0.0010 0.0002
## 440 1.0355 nan 0.0010 0.0002
## 460 1.0263 nan 0.0010 0.0002
## 480 1.0175 nan 0.0010 0.0002
## 500 1.0087 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0003
## 4 1.3171 nan 0.0010 0.0003
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3034 nan 0.0010 0.0003
## 40 1.2867 nan 0.0010 0.0003
## 60 1.2703 nan 0.0010 0.0004
## 80 1.2545 nan 0.0010 0.0003
## 100 1.2394 nan 0.0010 0.0004
## 120 1.2243 nan 0.0010 0.0003
## 140 1.2099 nan 0.0010 0.0003
## 160 1.1960 nan 0.0010 0.0003
## 180 1.1828 nan 0.0010 0.0003
## 200 1.1697 nan 0.0010 0.0003
## 220 1.1571 nan 0.0010 0.0002
## 240 1.1444 nan 0.0010 0.0003
## 260 1.1324 nan 0.0010 0.0003
## 280 1.1207 nan 0.0010 0.0002
## 300 1.1093 nan 0.0010 0.0002
## 320 1.0980 nan 0.0010 0.0002
## 340 1.0874 nan 0.0010 0.0002
## 360 1.0771 nan 0.0010 0.0002
## 380 1.0668 nan 0.0010 0.0002
## 400 1.0572 nan 0.0010 0.0002
## 420 1.0475 nan 0.0010 0.0002
## 440 1.0383 nan 0.0010 0.0002
## 460 1.0292 nan 0.0010 0.0002
## 480 1.0204 nan 0.0010 0.0002
## 500 1.0118 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3138 nan 0.0100 0.0030
## 2 1.3058 nan 0.0100 0.0032
## 3 1.2970 nan 0.0100 0.0038
## 4 1.2891 nan 0.0100 0.0033
## 5 1.2813 nan 0.0100 0.0037
## 6 1.2742 nan 0.0100 0.0037
## 7 1.2672 nan 0.0100 0.0030
## 8 1.2600 nan 0.0100 0.0030
## 9 1.2523 nan 0.0100 0.0032
## 10 1.2454 nan 0.0100 0.0034
## 20 1.1813 nan 0.0100 0.0026
## 40 1.0780 nan 0.0100 0.0018
## 60 1.0008 nan 0.0100 0.0013
## 80 0.9375 nan 0.0100 0.0013
## 100 0.8866 nan 0.0100 0.0010
## 120 0.8459 nan 0.0100 0.0007
## 140 0.8108 nan 0.0100 0.0005
## 160 0.7813 nan 0.0100 0.0004
## 180 0.7562 nan 0.0100 0.0004
## 200 0.7358 nan 0.0100 0.0003
## 220 0.7167 nan 0.0100 0.0002
## 240 0.7000 nan 0.0100 0.0001
## 260 0.6848 nan 0.0100 0.0000
## 280 0.6689 nan 0.0100 0.0000
## 300 0.6552 nan 0.0100 0.0000
## 320 0.6421 nan 0.0100 0.0001
## 340 0.6303 nan 0.0100 0.0002
## 360 0.6192 nan 0.0100 -0.0001
## 380 0.6078 nan 0.0100 -0.0001
## 400 0.5981 nan 0.0100 -0.0000
## 420 0.5881 nan 0.0100 0.0000
## 440 0.5792 nan 0.0100 -0.0002
## 460 0.5700 nan 0.0100 -0.0001
## 480 0.5614 nan 0.0100 0.0000
## 500 0.5530 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3125 nan 0.0100 0.0034
## 2 1.3057 nan 0.0100 0.0030
## 3 1.2975 nan 0.0100 0.0040
## 4 1.2896 nan 0.0100 0.0038
## 5 1.2816 nan 0.0100 0.0039
## 6 1.2740 nan 0.0100 0.0031
## 7 1.2669 nan 0.0100 0.0032
## 8 1.2603 nan 0.0100 0.0031
## 9 1.2524 nan 0.0100 0.0035
## 10 1.2452 nan 0.0100 0.0030
## 20 1.1808 nan 0.0100 0.0026
## 40 1.0794 nan 0.0100 0.0019
## 60 0.9999 nan 0.0100 0.0015
## 80 0.9377 nan 0.0100 0.0013
## 100 0.8880 nan 0.0100 0.0007
## 120 0.8471 nan 0.0100 0.0007
## 140 0.8127 nan 0.0100 0.0005
## 160 0.7839 nan 0.0100 0.0004
## 180 0.7604 nan 0.0100 0.0000
## 200 0.7394 nan 0.0100 -0.0001
## 220 0.7203 nan 0.0100 0.0001
## 240 0.7033 nan 0.0100 0.0003
## 260 0.6887 nan 0.0100 -0.0000
## 280 0.6743 nan 0.0100 0.0001
## 300 0.6622 nan 0.0100 -0.0001
## 320 0.6506 nan 0.0100 0.0000
## 340 0.6395 nan 0.0100 0.0000
## 360 0.6285 nan 0.0100 -0.0001
## 380 0.6181 nan 0.0100 -0.0000
## 400 0.6090 nan 0.0100 -0.0001
## 420 0.5998 nan 0.0100 -0.0000
## 440 0.5908 nan 0.0100 0.0000
## 460 0.5812 nan 0.0100 0.0001
## 480 0.5727 nan 0.0100 -0.0001
## 500 0.5641 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3127 nan 0.0100 0.0037
## 2 1.3049 nan 0.0100 0.0035
## 3 1.2979 nan 0.0100 0.0033
## 4 1.2908 nan 0.0100 0.0034
## 5 1.2834 nan 0.0100 0.0036
## 6 1.2766 nan 0.0100 0.0032
## 7 1.2702 nan 0.0100 0.0028
## 8 1.2632 nan 0.0100 0.0027
## 9 1.2565 nan 0.0100 0.0030
## 10 1.2492 nan 0.0100 0.0031
## 20 1.1849 nan 0.0100 0.0024
## 40 1.0808 nan 0.0100 0.0021
## 60 1.0015 nan 0.0100 0.0015
## 80 0.9386 nan 0.0100 0.0013
## 100 0.8884 nan 0.0100 0.0009
## 120 0.8479 nan 0.0100 0.0007
## 140 0.8146 nan 0.0100 0.0007
## 160 0.7860 nan 0.0100 0.0004
## 180 0.7601 nan 0.0100 0.0005
## 200 0.7394 nan 0.0100 0.0000
## 220 0.7211 nan 0.0100 0.0001
## 240 0.7041 nan 0.0100 0.0001
## 260 0.6897 nan 0.0100 0.0002
## 280 0.6748 nan 0.0100 0.0001
## 300 0.6619 nan 0.0100 0.0001
## 320 0.6507 nan 0.0100 0.0000
## 340 0.6399 nan 0.0100 -0.0000
## 360 0.6294 nan 0.0100 0.0001
## 380 0.6196 nan 0.0100 0.0001
## 400 0.6104 nan 0.0100 0.0001
## 420 0.6017 nan 0.0100 -0.0002
## 440 0.5935 nan 0.0100 -0.0001
## 460 0.5856 nan 0.0100 -0.0002
## 480 0.5766 nan 0.0100 -0.0001
## 500 0.5686 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3129 nan 0.0100 0.0037
## 2 1.3038 nan 0.0100 0.0039
## 3 1.2954 nan 0.0100 0.0037
## 4 1.2877 nan 0.0100 0.0033
## 5 1.2793 nan 0.0100 0.0039
## 6 1.2718 nan 0.0100 0.0032
## 7 1.2639 nan 0.0100 0.0034
## 8 1.2562 nan 0.0100 0.0035
## 9 1.2486 nan 0.0100 0.0032
## 10 1.2416 nan 0.0100 0.0030
## 20 1.1742 nan 0.0100 0.0025
## 40 1.0658 nan 0.0100 0.0018
## 60 0.9801 nan 0.0100 0.0017
## 80 0.9141 nan 0.0100 0.0011
## 100 0.8608 nan 0.0100 0.0009
## 120 0.8177 nan 0.0100 0.0007
## 140 0.7820 nan 0.0100 0.0005
## 160 0.7509 nan 0.0100 0.0003
## 180 0.7252 nan 0.0100 0.0001
## 200 0.7014 nan 0.0100 0.0003
## 220 0.6815 nan 0.0100 0.0002
## 240 0.6628 nan 0.0100 0.0002
## 260 0.6451 nan 0.0100 0.0000
## 280 0.6285 nan 0.0100 0.0001
## 300 0.6130 nan 0.0100 -0.0000
## 320 0.5987 nan 0.0100 -0.0001
## 340 0.5872 nan 0.0100 0.0000
## 360 0.5759 nan 0.0100 -0.0002
## 380 0.5646 nan 0.0100 0.0001
## 400 0.5528 nan 0.0100 0.0000
## 420 0.5420 nan 0.0100 -0.0001
## 440 0.5310 nan 0.0100 0.0002
## 460 0.5220 nan 0.0100 -0.0002
## 480 0.5116 nan 0.0100 -0.0001
## 500 0.5015 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0036
## 2 1.3045 nan 0.0100 0.0033
## 3 1.2965 nan 0.0100 0.0037
## 4 1.2885 nan 0.0100 0.0033
## 5 1.2807 nan 0.0100 0.0038
## 6 1.2733 nan 0.0100 0.0033
## 7 1.2649 nan 0.0100 0.0040
## 8 1.2571 nan 0.0100 0.0036
## 9 1.2489 nan 0.0100 0.0035
## 10 1.2415 nan 0.0100 0.0033
## 20 1.1739 nan 0.0100 0.0029
## 40 1.0638 nan 0.0100 0.0018
## 60 0.9822 nan 0.0100 0.0016
## 80 0.9184 nan 0.0100 0.0011
## 100 0.8651 nan 0.0100 0.0009
## 120 0.8213 nan 0.0100 0.0007
## 140 0.7851 nan 0.0100 0.0005
## 160 0.7543 nan 0.0100 0.0002
## 180 0.7278 nan 0.0100 0.0002
## 200 0.7049 nan 0.0100 0.0002
## 220 0.6850 nan 0.0100 0.0002
## 240 0.6671 nan 0.0100 0.0001
## 260 0.6485 nan 0.0100 0.0001
## 280 0.6337 nan 0.0100 0.0001
## 300 0.6179 nan 0.0100 0.0001
## 320 0.6041 nan 0.0100 0.0001
## 340 0.5927 nan 0.0100 0.0001
## 360 0.5805 nan 0.0100 -0.0000
## 380 0.5695 nan 0.0100 0.0002
## 400 0.5578 nan 0.0100 0.0002
## 420 0.5477 nan 0.0100 0.0001
## 440 0.5372 nan 0.0100 -0.0000
## 460 0.5280 nan 0.0100 -0.0001
## 480 0.5194 nan 0.0100 -0.0001
## 500 0.5102 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0038
## 2 1.3032 nan 0.0100 0.0041
## 3 1.2952 nan 0.0100 0.0035
## 4 1.2873 nan 0.0100 0.0036
## 5 1.2788 nan 0.0100 0.0039
## 6 1.2707 nan 0.0100 0.0040
## 7 1.2632 nan 0.0100 0.0034
## 8 1.2556 nan 0.0100 0.0036
## 9 1.2478 nan 0.0100 0.0034
## 10 1.2399 nan 0.0100 0.0033
## 20 1.1724 nan 0.0100 0.0029
## 40 1.0665 nan 0.0100 0.0021
## 60 0.9839 nan 0.0100 0.0015
## 80 0.9201 nan 0.0100 0.0011
## 100 0.8672 nan 0.0100 0.0006
## 120 0.8263 nan 0.0100 0.0005
## 140 0.7909 nan 0.0100 0.0005
## 160 0.7615 nan 0.0100 0.0003
## 180 0.7346 nan 0.0100 0.0002
## 200 0.7119 nan 0.0100 0.0002
## 220 0.6924 nan 0.0100 0.0000
## 240 0.6751 nan 0.0100 0.0001
## 260 0.6596 nan 0.0100 0.0002
## 280 0.6439 nan 0.0100 0.0002
## 300 0.6297 nan 0.0100 -0.0002
## 320 0.6156 nan 0.0100 0.0001
## 340 0.6031 nan 0.0100 0.0001
## 360 0.5920 nan 0.0100 0.0000
## 380 0.5808 nan 0.0100 0.0000
## 400 0.5696 nan 0.0100 0.0002
## 420 0.5596 nan 0.0100 -0.0001
## 440 0.5491 nan 0.0100 0.0000
## 460 0.5391 nan 0.0100 0.0000
## 480 0.5302 nan 0.0100 -0.0000
## 500 0.5212 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0042
## 2 1.3029 nan 0.0100 0.0039
## 3 1.2949 nan 0.0100 0.0037
## 4 1.2859 nan 0.0100 0.0036
## 5 1.2778 nan 0.0100 0.0038
## 6 1.2696 nan 0.0100 0.0037
## 7 1.2627 nan 0.0100 0.0033
## 8 1.2550 nan 0.0100 0.0036
## 9 1.2472 nan 0.0100 0.0033
## 10 1.2399 nan 0.0100 0.0031
## 20 1.1684 nan 0.0100 0.0029
## 40 1.0545 nan 0.0100 0.0019
## 60 0.9684 nan 0.0100 0.0015
## 80 0.9001 nan 0.0100 0.0010
## 100 0.8440 nan 0.0100 0.0008
## 120 0.7976 nan 0.0100 0.0007
## 140 0.7607 nan 0.0100 0.0005
## 160 0.7273 nan 0.0100 0.0005
## 180 0.6983 nan 0.0100 0.0003
## 200 0.6733 nan 0.0100 0.0001
## 220 0.6503 nan 0.0100 0.0003
## 240 0.6307 nan 0.0100 0.0001
## 260 0.6122 nan 0.0100 -0.0001
## 280 0.5953 nan 0.0100 0.0001
## 300 0.5799 nan 0.0100 -0.0000
## 320 0.5649 nan 0.0100 0.0001
## 340 0.5507 nan 0.0100 -0.0000
## 360 0.5379 nan 0.0100 -0.0002
## 380 0.5257 nan 0.0100 -0.0000
## 400 0.5139 nan 0.0100 0.0000
## 420 0.5022 nan 0.0100 -0.0000
## 440 0.4904 nan 0.0100 0.0001
## 460 0.4807 nan 0.0100 -0.0000
## 480 0.4701 nan 0.0100 -0.0002
## 500 0.4611 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0037
## 2 1.3036 nan 0.0100 0.0042
## 3 1.2948 nan 0.0100 0.0040
## 4 1.2869 nan 0.0100 0.0036
## 5 1.2788 nan 0.0100 0.0037
## 6 1.2708 nan 0.0100 0.0039
## 7 1.2634 nan 0.0100 0.0034
## 8 1.2554 nan 0.0100 0.0035
## 9 1.2473 nan 0.0100 0.0036
## 10 1.2388 nan 0.0100 0.0037
## 20 1.1679 nan 0.0100 0.0031
## 40 1.0550 nan 0.0100 0.0016
## 60 0.9661 nan 0.0100 0.0017
## 80 0.8958 nan 0.0100 0.0009
## 100 0.8410 nan 0.0100 0.0009
## 120 0.7978 nan 0.0100 0.0006
## 140 0.7602 nan 0.0100 0.0007
## 160 0.7270 nan 0.0100 0.0004
## 180 0.6994 nan 0.0100 0.0001
## 200 0.6752 nan 0.0100 0.0003
## 220 0.6542 nan 0.0100 0.0001
## 240 0.6345 nan 0.0100 -0.0001
## 260 0.6175 nan 0.0100 0.0001
## 280 0.5993 nan 0.0100 0.0001
## 300 0.5841 nan 0.0100 -0.0000
## 320 0.5690 nan 0.0100 0.0003
## 340 0.5553 nan 0.0100 -0.0000
## 360 0.5424 nan 0.0100 -0.0000
## 380 0.5293 nan 0.0100 -0.0002
## 400 0.5179 nan 0.0100 -0.0000
## 420 0.5067 nan 0.0100 0.0000
## 440 0.4963 nan 0.0100 -0.0000
## 460 0.4849 nan 0.0100 0.0000
## 480 0.4751 nan 0.0100 -0.0000
## 500 0.4658 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3113 nan 0.0100 0.0045
## 2 1.3028 nan 0.0100 0.0040
## 3 1.2941 nan 0.0100 0.0040
## 4 1.2855 nan 0.0100 0.0034
## 5 1.2774 nan 0.0100 0.0032
## 6 1.2692 nan 0.0100 0.0037
## 7 1.2622 nan 0.0100 0.0032
## 8 1.2543 nan 0.0100 0.0034
## 9 1.2468 nan 0.0100 0.0033
## 10 1.2387 nan 0.0100 0.0038
## 20 1.1695 nan 0.0100 0.0029
## 40 1.0574 nan 0.0100 0.0019
## 60 0.9736 nan 0.0100 0.0015
## 80 0.9050 nan 0.0100 0.0014
## 100 0.8513 nan 0.0100 0.0008
## 120 0.8074 nan 0.0100 0.0003
## 140 0.7704 nan 0.0100 0.0005
## 160 0.7397 nan 0.0100 0.0003
## 180 0.7116 nan 0.0100 0.0002
## 200 0.6886 nan 0.0100 0.0004
## 220 0.6659 nan 0.0100 -0.0001
## 240 0.6460 nan 0.0100 0.0001
## 260 0.6283 nan 0.0100 -0.0000
## 280 0.6123 nan 0.0100 0.0003
## 300 0.5967 nan 0.0100 0.0001
## 320 0.5822 nan 0.0100 0.0001
## 340 0.5683 nan 0.0100 -0.0000
## 360 0.5549 nan 0.0100 0.0000
## 380 0.5422 nan 0.0100 0.0001
## 400 0.5310 nan 0.0100 0.0002
## 420 0.5188 nan 0.0100 -0.0001
## 440 0.5080 nan 0.0100 0.0000
## 460 0.4980 nan 0.0100 -0.0001
## 480 0.4878 nan 0.0100 -0.0001
## 500 0.4776 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2361 nan 0.1000 0.0350
## 2 1.1733 nan 0.1000 0.0280
## 3 1.1280 nan 0.1000 0.0191
## 4 1.0820 nan 0.1000 0.0184
## 5 1.0378 nan 0.1000 0.0182
## 6 1.0019 nan 0.1000 0.0148
## 7 0.9726 nan 0.1000 0.0119
## 8 0.9393 nan 0.1000 0.0129
## 9 0.9099 nan 0.1000 0.0109
## 10 0.8858 nan 0.1000 0.0105
## 20 0.7379 nan 0.1000 0.0024
## 40 0.6069 nan 0.1000 0.0001
## 60 0.5181 nan 0.1000 -0.0016
## 80 0.4547 nan 0.1000 -0.0010
## 100 0.4008 nan 0.1000 -0.0012
## 120 0.3568 nan 0.1000 -0.0003
## 140 0.3215 nan 0.1000 -0.0008
## 160 0.2887 nan 0.1000 -0.0002
## 180 0.2617 nan 0.1000 -0.0004
## 200 0.2410 nan 0.1000 -0.0005
## 220 0.2152 nan 0.1000 -0.0000
## 240 0.1949 nan 0.1000 -0.0009
## 260 0.1787 nan 0.1000 -0.0005
## 280 0.1637 nan 0.1000 -0.0002
## 300 0.1497 nan 0.1000 -0.0004
## 320 0.1382 nan 0.1000 -0.0005
## 340 0.1260 nan 0.1000 -0.0003
## 360 0.1180 nan 0.1000 -0.0000
## 380 0.1097 nan 0.1000 -0.0002
## 400 0.1026 nan 0.1000 -0.0003
## 420 0.0949 nan 0.1000 -0.0004
## 440 0.0885 nan 0.1000 -0.0001
## 460 0.0819 nan 0.1000 -0.0001
## 480 0.0756 nan 0.1000 -0.0001
## 500 0.0695 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2417 nan 0.1000 0.0394
## 2 1.1764 nan 0.1000 0.0282
## 3 1.1244 nan 0.1000 0.0217
## 4 1.0789 nan 0.1000 0.0211
## 5 1.0379 nan 0.1000 0.0162
## 6 1.0056 nan 0.1000 0.0129
## 7 0.9750 nan 0.1000 0.0116
## 8 0.9404 nan 0.1000 0.0139
## 9 0.9137 nan 0.1000 0.0109
## 10 0.8926 nan 0.1000 0.0062
## 20 0.7431 nan 0.1000 0.0011
## 40 0.6024 nan 0.1000 0.0011
## 60 0.5230 nan 0.1000 -0.0006
## 80 0.4642 nan 0.1000 -0.0006
## 100 0.4084 nan 0.1000 -0.0021
## 120 0.3643 nan 0.1000 0.0002
## 140 0.3284 nan 0.1000 -0.0007
## 160 0.2961 nan 0.1000 -0.0008
## 180 0.2695 nan 0.1000 -0.0010
## 200 0.2417 nan 0.1000 -0.0017
## 220 0.2231 nan 0.1000 -0.0011
## 240 0.2047 nan 0.1000 -0.0010
## 260 0.1894 nan 0.1000 -0.0005
## 280 0.1734 nan 0.1000 -0.0005
## 300 0.1592 nan 0.1000 -0.0004
## 320 0.1476 nan 0.1000 -0.0000
## 340 0.1368 nan 0.1000 -0.0002
## 360 0.1249 nan 0.1000 -0.0003
## 380 0.1170 nan 0.1000 -0.0003
## 400 0.1084 nan 0.1000 -0.0005
## 420 0.1010 nan 0.1000 -0.0004
## 440 0.0941 nan 0.1000 -0.0003
## 460 0.0885 nan 0.1000 -0.0002
## 480 0.0823 nan 0.1000 -0.0003
## 500 0.0766 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2456 nan 0.1000 0.0333
## 2 1.1762 nan 0.1000 0.0320
## 3 1.1269 nan 0.1000 0.0225
## 4 1.0732 nan 0.1000 0.0246
## 5 1.0299 nan 0.1000 0.0165
## 6 0.9871 nan 0.1000 0.0165
## 7 0.9544 nan 0.1000 0.0107
## 8 0.9259 nan 0.1000 0.0126
## 9 0.9051 nan 0.1000 0.0061
## 10 0.8778 nan 0.1000 0.0101
## 20 0.7378 nan 0.1000 0.0033
## 40 0.6086 nan 0.1000 0.0011
## 60 0.5348 nan 0.1000 0.0006
## 80 0.4750 nan 0.1000 -0.0005
## 100 0.4220 nan 0.1000 -0.0012
## 120 0.3736 nan 0.1000 -0.0026
## 140 0.3341 nan 0.1000 -0.0013
## 160 0.3035 nan 0.1000 -0.0005
## 180 0.2767 nan 0.1000 -0.0000
## 200 0.2553 nan 0.1000 -0.0009
## 220 0.2336 nan 0.1000 -0.0004
## 240 0.2150 nan 0.1000 -0.0006
## 260 0.1996 nan 0.1000 -0.0012
## 280 0.1852 nan 0.1000 -0.0007
## 300 0.1723 nan 0.1000 -0.0004
## 320 0.1591 nan 0.1000 -0.0006
## 340 0.1470 nan 0.1000 -0.0006
## 360 0.1356 nan 0.1000 -0.0003
## 380 0.1257 nan 0.1000 -0.0002
## 400 0.1172 nan 0.1000 -0.0001
## 420 0.1082 nan 0.1000 -0.0003
## 440 0.1005 nan 0.1000 -0.0001
## 460 0.0939 nan 0.1000 -0.0002
## 480 0.0864 nan 0.1000 -0.0004
## 500 0.0809 nan 0.1000 -0.0004
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2496 nan 0.1000 0.0344
## 2 1.1873 nan 0.1000 0.0292
## 3 1.1268 nan 0.1000 0.0264
## 4 1.0743 nan 0.1000 0.0230
## 5 1.0314 nan 0.1000 0.0141
## 6 0.9872 nan 0.1000 0.0195
## 7 0.9473 nan 0.1000 0.0156
## 8 0.9158 nan 0.1000 0.0142
## 9 0.8902 nan 0.1000 0.0116
## 10 0.8628 nan 0.1000 0.0108
## 20 0.7115 nan 0.1000 0.0024
## 40 0.5594 nan 0.1000 -0.0010
## 60 0.4682 nan 0.1000 0.0003
## 80 0.3936 nan 0.1000 -0.0007
## 100 0.3369 nan 0.1000 -0.0001
## 120 0.2903 nan 0.1000 -0.0009
## 140 0.2535 nan 0.1000 -0.0001
## 160 0.2195 nan 0.1000 0.0000
## 180 0.1957 nan 0.1000 -0.0003
## 200 0.1736 nan 0.1000 0.0001
## 220 0.1559 nan 0.1000 -0.0003
## 240 0.1399 nan 0.1000 -0.0003
## 260 0.1247 nan 0.1000 -0.0002
## 280 0.1114 nan 0.1000 -0.0002
## 300 0.1009 nan 0.1000 -0.0001
## 320 0.0916 nan 0.1000 -0.0004
## 340 0.0826 nan 0.1000 -0.0004
## 360 0.0750 nan 0.1000 -0.0001
## 380 0.0683 nan 0.1000 -0.0001
## 400 0.0616 nan 0.1000 -0.0002
## 420 0.0561 nan 0.1000 -0.0001
## 440 0.0512 nan 0.1000 -0.0001
## 460 0.0468 nan 0.1000 -0.0001
## 480 0.0424 nan 0.1000 -0.0000
## 500 0.0384 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2387 nan 0.1000 0.0359
## 2 1.1728 nan 0.1000 0.0249
## 3 1.1146 nan 0.1000 0.0242
## 4 1.0527 nan 0.1000 0.0260
## 5 1.0118 nan 0.1000 0.0187
## 6 0.9754 nan 0.1000 0.0137
## 7 0.9428 nan 0.1000 0.0136
## 8 0.9074 nan 0.1000 0.0120
## 9 0.8798 nan 0.1000 0.0113
## 10 0.8564 nan 0.1000 0.0076
## 20 0.7107 nan 0.1000 0.0023
## 40 0.5675 nan 0.1000 -0.0006
## 60 0.4794 nan 0.1000 -0.0004
## 80 0.4062 nan 0.1000 -0.0004
## 100 0.3545 nan 0.1000 0.0000
## 120 0.3118 nan 0.1000 -0.0007
## 140 0.2772 nan 0.1000 -0.0007
## 160 0.2393 nan 0.1000 -0.0010
## 180 0.2105 nan 0.1000 -0.0001
## 200 0.1865 nan 0.1000 -0.0004
## 220 0.1648 nan 0.1000 0.0001
## 240 0.1468 nan 0.1000 0.0002
## 260 0.1324 nan 0.1000 -0.0006
## 280 0.1193 nan 0.1000 -0.0005
## 300 0.1067 nan 0.1000 -0.0003
## 320 0.0957 nan 0.1000 -0.0000
## 340 0.0861 nan 0.1000 -0.0002
## 360 0.0791 nan 0.1000 -0.0006
## 380 0.0712 nan 0.1000 -0.0002
## 400 0.0650 nan 0.1000 -0.0004
## 420 0.0595 nan 0.1000 -0.0002
## 440 0.0533 nan 0.1000 -0.0001
## 460 0.0481 nan 0.1000 -0.0002
## 480 0.0435 nan 0.1000 -0.0002
## 500 0.0395 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2362 nan 0.1000 0.0362
## 2 1.1636 nan 0.1000 0.0313
## 3 1.1072 nan 0.1000 0.0279
## 4 1.0562 nan 0.1000 0.0209
## 5 1.0135 nan 0.1000 0.0201
## 6 0.9726 nan 0.1000 0.0179
## 7 0.9389 nan 0.1000 0.0129
## 8 0.9093 nan 0.1000 0.0112
## 9 0.8836 nan 0.1000 0.0090
## 10 0.8561 nan 0.1000 0.0105
## 20 0.7126 nan 0.1000 0.0024
## 40 0.5733 nan 0.1000 -0.0011
## 60 0.4872 nan 0.1000 -0.0016
## 80 0.4204 nan 0.1000 -0.0015
## 100 0.3672 nan 0.1000 -0.0010
## 120 0.3198 nan 0.1000 -0.0017
## 140 0.2841 nan 0.1000 -0.0007
## 160 0.2508 nan 0.1000 -0.0003
## 180 0.2228 nan 0.1000 -0.0004
## 200 0.1984 nan 0.1000 -0.0003
## 220 0.1756 nan 0.1000 -0.0008
## 240 0.1571 nan 0.1000 -0.0009
## 260 0.1421 nan 0.1000 -0.0005
## 280 0.1258 nan 0.1000 -0.0007
## 300 0.1122 nan 0.1000 -0.0006
## 320 0.1017 nan 0.1000 -0.0003
## 340 0.0932 nan 0.1000 -0.0002
## 360 0.0853 nan 0.1000 -0.0006
## 380 0.0781 nan 0.1000 -0.0003
## 400 0.0713 nan 0.1000 -0.0002
## 420 0.0647 nan 0.1000 -0.0002
## 440 0.0601 nan 0.1000 -0.0002
## 460 0.0542 nan 0.1000 -0.0001
## 480 0.0493 nan 0.1000 -0.0002
## 500 0.0454 nan 0.1000 -0.0004
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2394 nan 0.1000 0.0370
## 2 1.1615 nan 0.1000 0.0301
## 3 1.1007 nan 0.1000 0.0252
## 4 1.0483 nan 0.1000 0.0216
## 5 1.0001 nan 0.1000 0.0192
## 6 0.9619 nan 0.1000 0.0150
## 7 0.9223 nan 0.1000 0.0153
## 8 0.8907 nan 0.1000 0.0119
## 9 0.8662 nan 0.1000 0.0077
## 10 0.8386 nan 0.1000 0.0094
## 20 0.6783 nan 0.1000 0.0010
## 40 0.5148 nan 0.1000 0.0007
## 60 0.4257 nan 0.1000 -0.0013
## 80 0.3497 nan 0.1000 0.0005
## 100 0.2922 nan 0.1000 -0.0005
## 120 0.2465 nan 0.1000 0.0000
## 140 0.2112 nan 0.1000 -0.0003
## 160 0.1811 nan 0.1000 -0.0001
## 180 0.1582 nan 0.1000 -0.0005
## 200 0.1354 nan 0.1000 -0.0004
## 220 0.1167 nan 0.1000 -0.0002
## 240 0.1024 nan 0.1000 -0.0002
## 260 0.0910 nan 0.1000 -0.0003
## 280 0.0800 nan 0.1000 0.0001
## 300 0.0710 nan 0.1000 -0.0002
## 320 0.0629 nan 0.1000 -0.0000
## 340 0.0553 nan 0.1000 0.0000
## 360 0.0493 nan 0.1000 -0.0001
## 380 0.0436 nan 0.1000 0.0001
## 400 0.0386 nan 0.1000 -0.0003
## 420 0.0337 nan 0.1000 0.0001
## 440 0.0298 nan 0.1000 -0.0000
## 460 0.0263 nan 0.1000 -0.0000
## 480 0.0233 nan 0.1000 -0.0000
## 500 0.0209 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2365 nan 0.1000 0.0422
## 2 1.1624 nan 0.1000 0.0295
## 3 1.1046 nan 0.1000 0.0227
## 4 1.0556 nan 0.1000 0.0221
## 5 1.0092 nan 0.1000 0.0192
## 6 0.9697 nan 0.1000 0.0144
## 7 0.9362 nan 0.1000 0.0136
## 8 0.9014 nan 0.1000 0.0131
## 9 0.8726 nan 0.1000 0.0103
## 10 0.8496 nan 0.1000 0.0072
## 20 0.6834 nan 0.1000 0.0003
## 40 0.5183 nan 0.1000 -0.0012
## 60 0.4203 nan 0.1000 -0.0015
## 80 0.3438 nan 0.1000 0.0004
## 100 0.2925 nan 0.1000 -0.0009
## 120 0.2534 nan 0.1000 -0.0007
## 140 0.2190 nan 0.1000 -0.0007
## 160 0.1894 nan 0.1000 -0.0006
## 180 0.1637 nan 0.1000 -0.0007
## 200 0.1437 nan 0.1000 -0.0005
## 220 0.1263 nan 0.1000 -0.0005
## 240 0.1112 nan 0.1000 -0.0002
## 260 0.0978 nan 0.1000 -0.0001
## 280 0.0854 nan 0.1000 -0.0001
## 300 0.0757 nan 0.1000 -0.0003
## 320 0.0673 nan 0.1000 -0.0002
## 340 0.0602 nan 0.1000 -0.0002
## 360 0.0537 nan 0.1000 -0.0003
## 380 0.0479 nan 0.1000 -0.0001
## 400 0.0427 nan 0.1000 -0.0000
## 420 0.0380 nan 0.1000 -0.0001
## 440 0.0338 nan 0.1000 -0.0002
## 460 0.0301 nan 0.1000 -0.0001
## 480 0.0274 nan 0.1000 -0.0000
## 500 0.0242 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2353 nan 0.1000 0.0383
## 2 1.1622 nan 0.1000 0.0338
## 3 1.0967 nan 0.1000 0.0285
## 4 1.0456 nan 0.1000 0.0238
## 5 1.0050 nan 0.1000 0.0169
## 6 0.9605 nan 0.1000 0.0174
## 7 0.9284 nan 0.1000 0.0115
## 8 0.8946 nan 0.1000 0.0092
## 9 0.8681 nan 0.1000 0.0093
## 10 0.8428 nan 0.1000 0.0089
## 20 0.6936 nan 0.1000 0.0001
## 40 0.5390 nan 0.1000 -0.0014
## 60 0.4329 nan 0.1000 -0.0001
## 80 0.3642 nan 0.1000 -0.0013
## 100 0.3084 nan 0.1000 -0.0016
## 120 0.2664 nan 0.1000 -0.0009
## 140 0.2306 nan 0.1000 -0.0008
## 160 0.1980 nan 0.1000 -0.0006
## 180 0.1711 nan 0.1000 -0.0003
## 200 0.1512 nan 0.1000 -0.0006
## 220 0.1325 nan 0.1000 -0.0006
## 240 0.1179 nan 0.1000 -0.0005
## 260 0.1046 nan 0.1000 -0.0002
## 280 0.0922 nan 0.1000 -0.0003
## 300 0.0819 nan 0.1000 -0.0000
## 320 0.0736 nan 0.1000 -0.0002
## 340 0.0653 nan 0.1000 -0.0001
## 360 0.0584 nan 0.1000 -0.0002
## 380 0.0521 nan 0.1000 -0.0003
## 400 0.0460 nan 0.1000 -0.0001
## 420 0.0412 nan 0.1000 -0.0001
## 440 0.0365 nan 0.1000 -0.0002
## 460 0.0330 nan 0.1000 -0.0001
## 480 0.0296 nan 0.1000 -0.0002
## 500 0.0267 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0003
## 4 1.3179 nan 0.0010 0.0004
## 5 1.3171 nan 0.0010 0.0004
## 6 1.3162 nan 0.0010 0.0004
## 7 1.3154 nan 0.0010 0.0003
## 8 1.3145 nan 0.0010 0.0004
## 9 1.3138 nan 0.0010 0.0003
## 10 1.3130 nan 0.0010 0.0003
## 20 1.3051 nan 0.0010 0.0003
## 40 1.2892 nan 0.0010 0.0004
## 60 1.2738 nan 0.0010 0.0003
## 80 1.2588 nan 0.0010 0.0004
## 100 1.2442 nan 0.0010 0.0003
## 120 1.2304 nan 0.0010 0.0003
## 140 1.2169 nan 0.0010 0.0003
## 160 1.2039 nan 0.0010 0.0003
## 180 1.1909 nan 0.0010 0.0003
## 200 1.1786 nan 0.0010 0.0003
## 220 1.1662 nan 0.0010 0.0003
## 240 1.1541 nan 0.0010 0.0003
## 260 1.1425 nan 0.0010 0.0002
## 280 1.1315 nan 0.0010 0.0002
## 300 1.1208 nan 0.0010 0.0002
## 320 1.1105 nan 0.0010 0.0003
## 340 1.1006 nan 0.0010 0.0002
## 360 1.0910 nan 0.0010 0.0002
## 380 1.0814 nan 0.0010 0.0002
## 400 1.0723 nan 0.0010 0.0002
## 420 1.0633 nan 0.0010 0.0002
## 440 1.0544 nan 0.0010 0.0002
## 460 1.0459 nan 0.0010 0.0002
## 480 1.0377 nan 0.0010 0.0002
## 500 1.0293 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0003
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3162 nan 0.0010 0.0004
## 7 1.3153 nan 0.0010 0.0004
## 8 1.3145 nan 0.0010 0.0004
## 9 1.3136 nan 0.0010 0.0004
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3044 nan 0.0010 0.0004
## 40 1.2887 nan 0.0010 0.0004
## 60 1.2730 nan 0.0010 0.0004
## 80 1.2579 nan 0.0010 0.0003
## 100 1.2433 nan 0.0010 0.0003
## 120 1.2294 nan 0.0010 0.0002
## 140 1.2155 nan 0.0010 0.0003
## 160 1.2023 nan 0.0010 0.0003
## 180 1.1893 nan 0.0010 0.0003
## 200 1.1770 nan 0.0010 0.0003
## 220 1.1652 nan 0.0010 0.0002
## 240 1.1535 nan 0.0010 0.0003
## 260 1.1422 nan 0.0010 0.0003
## 280 1.1310 nan 0.0010 0.0002
## 300 1.1203 nan 0.0010 0.0002
## 320 1.1100 nan 0.0010 0.0002
## 340 1.1001 nan 0.0010 0.0002
## 360 1.0905 nan 0.0010 0.0002
## 380 1.0809 nan 0.0010 0.0002
## 400 1.0716 nan 0.0010 0.0002
## 420 1.0624 nan 0.0010 0.0002
## 440 1.0538 nan 0.0010 0.0002
## 460 1.0451 nan 0.0010 0.0002
## 480 1.0370 nan 0.0010 0.0001
## 500 1.0289 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3153 nan 0.0010 0.0003
## 8 1.3145 nan 0.0010 0.0004
## 9 1.3137 nan 0.0010 0.0004
## 10 1.3129 nan 0.0010 0.0004
## 20 1.3047 nan 0.0010 0.0004
## 40 1.2885 nan 0.0010 0.0003
## 60 1.2730 nan 0.0010 0.0003
## 80 1.2586 nan 0.0010 0.0003
## 100 1.2438 nan 0.0010 0.0003
## 120 1.2302 nan 0.0010 0.0003
## 140 1.2170 nan 0.0010 0.0003
## 160 1.2038 nan 0.0010 0.0003
## 180 1.1909 nan 0.0010 0.0003
## 200 1.1785 nan 0.0010 0.0002
## 220 1.1664 nan 0.0010 0.0002
## 240 1.1550 nan 0.0010 0.0002
## 260 1.1439 nan 0.0010 0.0002
## 280 1.1332 nan 0.0010 0.0002
## 300 1.1228 nan 0.0010 0.0003
## 320 1.1127 nan 0.0010 0.0002
## 340 1.1027 nan 0.0010 0.0002
## 360 1.0932 nan 0.0010 0.0002
## 380 1.0836 nan 0.0010 0.0002
## 400 1.0741 nan 0.0010 0.0002
## 420 1.0652 nan 0.0010 0.0002
## 440 1.0565 nan 0.0010 0.0001
## 460 1.0483 nan 0.0010 0.0002
## 480 1.0400 nan 0.0010 0.0002
## 500 1.0317 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0005
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3132 nan 0.0010 0.0003
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0003
## 40 1.2866 nan 0.0010 0.0004
## 60 1.2704 nan 0.0010 0.0004
## 80 1.2545 nan 0.0010 0.0003
## 100 1.2385 nan 0.0010 0.0004
## 120 1.2235 nan 0.0010 0.0003
## 140 1.2089 nan 0.0010 0.0003
## 160 1.1951 nan 0.0010 0.0003
## 180 1.1818 nan 0.0010 0.0003
## 200 1.1684 nan 0.0010 0.0003
## 220 1.1557 nan 0.0010 0.0003
## 240 1.1434 nan 0.0010 0.0003
## 260 1.1312 nan 0.0010 0.0003
## 280 1.1196 nan 0.0010 0.0003
## 300 1.1083 nan 0.0010 0.0002
## 320 1.0975 nan 0.0010 0.0002
## 340 1.0868 nan 0.0010 0.0002
## 360 1.0763 nan 0.0010 0.0002
## 380 1.0662 nan 0.0010 0.0002
## 400 1.0563 nan 0.0010 0.0002
## 420 1.0469 nan 0.0010 0.0002
## 440 1.0375 nan 0.0010 0.0002
## 460 1.0283 nan 0.0010 0.0002
## 480 1.0196 nan 0.0010 0.0001
## 500 1.0111 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2864 nan 0.0010 0.0004
## 60 1.2698 nan 0.0010 0.0004
## 80 1.2540 nan 0.0010 0.0003
## 100 1.2388 nan 0.0010 0.0003
## 120 1.2234 nan 0.0010 0.0003
## 140 1.2089 nan 0.0010 0.0003
## 160 1.1951 nan 0.0010 0.0003
## 180 1.1814 nan 0.0010 0.0003
## 200 1.1687 nan 0.0010 0.0003
## 220 1.1559 nan 0.0010 0.0003
## 240 1.1438 nan 0.0010 0.0003
## 260 1.1316 nan 0.0010 0.0002
## 280 1.1202 nan 0.0010 0.0002
## 300 1.1092 nan 0.0010 0.0002
## 320 1.0984 nan 0.0010 0.0002
## 340 1.0878 nan 0.0010 0.0002
## 360 1.0776 nan 0.0010 0.0002
## 380 1.0674 nan 0.0010 0.0002
## 400 1.0579 nan 0.0010 0.0002
## 420 1.0484 nan 0.0010 0.0002
## 440 1.0394 nan 0.0010 0.0002
## 460 1.0305 nan 0.0010 0.0002
## 480 1.0219 nan 0.0010 0.0002
## 500 1.0132 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2863 nan 0.0010 0.0004
## 60 1.2699 nan 0.0010 0.0004
## 80 1.2541 nan 0.0010 0.0003
## 100 1.2389 nan 0.0010 0.0003
## 120 1.2242 nan 0.0010 0.0003
## 140 1.2102 nan 0.0010 0.0003
## 160 1.1966 nan 0.0010 0.0002
## 180 1.1833 nan 0.0010 0.0003
## 200 1.1704 nan 0.0010 0.0003
## 220 1.1581 nan 0.0010 0.0003
## 240 1.1458 nan 0.0010 0.0003
## 260 1.1340 nan 0.0010 0.0003
## 280 1.1226 nan 0.0010 0.0003
## 300 1.1116 nan 0.0010 0.0002
## 320 1.1008 nan 0.0010 0.0002
## 340 1.0903 nan 0.0010 0.0002
## 360 1.0802 nan 0.0010 0.0002
## 380 1.0701 nan 0.0010 0.0002
## 400 1.0607 nan 0.0010 0.0002
## 420 1.0512 nan 0.0010 0.0002
## 440 1.0422 nan 0.0010 0.0002
## 460 1.0333 nan 0.0010 0.0002
## 480 1.0245 nan 0.0010 0.0002
## 500 1.0161 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3183 nan 0.0010 0.0005
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3026 nan 0.0010 0.0004
## 40 1.2844 nan 0.0010 0.0004
## 60 1.2668 nan 0.0010 0.0004
## 80 1.2501 nan 0.0010 0.0004
## 100 1.2340 nan 0.0010 0.0004
## 120 1.2184 nan 0.0010 0.0004
## 140 1.2034 nan 0.0010 0.0003
## 160 1.1889 nan 0.0010 0.0003
## 180 1.1745 nan 0.0010 0.0003
## 200 1.1609 nan 0.0010 0.0003
## 220 1.1476 nan 0.0010 0.0003
## 240 1.1349 nan 0.0010 0.0003
## 260 1.1226 nan 0.0010 0.0002
## 280 1.1107 nan 0.0010 0.0003
## 300 1.0992 nan 0.0010 0.0002
## 320 1.0876 nan 0.0010 0.0002
## 340 1.0765 nan 0.0010 0.0002
## 360 1.0658 nan 0.0010 0.0002
## 380 1.0557 nan 0.0010 0.0002
## 400 1.0454 nan 0.0010 0.0002
## 420 1.0353 nan 0.0010 0.0002
## 440 1.0258 nan 0.0010 0.0002
## 460 1.0163 nan 0.0010 0.0002
## 480 1.0071 nan 0.0010 0.0002
## 500 0.9982 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3116 nan 0.0010 0.0005
## 20 1.3023 nan 0.0010 0.0004
## 40 1.2843 nan 0.0010 0.0003
## 60 1.2675 nan 0.0010 0.0004
## 80 1.2506 nan 0.0010 0.0004
## 100 1.2345 nan 0.0010 0.0004
## 120 1.2193 nan 0.0010 0.0004
## 140 1.2044 nan 0.0010 0.0003
## 160 1.1898 nan 0.0010 0.0003
## 180 1.1759 nan 0.0010 0.0003
## 200 1.1623 nan 0.0010 0.0003
## 220 1.1493 nan 0.0010 0.0003
## 240 1.1364 nan 0.0010 0.0003
## 260 1.1241 nan 0.0010 0.0003
## 280 1.1121 nan 0.0010 0.0003
## 300 1.1007 nan 0.0010 0.0002
## 320 1.0892 nan 0.0010 0.0002
## 340 1.0782 nan 0.0010 0.0003
## 360 1.0676 nan 0.0010 0.0002
## 380 1.0573 nan 0.0010 0.0002
## 400 1.0472 nan 0.0010 0.0002
## 420 1.0373 nan 0.0010 0.0002
## 440 1.0278 nan 0.0010 0.0002
## 460 1.0183 nan 0.0010 0.0002
## 480 1.0093 nan 0.0010 0.0002
## 500 1.0005 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0005
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2853 nan 0.0010 0.0004
## 60 1.2687 nan 0.0010 0.0003
## 80 1.2524 nan 0.0010 0.0004
## 100 1.2367 nan 0.0010 0.0004
## 120 1.2211 nan 0.0010 0.0004
## 140 1.2066 nan 0.0010 0.0003
## 160 1.1922 nan 0.0010 0.0003
## 180 1.1785 nan 0.0010 0.0003
## 200 1.1652 nan 0.0010 0.0003
## 220 1.1518 nan 0.0010 0.0003
## 240 1.1392 nan 0.0010 0.0003
## 260 1.1269 nan 0.0010 0.0003
## 280 1.1150 nan 0.0010 0.0003
## 300 1.1036 nan 0.0010 0.0003
## 320 1.0925 nan 0.0010 0.0003
## 340 1.0817 nan 0.0010 0.0003
## 360 1.0714 nan 0.0010 0.0002
## 380 1.0613 nan 0.0010 0.0002
## 400 1.0515 nan 0.0010 0.0002
## 420 1.0417 nan 0.0010 0.0002
## 440 1.0322 nan 0.0010 0.0002
## 460 1.0230 nan 0.0010 0.0002
## 480 1.0137 nan 0.0010 0.0002
## 500 1.0048 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3125 nan 0.0100 0.0040
## 2 1.3055 nan 0.0100 0.0027
## 3 1.2973 nan 0.0100 0.0039
## 4 1.2883 nan 0.0100 0.0041
## 5 1.2806 nan 0.0100 0.0035
## 6 1.2727 nan 0.0100 0.0039
## 7 1.2650 nan 0.0100 0.0032
## 8 1.2569 nan 0.0100 0.0036
## 9 1.2498 nan 0.0100 0.0035
## 10 1.2430 nan 0.0100 0.0032
## 20 1.1746 nan 0.0100 0.0028
## 40 1.0688 nan 0.0100 0.0020
## 60 0.9916 nan 0.0100 0.0014
## 80 0.9304 nan 0.0100 0.0012
## 100 0.8786 nan 0.0100 0.0008
## 120 0.8343 nan 0.0100 0.0007
## 140 0.8001 nan 0.0100 0.0006
## 160 0.7717 nan 0.0100 0.0003
## 180 0.7463 nan 0.0100 0.0003
## 200 0.7258 nan 0.0100 0.0002
## 220 0.7066 nan 0.0100 0.0000
## 240 0.6898 nan 0.0100 0.0001
## 260 0.6725 nan 0.0100 0.0001
## 280 0.6577 nan 0.0100 0.0001
## 300 0.6430 nan 0.0100 0.0001
## 320 0.6298 nan 0.0100 0.0001
## 340 0.6176 nan 0.0100 -0.0001
## 360 0.6067 nan 0.0100 0.0001
## 380 0.5954 nan 0.0100 -0.0001
## 400 0.5850 nan 0.0100 -0.0001
## 420 0.5757 nan 0.0100 -0.0000
## 440 0.5662 nan 0.0100 -0.0001
## 460 0.5566 nan 0.0100 -0.0002
## 480 0.5481 nan 0.0100 0.0001
## 500 0.5395 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0037
## 2 1.3039 nan 0.0100 0.0040
## 3 1.2951 nan 0.0100 0.0036
## 4 1.2872 nan 0.0100 0.0033
## 5 1.2798 nan 0.0100 0.0032
## 6 1.2727 nan 0.0100 0.0034
## 7 1.2658 nan 0.0100 0.0030
## 8 1.2581 nan 0.0100 0.0034
## 9 1.2507 nan 0.0100 0.0034
## 10 1.2445 nan 0.0100 0.0030
## 20 1.1785 nan 0.0100 0.0026
## 40 1.0739 nan 0.0100 0.0022
## 60 0.9939 nan 0.0100 0.0017
## 80 0.9290 nan 0.0100 0.0013
## 100 0.8775 nan 0.0100 0.0008
## 120 0.8354 nan 0.0100 0.0006
## 140 0.8016 nan 0.0100 0.0004
## 160 0.7739 nan 0.0100 0.0006
## 180 0.7484 nan 0.0100 0.0004
## 200 0.7270 nan 0.0100 0.0001
## 220 0.7072 nan 0.0100 0.0001
## 240 0.6901 nan 0.0100 0.0001
## 260 0.6753 nan 0.0100 -0.0000
## 280 0.6611 nan 0.0100 0.0001
## 300 0.6486 nan 0.0100 0.0000
## 320 0.6360 nan 0.0100 0.0000
## 340 0.6233 nan 0.0100 0.0000
## 360 0.6127 nan 0.0100 0.0000
## 380 0.6021 nan 0.0100 -0.0000
## 400 0.5922 nan 0.0100 -0.0001
## 420 0.5837 nan 0.0100 -0.0001
## 440 0.5751 nan 0.0100 -0.0000
## 460 0.5665 nan 0.0100 -0.0001
## 480 0.5587 nan 0.0100 -0.0000
## 500 0.5507 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3133 nan 0.0100 0.0037
## 2 1.3048 nan 0.0100 0.0039
## 3 1.2962 nan 0.0100 0.0040
## 4 1.2882 nan 0.0100 0.0038
## 5 1.2810 nan 0.0100 0.0035
## 6 1.2741 nan 0.0100 0.0036
## 7 1.2667 nan 0.0100 0.0033
## 8 1.2587 nan 0.0100 0.0035
## 9 1.2509 nan 0.0100 0.0034
## 10 1.2432 nan 0.0100 0.0036
## 20 1.1772 nan 0.0100 0.0026
## 40 1.0734 nan 0.0100 0.0015
## 60 0.9919 nan 0.0100 0.0014
## 80 0.9288 nan 0.0100 0.0012
## 100 0.8779 nan 0.0100 0.0008
## 120 0.8372 nan 0.0100 0.0007
## 140 0.8035 nan 0.0100 0.0004
## 160 0.7753 nan 0.0100 0.0004
## 180 0.7506 nan 0.0100 0.0002
## 200 0.7292 nan 0.0100 0.0002
## 220 0.7102 nan 0.0100 0.0001
## 240 0.6939 nan 0.0100 0.0001
## 260 0.6792 nan 0.0100 0.0002
## 280 0.6664 nan 0.0100 0.0001
## 300 0.6539 nan 0.0100 0.0000
## 320 0.6426 nan 0.0100 0.0001
## 340 0.6315 nan 0.0100 -0.0001
## 360 0.6212 nan 0.0100 0.0001
## 380 0.6114 nan 0.0100 -0.0000
## 400 0.6014 nan 0.0100 -0.0002
## 420 0.5917 nan 0.0100 -0.0001
## 440 0.5825 nan 0.0100 -0.0002
## 460 0.5736 nan 0.0100 -0.0001
## 480 0.5651 nan 0.0100 -0.0000
## 500 0.5575 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0042
## 2 1.3033 nan 0.0100 0.0042
## 3 1.2947 nan 0.0100 0.0039
## 4 1.2867 nan 0.0100 0.0033
## 5 1.2788 nan 0.0100 0.0035
## 6 1.2707 nan 0.0100 0.0037
## 7 1.2621 nan 0.0100 0.0034
## 8 1.2547 nan 0.0100 0.0032
## 9 1.2462 nan 0.0100 0.0036
## 10 1.2391 nan 0.0100 0.0031
## 20 1.1673 nan 0.0100 0.0027
## 40 1.0559 nan 0.0100 0.0021
## 60 0.9716 nan 0.0100 0.0014
## 80 0.9047 nan 0.0100 0.0012
## 100 0.8498 nan 0.0100 0.0009
## 120 0.8060 nan 0.0100 0.0006
## 140 0.7685 nan 0.0100 0.0005
## 160 0.7380 nan 0.0100 0.0005
## 180 0.7110 nan 0.0100 0.0004
## 200 0.6876 nan 0.0100 0.0002
## 220 0.6672 nan 0.0100 0.0002
## 240 0.6488 nan 0.0100 0.0002
## 260 0.6312 nan 0.0100 0.0002
## 280 0.6157 nan 0.0100 0.0001
## 300 0.6017 nan 0.0100 0.0002
## 320 0.5871 nan 0.0100 0.0001
## 340 0.5746 nan 0.0100 0.0001
## 360 0.5617 nan 0.0100 0.0000
## 380 0.5496 nan 0.0100 -0.0002
## 400 0.5383 nan 0.0100 0.0000
## 420 0.5275 nan 0.0100 0.0001
## 440 0.5172 nan 0.0100 0.0001
## 460 0.5074 nan 0.0100 -0.0001
## 480 0.4983 nan 0.0100 -0.0001
## 500 0.4882 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0039
## 2 1.3043 nan 0.0100 0.0037
## 3 1.2954 nan 0.0100 0.0040
## 4 1.2874 nan 0.0100 0.0039
## 5 1.2795 nan 0.0100 0.0038
## 6 1.2707 nan 0.0100 0.0037
## 7 1.2630 nan 0.0100 0.0038
## 8 1.2549 nan 0.0100 0.0035
## 9 1.2472 nan 0.0100 0.0035
## 10 1.2392 nan 0.0100 0.0039
## 20 1.1699 nan 0.0100 0.0030
## 40 1.0579 nan 0.0100 0.0019
## 60 0.9747 nan 0.0100 0.0014
## 80 0.9077 nan 0.0100 0.0011
## 100 0.8550 nan 0.0100 0.0009
## 120 0.8117 nan 0.0100 0.0006
## 140 0.7761 nan 0.0100 0.0004
## 160 0.7455 nan 0.0100 0.0002
## 180 0.7189 nan 0.0100 0.0006
## 200 0.6954 nan 0.0100 0.0002
## 220 0.6759 nan 0.0100 0.0001
## 240 0.6591 nan 0.0100 0.0001
## 260 0.6420 nan 0.0100 -0.0001
## 280 0.6273 nan 0.0100 -0.0001
## 300 0.6118 nan 0.0100 0.0002
## 320 0.5979 nan 0.0100 0.0002
## 340 0.5856 nan 0.0100 0.0000
## 360 0.5737 nan 0.0100 -0.0001
## 380 0.5611 nan 0.0100 0.0001
## 400 0.5501 nan 0.0100 -0.0001
## 420 0.5393 nan 0.0100 -0.0001
## 440 0.5291 nan 0.0100 -0.0003
## 460 0.5194 nan 0.0100 -0.0001
## 480 0.5096 nan 0.0100 -0.0001
## 500 0.5009 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0040
## 2 1.3029 nan 0.0100 0.0041
## 3 1.2945 nan 0.0100 0.0038
## 4 1.2856 nan 0.0100 0.0042
## 5 1.2775 nan 0.0100 0.0034
## 6 1.2694 nan 0.0100 0.0039
## 7 1.2613 nan 0.0100 0.0037
## 8 1.2533 nan 0.0100 0.0038
## 9 1.2454 nan 0.0100 0.0033
## 10 1.2374 nan 0.0100 0.0034
## 20 1.1705 nan 0.0100 0.0028
## 40 1.0593 nan 0.0100 0.0020
## 60 0.9740 nan 0.0100 0.0015
## 80 0.9088 nan 0.0100 0.0009
## 100 0.8558 nan 0.0100 0.0010
## 120 0.8142 nan 0.0100 0.0007
## 140 0.7791 nan 0.0100 0.0005
## 160 0.7515 nan 0.0100 0.0003
## 180 0.7271 nan 0.0100 0.0005
## 200 0.7053 nan 0.0100 0.0002
## 220 0.6859 nan 0.0100 0.0002
## 240 0.6686 nan 0.0100 0.0001
## 260 0.6516 nan 0.0100 0.0001
## 280 0.6373 nan 0.0100 -0.0001
## 300 0.6242 nan 0.0100 0.0000
## 320 0.6111 nan 0.0100 0.0001
## 340 0.5977 nan 0.0100 -0.0001
## 360 0.5855 nan 0.0100 0.0002
## 380 0.5730 nan 0.0100 0.0001
## 400 0.5622 nan 0.0100 -0.0001
## 420 0.5513 nan 0.0100 -0.0001
## 440 0.5414 nan 0.0100 0.0000
## 460 0.5321 nan 0.0100 -0.0002
## 480 0.5232 nan 0.0100 0.0000
## 500 0.5138 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3113 nan 0.0100 0.0047
## 2 1.3020 nan 0.0100 0.0043
## 3 1.2930 nan 0.0100 0.0041
## 4 1.2840 nan 0.0100 0.0038
## 5 1.2746 nan 0.0100 0.0042
## 6 1.2656 nan 0.0100 0.0040
## 7 1.2577 nan 0.0100 0.0034
## 8 1.2487 nan 0.0100 0.0042
## 9 1.2409 nan 0.0100 0.0034
## 10 1.2326 nan 0.0100 0.0034
## 20 1.1578 nan 0.0100 0.0026
## 40 1.0424 nan 0.0100 0.0021
## 60 0.9541 nan 0.0100 0.0014
## 80 0.8843 nan 0.0100 0.0012
## 100 0.8281 nan 0.0100 0.0010
## 120 0.7815 nan 0.0100 0.0007
## 140 0.7443 nan 0.0100 0.0005
## 160 0.7124 nan 0.0100 0.0002
## 180 0.6835 nan 0.0100 0.0002
## 200 0.6580 nan 0.0100 0.0002
## 220 0.6353 nan 0.0100 0.0002
## 240 0.6147 nan 0.0100 0.0000
## 260 0.5959 nan 0.0100 0.0001
## 280 0.5783 nan 0.0100 0.0001
## 300 0.5618 nan 0.0100 0.0000
## 320 0.5460 nan 0.0100 0.0000
## 340 0.5324 nan 0.0100 -0.0000
## 360 0.5190 nan 0.0100 -0.0002
## 380 0.5070 nan 0.0100 0.0000
## 400 0.4957 nan 0.0100 0.0000
## 420 0.4848 nan 0.0100 -0.0000
## 440 0.4725 nan 0.0100 0.0000
## 460 0.4621 nan 0.0100 0.0000
## 480 0.4515 nan 0.0100 0.0001
## 500 0.4415 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3118 nan 0.0100 0.0038
## 2 1.3028 nan 0.0100 0.0037
## 3 1.2939 nan 0.0100 0.0040
## 4 1.2853 nan 0.0100 0.0040
## 5 1.2764 nan 0.0100 0.0037
## 6 1.2675 nan 0.0100 0.0040
## 7 1.2579 nan 0.0100 0.0041
## 8 1.2492 nan 0.0100 0.0037
## 9 1.2415 nan 0.0100 0.0034
## 10 1.2333 nan 0.0100 0.0034
## 20 1.1605 nan 0.0100 0.0032
## 40 1.0467 nan 0.0100 0.0019
## 60 0.9590 nan 0.0100 0.0016
## 80 0.8899 nan 0.0100 0.0009
## 100 0.8349 nan 0.0100 0.0005
## 120 0.7897 nan 0.0100 0.0005
## 140 0.7527 nan 0.0100 0.0003
## 160 0.7215 nan 0.0100 0.0003
## 180 0.6941 nan 0.0100 0.0005
## 200 0.6693 nan 0.0100 0.0003
## 220 0.6472 nan 0.0100 0.0002
## 240 0.6275 nan 0.0100 0.0001
## 260 0.6089 nan 0.0100 -0.0002
## 280 0.5919 nan 0.0100 0.0001
## 300 0.5756 nan 0.0100 0.0001
## 320 0.5608 nan 0.0100 0.0002
## 340 0.5471 nan 0.0100 0.0001
## 360 0.5345 nan 0.0100 0.0001
## 380 0.5223 nan 0.0100 0.0000
## 400 0.5097 nan 0.0100 -0.0001
## 420 0.4982 nan 0.0100 0.0000
## 440 0.4871 nan 0.0100 0.0001
## 460 0.4757 nan 0.0100 0.0000
## 480 0.4659 nan 0.0100 -0.0002
## 500 0.4563 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3113 nan 0.0100 0.0045
## 2 1.3021 nan 0.0100 0.0036
## 3 1.2931 nan 0.0100 0.0043
## 4 1.2847 nan 0.0100 0.0038
## 5 1.2766 nan 0.0100 0.0040
## 6 1.2684 nan 0.0100 0.0037
## 7 1.2591 nan 0.0100 0.0040
## 8 1.2516 nan 0.0100 0.0036
## 9 1.2436 nan 0.0100 0.0037
## 10 1.2354 nan 0.0100 0.0037
## 20 1.1651 nan 0.0100 0.0025
## 40 1.0508 nan 0.0100 0.0021
## 60 0.9649 nan 0.0100 0.0016
## 80 0.8984 nan 0.0100 0.0013
## 100 0.8444 nan 0.0100 0.0010
## 120 0.7997 nan 0.0100 0.0008
## 140 0.7620 nan 0.0100 0.0004
## 160 0.7305 nan 0.0100 0.0004
## 180 0.7030 nan 0.0100 0.0003
## 200 0.6777 nan 0.0100 -0.0001
## 220 0.6568 nan 0.0100 0.0002
## 240 0.6377 nan 0.0100 0.0001
## 260 0.6195 nan 0.0100 0.0001
## 280 0.6025 nan 0.0100 0.0001
## 300 0.5872 nan 0.0100 -0.0000
## 320 0.5732 nan 0.0100 -0.0001
## 340 0.5583 nan 0.0100 0.0001
## 360 0.5465 nan 0.0100 -0.0001
## 380 0.5342 nan 0.0100 0.0001
## 400 0.5225 nan 0.0100 -0.0000
## 420 0.5114 nan 0.0100 -0.0000
## 440 0.5006 nan 0.0100 0.0000
## 460 0.4899 nan 0.0100 -0.0001
## 480 0.4800 nan 0.0100 -0.0001
## 500 0.4708 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2391 nan 0.1000 0.0378
## 2 1.1661 nan 0.1000 0.0317
## 3 1.1201 nan 0.1000 0.0183
## 4 1.0661 nan 0.1000 0.0239
## 5 1.0215 nan 0.1000 0.0195
## 6 0.9836 nan 0.1000 0.0119
## 7 0.9523 nan 0.1000 0.0145
## 8 0.9215 nan 0.1000 0.0097
## 9 0.8941 nan 0.1000 0.0103
## 10 0.8702 nan 0.1000 0.0093
## 20 0.7247 nan 0.1000 0.0021
## 40 0.5967 nan 0.1000 -0.0002
## 60 0.5048 nan 0.1000 0.0001
## 80 0.4321 nan 0.1000 0.0008
## 100 0.3812 nan 0.1000 -0.0002
## 120 0.3426 nan 0.1000 -0.0006
## 140 0.3008 nan 0.1000 -0.0001
## 160 0.2688 nan 0.1000 -0.0000
## 180 0.2423 nan 0.1000 -0.0008
## 200 0.2190 nan 0.1000 -0.0004
## 220 0.1969 nan 0.1000 -0.0007
## 240 0.1780 nan 0.1000 -0.0001
## 260 0.1600 nan 0.1000 -0.0005
## 280 0.1453 nan 0.1000 -0.0003
## 300 0.1315 nan 0.1000 -0.0002
## 320 0.1201 nan 0.1000 -0.0002
## 340 0.1099 nan 0.1000 -0.0002
## 360 0.1015 nan 0.1000 -0.0002
## 380 0.0941 nan 0.1000 -0.0003
## 400 0.0860 nan 0.1000 0.0001
## 420 0.0790 nan 0.1000 -0.0001
## 440 0.0731 nan 0.1000 -0.0002
## 460 0.0669 nan 0.1000 -0.0002
## 480 0.0620 nan 0.1000 -0.0001
## 500 0.0571 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2277 nan 0.1000 0.0409
## 2 1.1687 nan 0.1000 0.0253
## 3 1.1082 nan 0.1000 0.0242
## 4 1.0621 nan 0.1000 0.0203
## 5 1.0195 nan 0.1000 0.0167
## 6 0.9813 nan 0.1000 0.0146
## 7 0.9514 nan 0.1000 0.0130
## 8 0.9208 nan 0.1000 0.0114
## 9 0.8956 nan 0.1000 0.0100
## 10 0.8689 nan 0.1000 0.0090
## 20 0.7322 nan 0.1000 0.0018
## 40 0.5988 nan 0.1000 -0.0007
## 60 0.5237 nan 0.1000 -0.0004
## 80 0.4528 nan 0.1000 -0.0003
## 100 0.3995 nan 0.1000 -0.0004
## 120 0.3522 nan 0.1000 -0.0000
## 140 0.3144 nan 0.1000 -0.0001
## 160 0.2863 nan 0.1000 -0.0004
## 180 0.2578 nan 0.1000 -0.0007
## 200 0.2318 nan 0.1000 -0.0007
## 220 0.2083 nan 0.1000 -0.0004
## 240 0.1898 nan 0.1000 -0.0009
## 260 0.1724 nan 0.1000 -0.0005
## 280 0.1566 nan 0.1000 -0.0001
## 300 0.1435 nan 0.1000 -0.0003
## 320 0.1316 nan 0.1000 0.0001
## 340 0.1199 nan 0.1000 -0.0003
## 360 0.1104 nan 0.1000 -0.0005
## 380 0.1009 nan 0.1000 -0.0001
## 400 0.0920 nan 0.1000 -0.0003
## 420 0.0848 nan 0.1000 -0.0003
## 440 0.0782 nan 0.1000 -0.0004
## 460 0.0719 nan 0.1000 -0.0004
## 480 0.0660 nan 0.1000 -0.0002
## 500 0.0610 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2387 nan 0.1000 0.0404
## 2 1.1760 nan 0.1000 0.0281
## 3 1.1221 nan 0.1000 0.0243
## 4 1.0716 nan 0.1000 0.0198
## 5 1.0306 nan 0.1000 0.0188
## 6 0.9997 nan 0.1000 0.0122
## 7 0.9636 nan 0.1000 0.0139
## 8 0.9346 nan 0.1000 0.0127
## 9 0.9069 nan 0.1000 0.0099
## 10 0.8815 nan 0.1000 0.0064
## 20 0.7386 nan 0.1000 0.0021
## 40 0.6165 nan 0.1000 -0.0008
## 60 0.5297 nan 0.1000 0.0004
## 80 0.4759 nan 0.1000 -0.0003
## 100 0.4244 nan 0.1000 -0.0021
## 120 0.3734 nan 0.1000 -0.0014
## 140 0.3382 nan 0.1000 -0.0009
## 160 0.3072 nan 0.1000 -0.0016
## 180 0.2755 nan 0.1000 -0.0003
## 200 0.2510 nan 0.1000 -0.0007
## 220 0.2306 nan 0.1000 -0.0007
## 240 0.2104 nan 0.1000 -0.0004
## 260 0.1930 nan 0.1000 -0.0004
## 280 0.1767 nan 0.1000 -0.0000
## 300 0.1614 nan 0.1000 -0.0000
## 320 0.1483 nan 0.1000 -0.0010
## 340 0.1364 nan 0.1000 -0.0004
## 360 0.1272 nan 0.1000 -0.0007
## 380 0.1163 nan 0.1000 -0.0002
## 400 0.1070 nan 0.1000 -0.0007
## 420 0.0982 nan 0.1000 -0.0004
## 440 0.0909 nan 0.1000 -0.0002
## 460 0.0837 nan 0.1000 -0.0004
## 480 0.0782 nan 0.1000 -0.0004
## 500 0.0718 nan 0.1000 -0.0004
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2273 nan 0.1000 0.0399
## 2 1.1568 nan 0.1000 0.0319
## 3 1.1035 nan 0.1000 0.0249
## 4 1.0555 nan 0.1000 0.0217
## 5 1.0104 nan 0.1000 0.0196
## 6 0.9699 nan 0.1000 0.0157
## 7 0.9379 nan 0.1000 0.0131
## 8 0.9070 nan 0.1000 0.0127
## 9 0.8792 nan 0.1000 0.0106
## 10 0.8565 nan 0.1000 0.0102
## 20 0.6927 nan 0.1000 0.0023
## 40 0.5393 nan 0.1000 0.0003
## 60 0.4510 nan 0.1000 -0.0002
## 80 0.3787 nan 0.1000 0.0016
## 100 0.3228 nan 0.1000 -0.0005
## 120 0.2758 nan 0.1000 0.0000
## 140 0.2371 nan 0.1000 -0.0001
## 160 0.2103 nan 0.1000 -0.0001
## 180 0.1827 nan 0.1000 -0.0002
## 200 0.1595 nan 0.1000 -0.0003
## 220 0.1415 nan 0.1000 0.0000
## 240 0.1255 nan 0.1000 -0.0001
## 260 0.1109 nan 0.1000 -0.0003
## 280 0.0994 nan 0.1000 -0.0003
## 300 0.0880 nan 0.1000 -0.0003
## 320 0.0799 nan 0.1000 -0.0003
## 340 0.0719 nan 0.1000 -0.0003
## 360 0.0646 nan 0.1000 -0.0002
## 380 0.0581 nan 0.1000 -0.0002
## 400 0.0524 nan 0.1000 -0.0000
## 420 0.0472 nan 0.1000 -0.0001
## 440 0.0430 nan 0.1000 -0.0001
## 460 0.0387 nan 0.1000 -0.0001
## 480 0.0352 nan 0.1000 -0.0001
## 500 0.0316 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2338 nan 0.1000 0.0360
## 2 1.1604 nan 0.1000 0.0302
## 3 1.0991 nan 0.1000 0.0280
## 4 1.0456 nan 0.1000 0.0224
## 5 0.9960 nan 0.1000 0.0205
## 6 0.9615 nan 0.1000 0.0143
## 7 0.9245 nan 0.1000 0.0142
## 8 0.8935 nan 0.1000 0.0128
## 9 0.8660 nan 0.1000 0.0085
## 10 0.8449 nan 0.1000 0.0068
## 20 0.7004 nan 0.1000 0.0026
## 40 0.5568 nan 0.1000 0.0005
## 60 0.4691 nan 0.1000 0.0010
## 80 0.4006 nan 0.1000 -0.0008
## 100 0.3406 nan 0.1000 -0.0004
## 120 0.2947 nan 0.1000 -0.0003
## 140 0.2549 nan 0.1000 -0.0002
## 160 0.2220 nan 0.1000 -0.0004
## 180 0.1957 nan 0.1000 -0.0001
## 200 0.1729 nan 0.1000 -0.0006
## 220 0.1544 nan 0.1000 -0.0006
## 240 0.1371 nan 0.1000 -0.0007
## 260 0.1219 nan 0.1000 -0.0004
## 280 0.1090 nan 0.1000 -0.0007
## 300 0.0970 nan 0.1000 -0.0001
## 320 0.0852 nan 0.1000 -0.0002
## 340 0.0774 nan 0.1000 -0.0003
## 360 0.0702 nan 0.1000 -0.0002
## 380 0.0629 nan 0.1000 -0.0001
## 400 0.0572 nan 0.1000 -0.0002
## 420 0.0519 nan 0.1000 -0.0001
## 440 0.0465 nan 0.1000 -0.0001
## 460 0.0421 nan 0.1000 -0.0000
## 480 0.0383 nan 0.1000 -0.0001
## 500 0.0348 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2379 nan 0.1000 0.0365
## 2 1.1654 nan 0.1000 0.0354
## 3 1.1073 nan 0.1000 0.0235
## 4 1.0559 nan 0.1000 0.0171
## 5 1.0154 nan 0.1000 0.0177
## 6 0.9760 nan 0.1000 0.0169
## 7 0.9437 nan 0.1000 0.0149
## 8 0.9122 nan 0.1000 0.0110
## 9 0.8843 nan 0.1000 0.0113
## 10 0.8601 nan 0.1000 0.0082
## 20 0.7031 nan 0.1000 0.0018
## 40 0.5596 nan 0.1000 -0.0016
## 60 0.4727 nan 0.1000 -0.0000
## 80 0.4030 nan 0.1000 -0.0017
## 100 0.3501 nan 0.1000 -0.0014
## 120 0.3006 nan 0.1000 -0.0006
## 140 0.2666 nan 0.1000 -0.0004
## 160 0.2376 nan 0.1000 -0.0004
## 180 0.2120 nan 0.1000 -0.0005
## 200 0.1881 nan 0.1000 -0.0005
## 220 0.1686 nan 0.1000 -0.0004
## 240 0.1503 nan 0.1000 -0.0007
## 260 0.1350 nan 0.1000 -0.0004
## 280 0.1209 nan 0.1000 -0.0004
## 300 0.1085 nan 0.1000 -0.0004
## 320 0.0973 nan 0.1000 -0.0005
## 340 0.0888 nan 0.1000 -0.0003
## 360 0.0798 nan 0.1000 -0.0001
## 380 0.0714 nan 0.1000 -0.0000
## 400 0.0643 nan 0.1000 -0.0002
## 420 0.0586 nan 0.1000 -0.0002
## 440 0.0532 nan 0.1000 -0.0001
## 460 0.0479 nan 0.1000 -0.0001
## 480 0.0438 nan 0.1000 -0.0002
## 500 0.0394 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2405 nan 0.1000 0.0361
## 2 1.1596 nan 0.1000 0.0366
## 3 1.0962 nan 0.1000 0.0267
## 4 1.0431 nan 0.1000 0.0233
## 5 0.9928 nan 0.1000 0.0204
## 6 0.9538 nan 0.1000 0.0149
## 7 0.9199 nan 0.1000 0.0114
## 8 0.8890 nan 0.1000 0.0126
## 9 0.8552 nan 0.1000 0.0112
## 10 0.8280 nan 0.1000 0.0117
## 20 0.6651 nan 0.1000 -0.0008
## 40 0.4964 nan 0.1000 -0.0004
## 60 0.3997 nan 0.1000 -0.0006
## 80 0.3235 nan 0.1000 -0.0005
## 100 0.2724 nan 0.1000 -0.0004
## 120 0.2296 nan 0.1000 -0.0004
## 140 0.1952 nan 0.1000 -0.0007
## 160 0.1668 nan 0.1000 -0.0002
## 180 0.1417 nan 0.1000 -0.0000
## 200 0.1238 nan 0.1000 -0.0005
## 220 0.1054 nan 0.1000 -0.0000
## 240 0.0917 nan 0.1000 -0.0001
## 260 0.0795 nan 0.1000 0.0000
## 280 0.0688 nan 0.1000 -0.0002
## 300 0.0610 nan 0.1000 -0.0002
## 320 0.0537 nan 0.1000 -0.0001
## 340 0.0477 nan 0.1000 -0.0002
## 360 0.0421 nan 0.1000 -0.0002
## 380 0.0375 nan 0.1000 -0.0001
## 400 0.0333 nan 0.1000 -0.0000
## 420 0.0298 nan 0.1000 -0.0000
## 440 0.0264 nan 0.1000 -0.0000
## 460 0.0234 nan 0.1000 -0.0000
## 480 0.0207 nan 0.1000 -0.0001
## 500 0.0182 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2285 nan 0.1000 0.0436
## 2 1.1459 nan 0.1000 0.0354
## 3 1.0861 nan 0.1000 0.0239
## 4 1.0300 nan 0.1000 0.0258
## 5 0.9835 nan 0.1000 0.0208
## 6 0.9450 nan 0.1000 0.0146
## 7 0.9073 nan 0.1000 0.0138
## 8 0.8768 nan 0.1000 0.0110
## 9 0.8499 nan 0.1000 0.0078
## 10 0.8264 nan 0.1000 0.0063
## 20 0.6655 nan 0.1000 0.0021
## 40 0.5048 nan 0.1000 -0.0012
## 60 0.4093 nan 0.1000 -0.0017
## 80 0.3326 nan 0.1000 -0.0000
## 100 0.2759 nan 0.1000 0.0002
## 120 0.2327 nan 0.1000 -0.0006
## 140 0.1999 nan 0.1000 -0.0004
## 160 0.1711 nan 0.1000 -0.0003
## 180 0.1483 nan 0.1000 -0.0006
## 200 0.1263 nan 0.1000 -0.0004
## 220 0.1085 nan 0.1000 -0.0003
## 240 0.0948 nan 0.1000 -0.0002
## 260 0.0832 nan 0.1000 -0.0005
## 280 0.0727 nan 0.1000 -0.0001
## 300 0.0632 nan 0.1000 -0.0003
## 320 0.0554 nan 0.1000 -0.0001
## 340 0.0481 nan 0.1000 -0.0000
## 360 0.0424 nan 0.1000 -0.0001
## 380 0.0367 nan 0.1000 -0.0001
## 400 0.0324 nan 0.1000 -0.0001
## 420 0.0287 nan 0.1000 -0.0001
## 440 0.0250 nan 0.1000 -0.0001
## 460 0.0222 nan 0.1000 -0.0001
## 480 0.0197 nan 0.1000 -0.0001
## 500 0.0174 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2392 nan 0.1000 0.0370
## 2 1.1641 nan 0.1000 0.0323
## 3 1.1025 nan 0.1000 0.0240
## 4 1.0496 nan 0.1000 0.0207
## 5 0.9999 nan 0.1000 0.0211
## 6 0.9648 nan 0.1000 0.0155
## 7 0.9265 nan 0.1000 0.0127
## 8 0.8973 nan 0.1000 0.0097
## 9 0.8681 nan 0.1000 0.0112
## 10 0.8423 nan 0.1000 0.0107
## 20 0.6891 nan 0.1000 0.0007
## 40 0.5401 nan 0.1000 -0.0004
## 60 0.4396 nan 0.1000 -0.0016
## 80 0.3600 nan 0.1000 -0.0001
## 100 0.3052 nan 0.1000 -0.0002
## 120 0.2567 nan 0.1000 -0.0012
## 140 0.2217 nan 0.1000 -0.0007
## 160 0.1913 nan 0.1000 -0.0007
## 180 0.1674 nan 0.1000 -0.0007
## 200 0.1439 nan 0.1000 -0.0001
## 220 0.1263 nan 0.1000 -0.0010
## 240 0.1102 nan 0.1000 -0.0001
## 260 0.0970 nan 0.1000 -0.0002
## 280 0.0863 nan 0.1000 -0.0003
## 300 0.0757 nan 0.1000 -0.0004
## 320 0.0669 nan 0.1000 -0.0004
## 340 0.0586 nan 0.1000 -0.0002
## 360 0.0517 nan 0.1000 -0.0002
## 380 0.0457 nan 0.1000 -0.0001
## 400 0.0404 nan 0.1000 -0.0002
## 420 0.0357 nan 0.1000 -0.0001
## 440 0.0319 nan 0.1000 -0.0001
## 460 0.0281 nan 0.1000 -0.0001
## 480 0.0252 nan 0.1000 -0.0001
## 500 0.0220 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0003
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0003
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0003
## 9 1.3134 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3048 nan 0.0010 0.0004
## 40 1.2894 nan 0.0010 0.0003
## 60 1.2747 nan 0.0010 0.0003
## 80 1.2607 nan 0.0010 0.0003
## 100 1.2471 nan 0.0010 0.0003
## 120 1.2337 nan 0.0010 0.0003
## 140 1.2205 nan 0.0010 0.0003
## 160 1.2074 nan 0.0010 0.0003
## 180 1.1951 nan 0.0010 0.0003
## 200 1.1831 nan 0.0010 0.0003
## 220 1.1715 nan 0.0010 0.0003
## 240 1.1604 nan 0.0010 0.0003
## 260 1.1494 nan 0.0010 0.0002
## 280 1.1388 nan 0.0010 0.0003
## 300 1.1285 nan 0.0010 0.0002
## 320 1.1181 nan 0.0010 0.0002
## 340 1.1083 nan 0.0010 0.0002
## 360 1.0985 nan 0.0010 0.0002
## 380 1.0890 nan 0.0010 0.0002
## 400 1.0798 nan 0.0010 0.0002
## 420 1.0711 nan 0.0010 0.0002
## 440 1.0622 nan 0.0010 0.0002
## 460 1.0538 nan 0.0010 0.0002
## 480 1.0455 nan 0.0010 0.0002
## 500 1.0373 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0003
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0003
## 7 1.3150 nan 0.0010 0.0003
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3136 nan 0.0010 0.0003
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3049 nan 0.0010 0.0004
## 40 1.2898 nan 0.0010 0.0003
## 60 1.2747 nan 0.0010 0.0003
## 80 1.2603 nan 0.0010 0.0003
## 100 1.2461 nan 0.0010 0.0003
## 120 1.2326 nan 0.0010 0.0003
## 140 1.2198 nan 0.0010 0.0003
## 160 1.2070 nan 0.0010 0.0003
## 180 1.1946 nan 0.0010 0.0003
## 200 1.1825 nan 0.0010 0.0003
## 220 1.1708 nan 0.0010 0.0003
## 240 1.1595 nan 0.0010 0.0002
## 260 1.1486 nan 0.0010 0.0002
## 280 1.1380 nan 0.0010 0.0002
## 300 1.1275 nan 0.0010 0.0002
## 320 1.1175 nan 0.0010 0.0002
## 340 1.1077 nan 0.0010 0.0002
## 360 1.0981 nan 0.0010 0.0002
## 380 1.0888 nan 0.0010 0.0002
## 400 1.0797 nan 0.0010 0.0002
## 420 1.0709 nan 0.0010 0.0002
## 440 1.0622 nan 0.0010 0.0002
## 460 1.0538 nan 0.0010 0.0002
## 480 1.0458 nan 0.0010 0.0001
## 500 1.0379 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3183 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0003
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0003
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3135 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3049 nan 0.0010 0.0003
## 40 1.2899 nan 0.0010 0.0003
## 60 1.2750 nan 0.0010 0.0003
## 80 1.2606 nan 0.0010 0.0003
## 100 1.2469 nan 0.0010 0.0003
## 120 1.2333 nan 0.0010 0.0003
## 140 1.2205 nan 0.0010 0.0003
## 160 1.2080 nan 0.0010 0.0003
## 180 1.1958 nan 0.0010 0.0003
## 200 1.1841 nan 0.0010 0.0002
## 220 1.1726 nan 0.0010 0.0003
## 240 1.1615 nan 0.0010 0.0002
## 260 1.1504 nan 0.0010 0.0002
## 280 1.1397 nan 0.0010 0.0002
## 300 1.1296 nan 0.0010 0.0002
## 320 1.1198 nan 0.0010 0.0002
## 340 1.1100 nan 0.0010 0.0002
## 360 1.1006 nan 0.0010 0.0002
## 380 1.0911 nan 0.0010 0.0002
## 400 1.0821 nan 0.0010 0.0002
## 420 1.0734 nan 0.0010 0.0002
## 440 1.0650 nan 0.0010 0.0002
## 460 1.0566 nan 0.0010 0.0002
## 480 1.0482 nan 0.0010 0.0002
## 500 1.0404 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3034 nan 0.0010 0.0004
## 40 1.2870 nan 0.0010 0.0004
## 60 1.2711 nan 0.0010 0.0004
## 80 1.2560 nan 0.0010 0.0004
## 100 1.2412 nan 0.0010 0.0003
## 120 1.2268 nan 0.0010 0.0003
## 140 1.2128 nan 0.0010 0.0003
## 160 1.1992 nan 0.0010 0.0003
## 180 1.1862 nan 0.0010 0.0003
## 200 1.1735 nan 0.0010 0.0003
## 220 1.1612 nan 0.0010 0.0003
## 240 1.1492 nan 0.0010 0.0003
## 260 1.1374 nan 0.0010 0.0002
## 280 1.1261 nan 0.0010 0.0002
## 300 1.1148 nan 0.0010 0.0002
## 320 1.1041 nan 0.0010 0.0002
## 340 1.0936 nan 0.0010 0.0002
## 360 1.0836 nan 0.0010 0.0002
## 380 1.0738 nan 0.0010 0.0002
## 400 1.0641 nan 0.0010 0.0002
## 420 1.0547 nan 0.0010 0.0002
## 440 1.0455 nan 0.0010 0.0002
## 460 1.0366 nan 0.0010 0.0002
## 480 1.0282 nan 0.0010 0.0002
## 500 1.0199 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0003
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0003
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3036 nan 0.0010 0.0004
## 40 1.2872 nan 0.0010 0.0004
## 60 1.2715 nan 0.0010 0.0004
## 80 1.2561 nan 0.0010 0.0003
## 100 1.2410 nan 0.0010 0.0003
## 120 1.2268 nan 0.0010 0.0003
## 140 1.2130 nan 0.0010 0.0003
## 160 1.1997 nan 0.0010 0.0003
## 180 1.1869 nan 0.0010 0.0003
## 200 1.1741 nan 0.0010 0.0002
## 220 1.1619 nan 0.0010 0.0003
## 240 1.1499 nan 0.0010 0.0003
## 260 1.1382 nan 0.0010 0.0002
## 280 1.1269 nan 0.0010 0.0003
## 300 1.1159 nan 0.0010 0.0002
## 320 1.1053 nan 0.0010 0.0002
## 340 1.0950 nan 0.0010 0.0002
## 360 1.0848 nan 0.0010 0.0002
## 380 1.0748 nan 0.0010 0.0002
## 400 1.0653 nan 0.0010 0.0002
## 420 1.0561 nan 0.0010 0.0002
## 440 1.0467 nan 0.0010 0.0002
## 460 1.0379 nan 0.0010 0.0002
## 480 1.0294 nan 0.0010 0.0002
## 500 1.0209 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3182 nan 0.0010 0.0004
## 4 1.3174 nan 0.0010 0.0003
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0003
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3139 nan 0.0010 0.0003
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3042 nan 0.0010 0.0004
## 40 1.2878 nan 0.0010 0.0003
## 60 1.2724 nan 0.0010 0.0003
## 80 1.2573 nan 0.0010 0.0003
## 100 1.2425 nan 0.0010 0.0003
## 120 1.2282 nan 0.0010 0.0003
## 140 1.2145 nan 0.0010 0.0003
## 160 1.2012 nan 0.0010 0.0003
## 180 1.1881 nan 0.0010 0.0003
## 200 1.1753 nan 0.0010 0.0003
## 220 1.1627 nan 0.0010 0.0003
## 240 1.1505 nan 0.0010 0.0003
## 260 1.1392 nan 0.0010 0.0002
## 280 1.1277 nan 0.0010 0.0002
## 300 1.1168 nan 0.0010 0.0003
## 320 1.1061 nan 0.0010 0.0002
## 340 1.0960 nan 0.0010 0.0002
## 360 1.0861 nan 0.0010 0.0002
## 380 1.0764 nan 0.0010 0.0002
## 400 1.0670 nan 0.0010 0.0002
## 420 1.0576 nan 0.0010 0.0002
## 440 1.0485 nan 0.0010 0.0002
## 460 1.0396 nan 0.0010 0.0002
## 480 1.0313 nan 0.0010 0.0002
## 500 1.0229 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3187 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0004
## 4 1.3168 nan 0.0010 0.0004
## 5 1.3159 nan 0.0010 0.0004
## 6 1.3150 nan 0.0010 0.0004
## 7 1.3140 nan 0.0010 0.0004
## 8 1.3132 nan 0.0010 0.0004
## 9 1.3122 nan 0.0010 0.0004
## 10 1.3114 nan 0.0010 0.0004
## 20 1.3025 nan 0.0010 0.0004
## 40 1.2850 nan 0.0010 0.0003
## 60 1.2683 nan 0.0010 0.0004
## 80 1.2520 nan 0.0010 0.0003
## 100 1.2362 nan 0.0010 0.0003
## 120 1.2211 nan 0.0010 0.0003
## 140 1.2066 nan 0.0010 0.0003
## 160 1.1926 nan 0.0010 0.0003
## 180 1.1788 nan 0.0010 0.0003
## 200 1.1653 nan 0.0010 0.0003
## 220 1.1522 nan 0.0010 0.0003
## 240 1.1399 nan 0.0010 0.0003
## 260 1.1278 nan 0.0010 0.0003
## 280 1.1157 nan 0.0010 0.0003
## 300 1.1042 nan 0.0010 0.0002
## 320 1.0929 nan 0.0010 0.0002
## 340 1.0818 nan 0.0010 0.0002
## 360 1.0711 nan 0.0010 0.0002
## 380 1.0606 nan 0.0010 0.0002
## 400 1.0506 nan 0.0010 0.0002
## 420 1.0407 nan 0.0010 0.0002
## 440 1.0313 nan 0.0010 0.0002
## 460 1.0222 nan 0.0010 0.0002
## 480 1.0133 nan 0.0010 0.0002
## 500 1.0045 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3030 nan 0.0010 0.0004
## 40 1.2860 nan 0.0010 0.0004
## 60 1.2693 nan 0.0010 0.0003
## 80 1.2531 nan 0.0010 0.0004
## 100 1.2372 nan 0.0010 0.0004
## 120 1.2222 nan 0.0010 0.0003
## 140 1.2073 nan 0.0010 0.0003
## 160 1.1931 nan 0.0010 0.0003
## 180 1.1794 nan 0.0010 0.0003
## 200 1.1662 nan 0.0010 0.0003
## 220 1.1534 nan 0.0010 0.0003
## 240 1.1410 nan 0.0010 0.0003
## 260 1.1292 nan 0.0010 0.0002
## 280 1.1174 nan 0.0010 0.0003
## 300 1.1058 nan 0.0010 0.0003
## 320 1.0947 nan 0.0010 0.0003
## 340 1.0840 nan 0.0010 0.0002
## 360 1.0733 nan 0.0010 0.0002
## 380 1.0631 nan 0.0010 0.0002
## 400 1.0533 nan 0.0010 0.0002
## 420 1.0434 nan 0.0010 0.0002
## 440 1.0340 nan 0.0010 0.0002
## 460 1.0248 nan 0.0010 0.0002
## 480 1.0159 nan 0.0010 0.0002
## 500 1.0073 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0005
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3127 nan 0.0010 0.0004
## 10 1.3118 nan 0.0010 0.0004
## 20 1.3032 nan 0.0010 0.0004
## 40 1.2864 nan 0.0010 0.0004
## 60 1.2701 nan 0.0010 0.0004
## 80 1.2542 nan 0.0010 0.0003
## 100 1.2388 nan 0.0010 0.0004
## 120 1.2241 nan 0.0010 0.0003
## 140 1.2097 nan 0.0010 0.0003
## 160 1.1955 nan 0.0010 0.0003
## 180 1.1821 nan 0.0010 0.0002
## 200 1.1692 nan 0.0010 0.0003
## 220 1.1564 nan 0.0010 0.0003
## 240 1.1439 nan 0.0010 0.0003
## 260 1.1321 nan 0.0010 0.0002
## 280 1.1200 nan 0.0010 0.0002
## 300 1.1089 nan 0.0010 0.0002
## 320 1.0980 nan 0.0010 0.0002
## 340 1.0873 nan 0.0010 0.0002
## 360 1.0771 nan 0.0010 0.0002
## 380 1.0669 nan 0.0010 0.0002
## 400 1.0569 nan 0.0010 0.0002
## 420 1.0472 nan 0.0010 0.0002
## 440 1.0377 nan 0.0010 0.0002
## 460 1.0287 nan 0.0010 0.0002
## 480 1.0199 nan 0.0010 0.0002
## 500 1.0112 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0039
## 2 1.3048 nan 0.0100 0.0033
## 3 1.2977 nan 0.0100 0.0033
## 4 1.2893 nan 0.0100 0.0037
## 5 1.2820 nan 0.0100 0.0036
## 6 1.2753 nan 0.0100 0.0026
## 7 1.2692 nan 0.0100 0.0024
## 8 1.2624 nan 0.0100 0.0032
## 9 1.2555 nan 0.0100 0.0033
## 10 1.2473 nan 0.0100 0.0035
## 20 1.1856 nan 0.0100 0.0023
## 40 1.0800 nan 0.0100 0.0022
## 60 0.9989 nan 0.0100 0.0014
## 80 0.9386 nan 0.0100 0.0012
## 100 0.8885 nan 0.0100 0.0009
## 120 0.8466 nan 0.0100 0.0005
## 140 0.8122 nan 0.0100 0.0005
## 160 0.7816 nan 0.0100 0.0004
## 180 0.7561 nan 0.0100 0.0004
## 200 0.7341 nan 0.0100 0.0003
## 220 0.7159 nan 0.0100 0.0001
## 240 0.6989 nan 0.0100 0.0001
## 260 0.6830 nan 0.0100 0.0000
## 280 0.6686 nan 0.0100 0.0001
## 300 0.6545 nan 0.0100 0.0001
## 320 0.6422 nan 0.0100 -0.0000
## 340 0.6312 nan 0.0100 0.0002
## 360 0.6202 nan 0.0100 -0.0002
## 380 0.6104 nan 0.0100 0.0001
## 400 0.6010 nan 0.0100 -0.0002
## 420 0.5917 nan 0.0100 0.0000
## 440 0.5822 nan 0.0100 -0.0000
## 460 0.5736 nan 0.0100 -0.0001
## 480 0.5645 nan 0.0100 -0.0001
## 500 0.5568 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0039
## 2 1.3044 nan 0.0100 0.0035
## 3 1.2969 nan 0.0100 0.0035
## 4 1.2897 nan 0.0100 0.0028
## 5 1.2822 nan 0.0100 0.0031
## 6 1.2753 nan 0.0100 0.0032
## 7 1.2677 nan 0.0100 0.0031
## 8 1.2606 nan 0.0100 0.0030
## 9 1.2536 nan 0.0100 0.0032
## 10 1.2466 nan 0.0100 0.0028
## 20 1.1821 nan 0.0100 0.0027
## 40 1.0803 nan 0.0100 0.0019
## 60 1.0009 nan 0.0100 0.0014
## 80 0.9373 nan 0.0100 0.0011
## 100 0.8882 nan 0.0100 0.0009
## 120 0.8481 nan 0.0100 0.0006
## 140 0.8140 nan 0.0100 0.0005
## 160 0.7853 nan 0.0100 0.0002
## 180 0.7601 nan 0.0100 0.0003
## 200 0.7395 nan 0.0100 0.0005
## 220 0.7209 nan 0.0100 0.0000
## 240 0.7038 nan 0.0100 0.0001
## 260 0.6879 nan 0.0100 0.0001
## 280 0.6731 nan 0.0100 0.0000
## 300 0.6600 nan 0.0100 0.0002
## 320 0.6470 nan 0.0100 -0.0000
## 340 0.6359 nan 0.0100 -0.0000
## 360 0.6263 nan 0.0100 0.0000
## 380 0.6162 nan 0.0100 0.0001
## 400 0.6067 nan 0.0100 0.0000
## 420 0.5966 nan 0.0100 0.0001
## 440 0.5865 nan 0.0100 0.0001
## 460 0.5768 nan 0.0100 0.0000
## 480 0.5684 nan 0.0100 -0.0002
## 500 0.5608 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3131 nan 0.0100 0.0036
## 2 1.3050 nan 0.0100 0.0037
## 3 1.2980 nan 0.0100 0.0029
## 4 1.2901 nan 0.0100 0.0033
## 5 1.2821 nan 0.0100 0.0032
## 6 1.2748 nan 0.0100 0.0033
## 7 1.2673 nan 0.0100 0.0031
## 8 1.2601 nan 0.0100 0.0033
## 9 1.2535 nan 0.0100 0.0029
## 10 1.2465 nan 0.0100 0.0031
## 20 1.1824 nan 0.0100 0.0030
## 40 1.0804 nan 0.0100 0.0020
## 60 1.0009 nan 0.0100 0.0014
## 80 0.9375 nan 0.0100 0.0012
## 100 0.8872 nan 0.0100 0.0008
## 120 0.8469 nan 0.0100 0.0007
## 140 0.8148 nan 0.0100 0.0004
## 160 0.7868 nan 0.0100 0.0004
## 180 0.7632 nan 0.0100 0.0003
## 200 0.7417 nan 0.0100 0.0003
## 220 0.7241 nan 0.0100 0.0002
## 240 0.7082 nan 0.0100 0.0001
## 260 0.6948 nan 0.0100 0.0001
## 280 0.6818 nan 0.0100 0.0002
## 300 0.6695 nan 0.0100 0.0001
## 320 0.6584 nan 0.0100 0.0000
## 340 0.6475 nan 0.0100 0.0001
## 360 0.6387 nan 0.0100 0.0000
## 380 0.6293 nan 0.0100 0.0002
## 400 0.6195 nan 0.0100 -0.0000
## 420 0.6105 nan 0.0100 -0.0003
## 440 0.6023 nan 0.0100 -0.0000
## 460 0.5936 nan 0.0100 0.0001
## 480 0.5855 nan 0.0100 -0.0001
## 500 0.5777 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0038
## 2 1.3035 nan 0.0100 0.0036
## 3 1.2951 nan 0.0100 0.0040
## 4 1.2868 nan 0.0100 0.0035
## 5 1.2785 nan 0.0100 0.0038
## 6 1.2705 nan 0.0100 0.0033
## 7 1.2628 nan 0.0100 0.0034
## 8 1.2555 nan 0.0100 0.0034
## 9 1.2474 nan 0.0100 0.0038
## 10 1.2397 nan 0.0100 0.0031
## 20 1.1726 nan 0.0100 0.0028
## 40 1.0634 nan 0.0100 0.0021
## 60 0.9787 nan 0.0100 0.0015
## 80 0.9137 nan 0.0100 0.0011
## 100 0.8607 nan 0.0100 0.0008
## 120 0.8175 nan 0.0100 0.0005
## 140 0.7815 nan 0.0100 0.0004
## 160 0.7507 nan 0.0100 0.0002
## 180 0.7242 nan 0.0100 0.0003
## 200 0.7015 nan 0.0100 0.0003
## 220 0.6809 nan 0.0100 0.0002
## 240 0.6623 nan 0.0100 0.0002
## 260 0.6456 nan 0.0100 0.0001
## 280 0.6300 nan 0.0100 0.0002
## 300 0.6159 nan 0.0100 -0.0002
## 320 0.6014 nan 0.0100 0.0001
## 340 0.5873 nan 0.0100 0.0001
## 360 0.5756 nan 0.0100 0.0002
## 380 0.5629 nan 0.0100 0.0001
## 400 0.5509 nan 0.0100 0.0000
## 420 0.5405 nan 0.0100 -0.0000
## 440 0.5296 nan 0.0100 0.0001
## 460 0.5201 nan 0.0100 -0.0000
## 480 0.5109 nan 0.0100 0.0001
## 500 0.5014 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0037
## 2 1.3039 nan 0.0100 0.0038
## 3 1.2960 nan 0.0100 0.0042
## 4 1.2877 nan 0.0100 0.0036
## 5 1.2793 nan 0.0100 0.0035
## 6 1.2711 nan 0.0100 0.0035
## 7 1.2640 nan 0.0100 0.0029
## 8 1.2572 nan 0.0100 0.0030
## 9 1.2499 nan 0.0100 0.0035
## 10 1.2420 nan 0.0100 0.0032
## 20 1.1745 nan 0.0100 0.0027
## 40 1.0675 nan 0.0100 0.0020
## 60 0.9838 nan 0.0100 0.0016
## 80 0.9177 nan 0.0100 0.0012
## 100 0.8658 nan 0.0100 0.0010
## 120 0.8227 nan 0.0100 0.0008
## 140 0.7875 nan 0.0100 0.0004
## 160 0.7573 nan 0.0100 0.0004
## 180 0.7316 nan 0.0100 0.0002
## 200 0.7085 nan 0.0100 0.0002
## 220 0.6881 nan 0.0100 0.0000
## 240 0.6708 nan 0.0100 -0.0000
## 260 0.6543 nan 0.0100 0.0001
## 280 0.6396 nan 0.0100 0.0000
## 300 0.6248 nan 0.0100 0.0001
## 320 0.6106 nan 0.0100 -0.0002
## 340 0.5977 nan 0.0100 -0.0002
## 360 0.5859 nan 0.0100 0.0001
## 380 0.5746 nan 0.0100 -0.0000
## 400 0.5641 nan 0.0100 -0.0001
## 420 0.5537 nan 0.0100 -0.0001
## 440 0.5438 nan 0.0100 -0.0001
## 460 0.5341 nan 0.0100 -0.0000
## 480 0.5242 nan 0.0100 -0.0002
## 500 0.5149 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3126 nan 0.0100 0.0035
## 2 1.3045 nan 0.0100 0.0040
## 3 1.2968 nan 0.0100 0.0038
## 4 1.2886 nan 0.0100 0.0038
## 5 1.2803 nan 0.0100 0.0036
## 6 1.2727 nan 0.0100 0.0033
## 7 1.2647 nan 0.0100 0.0037
## 8 1.2574 nan 0.0100 0.0033
## 9 1.2498 nan 0.0100 0.0036
## 10 1.2424 nan 0.0100 0.0033
## 20 1.1763 nan 0.0100 0.0027
## 40 1.0691 nan 0.0100 0.0019
## 60 0.9849 nan 0.0100 0.0013
## 80 0.9203 nan 0.0100 0.0011
## 100 0.8673 nan 0.0100 0.0008
## 120 0.8243 nan 0.0100 0.0007
## 140 0.7905 nan 0.0100 0.0006
## 160 0.7613 nan 0.0100 0.0003
## 180 0.7364 nan 0.0100 0.0003
## 200 0.7141 nan 0.0100 0.0003
## 220 0.6936 nan 0.0100 0.0001
## 240 0.6754 nan 0.0100 0.0002
## 260 0.6601 nan 0.0100 -0.0000
## 280 0.6444 nan 0.0100 0.0001
## 300 0.6308 nan 0.0100 -0.0000
## 320 0.6172 nan 0.0100 -0.0001
## 340 0.6046 nan 0.0100 0.0001
## 360 0.5929 nan 0.0100 -0.0001
## 380 0.5813 nan 0.0100 0.0000
## 400 0.5715 nan 0.0100 0.0001
## 420 0.5606 nan 0.0100 -0.0001
## 440 0.5507 nan 0.0100 0.0001
## 460 0.5415 nan 0.0100 -0.0000
## 480 0.5325 nan 0.0100 0.0001
## 500 0.5239 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3120 nan 0.0100 0.0036
## 2 1.3021 nan 0.0100 0.0042
## 3 1.2937 nan 0.0100 0.0036
## 4 1.2846 nan 0.0100 0.0040
## 5 1.2764 nan 0.0100 0.0039
## 6 1.2683 nan 0.0100 0.0038
## 7 1.2605 nan 0.0100 0.0037
## 8 1.2528 nan 0.0100 0.0034
## 9 1.2448 nan 0.0100 0.0034
## 10 1.2372 nan 0.0100 0.0035
## 20 1.1665 nan 0.0100 0.0026
## 40 1.0525 nan 0.0100 0.0017
## 60 0.9656 nan 0.0100 0.0012
## 80 0.8969 nan 0.0100 0.0010
## 100 0.8425 nan 0.0100 0.0010
## 120 0.7958 nan 0.0100 0.0006
## 140 0.7580 nan 0.0100 0.0004
## 160 0.7256 nan 0.0100 0.0001
## 180 0.6970 nan 0.0100 0.0003
## 200 0.6722 nan 0.0100 0.0003
## 220 0.6491 nan 0.0100 0.0003
## 240 0.6294 nan 0.0100 -0.0000
## 260 0.6102 nan 0.0100 -0.0001
## 280 0.5932 nan 0.0100 0.0001
## 300 0.5771 nan 0.0100 0.0001
## 320 0.5621 nan 0.0100 0.0001
## 340 0.5492 nan 0.0100 0.0000
## 360 0.5361 nan 0.0100 0.0001
## 380 0.5238 nan 0.0100 0.0001
## 400 0.5112 nan 0.0100 0.0000
## 420 0.4999 nan 0.0100 -0.0001
## 440 0.4886 nan 0.0100 0.0001
## 460 0.4781 nan 0.0100 0.0001
## 480 0.4681 nan 0.0100 -0.0001
## 500 0.4582 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3111 nan 0.0100 0.0039
## 2 1.3019 nan 0.0100 0.0040
## 3 1.2928 nan 0.0100 0.0039
## 4 1.2842 nan 0.0100 0.0038
## 5 1.2760 nan 0.0100 0.0036
## 6 1.2670 nan 0.0100 0.0038
## 7 1.2591 nan 0.0100 0.0036
## 8 1.2508 nan 0.0100 0.0036
## 9 1.2428 nan 0.0100 0.0035
## 10 1.2354 nan 0.0100 0.0033
## 20 1.1649 nan 0.0100 0.0028
## 40 1.0511 nan 0.0100 0.0020
## 60 0.9649 nan 0.0100 0.0016
## 80 0.8984 nan 0.0100 0.0009
## 100 0.8444 nan 0.0100 0.0008
## 120 0.7990 nan 0.0100 0.0009
## 140 0.7630 nan 0.0100 0.0005
## 160 0.7324 nan 0.0100 0.0004
## 180 0.7057 nan 0.0100 0.0003
## 200 0.6813 nan 0.0100 0.0003
## 220 0.6601 nan 0.0100 0.0001
## 240 0.6403 nan 0.0100 0.0001
## 260 0.6236 nan 0.0100 0.0001
## 280 0.6059 nan 0.0100 0.0000
## 300 0.5912 nan 0.0100 -0.0000
## 320 0.5777 nan 0.0100 0.0001
## 340 0.5631 nan 0.0100 0.0001
## 360 0.5498 nan 0.0100 -0.0001
## 380 0.5373 nan 0.0100 -0.0000
## 400 0.5265 nan 0.0100 0.0000
## 420 0.5153 nan 0.0100 -0.0001
## 440 0.5043 nan 0.0100 0.0001
## 460 0.4938 nan 0.0100 -0.0001
## 480 0.4837 nan 0.0100 -0.0000
## 500 0.4730 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3111 nan 0.0100 0.0042
## 2 1.3024 nan 0.0100 0.0040
## 3 1.2940 nan 0.0100 0.0037
## 4 1.2857 nan 0.0100 0.0036
## 5 1.2780 nan 0.0100 0.0038
## 6 1.2694 nan 0.0100 0.0036
## 7 1.2615 nan 0.0100 0.0034
## 8 1.2537 nan 0.0100 0.0035
## 9 1.2463 nan 0.0100 0.0031
## 10 1.2394 nan 0.0100 0.0031
## 20 1.1694 nan 0.0100 0.0024
## 40 1.0566 nan 0.0100 0.0019
## 60 0.9700 nan 0.0100 0.0013
## 80 0.9026 nan 0.0100 0.0011
## 100 0.8486 nan 0.0100 0.0010
## 120 0.8043 nan 0.0100 0.0006
## 140 0.7697 nan 0.0100 0.0005
## 160 0.7382 nan 0.0100 0.0003
## 180 0.7123 nan 0.0100 0.0001
## 200 0.6895 nan 0.0100 0.0005
## 220 0.6682 nan 0.0100 0.0002
## 240 0.6495 nan 0.0100 0.0000
## 260 0.6325 nan 0.0100 0.0000
## 280 0.6164 nan 0.0100 0.0002
## 300 0.6013 nan 0.0100 0.0001
## 320 0.5876 nan 0.0100 0.0001
## 340 0.5742 nan 0.0100 -0.0000
## 360 0.5609 nan 0.0100 -0.0001
## 380 0.5481 nan 0.0100 -0.0000
## 400 0.5373 nan 0.0100 0.0001
## 420 0.5261 nan 0.0100 -0.0001
## 440 0.5161 nan 0.0100 0.0000
## 460 0.5057 nan 0.0100 -0.0003
## 480 0.4960 nan 0.0100 0.0001
## 500 0.4869 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2447 nan 0.1000 0.0378
## 2 1.1822 nan 0.1000 0.0256
## 3 1.1276 nan 0.1000 0.0184
## 4 1.0808 nan 0.1000 0.0228
## 5 1.0349 nan 0.1000 0.0188
## 6 0.9951 nan 0.1000 0.0178
## 7 0.9638 nan 0.1000 0.0097
## 8 0.9309 nan 0.1000 0.0135
## 9 0.9060 nan 0.1000 0.0104
## 10 0.8842 nan 0.1000 0.0076
## 20 0.7405 nan 0.1000 0.0011
## 40 0.6010 nan 0.1000 0.0007
## 60 0.5190 nan 0.1000 -0.0002
## 80 0.4481 nan 0.1000 -0.0003
## 100 0.3962 nan 0.1000 0.0006
## 120 0.3505 nan 0.1000 -0.0004
## 140 0.3151 nan 0.1000 -0.0004
## 160 0.2835 nan 0.1000 -0.0003
## 180 0.2567 nan 0.1000 -0.0001
## 200 0.2318 nan 0.1000 0.0000
## 220 0.2095 nan 0.1000 -0.0003
## 240 0.1894 nan 0.1000 0.0000
## 260 0.1733 nan 0.1000 -0.0007
## 280 0.1566 nan 0.1000 0.0000
## 300 0.1431 nan 0.1000 -0.0006
## 320 0.1314 nan 0.1000 -0.0003
## 340 0.1205 nan 0.1000 -0.0002
## 360 0.1106 nan 0.1000 -0.0002
## 380 0.1019 nan 0.1000 -0.0002
## 400 0.0951 nan 0.1000 -0.0003
## 420 0.0881 nan 0.1000 -0.0003
## 440 0.0814 nan 0.1000 -0.0001
## 460 0.0744 nan 0.1000 -0.0002
## 480 0.0691 nan 0.1000 -0.0002
## 500 0.0644 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2445 nan 0.1000 0.0365
## 2 1.1807 nan 0.1000 0.0254
## 3 1.1280 nan 0.1000 0.0235
## 4 1.0749 nan 0.1000 0.0224
## 5 1.0383 nan 0.1000 0.0135
## 6 0.9984 nan 0.1000 0.0160
## 7 0.9635 nan 0.1000 0.0153
## 8 0.9347 nan 0.1000 0.0126
## 9 0.9055 nan 0.1000 0.0111
## 10 0.8798 nan 0.1000 0.0101
## 20 0.7339 nan 0.1000 0.0023
## 40 0.6039 nan 0.1000 -0.0003
## 60 0.5145 nan 0.1000 -0.0007
## 80 0.4507 nan 0.1000 -0.0007
## 100 0.4001 nan 0.1000 0.0002
## 120 0.3545 nan 0.1000 -0.0006
## 140 0.3230 nan 0.1000 -0.0002
## 160 0.2897 nan 0.1000 -0.0002
## 180 0.2589 nan 0.1000 -0.0003
## 200 0.2369 nan 0.1000 -0.0003
## 220 0.2175 nan 0.1000 -0.0008
## 240 0.2001 nan 0.1000 -0.0010
## 260 0.1820 nan 0.1000 -0.0007
## 280 0.1660 nan 0.1000 -0.0004
## 300 0.1541 nan 0.1000 -0.0004
## 320 0.1417 nan 0.1000 -0.0006
## 340 0.1300 nan 0.1000 -0.0003
## 360 0.1210 nan 0.1000 -0.0002
## 380 0.1117 nan 0.1000 -0.0004
## 400 0.1033 nan 0.1000 -0.0005
## 420 0.0953 nan 0.1000 -0.0003
## 440 0.0888 nan 0.1000 0.0000
## 460 0.0818 nan 0.1000 -0.0001
## 480 0.0761 nan 0.1000 -0.0003
## 500 0.0712 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2449 nan 0.1000 0.0325
## 2 1.1842 nan 0.1000 0.0288
## 3 1.1275 nan 0.1000 0.0253
## 4 1.0790 nan 0.1000 0.0195
## 5 1.0422 nan 0.1000 0.0165
## 6 1.0065 nan 0.1000 0.0164
## 7 0.9725 nan 0.1000 0.0129
## 8 0.9400 nan 0.1000 0.0117
## 9 0.9166 nan 0.1000 0.0084
## 10 0.8924 nan 0.1000 0.0089
## 20 0.7437 nan 0.1000 0.0020
## 40 0.6262 nan 0.1000 -0.0009
## 60 0.5457 nan 0.1000 0.0001
## 80 0.4854 nan 0.1000 -0.0014
## 100 0.4302 nan 0.1000 -0.0012
## 120 0.3844 nan 0.1000 -0.0009
## 140 0.3507 nan 0.1000 -0.0014
## 160 0.3188 nan 0.1000 -0.0010
## 180 0.2877 nan 0.1000 -0.0003
## 200 0.2610 nan 0.1000 -0.0006
## 220 0.2389 nan 0.1000 -0.0008
## 240 0.2196 nan 0.1000 -0.0004
## 260 0.1988 nan 0.1000 -0.0008
## 280 0.1842 nan 0.1000 -0.0010
## 300 0.1698 nan 0.1000 -0.0003
## 320 0.1573 nan 0.1000 -0.0003
## 340 0.1434 nan 0.1000 -0.0004
## 360 0.1326 nan 0.1000 -0.0006
## 380 0.1228 nan 0.1000 -0.0002
## 400 0.1120 nan 0.1000 -0.0005
## 420 0.1042 nan 0.1000 -0.0001
## 440 0.0964 nan 0.1000 -0.0003
## 460 0.0893 nan 0.1000 -0.0002
## 480 0.0827 nan 0.1000 -0.0002
## 500 0.0765 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2454 nan 0.1000 0.0347
## 2 1.1713 nan 0.1000 0.0313
## 3 1.1073 nan 0.1000 0.0284
## 4 1.0583 nan 0.1000 0.0204
## 5 1.0134 nan 0.1000 0.0212
## 6 0.9725 nan 0.1000 0.0164
## 7 0.9323 nan 0.1000 0.0148
## 8 0.9010 nan 0.1000 0.0123
## 9 0.8787 nan 0.1000 0.0079
## 10 0.8512 nan 0.1000 0.0116
## 20 0.6942 nan 0.1000 0.0013
## 40 0.5527 nan 0.1000 -0.0007
## 60 0.4664 nan 0.1000 -0.0006
## 80 0.4005 nan 0.1000 -0.0009
## 100 0.3431 nan 0.1000 -0.0006
## 120 0.2980 nan 0.1000 -0.0004
## 140 0.2612 nan 0.1000 -0.0003
## 160 0.2255 nan 0.1000 -0.0005
## 180 0.1944 nan 0.1000 -0.0000
## 200 0.1749 nan 0.1000 -0.0004
## 220 0.1534 nan 0.1000 -0.0003
## 240 0.1378 nan 0.1000 -0.0004
## 260 0.1232 nan 0.1000 0.0000
## 280 0.1097 nan 0.1000 -0.0004
## 300 0.0981 nan 0.1000 -0.0000
## 320 0.0868 nan 0.1000 -0.0002
## 340 0.0785 nan 0.1000 -0.0001
## 360 0.0709 nan 0.1000 -0.0003
## 380 0.0639 nan 0.1000 0.0001
## 400 0.0575 nan 0.1000 -0.0001
## 420 0.0520 nan 0.1000 -0.0000
## 440 0.0474 nan 0.1000 -0.0002
## 460 0.0431 nan 0.1000 -0.0001
## 480 0.0388 nan 0.1000 -0.0000
## 500 0.0352 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2328 nan 0.1000 0.0392
## 2 1.1640 nan 0.1000 0.0307
## 3 1.1044 nan 0.1000 0.0261
## 4 1.0514 nan 0.1000 0.0199
## 5 1.0102 nan 0.1000 0.0172
## 6 0.9706 nan 0.1000 0.0160
## 7 0.9341 nan 0.1000 0.0132
## 8 0.9041 nan 0.1000 0.0112
## 9 0.8800 nan 0.1000 0.0074
## 10 0.8523 nan 0.1000 0.0096
## 20 0.7001 nan 0.1000 -0.0002
## 40 0.5680 nan 0.1000 -0.0004
## 60 0.4821 nan 0.1000 -0.0006
## 80 0.4125 nan 0.1000 0.0000
## 100 0.3632 nan 0.1000 -0.0020
## 120 0.3103 nan 0.1000 -0.0012
## 140 0.2709 nan 0.1000 -0.0010
## 160 0.2377 nan 0.1000 -0.0003
## 180 0.2124 nan 0.1000 -0.0006
## 200 0.1879 nan 0.1000 -0.0008
## 220 0.1675 nan 0.1000 -0.0009
## 240 0.1494 nan 0.1000 0.0004
## 260 0.1342 nan 0.1000 0.0000
## 280 0.1200 nan 0.1000 -0.0003
## 300 0.1083 nan 0.1000 -0.0001
## 320 0.0972 nan 0.1000 -0.0000
## 340 0.0877 nan 0.1000 -0.0004
## 360 0.0793 nan 0.1000 -0.0004
## 380 0.0727 nan 0.1000 -0.0003
## 400 0.0658 nan 0.1000 -0.0002
## 420 0.0591 nan 0.1000 -0.0002
## 440 0.0535 nan 0.1000 -0.0004
## 460 0.0484 nan 0.1000 -0.0002
## 480 0.0435 nan 0.1000 -0.0001
## 500 0.0396 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2410 nan 0.1000 0.0358
## 2 1.1691 nan 0.1000 0.0323
## 3 1.1078 nan 0.1000 0.0267
## 4 1.0599 nan 0.1000 0.0206
## 5 1.0139 nan 0.1000 0.0183
## 6 0.9787 nan 0.1000 0.0150
## 7 0.9422 nan 0.1000 0.0150
## 8 0.9113 nan 0.1000 0.0121
## 9 0.8846 nan 0.1000 0.0102
## 10 0.8629 nan 0.1000 0.0078
## 20 0.6984 nan 0.1000 0.0036
## 40 0.5699 nan 0.1000 -0.0006
## 60 0.4856 nan 0.1000 -0.0012
## 80 0.4209 nan 0.1000 0.0001
## 100 0.3607 nan 0.1000 -0.0007
## 120 0.3156 nan 0.1000 -0.0006
## 140 0.2791 nan 0.1000 -0.0016
## 160 0.2484 nan 0.1000 -0.0011
## 180 0.2217 nan 0.1000 -0.0003
## 200 0.1986 nan 0.1000 -0.0007
## 220 0.1777 nan 0.1000 -0.0006
## 240 0.1617 nan 0.1000 -0.0008
## 260 0.1441 nan 0.1000 -0.0007
## 280 0.1309 nan 0.1000 -0.0006
## 300 0.1177 nan 0.1000 -0.0005
## 320 0.1063 nan 0.1000 -0.0005
## 340 0.0960 nan 0.1000 -0.0002
## 360 0.0864 nan 0.1000 -0.0005
## 380 0.0776 nan 0.1000 -0.0003
## 400 0.0701 nan 0.1000 -0.0002
## 420 0.0638 nan 0.1000 -0.0001
## 440 0.0573 nan 0.1000 -0.0002
## 460 0.0519 nan 0.1000 -0.0002
## 480 0.0475 nan 0.1000 -0.0001
## 500 0.0437 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2403 nan 0.1000 0.0366
## 2 1.1655 nan 0.1000 0.0330
## 3 1.1021 nan 0.1000 0.0267
## 4 1.0474 nan 0.1000 0.0234
## 5 0.9965 nan 0.1000 0.0231
## 6 0.9579 nan 0.1000 0.0146
## 7 0.9177 nan 0.1000 0.0150
## 8 0.8867 nan 0.1000 0.0127
## 9 0.8587 nan 0.1000 0.0101
## 10 0.8339 nan 0.1000 0.0082
## 20 0.6723 nan 0.1000 0.0021
## 40 0.5220 nan 0.1000 -0.0007
## 60 0.4266 nan 0.1000 -0.0009
## 80 0.3494 nan 0.1000 -0.0001
## 100 0.2914 nan 0.1000 0.0000
## 120 0.2527 nan 0.1000 -0.0008
## 140 0.2162 nan 0.1000 -0.0002
## 160 0.1831 nan 0.1000 -0.0011
## 180 0.1571 nan 0.1000 -0.0002
## 200 0.1345 nan 0.1000 -0.0003
## 220 0.1177 nan 0.1000 -0.0002
## 240 0.1031 nan 0.1000 -0.0002
## 260 0.0911 nan 0.1000 -0.0003
## 280 0.0804 nan 0.1000 -0.0003
## 300 0.0707 nan 0.1000 -0.0003
## 320 0.0628 nan 0.1000 -0.0003
## 340 0.0554 nan 0.1000 -0.0001
## 360 0.0483 nan 0.1000 -0.0000
## 380 0.0430 nan 0.1000 -0.0001
## 400 0.0378 nan 0.1000 -0.0001
## 420 0.0333 nan 0.1000 0.0000
## 440 0.0297 nan 0.1000 -0.0000
## 460 0.0263 nan 0.1000 -0.0001
## 480 0.0235 nan 0.1000 -0.0001
## 500 0.0212 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2443 nan 0.1000 0.0289
## 2 1.1741 nan 0.1000 0.0333
## 3 1.1095 nan 0.1000 0.0260
## 4 1.0509 nan 0.1000 0.0235
## 5 1.0031 nan 0.1000 0.0224
## 6 0.9671 nan 0.1000 0.0128
## 7 0.9309 nan 0.1000 0.0158
## 8 0.8939 nan 0.1000 0.0125
## 9 0.8638 nan 0.1000 0.0122
## 10 0.8381 nan 0.1000 0.0093
## 20 0.6806 nan 0.1000 0.0003
## 40 0.5296 nan 0.1000 0.0010
## 60 0.4321 nan 0.1000 -0.0001
## 80 0.3567 nan 0.1000 -0.0016
## 100 0.2999 nan 0.1000 -0.0005
## 120 0.2545 nan 0.1000 -0.0006
## 140 0.2169 nan 0.1000 -0.0000
## 160 0.1899 nan 0.1000 -0.0008
## 180 0.1628 nan 0.1000 -0.0010
## 200 0.1415 nan 0.1000 -0.0005
## 220 0.1219 nan 0.1000 -0.0005
## 240 0.1071 nan 0.1000 -0.0004
## 260 0.0942 nan 0.1000 -0.0002
## 280 0.0833 nan 0.1000 -0.0000
## 300 0.0731 nan 0.1000 -0.0002
## 320 0.0645 nan 0.1000 -0.0001
## 340 0.0567 nan 0.1000 -0.0001
## 360 0.0495 nan 0.1000 -0.0001
## 380 0.0440 nan 0.1000 -0.0001
## 400 0.0391 nan 0.1000 -0.0001
## 420 0.0348 nan 0.1000 -0.0000
## 440 0.0307 nan 0.1000 -0.0001
## 460 0.0274 nan 0.1000 -0.0001
## 480 0.0240 nan 0.1000 -0.0000
## 500 0.0214 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2320 nan 0.1000 0.0440
## 2 1.1611 nan 0.1000 0.0352
## 3 1.1037 nan 0.1000 0.0267
## 4 1.0492 nan 0.1000 0.0222
## 5 1.0054 nan 0.1000 0.0198
## 6 0.9642 nan 0.1000 0.0188
## 7 0.9279 nan 0.1000 0.0130
## 8 0.8977 nan 0.1000 0.0122
## 9 0.8698 nan 0.1000 0.0108
## 10 0.8452 nan 0.1000 0.0074
## 20 0.6984 nan 0.1000 -0.0003
## 40 0.5521 nan 0.1000 -0.0012
## 60 0.4518 nan 0.1000 -0.0015
## 80 0.3766 nan 0.1000 -0.0012
## 100 0.3219 nan 0.1000 -0.0004
## 120 0.2728 nan 0.1000 -0.0005
## 140 0.2308 nan 0.1000 -0.0015
## 160 0.1986 nan 0.1000 -0.0004
## 180 0.1731 nan 0.1000 -0.0005
## 200 0.1534 nan 0.1000 -0.0004
## 220 0.1331 nan 0.1000 -0.0003
## 240 0.1175 nan 0.1000 -0.0001
## 260 0.1049 nan 0.1000 -0.0006
## 280 0.0923 nan 0.1000 -0.0001
## 300 0.0820 nan 0.1000 -0.0004
## 320 0.0738 nan 0.1000 -0.0002
## 340 0.0664 nan 0.1000 -0.0002
## 360 0.0585 nan 0.1000 -0.0002
## 380 0.0524 nan 0.1000 -0.0003
## 400 0.0461 nan 0.1000 -0.0000
## 420 0.0408 nan 0.1000 -0.0002
## 440 0.0359 nan 0.1000 -0.0001
## 460 0.0323 nan 0.1000 -0.0000
## 480 0.0292 nan 0.1000 -0.0001
## 500 0.0264 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0003
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2867 nan 0.0010 0.0004
## 60 1.2704 nan 0.0010 0.0004
## 80 1.2549 nan 0.0010 0.0002
## 100 1.2394 nan 0.0010 0.0004
## 120 1.2245 nan 0.0010 0.0003
## 140 1.2102 nan 0.0010 0.0003
## 160 1.1963 nan 0.0010 0.0003
## 180 1.1832 nan 0.0010 0.0003
## 200 1.1704 nan 0.0010 0.0003
## 220 1.1577 nan 0.0010 0.0002
## 240 1.1456 nan 0.0010 0.0003
## 260 1.1340 nan 0.0010 0.0003
## 280 1.1222 nan 0.0010 0.0002
## 300 1.1110 nan 0.0010 0.0002
## 320 1.1005 nan 0.0010 0.0002
## 340 1.0896 nan 0.0010 0.0002
## 360 1.0793 nan 0.0010 0.0002
## 380 1.0692 nan 0.0010 0.0003
## 400 1.0593 nan 0.0010 0.0002
## 420 1.0497 nan 0.0010 0.0002
## 440 1.0407 nan 0.0010 0.0002
## 460 1.0318 nan 0.0010 0.0002
## 480 1.0231 nan 0.0010 0.0002
## 500 1.0149 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0005
## 3 1.3179 nan 0.0010 0.0005
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0004
## 6 1.3151 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3134 nan 0.0010 0.0004
## 9 1.3125 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3028 nan 0.0010 0.0004
## 40 1.2862 nan 0.0010 0.0004
## 60 1.2695 nan 0.0010 0.0004
## 80 1.2537 nan 0.0010 0.0003
## 100 1.2386 nan 0.0010 0.0004
## 120 1.2236 nan 0.0010 0.0003
## 140 1.2095 nan 0.0010 0.0003
## 160 1.1959 nan 0.0010 0.0003
## 180 1.1827 nan 0.0010 0.0003
## 200 1.1700 nan 0.0010 0.0003
## 220 1.1577 nan 0.0010 0.0003
## 240 1.1455 nan 0.0010 0.0003
## 260 1.1338 nan 0.0010 0.0002
## 280 1.1224 nan 0.0010 0.0003
## 300 1.1113 nan 0.0010 0.0002
## 320 1.1005 nan 0.0010 0.0002
## 340 1.0901 nan 0.0010 0.0002
## 360 1.0800 nan 0.0010 0.0002
## 380 1.0701 nan 0.0010 0.0002
## 400 1.0604 nan 0.0010 0.0002
## 420 1.0512 nan 0.0010 0.0002
## 440 1.0421 nan 0.0010 0.0002
## 460 1.0332 nan 0.0010 0.0002
## 480 1.0244 nan 0.0010 0.0002
## 500 1.0158 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0005
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2863 nan 0.0010 0.0004
## 60 1.2705 nan 0.0010 0.0003
## 80 1.2548 nan 0.0010 0.0003
## 100 1.2398 nan 0.0010 0.0003
## 120 1.2252 nan 0.0010 0.0003
## 140 1.2110 nan 0.0010 0.0003
## 160 1.1974 nan 0.0010 0.0003
## 180 1.1840 nan 0.0010 0.0003
## 200 1.1710 nan 0.0010 0.0003
## 220 1.1585 nan 0.0010 0.0002
## 240 1.1464 nan 0.0010 0.0002
## 260 1.1348 nan 0.0010 0.0003
## 280 1.1235 nan 0.0010 0.0002
## 300 1.1125 nan 0.0010 0.0003
## 320 1.1016 nan 0.0010 0.0002
## 340 1.0914 nan 0.0010 0.0002
## 360 1.0815 nan 0.0010 0.0002
## 380 1.0714 nan 0.0010 0.0002
## 400 1.0620 nan 0.0010 0.0002
## 420 1.0530 nan 0.0010 0.0002
## 440 1.0439 nan 0.0010 0.0002
## 460 1.0350 nan 0.0010 0.0002
## 480 1.0265 nan 0.0010 0.0002
## 500 1.0183 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0005
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0005
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0005
## 6 1.3150 nan 0.0010 0.0004
## 7 1.3141 nan 0.0010 0.0004
## 8 1.3131 nan 0.0010 0.0005
## 9 1.3122 nan 0.0010 0.0004
## 10 1.3112 nan 0.0010 0.0004
## 20 1.3019 nan 0.0010 0.0004
## 40 1.2842 nan 0.0010 0.0004
## 60 1.2670 nan 0.0010 0.0004
## 80 1.2506 nan 0.0010 0.0004
## 100 1.2347 nan 0.0010 0.0004
## 120 1.2189 nan 0.0010 0.0004
## 140 1.2039 nan 0.0010 0.0003
## 160 1.1898 nan 0.0010 0.0004
## 180 1.1754 nan 0.0010 0.0003
## 200 1.1618 nan 0.0010 0.0003
## 220 1.1482 nan 0.0010 0.0003
## 240 1.1354 nan 0.0010 0.0002
## 260 1.1230 nan 0.0010 0.0003
## 280 1.1108 nan 0.0010 0.0003
## 300 1.0990 nan 0.0010 0.0003
## 320 1.0874 nan 0.0010 0.0002
## 340 1.0762 nan 0.0010 0.0003
## 360 1.0653 nan 0.0010 0.0002
## 380 1.0551 nan 0.0010 0.0002
## 400 1.0448 nan 0.0010 0.0002
## 420 1.0346 nan 0.0010 0.0002
## 440 1.0251 nan 0.0010 0.0002
## 460 1.0158 nan 0.0010 0.0002
## 480 1.0067 nan 0.0010 0.0002
## 500 0.9977 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3196 nan 0.0010 0.0004
## 2 1.3186 nan 0.0010 0.0004
## 3 1.3176 nan 0.0010 0.0004
## 4 1.3167 nan 0.0010 0.0004
## 5 1.3157 nan 0.0010 0.0004
## 6 1.3148 nan 0.0010 0.0004
## 7 1.3139 nan 0.0010 0.0004
## 8 1.3129 nan 0.0010 0.0004
## 9 1.3119 nan 0.0010 0.0004
## 10 1.3111 nan 0.0010 0.0004
## 20 1.3019 nan 0.0010 0.0004
## 40 1.2837 nan 0.0010 0.0004
## 60 1.2669 nan 0.0010 0.0004
## 80 1.2504 nan 0.0010 0.0004
## 100 1.2342 nan 0.0010 0.0004
## 120 1.2189 nan 0.0010 0.0004
## 140 1.2034 nan 0.0010 0.0003
## 160 1.1888 nan 0.0010 0.0003
## 180 1.1747 nan 0.0010 0.0003
## 200 1.1615 nan 0.0010 0.0003
## 220 1.1482 nan 0.0010 0.0003
## 240 1.1352 nan 0.0010 0.0003
## 260 1.1228 nan 0.0010 0.0002
## 280 1.1106 nan 0.0010 0.0002
## 300 1.0988 nan 0.0010 0.0003
## 320 1.0875 nan 0.0010 0.0003
## 340 1.0763 nan 0.0010 0.0002
## 360 1.0658 nan 0.0010 0.0002
## 380 1.0552 nan 0.0010 0.0002
## 400 1.0451 nan 0.0010 0.0002
## 420 1.0354 nan 0.0010 0.0002
## 440 1.0258 nan 0.0010 0.0002
## 460 1.0162 nan 0.0010 0.0002
## 480 1.0070 nan 0.0010 0.0002
## 500 0.9978 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0005
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3151 nan 0.0010 0.0004
## 7 1.3141 nan 0.0010 0.0004
## 8 1.3133 nan 0.0010 0.0004
## 9 1.3124 nan 0.0010 0.0005
## 10 1.3114 nan 0.0010 0.0005
## 20 1.3023 nan 0.0010 0.0004
## 40 1.2850 nan 0.0010 0.0004
## 60 1.2677 nan 0.0010 0.0004
## 80 1.2514 nan 0.0010 0.0004
## 100 1.2359 nan 0.0010 0.0003
## 120 1.2205 nan 0.0010 0.0003
## 140 1.2055 nan 0.0010 0.0003
## 160 1.1912 nan 0.0010 0.0003
## 180 1.1772 nan 0.0010 0.0003
## 200 1.1637 nan 0.0010 0.0003
## 220 1.1506 nan 0.0010 0.0003
## 240 1.1377 nan 0.0010 0.0003
## 260 1.1251 nan 0.0010 0.0003
## 280 1.1133 nan 0.0010 0.0003
## 300 1.1016 nan 0.0010 0.0002
## 320 1.0902 nan 0.0010 0.0003
## 340 1.0794 nan 0.0010 0.0002
## 360 1.0687 nan 0.0010 0.0002
## 380 1.0585 nan 0.0010 0.0002
## 400 1.0484 nan 0.0010 0.0002
## 420 1.0385 nan 0.0010 0.0002
## 440 1.0290 nan 0.0010 0.0002
## 460 1.0196 nan 0.0010 0.0002
## 480 1.0107 nan 0.0010 0.0002
## 500 1.0019 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3187 nan 0.0010 0.0005
## 3 1.3177 nan 0.0010 0.0004
## 4 1.3167 nan 0.0010 0.0005
## 5 1.3156 nan 0.0010 0.0005
## 6 1.3147 nan 0.0010 0.0004
## 7 1.3138 nan 0.0010 0.0004
## 8 1.3129 nan 0.0010 0.0005
## 9 1.3120 nan 0.0010 0.0004
## 10 1.3111 nan 0.0010 0.0004
## 20 1.3015 nan 0.0010 0.0004
## 40 1.2827 nan 0.0010 0.0004
## 60 1.2646 nan 0.0010 0.0004
## 80 1.2470 nan 0.0010 0.0004
## 100 1.2300 nan 0.0010 0.0004
## 120 1.2137 nan 0.0010 0.0004
## 140 1.1982 nan 0.0010 0.0003
## 160 1.1831 nan 0.0010 0.0004
## 180 1.1683 nan 0.0010 0.0003
## 200 1.1541 nan 0.0010 0.0003
## 220 1.1405 nan 0.0010 0.0003
## 240 1.1271 nan 0.0010 0.0003
## 260 1.1142 nan 0.0010 0.0003
## 280 1.1015 nan 0.0010 0.0002
## 300 1.0895 nan 0.0010 0.0002
## 320 1.0776 nan 0.0010 0.0003
## 340 1.0661 nan 0.0010 0.0002
## 360 1.0550 nan 0.0010 0.0002
## 380 1.0437 nan 0.0010 0.0002
## 400 1.0330 nan 0.0010 0.0002
## 420 1.0226 nan 0.0010 0.0002
## 440 1.0125 nan 0.0010 0.0002
## 460 1.0027 nan 0.0010 0.0002
## 480 0.9930 nan 0.0010 0.0002
## 500 0.9837 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3187 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0005
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3159 nan 0.0010 0.0005
## 6 1.3149 nan 0.0010 0.0005
## 7 1.3140 nan 0.0010 0.0004
## 8 1.3130 nan 0.0010 0.0004
## 9 1.3122 nan 0.0010 0.0004
## 10 1.3112 nan 0.0010 0.0004
## 20 1.3015 nan 0.0010 0.0004
## 40 1.2830 nan 0.0010 0.0004
## 60 1.2652 nan 0.0010 0.0004
## 80 1.2478 nan 0.0010 0.0004
## 100 1.2312 nan 0.0010 0.0004
## 120 1.2151 nan 0.0010 0.0003
## 140 1.1998 nan 0.0010 0.0003
## 160 1.1848 nan 0.0010 0.0004
## 180 1.1703 nan 0.0010 0.0003
## 200 1.1563 nan 0.0010 0.0003
## 220 1.1426 nan 0.0010 0.0003
## 240 1.1293 nan 0.0010 0.0003
## 260 1.1162 nan 0.0010 0.0003
## 280 1.1036 nan 0.0010 0.0003
## 300 1.0913 nan 0.0010 0.0003
## 320 1.0798 nan 0.0010 0.0003
## 340 1.0683 nan 0.0010 0.0002
## 360 1.0572 nan 0.0010 0.0003
## 380 1.0461 nan 0.0010 0.0002
## 400 1.0355 nan 0.0010 0.0002
## 420 1.0252 nan 0.0010 0.0002
## 440 1.0152 nan 0.0010 0.0002
## 460 1.0056 nan 0.0010 0.0002
## 480 0.9961 nan 0.0010 0.0002
## 500 0.9866 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0005
## 2 1.3187 nan 0.0010 0.0004
## 3 1.3178 nan 0.0010 0.0004
## 4 1.3168 nan 0.0010 0.0005
## 5 1.3159 nan 0.0010 0.0005
## 6 1.3149 nan 0.0010 0.0004
## 7 1.3139 nan 0.0010 0.0005
## 8 1.3129 nan 0.0010 0.0004
## 9 1.3120 nan 0.0010 0.0004
## 10 1.3110 nan 0.0010 0.0004
## 20 1.3017 nan 0.0010 0.0004
## 40 1.2837 nan 0.0010 0.0005
## 60 1.2661 nan 0.0010 0.0004
## 80 1.2489 nan 0.0010 0.0003
## 100 1.2325 nan 0.0010 0.0004
## 120 1.2167 nan 0.0010 0.0003
## 140 1.2015 nan 0.0010 0.0003
## 160 1.1866 nan 0.0010 0.0003
## 180 1.1723 nan 0.0010 0.0003
## 200 1.1583 nan 0.0010 0.0003
## 220 1.1448 nan 0.0010 0.0003
## 240 1.1318 nan 0.0010 0.0003
## 260 1.1191 nan 0.0010 0.0003
## 280 1.1067 nan 0.0010 0.0002
## 300 1.0945 nan 0.0010 0.0003
## 320 1.0830 nan 0.0010 0.0002
## 340 1.0715 nan 0.0010 0.0003
## 360 1.0605 nan 0.0010 0.0002
## 380 1.0497 nan 0.0010 0.0002
## 400 1.0392 nan 0.0010 0.0002
## 420 1.0290 nan 0.0010 0.0002
## 440 1.0192 nan 0.0010 0.0002
## 460 1.0099 nan 0.0010 0.0002
## 480 1.0005 nan 0.0010 0.0002
## 500 0.9913 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3110 nan 0.0100 0.0041
## 2 1.3017 nan 0.0100 0.0041
## 3 1.2933 nan 0.0100 0.0040
## 4 1.2844 nan 0.0100 0.0043
## 5 1.2765 nan 0.0100 0.0032
## 6 1.2675 nan 0.0100 0.0038
## 7 1.2606 nan 0.0100 0.0034
## 8 1.2536 nan 0.0100 0.0028
## 9 1.2465 nan 0.0100 0.0033
## 10 1.2395 nan 0.0100 0.0031
## 20 1.1687 nan 0.0100 0.0030
## 40 1.0613 nan 0.0100 0.0022
## 60 0.9758 nan 0.0100 0.0015
## 80 0.9116 nan 0.0100 0.0011
## 100 0.8591 nan 0.0100 0.0009
## 120 0.8164 nan 0.0100 0.0008
## 140 0.7823 nan 0.0100 0.0003
## 160 0.7524 nan 0.0100 0.0004
## 180 0.7274 nan 0.0100 0.0003
## 200 0.7041 nan 0.0100 0.0002
## 220 0.6844 nan 0.0100 0.0002
## 240 0.6666 nan 0.0100 0.0002
## 260 0.6513 nan 0.0100 0.0001
## 280 0.6366 nan 0.0100 -0.0001
## 300 0.6224 nan 0.0100 0.0001
## 320 0.6100 nan 0.0100 0.0000
## 340 0.5968 nan 0.0100 0.0002
## 360 0.5860 nan 0.0100 0.0000
## 380 0.5753 nan 0.0100 0.0002
## 400 0.5649 nan 0.0100 -0.0001
## 420 0.5543 nan 0.0100 0.0001
## 440 0.5455 nan 0.0100 0.0000
## 460 0.5374 nan 0.0100 0.0000
## 480 0.5288 nan 0.0100 -0.0001
## 500 0.5209 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3120 nan 0.0100 0.0041
## 2 1.3036 nan 0.0100 0.0039
## 3 1.2956 nan 0.0100 0.0035
## 4 1.2872 nan 0.0100 0.0042
## 5 1.2796 nan 0.0100 0.0033
## 6 1.2706 nan 0.0100 0.0043
## 7 1.2623 nan 0.0100 0.0040
## 8 1.2542 nan 0.0100 0.0038
## 9 1.2468 nan 0.0100 0.0033
## 10 1.2402 nan 0.0100 0.0029
## 20 1.1726 nan 0.0100 0.0027
## 40 1.0637 nan 0.0100 0.0020
## 60 0.9799 nan 0.0100 0.0016
## 80 0.9118 nan 0.0100 0.0011
## 100 0.8572 nan 0.0100 0.0009
## 120 0.8143 nan 0.0100 0.0007
## 140 0.7795 nan 0.0100 0.0004
## 160 0.7498 nan 0.0100 0.0003
## 180 0.7246 nan 0.0100 0.0004
## 200 0.7048 nan 0.0100 0.0002
## 220 0.6851 nan 0.0100 0.0001
## 240 0.6675 nan 0.0100 0.0002
## 260 0.6525 nan 0.0100 0.0001
## 280 0.6383 nan 0.0100 0.0000
## 300 0.6254 nan 0.0100 -0.0000
## 320 0.6128 nan 0.0100 -0.0000
## 340 0.6021 nan 0.0100 0.0001
## 360 0.5920 nan 0.0100 0.0001
## 380 0.5817 nan 0.0100 -0.0001
## 400 0.5712 nan 0.0100 0.0000
## 420 0.5620 nan 0.0100 0.0001
## 440 0.5531 nan 0.0100 -0.0002
## 460 0.5450 nan 0.0100 -0.0001
## 480 0.5367 nan 0.0100 -0.0002
## 500 0.5295 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0041
## 2 1.3024 nan 0.0100 0.0041
## 3 1.2944 nan 0.0100 0.0040
## 4 1.2859 nan 0.0100 0.0039
## 5 1.2781 nan 0.0100 0.0037
## 6 1.2705 nan 0.0100 0.0035
## 7 1.2629 nan 0.0100 0.0034
## 8 1.2553 nan 0.0100 0.0039
## 9 1.2472 nan 0.0100 0.0036
## 10 1.2395 nan 0.0100 0.0037
## 20 1.1707 nan 0.0100 0.0026
## 40 1.0607 nan 0.0100 0.0018
## 60 0.9794 nan 0.0100 0.0014
## 80 0.9123 nan 0.0100 0.0011
## 100 0.8599 nan 0.0100 0.0008
## 120 0.8173 nan 0.0100 0.0004
## 140 0.7842 nan 0.0100 0.0004
## 160 0.7543 nan 0.0100 0.0004
## 180 0.7297 nan 0.0100 0.0002
## 200 0.7078 nan 0.0100 0.0002
## 220 0.6898 nan 0.0100 0.0002
## 240 0.6735 nan 0.0100 0.0003
## 260 0.6591 nan 0.0100 0.0001
## 280 0.6457 nan 0.0100 0.0001
## 300 0.6333 nan 0.0100 0.0000
## 320 0.6219 nan 0.0100 0.0001
## 340 0.6108 nan 0.0100 0.0000
## 360 0.6007 nan 0.0100 0.0001
## 380 0.5908 nan 0.0100 0.0000
## 400 0.5809 nan 0.0100 -0.0000
## 420 0.5717 nan 0.0100 0.0000
## 440 0.5631 nan 0.0100 -0.0000
## 460 0.5553 nan 0.0100 0.0000
## 480 0.5470 nan 0.0100 -0.0000
## 500 0.5395 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3126 nan 0.0100 0.0038
## 2 1.3033 nan 0.0100 0.0043
## 3 1.2935 nan 0.0100 0.0044
## 4 1.2838 nan 0.0100 0.0042
## 5 1.2760 nan 0.0100 0.0036
## 6 1.2670 nan 0.0100 0.0039
## 7 1.2579 nan 0.0100 0.0037
## 8 1.2495 nan 0.0100 0.0037
## 9 1.2411 nan 0.0100 0.0038
## 10 1.2335 nan 0.0100 0.0033
## 20 1.1602 nan 0.0100 0.0034
## 40 1.0433 nan 0.0100 0.0023
## 60 0.9563 nan 0.0100 0.0012
## 80 0.8846 nan 0.0100 0.0010
## 100 0.8296 nan 0.0100 0.0013
## 120 0.7852 nan 0.0100 0.0008
## 140 0.7483 nan 0.0100 0.0002
## 160 0.7176 nan 0.0100 0.0005
## 180 0.6923 nan 0.0100 0.0003
## 200 0.6686 nan 0.0100 0.0001
## 220 0.6464 nan 0.0100 0.0002
## 240 0.6275 nan 0.0100 0.0002
## 260 0.6100 nan 0.0100 0.0001
## 280 0.5932 nan 0.0100 0.0001
## 300 0.5790 nan 0.0100 -0.0001
## 320 0.5644 nan 0.0100 0.0001
## 340 0.5521 nan 0.0100 -0.0002
## 360 0.5398 nan 0.0100 0.0001
## 380 0.5283 nan 0.0100 -0.0001
## 400 0.5157 nan 0.0100 -0.0000
## 420 0.5050 nan 0.0100 -0.0001
## 440 0.4955 nan 0.0100 0.0001
## 460 0.4855 nan 0.0100 -0.0001
## 480 0.4762 nan 0.0100 -0.0001
## 500 0.4674 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3108 nan 0.0100 0.0044
## 2 1.3018 nan 0.0100 0.0036
## 3 1.2922 nan 0.0100 0.0042
## 4 1.2833 nan 0.0100 0.0037
## 5 1.2753 nan 0.0100 0.0036
## 6 1.2670 nan 0.0100 0.0037
## 7 1.2592 nan 0.0100 0.0033
## 8 1.2509 nan 0.0100 0.0039
## 9 1.2421 nan 0.0100 0.0039
## 10 1.2335 nan 0.0100 0.0035
## 20 1.1636 nan 0.0100 0.0028
## 40 1.0487 nan 0.0100 0.0020
## 60 0.9613 nan 0.0100 0.0013
## 80 0.8921 nan 0.0100 0.0012
## 100 0.8384 nan 0.0100 0.0009
## 120 0.7937 nan 0.0100 0.0009
## 140 0.7561 nan 0.0100 0.0003
## 160 0.7239 nan 0.0100 0.0003
## 180 0.6975 nan 0.0100 0.0003
## 200 0.6738 nan 0.0100 0.0001
## 220 0.6537 nan 0.0100 0.0001
## 240 0.6342 nan 0.0100 0.0001
## 260 0.6168 nan 0.0100 -0.0001
## 280 0.6011 nan 0.0100 0.0000
## 300 0.5866 nan 0.0100 0.0001
## 320 0.5737 nan 0.0100 0.0001
## 340 0.5608 nan 0.0100 0.0000
## 360 0.5479 nan 0.0100 -0.0001
## 380 0.5372 nan 0.0100 -0.0000
## 400 0.5268 nan 0.0100 -0.0000
## 420 0.5161 nan 0.0100 -0.0000
## 440 0.5071 nan 0.0100 -0.0001
## 460 0.4969 nan 0.0100 0.0000
## 480 0.4882 nan 0.0100 -0.0001
## 500 0.4797 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3115 nan 0.0100 0.0038
## 2 1.3021 nan 0.0100 0.0043
## 3 1.2928 nan 0.0100 0.0043
## 4 1.2840 nan 0.0100 0.0042
## 5 1.2750 nan 0.0100 0.0038
## 6 1.2663 nan 0.0100 0.0041
## 7 1.2581 nan 0.0100 0.0033
## 8 1.2507 nan 0.0100 0.0033
## 9 1.2424 nan 0.0100 0.0037
## 10 1.2342 nan 0.0100 0.0039
## 20 1.1608 nan 0.0100 0.0030
## 40 1.0475 nan 0.0100 0.0020
## 60 0.9612 nan 0.0100 0.0017
## 80 0.8909 nan 0.0100 0.0012
## 100 0.8385 nan 0.0100 0.0009
## 120 0.7952 nan 0.0100 0.0006
## 140 0.7591 nan 0.0100 0.0005
## 160 0.7287 nan 0.0100 0.0003
## 180 0.7023 nan 0.0100 0.0002
## 200 0.6796 nan 0.0100 0.0003
## 220 0.6596 nan 0.0100 0.0001
## 240 0.6423 nan 0.0100 0.0000
## 260 0.6261 nan 0.0100 0.0002
## 280 0.6121 nan 0.0100 -0.0000
## 300 0.5978 nan 0.0100 -0.0001
## 320 0.5849 nan 0.0100 -0.0000
## 340 0.5728 nan 0.0100 -0.0000
## 360 0.5617 nan 0.0100 0.0000
## 380 0.5501 nan 0.0100 -0.0001
## 400 0.5392 nan 0.0100 0.0000
## 420 0.5298 nan 0.0100 -0.0001
## 440 0.5206 nan 0.0100 -0.0001
## 460 0.5105 nan 0.0100 -0.0001
## 480 0.5013 nan 0.0100 -0.0000
## 500 0.4925 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3104 nan 0.0100 0.0045
## 2 1.3004 nan 0.0100 0.0046
## 3 1.2911 nan 0.0100 0.0043
## 4 1.2815 nan 0.0100 0.0044
## 5 1.2729 nan 0.0100 0.0039
## 6 1.2644 nan 0.0100 0.0039
## 7 1.2549 nan 0.0100 0.0043
## 8 1.2467 nan 0.0100 0.0037
## 9 1.2377 nan 0.0100 0.0043
## 10 1.2290 nan 0.0100 0.0036
## 20 1.1518 nan 0.0100 0.0032
## 40 1.0306 nan 0.0100 0.0019
## 60 0.9393 nan 0.0100 0.0016
## 80 0.8682 nan 0.0100 0.0011
## 100 0.8121 nan 0.0100 0.0009
## 120 0.7648 nan 0.0100 0.0006
## 140 0.7246 nan 0.0100 0.0005
## 160 0.6926 nan 0.0100 0.0005
## 180 0.6649 nan 0.0100 0.0001
## 200 0.6404 nan 0.0100 0.0003
## 220 0.6172 nan 0.0100 -0.0000
## 240 0.5968 nan 0.0100 0.0001
## 260 0.5779 nan 0.0100 0.0001
## 280 0.5609 nan 0.0100 0.0002
## 300 0.5455 nan 0.0100 0.0001
## 320 0.5309 nan 0.0100 -0.0001
## 340 0.5166 nan 0.0100 0.0000
## 360 0.5037 nan 0.0100 0.0000
## 380 0.4908 nan 0.0100 -0.0002
## 400 0.4788 nan 0.0100 -0.0000
## 420 0.4674 nan 0.0100 0.0001
## 440 0.4563 nan 0.0100 -0.0000
## 460 0.4452 nan 0.0100 -0.0002
## 480 0.4347 nan 0.0100 -0.0001
## 500 0.4254 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3099 nan 0.0100 0.0049
## 2 1.3013 nan 0.0100 0.0040
## 3 1.2926 nan 0.0100 0.0038
## 4 1.2833 nan 0.0100 0.0040
## 5 1.2742 nan 0.0100 0.0043
## 6 1.2655 nan 0.0100 0.0037
## 7 1.2569 nan 0.0100 0.0039
## 8 1.2483 nan 0.0100 0.0040
## 9 1.2400 nan 0.0100 0.0039
## 10 1.2315 nan 0.0100 0.0040
## 20 1.1550 nan 0.0100 0.0033
## 40 1.0333 nan 0.0100 0.0020
## 60 0.9420 nan 0.0100 0.0016
## 80 0.8706 nan 0.0100 0.0013
## 100 0.8122 nan 0.0100 0.0009
## 120 0.7684 nan 0.0100 0.0005
## 140 0.7293 nan 0.0100 0.0006
## 160 0.6964 nan 0.0100 0.0005
## 180 0.6682 nan 0.0100 0.0005
## 200 0.6442 nan 0.0100 0.0000
## 220 0.6227 nan 0.0100 0.0003
## 240 0.6028 nan 0.0100 0.0003
## 260 0.5844 nan 0.0100 0.0001
## 280 0.5685 nan 0.0100 -0.0000
## 300 0.5529 nan 0.0100 0.0001
## 320 0.5386 nan 0.0100 0.0001
## 340 0.5251 nan 0.0100 -0.0000
## 360 0.5119 nan 0.0100 -0.0001
## 380 0.4997 nan 0.0100 -0.0001
## 400 0.4886 nan 0.0100 0.0000
## 420 0.4779 nan 0.0100 0.0001
## 440 0.4672 nan 0.0100 -0.0001
## 460 0.4565 nan 0.0100 0.0000
## 480 0.4466 nan 0.0100 0.0000
## 500 0.4378 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3098 nan 0.0100 0.0046
## 2 1.2997 nan 0.0100 0.0049
## 3 1.2918 nan 0.0100 0.0034
## 4 1.2832 nan 0.0100 0.0039
## 5 1.2732 nan 0.0100 0.0044
## 6 1.2650 nan 0.0100 0.0035
## 7 1.2562 nan 0.0100 0.0041
## 8 1.2479 nan 0.0100 0.0036
## 9 1.2395 nan 0.0100 0.0038
## 10 1.2315 nan 0.0100 0.0035
## 20 1.1594 nan 0.0100 0.0033
## 40 1.0399 nan 0.0100 0.0024
## 60 0.9481 nan 0.0100 0.0017
## 80 0.8771 nan 0.0100 0.0011
## 100 0.8226 nan 0.0100 0.0010
## 120 0.7780 nan 0.0100 0.0007
## 140 0.7403 nan 0.0100 0.0004
## 160 0.7082 nan 0.0100 0.0004
## 180 0.6797 nan 0.0100 0.0004
## 200 0.6568 nan 0.0100 0.0002
## 220 0.6354 nan 0.0100 0.0000
## 240 0.6166 nan 0.0100 0.0001
## 260 0.6004 nan 0.0100 -0.0002
## 280 0.5838 nan 0.0100 -0.0001
## 300 0.5689 nan 0.0100 0.0001
## 320 0.5549 nan 0.0100 -0.0001
## 340 0.5403 nan 0.0100 -0.0000
## 360 0.5278 nan 0.0100 0.0000
## 380 0.5161 nan 0.0100 0.0001
## 400 0.5054 nan 0.0100 -0.0000
## 420 0.4937 nan 0.0100 -0.0002
## 440 0.4825 nan 0.0100 -0.0001
## 460 0.4728 nan 0.0100 -0.0000
## 480 0.4630 nan 0.0100 -0.0001
## 500 0.4529 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2328 nan 0.1000 0.0408
## 2 1.1660 nan 0.1000 0.0300
## 3 1.1001 nan 0.1000 0.0275
## 4 1.0456 nan 0.1000 0.0216
## 5 1.0005 nan 0.1000 0.0166
## 6 0.9660 nan 0.1000 0.0144
## 7 0.9329 nan 0.1000 0.0118
## 8 0.9005 nan 0.1000 0.0144
## 9 0.8763 nan 0.1000 0.0114
## 10 0.8529 nan 0.1000 0.0075
## 20 0.7005 nan 0.1000 0.0036
## 40 0.5721 nan 0.1000 -0.0002
## 60 0.4840 nan 0.1000 -0.0017
## 80 0.4261 nan 0.1000 -0.0008
## 100 0.3813 nan 0.1000 -0.0008
## 120 0.3308 nan 0.1000 -0.0005
## 140 0.2943 nan 0.1000 -0.0002
## 160 0.2635 nan 0.1000 -0.0005
## 180 0.2389 nan 0.1000 -0.0004
## 200 0.2161 nan 0.1000 0.0000
## 220 0.1952 nan 0.1000 -0.0002
## 240 0.1752 nan 0.1000 -0.0004
## 260 0.1591 nan 0.1000 -0.0004
## 280 0.1435 nan 0.1000 -0.0004
## 300 0.1322 nan 0.1000 -0.0006
## 320 0.1218 nan 0.1000 -0.0001
## 340 0.1104 nan 0.1000 -0.0002
## 360 0.1006 nan 0.1000 -0.0001
## 380 0.0921 nan 0.1000 -0.0001
## 400 0.0851 nan 0.1000 0.0000
## 420 0.0790 nan 0.1000 -0.0003
## 440 0.0724 nan 0.1000 -0.0002
## 460 0.0673 nan 0.1000 -0.0003
## 480 0.0619 nan 0.1000 -0.0000
## 500 0.0571 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2348 nan 0.1000 0.0406
## 2 1.1695 nan 0.1000 0.0286
## 3 1.1119 nan 0.1000 0.0244
## 4 1.0572 nan 0.1000 0.0246
## 5 1.0078 nan 0.1000 0.0236
## 6 0.9701 nan 0.1000 0.0157
## 7 0.9364 nan 0.1000 0.0133
## 8 0.9032 nan 0.1000 0.0113
## 9 0.8738 nan 0.1000 0.0100
## 10 0.8444 nan 0.1000 0.0122
## 20 0.7007 nan 0.1000 0.0022
## 40 0.5764 nan 0.1000 -0.0007
## 60 0.4957 nan 0.1000 -0.0014
## 80 0.4310 nan 0.1000 -0.0001
## 100 0.3822 nan 0.1000 0.0003
## 120 0.3372 nan 0.1000 -0.0007
## 140 0.3013 nan 0.1000 -0.0005
## 160 0.2728 nan 0.1000 -0.0006
## 180 0.2486 nan 0.1000 -0.0003
## 200 0.2242 nan 0.1000 -0.0014
## 220 0.2013 nan 0.1000 -0.0008
## 240 0.1806 nan 0.1000 -0.0004
## 260 0.1662 nan 0.1000 -0.0002
## 280 0.1541 nan 0.1000 -0.0006
## 300 0.1406 nan 0.1000 -0.0006
## 320 0.1293 nan 0.1000 -0.0001
## 340 0.1188 nan 0.1000 -0.0003
## 360 0.1099 nan 0.1000 -0.0004
## 380 0.1019 nan 0.1000 -0.0005
## 400 0.0943 nan 0.1000 -0.0002
## 420 0.0867 nan 0.1000 -0.0003
## 440 0.0799 nan 0.1000 -0.0003
## 460 0.0735 nan 0.1000 -0.0002
## 480 0.0688 nan 0.1000 -0.0002
## 500 0.0640 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2353 nan 0.1000 0.0386
## 2 1.1638 nan 0.1000 0.0321
## 3 1.1038 nan 0.1000 0.0252
## 4 1.0540 nan 0.1000 0.0192
## 5 1.0129 nan 0.1000 0.0173
## 6 0.9681 nan 0.1000 0.0189
## 7 0.9338 nan 0.1000 0.0163
## 8 0.9038 nan 0.1000 0.0149
## 9 0.8784 nan 0.1000 0.0092
## 10 0.8547 nan 0.1000 0.0098
## 20 0.7055 nan 0.1000 0.0048
## 40 0.5862 nan 0.1000 0.0001
## 60 0.5104 nan 0.1000 -0.0004
## 80 0.4554 nan 0.1000 -0.0011
## 100 0.4058 nan 0.1000 -0.0002
## 120 0.3648 nan 0.1000 -0.0008
## 140 0.3302 nan 0.1000 -0.0002
## 160 0.2930 nan 0.1000 -0.0007
## 180 0.2653 nan 0.1000 -0.0001
## 200 0.2366 nan 0.1000 -0.0007
## 220 0.2172 nan 0.1000 -0.0008
## 240 0.1992 nan 0.1000 -0.0006
## 260 0.1811 nan 0.1000 -0.0005
## 280 0.1654 nan 0.1000 -0.0003
## 300 0.1522 nan 0.1000 -0.0005
## 320 0.1407 nan 0.1000 -0.0006
## 340 0.1281 nan 0.1000 -0.0002
## 360 0.1175 nan 0.1000 -0.0001
## 380 0.1086 nan 0.1000 -0.0003
## 400 0.0990 nan 0.1000 -0.0002
## 420 0.0919 nan 0.1000 -0.0002
## 440 0.0853 nan 0.1000 -0.0004
## 460 0.0794 nan 0.1000 -0.0002
## 480 0.0731 nan 0.1000 -0.0002
## 500 0.0677 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2358 nan 0.1000 0.0398
## 2 1.1656 nan 0.1000 0.0285
## 3 1.1089 nan 0.1000 0.0229
## 4 1.0461 nan 0.1000 0.0266
## 5 0.9991 nan 0.1000 0.0205
## 6 0.9572 nan 0.1000 0.0159
## 7 0.9212 nan 0.1000 0.0146
## 8 0.8797 nan 0.1000 0.0146
## 9 0.8492 nan 0.1000 0.0101
## 10 0.8277 nan 0.1000 0.0048
## 20 0.6697 nan 0.1000 0.0034
## 40 0.5286 nan 0.1000 -0.0003
## 60 0.4327 nan 0.1000 -0.0005
## 80 0.3633 nan 0.1000 0.0002
## 100 0.3117 nan 0.1000 -0.0005
## 120 0.2727 nan 0.1000 -0.0012
## 140 0.2385 nan 0.1000 -0.0003
## 160 0.2063 nan 0.1000 -0.0007
## 180 0.1782 nan 0.1000 -0.0001
## 200 0.1584 nan 0.1000 -0.0004
## 220 0.1405 nan 0.1000 -0.0004
## 240 0.1263 nan 0.1000 -0.0003
## 260 0.1121 nan 0.1000 -0.0002
## 280 0.0993 nan 0.1000 -0.0005
## 300 0.0888 nan 0.1000 -0.0003
## 320 0.0788 nan 0.1000 -0.0002
## 340 0.0712 nan 0.1000 -0.0002
## 360 0.0641 nan 0.1000 -0.0002
## 380 0.0574 nan 0.1000 -0.0000
## 400 0.0521 nan 0.1000 -0.0001
## 420 0.0469 nan 0.1000 -0.0002
## 440 0.0422 nan 0.1000 -0.0000
## 460 0.0383 nan 0.1000 -0.0001
## 480 0.0348 nan 0.1000 -0.0001
## 500 0.0317 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2315 nan 0.1000 0.0415
## 2 1.1550 nan 0.1000 0.0314
## 3 1.0971 nan 0.1000 0.0223
## 4 1.0462 nan 0.1000 0.0220
## 5 1.0048 nan 0.1000 0.0195
## 6 0.9576 nan 0.1000 0.0204
## 7 0.9168 nan 0.1000 0.0180
## 8 0.8856 nan 0.1000 0.0128
## 9 0.8572 nan 0.1000 0.0129
## 10 0.8286 nan 0.1000 0.0094
## 20 0.6789 nan 0.1000 0.0025
## 40 0.5419 nan 0.1000 -0.0015
## 60 0.4533 nan 0.1000 -0.0012
## 80 0.3828 nan 0.1000 -0.0005
## 100 0.3252 nan 0.1000 -0.0002
## 120 0.2845 nan 0.1000 -0.0001
## 140 0.2459 nan 0.1000 -0.0010
## 160 0.2146 nan 0.1000 -0.0011
## 180 0.1909 nan 0.1000 -0.0009
## 200 0.1687 nan 0.1000 -0.0005
## 220 0.1500 nan 0.1000 -0.0004
## 240 0.1346 nan 0.1000 -0.0005
## 260 0.1189 nan 0.1000 -0.0002
## 280 0.1077 nan 0.1000 -0.0003
## 300 0.0974 nan 0.1000 -0.0002
## 320 0.0885 nan 0.1000 -0.0004
## 340 0.0794 nan 0.1000 -0.0005
## 360 0.0719 nan 0.1000 -0.0004
## 380 0.0644 nan 0.1000 -0.0004
## 400 0.0575 nan 0.1000 -0.0001
## 420 0.0519 nan 0.1000 -0.0001
## 440 0.0470 nan 0.1000 -0.0001
## 460 0.0422 nan 0.1000 -0.0001
## 480 0.0379 nan 0.1000 -0.0002
## 500 0.0346 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2231 nan 0.1000 0.0407
## 2 1.1473 nan 0.1000 0.0323
## 3 1.0870 nan 0.1000 0.0279
## 4 1.0345 nan 0.1000 0.0240
## 5 0.9764 nan 0.1000 0.0220
## 6 0.9393 nan 0.1000 0.0176
## 7 0.9049 nan 0.1000 0.0132
## 8 0.8746 nan 0.1000 0.0113
## 9 0.8463 nan 0.1000 0.0107
## 10 0.8229 nan 0.1000 0.0095
## 20 0.6721 nan 0.1000 0.0024
## 40 0.5339 nan 0.1000 -0.0005
## 60 0.4523 nan 0.1000 -0.0009
## 80 0.3861 nan 0.1000 -0.0019
## 100 0.3333 nan 0.1000 -0.0006
## 120 0.2895 nan 0.1000 -0.0014
## 140 0.2570 nan 0.1000 -0.0011
## 160 0.2229 nan 0.1000 -0.0003
## 180 0.1966 nan 0.1000 -0.0009
## 200 0.1756 nan 0.1000 -0.0007
## 220 0.1562 nan 0.1000 -0.0009
## 240 0.1428 nan 0.1000 -0.0002
## 260 0.1286 nan 0.1000 -0.0004
## 280 0.1168 nan 0.1000 -0.0005
## 300 0.1045 nan 0.1000 -0.0002
## 320 0.0945 nan 0.1000 -0.0001
## 340 0.0852 nan 0.1000 -0.0007
## 360 0.0768 nan 0.1000 -0.0003
## 380 0.0702 nan 0.1000 -0.0004
## 400 0.0634 nan 0.1000 -0.0001
## 420 0.0576 nan 0.1000 -0.0003
## 440 0.0513 nan 0.1000 -0.0003
## 460 0.0460 nan 0.1000 -0.0002
## 480 0.0415 nan 0.1000 -0.0002
## 500 0.0384 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2284 nan 0.1000 0.0422
## 2 1.1509 nan 0.1000 0.0326
## 3 1.0852 nan 0.1000 0.0289
## 4 1.0293 nan 0.1000 0.0255
## 5 0.9766 nan 0.1000 0.0185
## 6 0.9329 nan 0.1000 0.0168
## 7 0.8957 nan 0.1000 0.0168
## 8 0.8626 nan 0.1000 0.0112
## 9 0.8298 nan 0.1000 0.0137
## 10 0.8031 nan 0.1000 0.0104
## 20 0.6397 nan 0.1000 0.0035
## 40 0.4810 nan 0.1000 0.0008
## 60 0.3841 nan 0.1000 0.0007
## 80 0.3177 nan 0.1000 -0.0008
## 100 0.2658 nan 0.1000 -0.0008
## 120 0.2263 nan 0.1000 0.0001
## 140 0.1894 nan 0.1000 -0.0003
## 160 0.1627 nan 0.1000 -0.0004
## 180 0.1388 nan 0.1000 0.0002
## 200 0.1208 nan 0.1000 -0.0004
## 220 0.1042 nan 0.1000 0.0000
## 240 0.0900 nan 0.1000 -0.0002
## 260 0.0788 nan 0.1000 -0.0004
## 280 0.0693 nan 0.1000 -0.0001
## 300 0.0605 nan 0.1000 -0.0002
## 320 0.0537 nan 0.1000 -0.0000
## 340 0.0471 nan 0.1000 -0.0002
## 360 0.0411 nan 0.1000 -0.0001
## 380 0.0362 nan 0.1000 0.0000
## 400 0.0324 nan 0.1000 -0.0001
## 420 0.0285 nan 0.1000 -0.0001
## 440 0.0253 nan 0.1000 -0.0001
## 460 0.0223 nan 0.1000 -0.0001
## 480 0.0199 nan 0.1000 -0.0000
## 500 0.0177 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2286 nan 0.1000 0.0466
## 2 1.1516 nan 0.1000 0.0371
## 3 1.0864 nan 0.1000 0.0264
## 4 1.0274 nan 0.1000 0.0259
## 5 0.9793 nan 0.1000 0.0215
## 6 0.9409 nan 0.1000 0.0128
## 7 0.9052 nan 0.1000 0.0140
## 8 0.8734 nan 0.1000 0.0127
## 9 0.8448 nan 0.1000 0.0109
## 10 0.8176 nan 0.1000 0.0082
## 20 0.6502 nan 0.1000 0.0031
## 40 0.4996 nan 0.1000 -0.0003
## 60 0.4036 nan 0.1000 0.0003
## 80 0.3271 nan 0.1000 -0.0004
## 100 0.2768 nan 0.1000 -0.0008
## 120 0.2322 nan 0.1000 -0.0008
## 140 0.1966 nan 0.1000 -0.0004
## 160 0.1694 nan 0.1000 -0.0003
## 180 0.1465 nan 0.1000 -0.0002
## 200 0.1264 nan 0.1000 -0.0000
## 220 0.1082 nan 0.1000 0.0002
## 240 0.0947 nan 0.1000 -0.0003
## 260 0.0831 nan 0.1000 -0.0004
## 280 0.0738 nan 0.1000 -0.0002
## 300 0.0651 nan 0.1000 -0.0003
## 320 0.0572 nan 0.1000 -0.0004
## 340 0.0504 nan 0.1000 -0.0002
## 360 0.0450 nan 0.1000 -0.0002
## 380 0.0400 nan 0.1000 -0.0003
## 400 0.0350 nan 0.1000 -0.0001
## 420 0.0310 nan 0.1000 -0.0000
## 440 0.0275 nan 0.1000 -0.0001
## 460 0.0245 nan 0.1000 -0.0001
## 480 0.0213 nan 0.1000 -0.0000
## 500 0.0190 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2315 nan 0.1000 0.0422
## 2 1.1601 nan 0.1000 0.0308
## 3 1.0905 nan 0.1000 0.0299
## 4 1.0383 nan 0.1000 0.0242
## 5 0.9904 nan 0.1000 0.0215
## 6 0.9467 nan 0.1000 0.0169
## 7 0.9093 nan 0.1000 0.0144
## 8 0.8721 nan 0.1000 0.0157
## 9 0.8446 nan 0.1000 0.0100
## 10 0.8199 nan 0.1000 0.0082
## 20 0.6573 nan 0.1000 0.0022
## 40 0.5216 nan 0.1000 -0.0001
## 60 0.4237 nan 0.1000 -0.0005
## 80 0.3539 nan 0.1000 -0.0015
## 100 0.2961 nan 0.1000 -0.0008
## 120 0.2528 nan 0.1000 -0.0010
## 140 0.2183 nan 0.1000 -0.0010
## 160 0.1867 nan 0.1000 -0.0004
## 180 0.1595 nan 0.1000 -0.0005
## 200 0.1397 nan 0.1000 -0.0005
## 220 0.1210 nan 0.1000 -0.0003
## 240 0.1063 nan 0.1000 -0.0007
## 260 0.0935 nan 0.1000 -0.0002
## 280 0.0823 nan 0.1000 -0.0005
## 300 0.0718 nan 0.1000 -0.0002
## 320 0.0637 nan 0.1000 -0.0003
## 340 0.0566 nan 0.1000 -0.0002
## 360 0.0503 nan 0.1000 -0.0001
## 380 0.0442 nan 0.1000 -0.0001
## 400 0.0394 nan 0.1000 -0.0002
## 420 0.0351 nan 0.1000 -0.0001
## 440 0.0315 nan 0.1000 -0.0001
## 460 0.0279 nan 0.1000 -0.0001
## 480 0.0248 nan 0.1000 -0.0001
## 500 0.0221 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0003
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3183 nan 0.0010 0.0003
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0003
## 10 1.3125 nan 0.0010 0.0004
## 20 1.3046 nan 0.0010 0.0003
## 40 1.2891 nan 0.0010 0.0004
## 60 1.2743 nan 0.0010 0.0003
## 80 1.2596 nan 0.0010 0.0003
## 100 1.2455 nan 0.0010 0.0003
## 120 1.2317 nan 0.0010 0.0003
## 140 1.2182 nan 0.0010 0.0003
## 160 1.2055 nan 0.0010 0.0003
## 180 1.1928 nan 0.0010 0.0003
## 200 1.1806 nan 0.0010 0.0002
## 220 1.1684 nan 0.0010 0.0003
## 240 1.1570 nan 0.0010 0.0003
## 260 1.1458 nan 0.0010 0.0002
## 280 1.1353 nan 0.0010 0.0002
## 300 1.1247 nan 0.0010 0.0002
## 320 1.1146 nan 0.0010 0.0002
## 340 1.1045 nan 0.0010 0.0002
## 360 1.0949 nan 0.0010 0.0002
## 380 1.0852 nan 0.0010 0.0002
## 400 1.0760 nan 0.0010 0.0002
## 420 1.0671 nan 0.0010 0.0001
## 440 1.0582 nan 0.0010 0.0002
## 460 1.0495 nan 0.0010 0.0002
## 480 1.0412 nan 0.0010 0.0002
## 500 1.0332 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3199 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3184 nan 0.0010 0.0003
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0003
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3135 nan 0.0010 0.0003
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3047 nan 0.0010 0.0004
## 40 1.2892 nan 0.0010 0.0003
## 60 1.2741 nan 0.0010 0.0003
## 80 1.2594 nan 0.0010 0.0004
## 100 1.2453 nan 0.0010 0.0003
## 120 1.2314 nan 0.0010 0.0003
## 140 1.2178 nan 0.0010 0.0003
## 160 1.2051 nan 0.0010 0.0003
## 180 1.1926 nan 0.0010 0.0003
## 200 1.1808 nan 0.0010 0.0002
## 220 1.1689 nan 0.0010 0.0002
## 240 1.1578 nan 0.0010 0.0002
## 260 1.1466 nan 0.0010 0.0002
## 280 1.1360 nan 0.0010 0.0002
## 300 1.1253 nan 0.0010 0.0002
## 320 1.1151 nan 0.0010 0.0002
## 340 1.1050 nan 0.0010 0.0002
## 360 1.0953 nan 0.0010 0.0002
## 380 1.0857 nan 0.0010 0.0002
## 400 1.0764 nan 0.0010 0.0002
## 420 1.0676 nan 0.0010 0.0002
## 440 1.0589 nan 0.0010 0.0002
## 460 1.0504 nan 0.0010 0.0002
## 480 1.0424 nan 0.0010 0.0001
## 500 1.0343 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0003
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0003
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3135 nan 0.0010 0.0004
## 10 1.3127 nan 0.0010 0.0004
## 20 1.3050 nan 0.0010 0.0003
## 40 1.2895 nan 0.0010 0.0004
## 60 1.2741 nan 0.0010 0.0004
## 80 1.2600 nan 0.0010 0.0003
## 100 1.2462 nan 0.0010 0.0002
## 120 1.2326 nan 0.0010 0.0003
## 140 1.2192 nan 0.0010 0.0003
## 160 1.2065 nan 0.0010 0.0003
## 180 1.1941 nan 0.0010 0.0003
## 200 1.1818 nan 0.0010 0.0002
## 220 1.1703 nan 0.0010 0.0002
## 240 1.1589 nan 0.0010 0.0002
## 260 1.1479 nan 0.0010 0.0002
## 280 1.1372 nan 0.0010 0.0002
## 300 1.1267 nan 0.0010 0.0002
## 320 1.1163 nan 0.0010 0.0002
## 340 1.1063 nan 0.0010 0.0002
## 360 1.0965 nan 0.0010 0.0002
## 380 1.0874 nan 0.0010 0.0002
## 400 1.0780 nan 0.0010 0.0002
## 420 1.0690 nan 0.0010 0.0002
## 440 1.0602 nan 0.0010 0.0002
## 460 1.0518 nan 0.0010 0.0002
## 480 1.0435 nan 0.0010 0.0002
## 500 1.0355 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3121 nan 0.0010 0.0003
## 20 1.3034 nan 0.0010 0.0003
## 40 1.2867 nan 0.0010 0.0003
## 60 1.2707 nan 0.0010 0.0003
## 80 1.2552 nan 0.0010 0.0004
## 100 1.2402 nan 0.0010 0.0004
## 120 1.2254 nan 0.0010 0.0003
## 140 1.2115 nan 0.0010 0.0003
## 160 1.1977 nan 0.0010 0.0003
## 180 1.1845 nan 0.0010 0.0003
## 200 1.1716 nan 0.0010 0.0002
## 220 1.1592 nan 0.0010 0.0003
## 240 1.1473 nan 0.0010 0.0003
## 260 1.1355 nan 0.0010 0.0002
## 280 1.1243 nan 0.0010 0.0002
## 300 1.1130 nan 0.0010 0.0002
## 320 1.1019 nan 0.0010 0.0002
## 340 1.0914 nan 0.0010 0.0002
## 360 1.0811 nan 0.0010 0.0002
## 380 1.0711 nan 0.0010 0.0002
## 400 1.0614 nan 0.0010 0.0002
## 420 1.0519 nan 0.0010 0.0002
## 440 1.0426 nan 0.0010 0.0002
## 460 1.0334 nan 0.0010 0.0001
## 480 1.0248 nan 0.0010 0.0002
## 500 1.0161 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0003
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0003
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0003
## 40 1.2872 nan 0.0010 0.0003
## 60 1.2711 nan 0.0010 0.0003
## 80 1.2555 nan 0.0010 0.0003
## 100 1.2405 nan 0.0010 0.0003
## 120 1.2259 nan 0.0010 0.0003
## 140 1.2117 nan 0.0010 0.0003
## 160 1.1979 nan 0.0010 0.0003
## 180 1.1849 nan 0.0010 0.0003
## 200 1.1720 nan 0.0010 0.0003
## 220 1.1596 nan 0.0010 0.0002
## 240 1.1478 nan 0.0010 0.0003
## 260 1.1361 nan 0.0010 0.0003
## 280 1.1247 nan 0.0010 0.0002
## 300 1.1134 nan 0.0010 0.0002
## 320 1.1028 nan 0.0010 0.0002
## 340 1.0923 nan 0.0010 0.0002
## 360 1.0821 nan 0.0010 0.0002
## 380 1.0720 nan 0.0010 0.0002
## 400 1.0623 nan 0.0010 0.0002
## 420 1.0527 nan 0.0010 0.0002
## 440 1.0438 nan 0.0010 0.0002
## 460 1.0348 nan 0.0010 0.0002
## 480 1.0262 nan 0.0010 0.0002
## 500 1.0177 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0004
## 40 1.2874 nan 0.0010 0.0003
## 60 1.2716 nan 0.0010 0.0004
## 80 1.2561 nan 0.0010 0.0003
## 100 1.2413 nan 0.0010 0.0003
## 120 1.2269 nan 0.0010 0.0003
## 140 1.2128 nan 0.0010 0.0003
## 160 1.1992 nan 0.0010 0.0003
## 180 1.1862 nan 0.0010 0.0003
## 200 1.1733 nan 0.0010 0.0003
## 220 1.1611 nan 0.0010 0.0003
## 240 1.1490 nan 0.0010 0.0002
## 260 1.1374 nan 0.0010 0.0003
## 280 1.1264 nan 0.0010 0.0002
## 300 1.1152 nan 0.0010 0.0002
## 320 1.1047 nan 0.0010 0.0002
## 340 1.0941 nan 0.0010 0.0002
## 360 1.0840 nan 0.0010 0.0002
## 380 1.0742 nan 0.0010 0.0002
## 400 1.0644 nan 0.0010 0.0002
## 420 1.0551 nan 0.0010 0.0002
## 440 1.0463 nan 0.0010 0.0001
## 460 1.0373 nan 0.0010 0.0002
## 480 1.0287 nan 0.0010 0.0002
## 500 1.0203 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0003
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3144 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0004
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3027 nan 0.0010 0.0004
## 40 1.2855 nan 0.0010 0.0004
## 60 1.2686 nan 0.0010 0.0004
## 80 1.2522 nan 0.0010 0.0004
## 100 1.2366 nan 0.0010 0.0003
## 120 1.2214 nan 0.0010 0.0003
## 140 1.2064 nan 0.0010 0.0003
## 160 1.1922 nan 0.0010 0.0003
## 180 1.1784 nan 0.0010 0.0003
## 200 1.1650 nan 0.0010 0.0003
## 220 1.1523 nan 0.0010 0.0002
## 240 1.1398 nan 0.0010 0.0003
## 260 1.1276 nan 0.0010 0.0003
## 280 1.1156 nan 0.0010 0.0003
## 300 1.1042 nan 0.0010 0.0002
## 320 1.0927 nan 0.0010 0.0002
## 340 1.0820 nan 0.0010 0.0002
## 360 1.0714 nan 0.0010 0.0002
## 380 1.0610 nan 0.0010 0.0002
## 400 1.0508 nan 0.0010 0.0002
## 420 1.0410 nan 0.0010 0.0002
## 440 1.0314 nan 0.0010 0.0001
## 460 1.0221 nan 0.0010 0.0002
## 480 1.0129 nan 0.0010 0.0002
## 500 1.0040 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3197 nan 0.0010 0.0004
## 2 1.3188 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3169 nan 0.0010 0.0004
## 5 1.3160 nan 0.0010 0.0004
## 6 1.3152 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0003
## 8 1.3133 nan 0.0010 0.0005
## 9 1.3124 nan 0.0010 0.0004
## 10 1.3115 nan 0.0010 0.0004
## 20 1.3027 nan 0.0010 0.0004
## 40 1.2855 nan 0.0010 0.0004
## 60 1.2687 nan 0.0010 0.0004
## 80 1.2523 nan 0.0010 0.0004
## 100 1.2369 nan 0.0010 0.0003
## 120 1.2216 nan 0.0010 0.0004
## 140 1.2069 nan 0.0010 0.0003
## 160 1.1926 nan 0.0010 0.0003
## 180 1.1786 nan 0.0010 0.0003
## 200 1.1651 nan 0.0010 0.0003
## 220 1.1525 nan 0.0010 0.0003
## 240 1.1397 nan 0.0010 0.0003
## 260 1.1277 nan 0.0010 0.0003
## 280 1.1158 nan 0.0010 0.0003
## 300 1.1041 nan 0.0010 0.0003
## 320 1.0929 nan 0.0010 0.0003
## 340 1.0820 nan 0.0010 0.0002
## 360 1.0715 nan 0.0010 0.0002
## 380 1.0611 nan 0.0010 0.0002
## 400 1.0509 nan 0.0010 0.0002
## 420 1.0409 nan 0.0010 0.0002
## 440 1.0311 nan 0.0010 0.0002
## 460 1.0217 nan 0.0010 0.0002
## 480 1.0127 nan 0.0010 0.0002
## 500 1.0038 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0004
## 3 1.3181 nan 0.0010 0.0003
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3155 nan 0.0010 0.0004
## 7 1.3146 nan 0.0010 0.0003
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0003
## 40 1.2863 nan 0.0010 0.0004
## 60 1.2697 nan 0.0010 0.0003
## 80 1.2536 nan 0.0010 0.0003
## 100 1.2381 nan 0.0010 0.0003
## 120 1.2233 nan 0.0010 0.0003
## 140 1.2088 nan 0.0010 0.0003
## 160 1.1947 nan 0.0010 0.0003
## 180 1.1809 nan 0.0010 0.0003
## 200 1.1676 nan 0.0010 0.0002
## 220 1.1551 nan 0.0010 0.0003
## 240 1.1427 nan 0.0010 0.0003
## 260 1.1304 nan 0.0010 0.0003
## 280 1.1186 nan 0.0010 0.0003
## 300 1.1074 nan 0.0010 0.0002
## 320 1.0964 nan 0.0010 0.0002
## 340 1.0856 nan 0.0010 0.0002
## 360 1.0751 nan 0.0010 0.0002
## 380 1.0652 nan 0.0010 0.0002
## 400 1.0553 nan 0.0010 0.0002
## 420 1.0457 nan 0.0010 0.0003
## 440 1.0365 nan 0.0010 0.0002
## 460 1.0273 nan 0.0010 0.0002
## 480 1.0182 nan 0.0010 0.0002
## 500 1.0094 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0040
## 2 1.3041 nan 0.0100 0.0031
## 3 1.2962 nan 0.0100 0.0038
## 4 1.2881 nan 0.0100 0.0034
## 5 1.2808 nan 0.0100 0.0033
## 6 1.2731 nan 0.0100 0.0033
## 7 1.2661 nan 0.0100 0.0031
## 8 1.2586 nan 0.0100 0.0033
## 9 1.2511 nan 0.0100 0.0033
## 10 1.2441 nan 0.0100 0.0028
## 20 1.1780 nan 0.0100 0.0028
## 40 1.0724 nan 0.0100 0.0021
## 60 0.9930 nan 0.0100 0.0018
## 80 0.9314 nan 0.0100 0.0010
## 100 0.8818 nan 0.0100 0.0008
## 120 0.8418 nan 0.0100 0.0005
## 140 0.8087 nan 0.0100 0.0007
## 160 0.7800 nan 0.0100 0.0004
## 180 0.7550 nan 0.0100 0.0003
## 200 0.7325 nan 0.0100 0.0001
## 220 0.7139 nan 0.0100 0.0002
## 240 0.6967 nan 0.0100 0.0001
## 260 0.6815 nan 0.0100 0.0002
## 280 0.6679 nan 0.0100 0.0001
## 300 0.6559 nan 0.0100 0.0001
## 320 0.6435 nan 0.0100 0.0001
## 340 0.6326 nan 0.0100 0.0001
## 360 0.6221 nan 0.0100 0.0002
## 380 0.6111 nan 0.0100 0.0001
## 400 0.6013 nan 0.0100 -0.0001
## 420 0.5915 nan 0.0100 -0.0000
## 440 0.5815 nan 0.0100 0.0000
## 460 0.5733 nan 0.0100 -0.0001
## 480 0.5654 nan 0.0100 0.0001
## 500 0.5573 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0037
## 2 1.3047 nan 0.0100 0.0034
## 3 1.2963 nan 0.0100 0.0035
## 4 1.2884 nan 0.0100 0.0035
## 5 1.2808 nan 0.0100 0.0034
## 6 1.2733 nan 0.0100 0.0032
## 7 1.2662 nan 0.0100 0.0033
## 8 1.2585 nan 0.0100 0.0035
## 9 1.2509 nan 0.0100 0.0033
## 10 1.2442 nan 0.0100 0.0032
## 20 1.1800 nan 0.0100 0.0023
## 40 1.0777 nan 0.0100 0.0020
## 60 0.9976 nan 0.0100 0.0013
## 80 0.9326 nan 0.0100 0.0012
## 100 0.8816 nan 0.0100 0.0009
## 120 0.8410 nan 0.0100 0.0005
## 140 0.8079 nan 0.0100 0.0002
## 160 0.7795 nan 0.0100 0.0005
## 180 0.7555 nan 0.0100 0.0004
## 200 0.7343 nan 0.0100 0.0003
## 220 0.7146 nan 0.0100 0.0002
## 240 0.6971 nan 0.0100 0.0002
## 260 0.6827 nan 0.0100 -0.0002
## 280 0.6683 nan 0.0100 0.0001
## 300 0.6553 nan 0.0100 0.0001
## 320 0.6432 nan 0.0100 -0.0000
## 340 0.6317 nan 0.0100 -0.0001
## 360 0.6213 nan 0.0100 -0.0000
## 380 0.6111 nan 0.0100 0.0001
## 400 0.6015 nan 0.0100 -0.0000
## 420 0.5915 nan 0.0100 0.0001
## 440 0.5830 nan 0.0100 0.0001
## 460 0.5742 nan 0.0100 0.0000
## 480 0.5659 nan 0.0100 0.0000
## 500 0.5578 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0041
## 2 1.3050 nan 0.0100 0.0033
## 3 1.2976 nan 0.0100 0.0035
## 4 1.2899 nan 0.0100 0.0035
## 5 1.2827 nan 0.0100 0.0035
## 6 1.2759 nan 0.0100 0.0031
## 7 1.2675 nan 0.0100 0.0040
## 8 1.2602 nan 0.0100 0.0034
## 9 1.2534 nan 0.0100 0.0030
## 10 1.2473 nan 0.0100 0.0031
## 20 1.1799 nan 0.0100 0.0030
## 40 1.0755 nan 0.0100 0.0019
## 60 0.9958 nan 0.0100 0.0014
## 80 0.9341 nan 0.0100 0.0009
## 100 0.8845 nan 0.0100 0.0009
## 120 0.8414 nan 0.0100 0.0007
## 140 0.8077 nan 0.0100 0.0005
## 160 0.7792 nan 0.0100 0.0002
## 180 0.7561 nan 0.0100 0.0004
## 200 0.7352 nan 0.0100 0.0003
## 220 0.7168 nan 0.0100 0.0003
## 240 0.7006 nan 0.0100 0.0000
## 260 0.6863 nan 0.0100 0.0000
## 280 0.6730 nan 0.0100 0.0001
## 300 0.6604 nan 0.0100 0.0001
## 320 0.6482 nan 0.0100 0.0002
## 340 0.6369 nan 0.0100 0.0001
## 360 0.6262 nan 0.0100 0.0000
## 380 0.6157 nan 0.0100 0.0000
## 400 0.6058 nan 0.0100 0.0000
## 420 0.5977 nan 0.0100 -0.0002
## 440 0.5898 nan 0.0100 -0.0001
## 460 0.5811 nan 0.0100 -0.0001
## 480 0.5727 nan 0.0100 -0.0001
## 500 0.5648 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3120 nan 0.0100 0.0036
## 2 1.3031 nan 0.0100 0.0042
## 3 1.2950 nan 0.0100 0.0031
## 4 1.2863 nan 0.0100 0.0037
## 5 1.2784 nan 0.0100 0.0035
## 6 1.2706 nan 0.0100 0.0033
## 7 1.2621 nan 0.0100 0.0038
## 8 1.2551 nan 0.0100 0.0028
## 9 1.2472 nan 0.0100 0.0033
## 10 1.2394 nan 0.0100 0.0030
## 20 1.1693 nan 0.0100 0.0031
## 40 1.0596 nan 0.0100 0.0020
## 60 0.9754 nan 0.0100 0.0015
## 80 0.9080 nan 0.0100 0.0011
## 100 0.8548 nan 0.0100 0.0008
## 120 0.8102 nan 0.0100 0.0008
## 140 0.7729 nan 0.0100 0.0003
## 160 0.7413 nan 0.0100 0.0004
## 180 0.7145 nan 0.0100 0.0001
## 200 0.6917 nan 0.0100 0.0002
## 220 0.6710 nan 0.0100 -0.0001
## 240 0.6521 nan 0.0100 -0.0000
## 260 0.6351 nan 0.0100 0.0001
## 280 0.6202 nan 0.0100 -0.0001
## 300 0.6063 nan 0.0100 -0.0001
## 320 0.5927 nan 0.0100 -0.0001
## 340 0.5789 nan 0.0100 0.0000
## 360 0.5674 nan 0.0100 0.0002
## 380 0.5567 nan 0.0100 -0.0001
## 400 0.5457 nan 0.0100 -0.0001
## 420 0.5351 nan 0.0100 -0.0000
## 440 0.5256 nan 0.0100 -0.0001
## 460 0.5159 nan 0.0100 -0.0001
## 480 0.5069 nan 0.0100 0.0001
## 500 0.4981 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0035
## 2 1.3033 nan 0.0100 0.0039
## 3 1.2952 nan 0.0100 0.0038
## 4 1.2867 nan 0.0100 0.0041
## 5 1.2789 nan 0.0100 0.0030
## 6 1.2713 nan 0.0100 0.0034
## 7 1.2629 nan 0.0100 0.0036
## 8 1.2552 nan 0.0100 0.0036
## 9 1.2473 nan 0.0100 0.0033
## 10 1.2394 nan 0.0100 0.0033
## 20 1.1713 nan 0.0100 0.0029
## 40 1.0586 nan 0.0100 0.0016
## 60 0.9771 nan 0.0100 0.0013
## 80 0.9107 nan 0.0100 0.0011
## 100 0.8582 nan 0.0100 0.0010
## 120 0.8159 nan 0.0100 0.0006
## 140 0.7802 nan 0.0100 0.0005
## 160 0.7497 nan 0.0100 0.0005
## 180 0.7241 nan 0.0100 0.0002
## 200 0.7002 nan 0.0100 0.0004
## 220 0.6802 nan 0.0100 0.0002
## 240 0.6623 nan 0.0100 0.0001
## 260 0.6453 nan 0.0100 0.0000
## 280 0.6298 nan 0.0100 0.0000
## 300 0.6163 nan 0.0100 -0.0000
## 320 0.6034 nan 0.0100 -0.0000
## 340 0.5898 nan 0.0100 -0.0001
## 360 0.5785 nan 0.0100 -0.0001
## 380 0.5677 nan 0.0100 -0.0000
## 400 0.5570 nan 0.0100 -0.0001
## 420 0.5472 nan 0.0100 -0.0001
## 440 0.5374 nan 0.0100 0.0001
## 460 0.5277 nan 0.0100 -0.0000
## 480 0.5188 nan 0.0100 -0.0000
## 500 0.5090 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0041
## 2 1.3038 nan 0.0100 0.0039
## 3 1.2954 nan 0.0100 0.0036
## 4 1.2866 nan 0.0100 0.0037
## 5 1.2787 nan 0.0100 0.0039
## 6 1.2701 nan 0.0100 0.0036
## 7 1.2631 nan 0.0100 0.0029
## 8 1.2550 nan 0.0100 0.0036
## 9 1.2476 nan 0.0100 0.0033
## 10 1.2405 nan 0.0100 0.0028
## 20 1.1732 nan 0.0100 0.0028
## 40 1.0661 nan 0.0100 0.0022
## 60 0.9811 nan 0.0100 0.0013
## 80 0.9147 nan 0.0100 0.0013
## 100 0.8642 nan 0.0100 0.0008
## 120 0.8209 nan 0.0100 0.0007
## 140 0.7854 nan 0.0100 0.0005
## 160 0.7556 nan 0.0100 0.0002
## 180 0.7310 nan 0.0100 0.0004
## 200 0.7078 nan 0.0100 0.0002
## 220 0.6893 nan 0.0100 0.0001
## 240 0.6720 nan 0.0100 0.0002
## 260 0.6575 nan 0.0100 0.0002
## 280 0.6422 nan 0.0100 0.0000
## 300 0.6274 nan 0.0100 -0.0001
## 320 0.6145 nan 0.0100 0.0001
## 340 0.6034 nan 0.0100 -0.0002
## 360 0.5923 nan 0.0100 0.0001
## 380 0.5801 nan 0.0100 0.0001
## 400 0.5694 nan 0.0100 -0.0001
## 420 0.5594 nan 0.0100 -0.0000
## 440 0.5506 nan 0.0100 -0.0001
## 460 0.5404 nan 0.0100 -0.0001
## 480 0.5304 nan 0.0100 -0.0001
## 500 0.5215 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3117 nan 0.0100 0.0044
## 2 1.3020 nan 0.0100 0.0040
## 3 1.2930 nan 0.0100 0.0040
## 4 1.2851 nan 0.0100 0.0037
## 5 1.2766 nan 0.0100 0.0038
## 6 1.2682 nan 0.0100 0.0039
## 7 1.2604 nan 0.0100 0.0034
## 8 1.2528 nan 0.0100 0.0032
## 9 1.2442 nan 0.0100 0.0038
## 10 1.2361 nan 0.0100 0.0036
## 20 1.1648 nan 0.0100 0.0031
## 40 1.0497 nan 0.0100 0.0024
## 60 0.9620 nan 0.0100 0.0019
## 80 0.8916 nan 0.0100 0.0014
## 100 0.8348 nan 0.0100 0.0010
## 120 0.7905 nan 0.0100 0.0007
## 140 0.7518 nan 0.0100 0.0006
## 160 0.7184 nan 0.0100 0.0005
## 180 0.6894 nan 0.0100 0.0003
## 200 0.6654 nan 0.0100 0.0002
## 220 0.6445 nan 0.0100 0.0003
## 240 0.6234 nan 0.0100 0.0001
## 260 0.6055 nan 0.0100 -0.0001
## 280 0.5873 nan 0.0100 0.0001
## 300 0.5720 nan 0.0100 0.0001
## 320 0.5574 nan 0.0100 0.0001
## 340 0.5427 nan 0.0100 0.0001
## 360 0.5295 nan 0.0100 0.0001
## 380 0.5171 nan 0.0100 0.0002
## 400 0.5057 nan 0.0100 -0.0002
## 420 0.4954 nan 0.0100 0.0000
## 440 0.4851 nan 0.0100 0.0000
## 460 0.4748 nan 0.0100 -0.0000
## 480 0.4645 nan 0.0100 0.0001
## 500 0.4546 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0037
## 2 1.3027 nan 0.0100 0.0042
## 3 1.2940 nan 0.0100 0.0037
## 4 1.2855 nan 0.0100 0.0040
## 5 1.2770 nan 0.0100 0.0035
## 6 1.2684 nan 0.0100 0.0040
## 7 1.2601 nan 0.0100 0.0038
## 8 1.2518 nan 0.0100 0.0034
## 9 1.2444 nan 0.0100 0.0037
## 10 1.2364 nan 0.0100 0.0037
## 20 1.1652 nan 0.0100 0.0029
## 40 1.0507 nan 0.0100 0.0019
## 60 0.9644 nan 0.0100 0.0016
## 80 0.8942 nan 0.0100 0.0012
## 100 0.8391 nan 0.0100 0.0008
## 120 0.7949 nan 0.0100 0.0004
## 140 0.7563 nan 0.0100 0.0004
## 160 0.7243 nan 0.0100 0.0004
## 180 0.6977 nan 0.0100 0.0004
## 200 0.6741 nan 0.0100 0.0003
## 220 0.6532 nan 0.0100 0.0000
## 240 0.6330 nan 0.0100 0.0003
## 260 0.6155 nan 0.0100 0.0001
## 280 0.5989 nan 0.0100 -0.0000
## 300 0.5837 nan 0.0100 -0.0000
## 320 0.5704 nan 0.0100 -0.0000
## 340 0.5564 nan 0.0100 -0.0000
## 360 0.5445 nan 0.0100 -0.0002
## 380 0.5321 nan 0.0100 -0.0000
## 400 0.5207 nan 0.0100 0.0000
## 420 0.5085 nan 0.0100 -0.0000
## 440 0.4973 nan 0.0100 -0.0002
## 460 0.4875 nan 0.0100 -0.0001
## 480 0.4767 nan 0.0100 0.0000
## 500 0.4675 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0036
## 2 1.3034 nan 0.0100 0.0035
## 3 1.2953 nan 0.0100 0.0036
## 4 1.2866 nan 0.0100 0.0040
## 5 1.2780 nan 0.0100 0.0039
## 6 1.2706 nan 0.0100 0.0032
## 7 1.2621 nan 0.0100 0.0036
## 8 1.2547 nan 0.0100 0.0030
## 9 1.2467 nan 0.0100 0.0034
## 10 1.2382 nan 0.0100 0.0036
## 20 1.1689 nan 0.0100 0.0030
## 40 1.0543 nan 0.0100 0.0019
## 60 0.9676 nan 0.0100 0.0017
## 80 0.9012 nan 0.0100 0.0009
## 100 0.8470 nan 0.0100 0.0007
## 120 0.8009 nan 0.0100 0.0007
## 140 0.7642 nan 0.0100 0.0004
## 160 0.7334 nan 0.0100 0.0005
## 180 0.7063 nan 0.0100 0.0002
## 200 0.6829 nan 0.0100 0.0001
## 220 0.6623 nan 0.0100 0.0002
## 240 0.6416 nan 0.0100 0.0001
## 260 0.6242 nan 0.0100 -0.0001
## 280 0.6089 nan 0.0100 -0.0001
## 300 0.5935 nan 0.0100 -0.0000
## 320 0.5791 nan 0.0100 0.0000
## 340 0.5655 nan 0.0100 0.0001
## 360 0.5539 nan 0.0100 -0.0002
## 380 0.5420 nan 0.0100 0.0000
## 400 0.5311 nan 0.0100 0.0000
## 420 0.5219 nan 0.0100 -0.0001
## 440 0.5111 nan 0.0100 -0.0002
## 460 0.4996 nan 0.0100 0.0001
## 480 0.4902 nan 0.0100 0.0000
## 500 0.4801 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2456 nan 0.1000 0.0316
## 2 1.1810 nan 0.1000 0.0310
## 3 1.1266 nan 0.1000 0.0263
## 4 1.0780 nan 0.1000 0.0202
## 5 1.0322 nan 0.1000 0.0180
## 6 0.9898 nan 0.1000 0.0167
## 7 0.9560 nan 0.1000 0.0129
## 8 0.9260 nan 0.1000 0.0107
## 9 0.8982 nan 0.1000 0.0115
## 10 0.8767 nan 0.1000 0.0081
## 20 0.7281 nan 0.1000 -0.0005
## 40 0.6015 nan 0.1000 -0.0006
## 60 0.5156 nan 0.1000 0.0005
## 80 0.4527 nan 0.1000 -0.0006
## 100 0.4009 nan 0.1000 0.0005
## 120 0.3569 nan 0.1000 -0.0005
## 140 0.3182 nan 0.1000 -0.0010
## 160 0.2871 nan 0.1000 -0.0008
## 180 0.2599 nan 0.1000 -0.0005
## 200 0.2371 nan 0.1000 -0.0003
## 220 0.2151 nan 0.1000 -0.0009
## 240 0.1941 nan 0.1000 -0.0004
## 260 0.1747 nan 0.1000 -0.0004
## 280 0.1587 nan 0.1000 -0.0006
## 300 0.1448 nan 0.1000 -0.0000
## 320 0.1316 nan 0.1000 -0.0001
## 340 0.1214 nan 0.1000 -0.0002
## 360 0.1119 nan 0.1000 -0.0004
## 380 0.1027 nan 0.1000 -0.0003
## 400 0.0949 nan 0.1000 -0.0001
## 420 0.0869 nan 0.1000 -0.0000
## 440 0.0805 nan 0.1000 -0.0000
## 460 0.0743 nan 0.1000 -0.0001
## 480 0.0695 nan 0.1000 -0.0002
## 500 0.0644 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2418 nan 0.1000 0.0391
## 2 1.1779 nan 0.1000 0.0298
## 3 1.1207 nan 0.1000 0.0245
## 4 1.0717 nan 0.1000 0.0198
## 5 1.0295 nan 0.1000 0.0164
## 6 0.9918 nan 0.1000 0.0160
## 7 0.9560 nan 0.1000 0.0130
## 8 0.9259 nan 0.1000 0.0130
## 9 0.8995 nan 0.1000 0.0112
## 10 0.8787 nan 0.1000 0.0070
## 20 0.7310 nan 0.1000 0.0014
## 40 0.6090 nan 0.1000 -0.0001
## 60 0.5306 nan 0.1000 -0.0005
## 80 0.4639 nan 0.1000 0.0001
## 100 0.4089 nan 0.1000 -0.0009
## 120 0.3681 nan 0.1000 -0.0000
## 140 0.3270 nan 0.1000 -0.0017
## 160 0.2972 nan 0.1000 0.0000
## 180 0.2686 nan 0.1000 -0.0006
## 200 0.2474 nan 0.1000 -0.0008
## 220 0.2241 nan 0.1000 -0.0006
## 240 0.2056 nan 0.1000 -0.0008
## 260 0.1898 nan 0.1000 -0.0004
## 280 0.1756 nan 0.1000 -0.0002
## 300 0.1613 nan 0.1000 -0.0004
## 320 0.1491 nan 0.1000 -0.0004
## 340 0.1373 nan 0.1000 -0.0005
## 360 0.1274 nan 0.1000 -0.0005
## 380 0.1181 nan 0.1000 -0.0002
## 400 0.1104 nan 0.1000 -0.0002
## 420 0.1015 nan 0.1000 -0.0008
## 440 0.0940 nan 0.1000 -0.0002
## 460 0.0871 nan 0.1000 -0.0005
## 480 0.0807 nan 0.1000 -0.0002
## 500 0.0745 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2434 nan 0.1000 0.0371
## 2 1.1731 nan 0.1000 0.0338
## 3 1.1214 nan 0.1000 0.0259
## 4 1.0758 nan 0.1000 0.0182
## 5 1.0328 nan 0.1000 0.0177
## 6 0.9950 nan 0.1000 0.0176
## 7 0.9632 nan 0.1000 0.0114
## 8 0.9366 nan 0.1000 0.0099
## 9 0.9084 nan 0.1000 0.0098
## 10 0.8871 nan 0.1000 0.0082
## 20 0.7435 nan 0.1000 0.0022
## 40 0.6234 nan 0.1000 -0.0003
## 60 0.5416 nan 0.1000 -0.0007
## 80 0.4831 nan 0.1000 0.0006
## 100 0.4295 nan 0.1000 -0.0004
## 120 0.3875 nan 0.1000 -0.0002
## 140 0.3506 nan 0.1000 -0.0014
## 160 0.3186 nan 0.1000 -0.0020
## 180 0.2925 nan 0.1000 -0.0012
## 200 0.2688 nan 0.1000 -0.0005
## 220 0.2449 nan 0.1000 -0.0005
## 240 0.2238 nan 0.1000 -0.0015
## 260 0.2067 nan 0.1000 -0.0010
## 280 0.1906 nan 0.1000 -0.0002
## 300 0.1754 nan 0.1000 -0.0004
## 320 0.1610 nan 0.1000 -0.0003
## 340 0.1487 nan 0.1000 -0.0004
## 360 0.1371 nan 0.1000 -0.0002
## 380 0.1275 nan 0.1000 -0.0004
## 400 0.1196 nan 0.1000 -0.0005
## 420 0.1100 nan 0.1000 -0.0004
## 440 0.1020 nan 0.1000 -0.0003
## 460 0.0945 nan 0.1000 -0.0002
## 480 0.0877 nan 0.1000 -0.0004
## 500 0.0821 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2345 nan 0.1000 0.0360
## 2 1.1595 nan 0.1000 0.0307
## 3 1.1004 nan 0.1000 0.0239
## 4 1.0533 nan 0.1000 0.0206
## 5 1.0109 nan 0.1000 0.0166
## 6 0.9692 nan 0.1000 0.0165
## 7 0.9340 nan 0.1000 0.0145
## 8 0.9020 nan 0.1000 0.0127
## 9 0.8771 nan 0.1000 0.0100
## 10 0.8545 nan 0.1000 0.0068
## 20 0.6986 nan 0.1000 0.0014
## 40 0.5347 nan 0.1000 -0.0017
## 60 0.4510 nan 0.1000 -0.0013
## 80 0.3799 nan 0.1000 -0.0006
## 100 0.3290 nan 0.1000 -0.0001
## 120 0.2911 nan 0.1000 -0.0007
## 140 0.2570 nan 0.1000 -0.0008
## 160 0.2259 nan 0.1000 -0.0006
## 180 0.1992 nan 0.1000 -0.0001
## 200 0.1756 nan 0.1000 -0.0002
## 220 0.1561 nan 0.1000 -0.0000
## 240 0.1396 nan 0.1000 -0.0003
## 260 0.1256 nan 0.1000 -0.0004
## 280 0.1128 nan 0.1000 -0.0005
## 300 0.1019 nan 0.1000 -0.0001
## 320 0.0913 nan 0.1000 0.0000
## 340 0.0823 nan 0.1000 -0.0002
## 360 0.0749 nan 0.1000 -0.0003
## 380 0.0674 nan 0.1000 -0.0001
## 400 0.0612 nan 0.1000 -0.0003
## 420 0.0554 nan 0.1000 -0.0002
## 440 0.0503 nan 0.1000 -0.0001
## 460 0.0461 nan 0.1000 -0.0002
## 480 0.0416 nan 0.1000 -0.0001
## 500 0.0378 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2346 nan 0.1000 0.0395
## 2 1.1680 nan 0.1000 0.0289
## 3 1.1009 nan 0.1000 0.0282
## 4 1.0494 nan 0.1000 0.0220
## 5 1.0032 nan 0.1000 0.0209
## 6 0.9688 nan 0.1000 0.0114
## 7 0.9327 nan 0.1000 0.0153
## 8 0.8994 nan 0.1000 0.0131
## 9 0.8727 nan 0.1000 0.0097
## 10 0.8474 nan 0.1000 0.0095
## 20 0.7014 nan 0.1000 0.0005
## 40 0.5626 nan 0.1000 -0.0016
## 60 0.4769 nan 0.1000 0.0011
## 80 0.4091 nan 0.1000 0.0001
## 100 0.3533 nan 0.1000 -0.0007
## 120 0.3019 nan 0.1000 -0.0011
## 140 0.2676 nan 0.1000 -0.0013
## 160 0.2363 nan 0.1000 -0.0007
## 180 0.2063 nan 0.1000 -0.0003
## 200 0.1845 nan 0.1000 -0.0003
## 220 0.1633 nan 0.1000 -0.0002
## 240 0.1450 nan 0.1000 -0.0001
## 260 0.1302 nan 0.1000 -0.0006
## 280 0.1172 nan 0.1000 -0.0008
## 300 0.1046 nan 0.1000 -0.0004
## 320 0.0939 nan 0.1000 0.0000
## 340 0.0852 nan 0.1000 -0.0002
## 360 0.0767 nan 0.1000 -0.0001
## 380 0.0702 nan 0.1000 -0.0002
## 400 0.0640 nan 0.1000 -0.0002
## 420 0.0584 nan 0.1000 -0.0004
## 440 0.0528 nan 0.1000 -0.0002
## 460 0.0478 nan 0.1000 -0.0002
## 480 0.0436 nan 0.1000 -0.0000
## 500 0.0398 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2428 nan 0.1000 0.0401
## 2 1.1729 nan 0.1000 0.0333
## 3 1.1079 nan 0.1000 0.0252
## 4 1.0589 nan 0.1000 0.0186
## 5 1.0165 nan 0.1000 0.0183
## 6 0.9751 nan 0.1000 0.0168
## 7 0.9420 nan 0.1000 0.0114
## 8 0.9083 nan 0.1000 0.0132
## 9 0.8812 nan 0.1000 0.0113
## 10 0.8562 nan 0.1000 0.0106
## 20 0.7081 nan 0.1000 0.0031
## 40 0.5608 nan 0.1000 -0.0009
## 60 0.4785 nan 0.1000 -0.0021
## 80 0.4112 nan 0.1000 -0.0006
## 100 0.3574 nan 0.1000 0.0002
## 120 0.3149 nan 0.1000 -0.0001
## 140 0.2733 nan 0.1000 -0.0003
## 160 0.2402 nan 0.1000 -0.0007
## 180 0.2127 nan 0.1000 -0.0008
## 200 0.1908 nan 0.1000 -0.0004
## 220 0.1724 nan 0.1000 -0.0009
## 240 0.1568 nan 0.1000 -0.0003
## 260 0.1416 nan 0.1000 -0.0005
## 280 0.1294 nan 0.1000 -0.0002
## 300 0.1165 nan 0.1000 -0.0004
## 320 0.1061 nan 0.1000 -0.0004
## 340 0.0953 nan 0.1000 -0.0004
## 360 0.0861 nan 0.1000 -0.0002
## 380 0.0783 nan 0.1000 -0.0002
## 400 0.0711 nan 0.1000 -0.0002
## 420 0.0652 nan 0.1000 -0.0003
## 440 0.0596 nan 0.1000 -0.0003
## 460 0.0543 nan 0.1000 -0.0004
## 480 0.0497 nan 0.1000 -0.0001
## 500 0.0454 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2364 nan 0.1000 0.0389
## 2 1.1560 nan 0.1000 0.0370
## 3 1.0984 nan 0.1000 0.0245
## 4 1.0479 nan 0.1000 0.0195
## 5 1.0030 nan 0.1000 0.0190
## 6 0.9632 nan 0.1000 0.0144
## 7 0.9257 nan 0.1000 0.0133
## 8 0.8906 nan 0.1000 0.0141
## 9 0.8603 nan 0.1000 0.0094
## 10 0.8354 nan 0.1000 0.0104
## 20 0.6746 nan 0.1000 0.0004
## 40 0.5164 nan 0.1000 -0.0006
## 60 0.4215 nan 0.1000 -0.0005
## 80 0.3463 nan 0.1000 -0.0009
## 100 0.2864 nan 0.1000 0.0003
## 120 0.2421 nan 0.1000 -0.0001
## 140 0.2126 nan 0.1000 -0.0005
## 160 0.1817 nan 0.1000 -0.0004
## 180 0.1563 nan 0.1000 -0.0004
## 200 0.1370 nan 0.1000 -0.0007
## 220 0.1192 nan 0.1000 -0.0002
## 240 0.1050 nan 0.1000 0.0000
## 260 0.0915 nan 0.1000 -0.0002
## 280 0.0808 nan 0.1000 -0.0003
## 300 0.0706 nan 0.1000 -0.0001
## 320 0.0626 nan 0.1000 -0.0001
## 340 0.0560 nan 0.1000 -0.0001
## 360 0.0500 nan 0.1000 -0.0000
## 380 0.0442 nan 0.1000 -0.0001
## 400 0.0393 nan 0.1000 -0.0001
## 420 0.0355 nan 0.1000 -0.0001
## 440 0.0315 nan 0.1000 -0.0001
## 460 0.0278 nan 0.1000 0.0000
## 480 0.0251 nan 0.1000 -0.0000
## 500 0.0227 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2403 nan 0.1000 0.0325
## 2 1.1646 nan 0.1000 0.0346
## 3 1.1061 nan 0.1000 0.0261
## 4 1.0580 nan 0.1000 0.0196
## 5 1.0115 nan 0.1000 0.0191
## 6 0.9752 nan 0.1000 0.0151
## 7 0.9355 nan 0.1000 0.0143
## 8 0.9056 nan 0.1000 0.0117
## 9 0.8756 nan 0.1000 0.0118
## 10 0.8509 nan 0.1000 0.0092
## 20 0.6878 nan 0.1000 0.0039
## 40 0.5271 nan 0.1000 0.0007
## 60 0.4276 nan 0.1000 -0.0017
## 80 0.3572 nan 0.1000 -0.0010
## 100 0.3020 nan 0.1000 -0.0007
## 120 0.2516 nan 0.1000 0.0009
## 140 0.2124 nan 0.1000 0.0000
## 160 0.1820 nan 0.1000 -0.0007
## 180 0.1583 nan 0.1000 -0.0001
## 200 0.1398 nan 0.1000 -0.0005
## 220 0.1210 nan 0.1000 -0.0003
## 240 0.1061 nan 0.1000 -0.0003
## 260 0.0925 nan 0.1000 -0.0006
## 280 0.0811 nan 0.1000 -0.0003
## 300 0.0715 nan 0.1000 -0.0003
## 320 0.0634 nan 0.1000 -0.0002
## 340 0.0560 nan 0.1000 -0.0003
## 360 0.0501 nan 0.1000 -0.0001
## 380 0.0444 nan 0.1000 -0.0002
## 400 0.0393 nan 0.1000 -0.0002
## 420 0.0347 nan 0.1000 -0.0000
## 440 0.0310 nan 0.1000 -0.0001
## 460 0.0276 nan 0.1000 -0.0001
## 480 0.0248 nan 0.1000 -0.0001
## 500 0.0219 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2318 nan 0.1000 0.0401
## 2 1.1584 nan 0.1000 0.0338
## 3 1.1019 nan 0.1000 0.0237
## 4 1.0516 nan 0.1000 0.0230
## 5 1.0042 nan 0.1000 0.0188
## 6 0.9672 nan 0.1000 0.0166
## 7 0.9353 nan 0.1000 0.0120
## 8 0.9007 nan 0.1000 0.0134
## 9 0.8710 nan 0.1000 0.0098
## 10 0.8457 nan 0.1000 0.0094
## 20 0.6841 nan 0.1000 0.0046
## 40 0.5418 nan 0.1000 -0.0010
## 60 0.4428 nan 0.1000 -0.0006
## 80 0.3716 nan 0.1000 -0.0004
## 100 0.3168 nan 0.1000 -0.0014
## 120 0.2746 nan 0.1000 -0.0008
## 140 0.2364 nan 0.1000 -0.0010
## 160 0.2067 nan 0.1000 -0.0004
## 180 0.1816 nan 0.1000 -0.0011
## 200 0.1590 nan 0.1000 -0.0007
## 220 0.1413 nan 0.1000 -0.0006
## 240 0.1245 nan 0.1000 -0.0005
## 260 0.1102 nan 0.1000 -0.0004
## 280 0.0971 nan 0.1000 -0.0005
## 300 0.0857 nan 0.1000 -0.0004
## 320 0.0754 nan 0.1000 -0.0001
## 340 0.0669 nan 0.1000 -0.0002
## 360 0.0598 nan 0.1000 -0.0002
## 380 0.0536 nan 0.1000 -0.0001
## 400 0.0477 nan 0.1000 -0.0001
## 420 0.0433 nan 0.1000 -0.0002
## 440 0.0385 nan 0.1000 -0.0002
## 460 0.0349 nan 0.1000 -0.0003
## 480 0.0313 nan 0.1000 -0.0001
## 500 0.0282 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0003
## 4 1.3179 nan 0.0010 0.0004
## 5 1.3171 nan 0.0010 0.0004
## 6 1.3162 nan 0.0010 0.0004
## 7 1.3154 nan 0.0010 0.0003
## 8 1.3147 nan 0.0010 0.0003
## 9 1.3140 nan 0.0010 0.0003
## 10 1.3131 nan 0.0010 0.0004
## 20 1.3052 nan 0.0010 0.0004
## 40 1.2896 nan 0.0010 0.0004
## 60 1.2745 nan 0.0010 0.0003
## 80 1.2598 nan 0.0010 0.0003
## 100 1.2458 nan 0.0010 0.0003
## 120 1.2321 nan 0.0010 0.0003
## 140 1.2187 nan 0.0010 0.0003
## 160 1.2057 nan 0.0010 0.0003
## 180 1.1933 nan 0.0010 0.0002
## 200 1.1814 nan 0.0010 0.0003
## 220 1.1698 nan 0.0010 0.0002
## 240 1.1586 nan 0.0010 0.0003
## 260 1.1474 nan 0.0010 0.0002
## 280 1.1367 nan 0.0010 0.0002
## 300 1.1262 nan 0.0010 0.0002
## 320 1.1161 nan 0.0010 0.0002
## 340 1.1060 nan 0.0010 0.0002
## 360 1.0964 nan 0.0010 0.0002
## 380 1.0868 nan 0.0010 0.0002
## 400 1.0777 nan 0.0010 0.0002
## 420 1.0690 nan 0.0010 0.0002
## 440 1.0604 nan 0.0010 0.0002
## 460 1.0519 nan 0.0010 0.0002
## 480 1.0436 nan 0.0010 0.0002
## 500 1.0355 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3179 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3163 nan 0.0010 0.0003
## 7 1.3155 nan 0.0010 0.0004
## 8 1.3148 nan 0.0010 0.0003
## 9 1.3140 nan 0.0010 0.0004
## 10 1.3132 nan 0.0010 0.0004
## 20 1.3051 nan 0.0010 0.0004
## 40 1.2898 nan 0.0010 0.0004
## 60 1.2747 nan 0.0010 0.0003
## 80 1.2601 nan 0.0010 0.0003
## 100 1.2462 nan 0.0010 0.0003
## 120 1.2325 nan 0.0010 0.0003
## 140 1.2192 nan 0.0010 0.0003
## 160 1.2062 nan 0.0010 0.0003
## 180 1.1937 nan 0.0010 0.0003
## 200 1.1815 nan 0.0010 0.0003
## 220 1.1698 nan 0.0010 0.0003
## 240 1.1587 nan 0.0010 0.0003
## 260 1.1477 nan 0.0010 0.0003
## 280 1.1373 nan 0.0010 0.0002
## 300 1.1269 nan 0.0010 0.0002
## 320 1.1169 nan 0.0010 0.0002
## 340 1.1072 nan 0.0010 0.0002
## 360 1.0976 nan 0.0010 0.0002
## 380 1.0885 nan 0.0010 0.0002
## 400 1.0794 nan 0.0010 0.0002
## 420 1.0706 nan 0.0010 0.0002
## 440 1.0622 nan 0.0010 0.0002
## 460 1.0537 nan 0.0010 0.0001
## 480 1.0455 nan 0.0010 0.0002
## 500 1.0376 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3180 nan 0.0010 0.0003
## 5 1.3172 nan 0.0010 0.0004
## 6 1.3163 nan 0.0010 0.0004
## 7 1.3155 nan 0.0010 0.0003
## 8 1.3146 nan 0.0010 0.0004
## 9 1.3138 nan 0.0010 0.0004
## 10 1.3130 nan 0.0010 0.0004
## 20 1.3052 nan 0.0010 0.0003
## 40 1.2896 nan 0.0010 0.0003
## 60 1.2746 nan 0.0010 0.0003
## 80 1.2603 nan 0.0010 0.0003
## 100 1.2465 nan 0.0010 0.0003
## 120 1.2327 nan 0.0010 0.0003
## 140 1.2199 nan 0.0010 0.0003
## 160 1.2073 nan 0.0010 0.0003
## 180 1.1950 nan 0.0010 0.0003
## 200 1.1832 nan 0.0010 0.0002
## 220 1.1716 nan 0.0010 0.0002
## 240 1.1602 nan 0.0010 0.0002
## 260 1.1493 nan 0.0010 0.0002
## 280 1.1388 nan 0.0010 0.0002
## 300 1.1285 nan 0.0010 0.0002
## 320 1.1186 nan 0.0010 0.0002
## 340 1.1091 nan 0.0010 0.0002
## 360 1.0997 nan 0.0010 0.0002
## 380 1.0904 nan 0.0010 0.0002
## 400 1.0815 nan 0.0010 0.0002
## 420 1.0728 nan 0.0010 0.0002
## 440 1.0642 nan 0.0010 0.0002
## 460 1.0559 nan 0.0010 0.0002
## 480 1.0477 nan 0.0010 0.0002
## 500 1.0396 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0004
## 6 1.3159 nan 0.0010 0.0004
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0004
## 10 1.3125 nan 0.0010 0.0004
## 20 1.3039 nan 0.0010 0.0004
## 40 1.2873 nan 0.0010 0.0004
## 60 1.2717 nan 0.0010 0.0004
## 80 1.2561 nan 0.0010 0.0003
## 100 1.2411 nan 0.0010 0.0003
## 120 1.2268 nan 0.0010 0.0003
## 140 1.2133 nan 0.0010 0.0003
## 160 1.1998 nan 0.0010 0.0003
## 180 1.1868 nan 0.0010 0.0003
## 200 1.1738 nan 0.0010 0.0003
## 220 1.1613 nan 0.0010 0.0002
## 240 1.1495 nan 0.0010 0.0003
## 260 1.1379 nan 0.0010 0.0002
## 280 1.1263 nan 0.0010 0.0002
## 300 1.1153 nan 0.0010 0.0002
## 320 1.1044 nan 0.0010 0.0002
## 340 1.0938 nan 0.0010 0.0002
## 360 1.0838 nan 0.0010 0.0002
## 380 1.0739 nan 0.0010 0.0002
## 400 1.0641 nan 0.0010 0.0002
## 420 1.0548 nan 0.0010 0.0002
## 440 1.0456 nan 0.0010 0.0002
## 460 1.0364 nan 0.0010 0.0002
## 480 1.0277 nan 0.0010 0.0002
## 500 1.0194 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0003
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3144 nan 0.0010 0.0003
## 9 1.3135 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3040 nan 0.0010 0.0004
## 40 1.2877 nan 0.0010 0.0004
## 60 1.2717 nan 0.0010 0.0004
## 80 1.2565 nan 0.0010 0.0004
## 100 1.2418 nan 0.0010 0.0003
## 120 1.2276 nan 0.0010 0.0003
## 140 1.2135 nan 0.0010 0.0003
## 160 1.1998 nan 0.0010 0.0003
## 180 1.1867 nan 0.0010 0.0002
## 200 1.1739 nan 0.0010 0.0003
## 220 1.1613 nan 0.0010 0.0003
## 240 1.1492 nan 0.0010 0.0002
## 260 1.1376 nan 0.0010 0.0002
## 280 1.1265 nan 0.0010 0.0002
## 300 1.1156 nan 0.0010 0.0002
## 320 1.1048 nan 0.0010 0.0002
## 340 1.0943 nan 0.0010 0.0002
## 360 1.0840 nan 0.0010 0.0002
## 380 1.0741 nan 0.0010 0.0001
## 400 1.0645 nan 0.0010 0.0002
## 420 1.0551 nan 0.0010 0.0002
## 440 1.0461 nan 0.0010 0.0002
## 460 1.0371 nan 0.0010 0.0002
## 480 1.0286 nan 0.0010 0.0002
## 500 1.0201 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0003
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3144 nan 0.0010 0.0004
## 9 1.3137 nan 0.0010 0.0003
## 10 1.3128 nan 0.0010 0.0004
## 20 1.3045 nan 0.0010 0.0004
## 40 1.2885 nan 0.0010 0.0003
## 60 1.2725 nan 0.0010 0.0004
## 80 1.2575 nan 0.0010 0.0003
## 100 1.2427 nan 0.0010 0.0003
## 120 1.2286 nan 0.0010 0.0003
## 140 1.2149 nan 0.0010 0.0003
## 160 1.2014 nan 0.0010 0.0003
## 180 1.1883 nan 0.0010 0.0003
## 200 1.1756 nan 0.0010 0.0003
## 220 1.1635 nan 0.0010 0.0003
## 240 1.1516 nan 0.0010 0.0003
## 260 1.1401 nan 0.0010 0.0002
## 280 1.1289 nan 0.0010 0.0002
## 300 1.1182 nan 0.0010 0.0002
## 320 1.1077 nan 0.0010 0.0002
## 340 1.0973 nan 0.0010 0.0002
## 360 1.0873 nan 0.0010 0.0002
## 380 1.0775 nan 0.0010 0.0002
## 400 1.0679 nan 0.0010 0.0002
## 420 1.0589 nan 0.0010 0.0002
## 440 1.0500 nan 0.0010 0.0002
## 460 1.0412 nan 0.0010 0.0002
## 480 1.0325 nan 0.0010 0.0002
## 500 1.0244 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2854 nan 0.0010 0.0004
## 60 1.2685 nan 0.0010 0.0003
## 80 1.2524 nan 0.0010 0.0004
## 100 1.2369 nan 0.0010 0.0002
## 120 1.2219 nan 0.0010 0.0003
## 140 1.2071 nan 0.0010 0.0004
## 160 1.1929 nan 0.0010 0.0003
## 180 1.1791 nan 0.0010 0.0003
## 200 1.1654 nan 0.0010 0.0003
## 220 1.1524 nan 0.0010 0.0003
## 240 1.1399 nan 0.0010 0.0002
## 260 1.1277 nan 0.0010 0.0003
## 280 1.1158 nan 0.0010 0.0003
## 300 1.1043 nan 0.0010 0.0002
## 320 1.0932 nan 0.0010 0.0002
## 340 1.0823 nan 0.0010 0.0002
## 360 1.0716 nan 0.0010 0.0002
## 380 1.0614 nan 0.0010 0.0002
## 400 1.0516 nan 0.0010 0.0002
## 420 1.0420 nan 0.0010 0.0002
## 440 1.0325 nan 0.0010 0.0002
## 460 1.0233 nan 0.0010 0.0002
## 480 1.0144 nan 0.0010 0.0002
## 500 1.0056 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3149 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0004
## 40 1.2866 nan 0.0010 0.0004
## 60 1.2701 nan 0.0010 0.0003
## 80 1.2543 nan 0.0010 0.0004
## 100 1.2387 nan 0.0010 0.0004
## 120 1.2236 nan 0.0010 0.0003
## 140 1.2093 nan 0.0010 0.0003
## 160 1.1953 nan 0.0010 0.0003
## 180 1.1819 nan 0.0010 0.0003
## 200 1.1685 nan 0.0010 0.0003
## 220 1.1558 nan 0.0010 0.0003
## 240 1.1435 nan 0.0010 0.0002
## 260 1.1313 nan 0.0010 0.0003
## 280 1.1193 nan 0.0010 0.0002
## 300 1.1079 nan 0.0010 0.0003
## 320 1.0966 nan 0.0010 0.0002
## 340 1.0857 nan 0.0010 0.0002
## 360 1.0751 nan 0.0010 0.0002
## 380 1.0646 nan 0.0010 0.0002
## 400 1.0545 nan 0.0010 0.0002
## 420 1.0449 nan 0.0010 0.0002
## 440 1.0356 nan 0.0010 0.0002
## 460 1.0265 nan 0.0010 0.0002
## 480 1.0174 nan 0.0010 0.0002
## 500 1.0089 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3176 nan 0.0010 0.0004
## 5 1.3168 nan 0.0010 0.0003
## 6 1.3158 nan 0.0010 0.0004
## 7 1.3150 nan 0.0010 0.0004
## 8 1.3141 nan 0.0010 0.0004
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3036 nan 0.0010 0.0004
## 40 1.2866 nan 0.0010 0.0003
## 60 1.2705 nan 0.0010 0.0003
## 80 1.2547 nan 0.0010 0.0004
## 100 1.2391 nan 0.0010 0.0003
## 120 1.2245 nan 0.0010 0.0003
## 140 1.2102 nan 0.0010 0.0003
## 160 1.1963 nan 0.0010 0.0003
## 180 1.1826 nan 0.0010 0.0003
## 200 1.1697 nan 0.0010 0.0003
## 220 1.1569 nan 0.0010 0.0003
## 240 1.1447 nan 0.0010 0.0003
## 260 1.1327 nan 0.0010 0.0002
## 280 1.1210 nan 0.0010 0.0002
## 300 1.1097 nan 0.0010 0.0002
## 320 1.0983 nan 0.0010 0.0002
## 340 1.0879 nan 0.0010 0.0002
## 360 1.0774 nan 0.0010 0.0002
## 380 1.0674 nan 0.0010 0.0002
## 400 1.0577 nan 0.0010 0.0002
## 420 1.0482 nan 0.0010 0.0002
## 440 1.0388 nan 0.0010 0.0002
## 460 1.0298 nan 0.0010 0.0002
## 480 1.0209 nan 0.0010 0.0002
## 500 1.0122 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3131 nan 0.0100 0.0040
## 2 1.3058 nan 0.0100 0.0035
## 3 1.2981 nan 0.0100 0.0036
## 4 1.2909 nan 0.0100 0.0035
## 5 1.2828 nan 0.0100 0.0038
## 6 1.2751 nan 0.0100 0.0036
## 7 1.2676 nan 0.0100 0.0032
## 8 1.2602 nan 0.0100 0.0031
## 9 1.2529 nan 0.0100 0.0032
## 10 1.2461 nan 0.0100 0.0034
## 20 1.1825 nan 0.0100 0.0027
## 40 1.0784 nan 0.0100 0.0020
## 60 0.9985 nan 0.0100 0.0009
## 80 0.9356 nan 0.0100 0.0013
## 100 0.8828 nan 0.0100 0.0009
## 120 0.8422 nan 0.0100 0.0008
## 140 0.8077 nan 0.0100 0.0004
## 160 0.7789 nan 0.0100 0.0005
## 180 0.7543 nan 0.0100 0.0004
## 200 0.7315 nan 0.0100 0.0005
## 220 0.7119 nan 0.0100 0.0002
## 240 0.6945 nan 0.0100 0.0001
## 260 0.6798 nan 0.0100 0.0001
## 280 0.6655 nan 0.0100 0.0001
## 300 0.6531 nan 0.0100 0.0001
## 320 0.6415 nan 0.0100 0.0001
## 340 0.6306 nan 0.0100 0.0000
## 360 0.6201 nan 0.0100 -0.0001
## 380 0.6095 nan 0.0100 0.0000
## 400 0.5996 nan 0.0100 -0.0001
## 420 0.5908 nan 0.0100 -0.0000
## 440 0.5823 nan 0.0100 -0.0001
## 460 0.5723 nan 0.0100 0.0002
## 480 0.5633 nan 0.0100 0.0000
## 500 0.5550 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3128 nan 0.0100 0.0041
## 2 1.3045 nan 0.0100 0.0035
## 3 1.2963 nan 0.0100 0.0038
## 4 1.2883 nan 0.0100 0.0032
## 5 1.2803 nan 0.0100 0.0035
## 6 1.2726 nan 0.0100 0.0031
## 7 1.2654 nan 0.0100 0.0031
## 8 1.2576 nan 0.0100 0.0036
## 9 1.2504 nan 0.0100 0.0033
## 10 1.2431 nan 0.0100 0.0033
## 20 1.1802 nan 0.0100 0.0025
## 40 1.0787 nan 0.0100 0.0020
## 60 1.0009 nan 0.0100 0.0017
## 80 0.9394 nan 0.0100 0.0012
## 100 0.8887 nan 0.0100 0.0006
## 120 0.8474 nan 0.0100 0.0007
## 140 0.8131 nan 0.0100 0.0004
## 160 0.7849 nan 0.0100 0.0005
## 180 0.7592 nan 0.0100 0.0003
## 200 0.7361 nan 0.0100 0.0003
## 220 0.7167 nan 0.0100 0.0000
## 240 0.6999 nan 0.0100 0.0001
## 260 0.6843 nan 0.0100 0.0001
## 280 0.6705 nan 0.0100 -0.0000
## 300 0.6588 nan 0.0100 0.0000
## 320 0.6465 nan 0.0100 0.0000
## 340 0.6344 nan 0.0100 0.0001
## 360 0.6239 nan 0.0100 -0.0002
## 380 0.6133 nan 0.0100 -0.0002
## 400 0.6040 nan 0.0100 0.0001
## 420 0.5946 nan 0.0100 -0.0001
## 440 0.5854 nan 0.0100 0.0000
## 460 0.5768 nan 0.0100 0.0001
## 480 0.5687 nan 0.0100 0.0000
## 500 0.5606 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3131 nan 0.0100 0.0035
## 2 1.3058 nan 0.0100 0.0033
## 3 1.2978 nan 0.0100 0.0035
## 4 1.2899 nan 0.0100 0.0036
## 5 1.2822 nan 0.0100 0.0034
## 6 1.2747 nan 0.0100 0.0032
## 7 1.2675 nan 0.0100 0.0033
## 8 1.2601 nan 0.0100 0.0034
## 9 1.2529 nan 0.0100 0.0034
## 10 1.2458 nan 0.0100 0.0032
## 20 1.1835 nan 0.0100 0.0027
## 40 1.0820 nan 0.0100 0.0019
## 60 1.0030 nan 0.0100 0.0014
## 80 0.9397 nan 0.0100 0.0010
## 100 0.8890 nan 0.0100 0.0009
## 120 0.8489 nan 0.0100 0.0007
## 140 0.8151 nan 0.0100 0.0003
## 160 0.7861 nan 0.0100 0.0003
## 180 0.7612 nan 0.0100 0.0002
## 200 0.7406 nan 0.0100 0.0003
## 220 0.7234 nan 0.0100 -0.0000
## 240 0.7079 nan 0.0100 0.0001
## 260 0.6935 nan 0.0100 -0.0000
## 280 0.6804 nan 0.0100 0.0000
## 300 0.6676 nan 0.0100 0.0000
## 320 0.6562 nan 0.0100 -0.0001
## 340 0.6449 nan 0.0100 0.0001
## 360 0.6347 nan 0.0100 -0.0001
## 380 0.6235 nan 0.0100 -0.0000
## 400 0.6146 nan 0.0100 -0.0000
## 420 0.6067 nan 0.0100 -0.0002
## 440 0.5974 nan 0.0100 0.0001
## 460 0.5893 nan 0.0100 0.0000
## 480 0.5812 nan 0.0100 -0.0001
## 500 0.5725 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3120 nan 0.0100 0.0042
## 2 1.3033 nan 0.0100 0.0037
## 3 1.2941 nan 0.0100 0.0039
## 4 1.2850 nan 0.0100 0.0038
## 5 1.2770 nan 0.0100 0.0033
## 6 1.2689 nan 0.0100 0.0036
## 7 1.2610 nan 0.0100 0.0034
## 8 1.2536 nan 0.0100 0.0037
## 9 1.2460 nan 0.0100 0.0033
## 10 1.2390 nan 0.0100 0.0030
## 20 1.1709 nan 0.0100 0.0031
## 40 1.0632 nan 0.0100 0.0015
## 60 0.9798 nan 0.0100 0.0014
## 80 0.9140 nan 0.0100 0.0011
## 100 0.8617 nan 0.0100 0.0011
## 120 0.8181 nan 0.0100 0.0005
## 140 0.7816 nan 0.0100 0.0005
## 160 0.7507 nan 0.0100 0.0003
## 180 0.7229 nan 0.0100 0.0002
## 200 0.7003 nan 0.0100 0.0002
## 220 0.6793 nan 0.0100 -0.0000
## 240 0.6605 nan 0.0100 0.0002
## 260 0.6436 nan 0.0100 -0.0001
## 280 0.6284 nan 0.0100 0.0002
## 300 0.6128 nan 0.0100 0.0001
## 320 0.5981 nan 0.0100 0.0000
## 340 0.5839 nan 0.0100 0.0001
## 360 0.5717 nan 0.0100 0.0001
## 380 0.5592 nan 0.0100 0.0001
## 400 0.5480 nan 0.0100 0.0001
## 420 0.5363 nan 0.0100 0.0001
## 440 0.5249 nan 0.0100 0.0001
## 460 0.5157 nan 0.0100 -0.0001
## 480 0.5059 nan 0.0100 0.0001
## 500 0.4965 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3119 nan 0.0100 0.0042
## 2 1.3030 nan 0.0100 0.0040
## 3 1.2949 nan 0.0100 0.0037
## 4 1.2873 nan 0.0100 0.0036
## 5 1.2792 nan 0.0100 0.0035
## 6 1.2708 nan 0.0100 0.0036
## 7 1.2634 nan 0.0100 0.0035
## 8 1.2550 nan 0.0100 0.0035
## 9 1.2471 nan 0.0100 0.0032
## 10 1.2397 nan 0.0100 0.0030
## 20 1.1713 nan 0.0100 0.0020
## 40 1.0618 nan 0.0100 0.0020
## 60 0.9801 nan 0.0100 0.0015
## 80 0.9142 nan 0.0100 0.0012
## 100 0.8624 nan 0.0100 0.0008
## 120 0.8204 nan 0.0100 0.0006
## 140 0.7852 nan 0.0100 0.0006
## 160 0.7553 nan 0.0100 0.0004
## 180 0.7309 nan 0.0100 0.0003
## 200 0.7076 nan 0.0100 0.0004
## 220 0.6884 nan 0.0100 0.0002
## 240 0.6692 nan 0.0100 0.0002
## 260 0.6520 nan 0.0100 0.0002
## 280 0.6371 nan 0.0100 0.0000
## 300 0.6227 nan 0.0100 -0.0001
## 320 0.6081 nan 0.0100 -0.0002
## 340 0.5960 nan 0.0100 0.0001
## 360 0.5845 nan 0.0100 0.0000
## 380 0.5729 nan 0.0100 0.0002
## 400 0.5618 nan 0.0100 0.0001
## 420 0.5509 nan 0.0100 0.0001
## 440 0.5405 nan 0.0100 0.0001
## 460 0.5311 nan 0.0100 0.0000
## 480 0.5220 nan 0.0100 -0.0001
## 500 0.5127 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3132 nan 0.0100 0.0035
## 2 1.3047 nan 0.0100 0.0036
## 3 1.2964 nan 0.0100 0.0038
## 4 1.2882 nan 0.0100 0.0036
## 5 1.2800 nan 0.0100 0.0036
## 6 1.2723 nan 0.0100 0.0034
## 7 1.2646 nan 0.0100 0.0035
## 8 1.2568 nan 0.0100 0.0035
## 9 1.2499 nan 0.0100 0.0028
## 10 1.2424 nan 0.0100 0.0032
## 20 1.1744 nan 0.0100 0.0030
## 40 1.0676 nan 0.0100 0.0022
## 60 0.9836 nan 0.0100 0.0014
## 80 0.9198 nan 0.0100 0.0013
## 100 0.8679 nan 0.0100 0.0010
## 120 0.8255 nan 0.0100 0.0004
## 140 0.7913 nan 0.0100 0.0003
## 160 0.7611 nan 0.0100 0.0004
## 180 0.7357 nan 0.0100 0.0001
## 200 0.7131 nan 0.0100 0.0002
## 220 0.6925 nan 0.0100 0.0003
## 240 0.6745 nan 0.0100 0.0002
## 260 0.6594 nan 0.0100 -0.0001
## 280 0.6441 nan 0.0100 0.0001
## 300 0.6302 nan 0.0100 -0.0001
## 320 0.6166 nan 0.0100 0.0002
## 340 0.6037 nan 0.0100 -0.0001
## 360 0.5904 nan 0.0100 0.0000
## 380 0.5788 nan 0.0100 -0.0003
## 400 0.5686 nan 0.0100 0.0000
## 420 0.5590 nan 0.0100 0.0000
## 440 0.5483 nan 0.0100 -0.0001
## 460 0.5387 nan 0.0100 -0.0001
## 480 0.5288 nan 0.0100 -0.0001
## 500 0.5199 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3124 nan 0.0100 0.0041
## 2 1.3036 nan 0.0100 0.0041
## 3 1.2943 nan 0.0100 0.0043
## 4 1.2855 nan 0.0100 0.0044
## 5 1.2761 nan 0.0100 0.0038
## 6 1.2674 nan 0.0100 0.0037
## 7 1.2595 nan 0.0100 0.0033
## 8 1.2515 nan 0.0100 0.0033
## 9 1.2437 nan 0.0100 0.0033
## 10 1.2358 nan 0.0100 0.0036
## 20 1.1634 nan 0.0100 0.0030
## 40 1.0494 nan 0.0100 0.0023
## 60 0.9638 nan 0.0100 0.0017
## 80 0.8956 nan 0.0100 0.0009
## 100 0.8408 nan 0.0100 0.0008
## 120 0.7944 nan 0.0100 0.0009
## 140 0.7577 nan 0.0100 0.0006
## 160 0.7251 nan 0.0100 0.0008
## 180 0.6966 nan 0.0100 0.0003
## 200 0.6715 nan 0.0100 0.0003
## 220 0.6488 nan 0.0100 0.0003
## 240 0.6307 nan 0.0100 0.0001
## 260 0.6124 nan 0.0100 0.0001
## 280 0.5952 nan 0.0100 -0.0001
## 300 0.5800 nan 0.0100 0.0002
## 320 0.5646 nan 0.0100 -0.0001
## 340 0.5496 nan 0.0100 0.0001
## 360 0.5361 nan 0.0100 -0.0000
## 380 0.5224 nan 0.0100 0.0001
## 400 0.5110 nan 0.0100 0.0001
## 420 0.4992 nan 0.0100 -0.0002
## 440 0.4881 nan 0.0100 0.0000
## 460 0.4776 nan 0.0100 -0.0001
## 480 0.4678 nan 0.0100 0.0001
## 500 0.4569 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3128 nan 0.0100 0.0040
## 2 1.3035 nan 0.0100 0.0040
## 3 1.2947 nan 0.0100 0.0040
## 4 1.2862 nan 0.0100 0.0040
## 5 1.2782 nan 0.0100 0.0035
## 6 1.2701 nan 0.0100 0.0036
## 7 1.2616 nan 0.0100 0.0034
## 8 1.2542 nan 0.0100 0.0032
## 9 1.2469 nan 0.0100 0.0030
## 10 1.2394 nan 0.0100 0.0031
## 20 1.1689 nan 0.0100 0.0027
## 40 1.0568 nan 0.0100 0.0022
## 60 0.9702 nan 0.0100 0.0017
## 80 0.9017 nan 0.0100 0.0013
## 100 0.8465 nan 0.0100 0.0010
## 120 0.8015 nan 0.0100 0.0006
## 140 0.7646 nan 0.0100 0.0004
## 160 0.7331 nan 0.0100 0.0005
## 180 0.7055 nan 0.0100 0.0002
## 200 0.6823 nan 0.0100 0.0000
## 220 0.6604 nan 0.0100 0.0001
## 240 0.6413 nan 0.0100 0.0001
## 260 0.6225 nan 0.0100 0.0001
## 280 0.6053 nan 0.0100 0.0002
## 300 0.5880 nan 0.0100 0.0001
## 320 0.5734 nan 0.0100 0.0001
## 340 0.5601 nan 0.0100 0.0001
## 360 0.5478 nan 0.0100 0.0000
## 380 0.5342 nan 0.0100 -0.0001
## 400 0.5215 nan 0.0100 0.0000
## 420 0.5094 nan 0.0100 -0.0000
## 440 0.4975 nan 0.0100 -0.0001
## 460 0.4873 nan 0.0100 0.0000
## 480 0.4769 nan 0.0100 -0.0000
## 500 0.4668 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3126 nan 0.0100 0.0039
## 2 1.3035 nan 0.0100 0.0043
## 3 1.2945 nan 0.0100 0.0037
## 4 1.2863 nan 0.0100 0.0038
## 5 1.2782 nan 0.0100 0.0033
## 6 1.2707 nan 0.0100 0.0038
## 7 1.2628 nan 0.0100 0.0036
## 8 1.2552 nan 0.0100 0.0035
## 9 1.2472 nan 0.0100 0.0036
## 10 1.2396 nan 0.0100 0.0032
## 20 1.1694 nan 0.0100 0.0029
## 40 1.0572 nan 0.0100 0.0019
## 60 0.9715 nan 0.0100 0.0015
## 80 0.9052 nan 0.0100 0.0011
## 100 0.8506 nan 0.0100 0.0010
## 120 0.8042 nan 0.0100 0.0006
## 140 0.7674 nan 0.0100 0.0004
## 160 0.7362 nan 0.0100 0.0004
## 180 0.7091 nan 0.0100 0.0004
## 200 0.6844 nan 0.0100 0.0003
## 220 0.6650 nan 0.0100 0.0001
## 240 0.6458 nan 0.0100 0.0002
## 260 0.6283 nan 0.0100 0.0001
## 280 0.6113 nan 0.0100 0.0001
## 300 0.5962 nan 0.0100 0.0001
## 320 0.5818 nan 0.0100 -0.0001
## 340 0.5685 nan 0.0100 -0.0001
## 360 0.5560 nan 0.0100 -0.0000
## 380 0.5429 nan 0.0100 0.0001
## 400 0.5310 nan 0.0100 0.0001
## 420 0.5194 nan 0.0100 -0.0001
## 440 0.5087 nan 0.0100 -0.0000
## 460 0.4983 nan 0.0100 0.0000
## 480 0.4879 nan 0.0100 0.0000
## 500 0.4791 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2430 nan 0.1000 0.0322
## 2 1.1783 nan 0.1000 0.0294
## 3 1.1196 nan 0.1000 0.0234
## 4 1.0714 nan 0.1000 0.0181
## 5 1.0270 nan 0.1000 0.0167
## 6 0.9869 nan 0.1000 0.0154
## 7 0.9533 nan 0.1000 0.0133
## 8 0.9255 nan 0.1000 0.0124
## 9 0.8998 nan 0.1000 0.0093
## 10 0.8721 nan 0.1000 0.0087
## 20 0.7289 nan 0.1000 0.0011
## 40 0.5906 nan 0.1000 -0.0008
## 60 0.5087 nan 0.1000 -0.0030
## 80 0.4387 nan 0.1000 -0.0005
## 100 0.3835 nan 0.1000 -0.0002
## 120 0.3392 nan 0.1000 -0.0002
## 140 0.3014 nan 0.1000 0.0004
## 160 0.2698 nan 0.1000 -0.0003
## 180 0.2442 nan 0.1000 -0.0000
## 200 0.2232 nan 0.1000 -0.0005
## 220 0.2035 nan 0.1000 -0.0011
## 240 0.1853 nan 0.1000 -0.0008
## 260 0.1693 nan 0.1000 0.0002
## 280 0.1526 nan 0.1000 -0.0002
## 300 0.1387 nan 0.1000 -0.0002
## 320 0.1267 nan 0.1000 -0.0002
## 340 0.1172 nan 0.1000 -0.0003
## 360 0.1074 nan 0.1000 -0.0003
## 380 0.0987 nan 0.1000 -0.0003
## 400 0.0903 nan 0.1000 -0.0002
## 420 0.0834 nan 0.1000 -0.0002
## 440 0.0774 nan 0.1000 -0.0003
## 460 0.0716 nan 0.1000 -0.0000
## 480 0.0662 nan 0.1000 -0.0002
## 500 0.0608 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2474 nan 0.1000 0.0311
## 2 1.1756 nan 0.1000 0.0311
## 3 1.1216 nan 0.1000 0.0232
## 4 1.0760 nan 0.1000 0.0181
## 5 1.0338 nan 0.1000 0.0176
## 6 0.9951 nan 0.1000 0.0178
## 7 0.9656 nan 0.1000 0.0124
## 8 0.9384 nan 0.1000 0.0118
## 9 0.9130 nan 0.1000 0.0099
## 10 0.8900 nan 0.1000 0.0091
## 20 0.7386 nan 0.1000 0.0001
## 40 0.6007 nan 0.1000 0.0000
## 60 0.5187 nan 0.1000 0.0020
## 80 0.4509 nan 0.1000 -0.0021
## 100 0.3968 nan 0.1000 -0.0011
## 120 0.3519 nan 0.1000 -0.0013
## 140 0.3131 nan 0.1000 -0.0012
## 160 0.2819 nan 0.1000 -0.0003
## 180 0.2556 nan 0.1000 -0.0005
## 200 0.2329 nan 0.1000 -0.0010
## 220 0.2119 nan 0.1000 -0.0008
## 240 0.1937 nan 0.1000 -0.0006
## 260 0.1787 nan 0.1000 -0.0004
## 280 0.1632 nan 0.1000 -0.0004
## 300 0.1502 nan 0.1000 0.0000
## 320 0.1368 nan 0.1000 -0.0004
## 340 0.1269 nan 0.1000 -0.0005
## 360 0.1171 nan 0.1000 -0.0001
## 380 0.1076 nan 0.1000 -0.0003
## 400 0.0993 nan 0.1000 -0.0003
## 420 0.0916 nan 0.1000 -0.0004
## 440 0.0846 nan 0.1000 -0.0002
## 460 0.0785 nan 0.1000 -0.0003
## 480 0.0729 nan 0.1000 -0.0002
## 500 0.0667 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2404 nan 0.1000 0.0375
## 2 1.1835 nan 0.1000 0.0263
## 3 1.1284 nan 0.1000 0.0250
## 4 1.0808 nan 0.1000 0.0212
## 5 1.0359 nan 0.1000 0.0188
## 6 1.0014 nan 0.1000 0.0146
## 7 0.9659 nan 0.1000 0.0130
## 8 0.9344 nan 0.1000 0.0115
## 9 0.9069 nan 0.1000 0.0099
## 10 0.8842 nan 0.1000 0.0098
## 20 0.7494 nan 0.1000 0.0009
## 40 0.6168 nan 0.1000 -0.0008
## 60 0.5394 nan 0.1000 -0.0010
## 80 0.4764 nan 0.1000 0.0006
## 100 0.4186 nan 0.1000 0.0002
## 120 0.3735 nan 0.1000 -0.0018
## 140 0.3385 nan 0.1000 -0.0011
## 160 0.3087 nan 0.1000 -0.0014
## 180 0.2773 nan 0.1000 -0.0007
## 200 0.2525 nan 0.1000 -0.0007
## 220 0.2312 nan 0.1000 -0.0012
## 240 0.2100 nan 0.1000 -0.0005
## 260 0.1912 nan 0.1000 -0.0003
## 280 0.1783 nan 0.1000 -0.0001
## 300 0.1640 nan 0.1000 -0.0005
## 320 0.1515 nan 0.1000 -0.0005
## 340 0.1401 nan 0.1000 -0.0003
## 360 0.1293 nan 0.1000 -0.0004
## 380 0.1199 nan 0.1000 -0.0003
## 400 0.1119 nan 0.1000 -0.0006
## 420 0.1042 nan 0.1000 -0.0003
## 440 0.0964 nan 0.1000 -0.0004
## 460 0.0904 nan 0.1000 -0.0003
## 480 0.0839 nan 0.1000 -0.0004
## 500 0.0779 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2329 nan 0.1000 0.0402
## 2 1.1694 nan 0.1000 0.0277
## 3 1.1071 nan 0.1000 0.0289
## 4 1.0575 nan 0.1000 0.0192
## 5 1.0135 nan 0.1000 0.0185
## 6 0.9778 nan 0.1000 0.0151
## 7 0.9469 nan 0.1000 0.0133
## 8 0.9123 nan 0.1000 0.0142
## 9 0.8875 nan 0.1000 0.0106
## 10 0.8642 nan 0.1000 0.0070
## 20 0.7020 nan 0.1000 0.0035
## 40 0.5641 nan 0.1000 -0.0018
## 60 0.4714 nan 0.1000 -0.0004
## 80 0.3974 nan 0.1000 0.0009
## 100 0.3357 nan 0.1000 -0.0011
## 120 0.2888 nan 0.1000 -0.0008
## 140 0.2523 nan 0.1000 0.0002
## 160 0.2221 nan 0.1000 0.0002
## 180 0.1926 nan 0.1000 -0.0002
## 200 0.1729 nan 0.1000 -0.0004
## 220 0.1534 nan 0.1000 -0.0002
## 240 0.1351 nan 0.1000 -0.0003
## 260 0.1203 nan 0.1000 -0.0002
## 280 0.1072 nan 0.1000 -0.0001
## 300 0.0968 nan 0.1000 -0.0004
## 320 0.0873 nan 0.1000 -0.0002
## 340 0.0787 nan 0.1000 -0.0003
## 360 0.0711 nan 0.1000 -0.0001
## 380 0.0638 nan 0.1000 -0.0002
## 400 0.0567 nan 0.1000 -0.0000
## 420 0.0513 nan 0.1000 -0.0002
## 440 0.0466 nan 0.1000 -0.0000
## 460 0.0422 nan 0.1000 -0.0001
## 480 0.0381 nan 0.1000 0.0000
## 500 0.0348 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2448 nan 0.1000 0.0325
## 2 1.1704 nan 0.1000 0.0300
## 3 1.1201 nan 0.1000 0.0194
## 4 1.0692 nan 0.1000 0.0216
## 5 1.0304 nan 0.1000 0.0162
## 6 0.9903 nan 0.1000 0.0131
## 7 0.9538 nan 0.1000 0.0167
## 8 0.9197 nan 0.1000 0.0160
## 9 0.8927 nan 0.1000 0.0103
## 10 0.8669 nan 0.1000 0.0094
## 20 0.7106 nan 0.1000 0.0008
## 40 0.5655 nan 0.1000 -0.0001
## 60 0.4657 nan 0.1000 -0.0007
## 80 0.3965 nan 0.1000 -0.0002
## 100 0.3370 nan 0.1000 -0.0006
## 120 0.2972 nan 0.1000 -0.0006
## 140 0.2626 nan 0.1000 -0.0005
## 160 0.2303 nan 0.1000 -0.0006
## 180 0.2024 nan 0.1000 -0.0004
## 200 0.1777 nan 0.1000 -0.0005
## 220 0.1572 nan 0.1000 -0.0005
## 240 0.1403 nan 0.1000 -0.0003
## 260 0.1252 nan 0.1000 -0.0005
## 280 0.1129 nan 0.1000 0.0002
## 300 0.1008 nan 0.1000 -0.0002
## 320 0.0901 nan 0.1000 -0.0002
## 340 0.0803 nan 0.1000 -0.0001
## 360 0.0729 nan 0.1000 -0.0002
## 380 0.0659 nan 0.1000 -0.0001
## 400 0.0597 nan 0.1000 -0.0003
## 420 0.0535 nan 0.1000 -0.0002
## 440 0.0483 nan 0.1000 -0.0001
## 460 0.0437 nan 0.1000 -0.0000
## 480 0.0401 nan 0.1000 -0.0002
## 500 0.0365 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2386 nan 0.1000 0.0349
## 2 1.1708 nan 0.1000 0.0269
## 3 1.1139 nan 0.1000 0.0246
## 4 1.0664 nan 0.1000 0.0206
## 5 1.0261 nan 0.1000 0.0157
## 6 0.9880 nan 0.1000 0.0166
## 7 0.9530 nan 0.1000 0.0113
## 8 0.9240 nan 0.1000 0.0113
## 9 0.8994 nan 0.1000 0.0082
## 10 0.8723 nan 0.1000 0.0098
## 20 0.7191 nan 0.1000 0.0027
## 40 0.5788 nan 0.1000 -0.0023
## 60 0.4854 nan 0.1000 0.0000
## 80 0.4140 nan 0.1000 -0.0003
## 100 0.3602 nan 0.1000 -0.0008
## 120 0.3159 nan 0.1000 -0.0010
## 140 0.2772 nan 0.1000 -0.0002
## 160 0.2470 nan 0.1000 -0.0006
## 180 0.2216 nan 0.1000 -0.0006
## 200 0.1973 nan 0.1000 -0.0011
## 220 0.1744 nan 0.1000 -0.0005
## 240 0.1562 nan 0.1000 -0.0006
## 260 0.1403 nan 0.1000 -0.0005
## 280 0.1262 nan 0.1000 -0.0002
## 300 0.1133 nan 0.1000 -0.0003
## 320 0.1013 nan 0.1000 -0.0006
## 340 0.0909 nan 0.1000 -0.0004
## 360 0.0821 nan 0.1000 -0.0002
## 380 0.0749 nan 0.1000 -0.0002
## 400 0.0690 nan 0.1000 -0.0000
## 420 0.0623 nan 0.1000 -0.0001
## 440 0.0569 nan 0.1000 -0.0002
## 460 0.0521 nan 0.1000 -0.0002
## 480 0.0479 nan 0.1000 -0.0002
## 500 0.0428 nan 0.1000 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2319 nan 0.1000 0.0381
## 2 1.1542 nan 0.1000 0.0324
## 3 1.0940 nan 0.1000 0.0264
## 4 1.0400 nan 0.1000 0.0220
## 5 0.9941 nan 0.1000 0.0208
## 6 0.9589 nan 0.1000 0.0134
## 7 0.9242 nan 0.1000 0.0104
## 8 0.8892 nan 0.1000 0.0120
## 9 0.8623 nan 0.1000 0.0116
## 10 0.8344 nan 0.1000 0.0104
## 20 0.6655 nan 0.1000 0.0045
## 40 0.5015 nan 0.1000 -0.0001
## 60 0.4072 nan 0.1000 -0.0001
## 80 0.3404 nan 0.1000 0.0002
## 100 0.2878 nan 0.1000 -0.0016
## 120 0.2409 nan 0.1000 -0.0009
## 140 0.2027 nan 0.1000 -0.0008
## 160 0.1721 nan 0.1000 -0.0003
## 180 0.1495 nan 0.1000 -0.0002
## 200 0.1290 nan 0.1000 -0.0002
## 220 0.1117 nan 0.1000 -0.0002
## 240 0.0982 nan 0.1000 -0.0002
## 260 0.0849 nan 0.1000 -0.0002
## 280 0.0742 nan 0.1000 -0.0002
## 300 0.0653 nan 0.1000 0.0000
## 320 0.0567 nan 0.1000 -0.0001
## 340 0.0502 nan 0.1000 -0.0002
## 360 0.0441 nan 0.1000 0.0000
## 380 0.0389 nan 0.1000 -0.0001
## 400 0.0337 nan 0.1000 -0.0001
## 420 0.0297 nan 0.1000 -0.0000
## 440 0.0260 nan 0.1000 -0.0001
## 460 0.0230 nan 0.1000 -0.0001
## 480 0.0202 nan 0.1000 -0.0000
## 500 0.0177 nan 0.1000 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2287 nan 0.1000 0.0386
## 2 1.1565 nan 0.1000 0.0321
## 3 1.0975 nan 0.1000 0.0233
## 4 1.0476 nan 0.1000 0.0205
## 5 0.9977 nan 0.1000 0.0218
## 6 0.9607 nan 0.1000 0.0158
## 7 0.9241 nan 0.1000 0.0161
## 8 0.8938 nan 0.1000 0.0098
## 9 0.8648 nan 0.1000 0.0105
## 10 0.8407 nan 0.1000 0.0087
## 20 0.6769 nan 0.1000 0.0010
## 40 0.5284 nan 0.1000 -0.0007
## 60 0.4306 nan 0.1000 -0.0011
## 80 0.3524 nan 0.1000 -0.0004
## 100 0.3006 nan 0.1000 -0.0011
## 120 0.2496 nan 0.1000 -0.0007
## 140 0.2133 nan 0.1000 -0.0005
## 160 0.1831 nan 0.1000 -0.0011
## 180 0.1603 nan 0.1000 -0.0005
## 200 0.1386 nan 0.1000 -0.0003
## 220 0.1219 nan 0.1000 -0.0005
## 240 0.1051 nan 0.1000 -0.0003
## 260 0.0910 nan 0.1000 -0.0004
## 280 0.0791 nan 0.1000 -0.0003
## 300 0.0691 nan 0.1000 -0.0004
## 320 0.0613 nan 0.1000 -0.0002
## 340 0.0546 nan 0.1000 -0.0003
## 360 0.0478 nan 0.1000 -0.0002
## 380 0.0423 nan 0.1000 -0.0001
## 400 0.0380 nan 0.1000 -0.0001
## 420 0.0335 nan 0.1000 -0.0000
## 440 0.0299 nan 0.1000 -0.0002
## 460 0.0266 nan 0.1000 -0.0000
## 480 0.0238 nan 0.1000 -0.0000
## 500 0.0211 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2481 nan 0.1000 0.0321
## 2 1.1782 nan 0.1000 0.0301
## 3 1.1144 nan 0.1000 0.0300
## 4 1.0608 nan 0.1000 0.0248
## 5 1.0125 nan 0.1000 0.0202
## 6 0.9754 nan 0.1000 0.0142
## 7 0.9393 nan 0.1000 0.0144
## 8 0.9074 nan 0.1000 0.0114
## 9 0.8798 nan 0.1000 0.0108
## 10 0.8564 nan 0.1000 0.0083
## 20 0.6899 nan 0.1000 0.0027
## 40 0.5384 nan 0.1000 -0.0006
## 60 0.4447 nan 0.1000 0.0003
## 80 0.3695 nan 0.1000 -0.0011
## 100 0.3129 nan 0.1000 -0.0006
## 120 0.2667 nan 0.1000 -0.0003
## 140 0.2294 nan 0.1000 -0.0010
## 160 0.1971 nan 0.1000 -0.0004
## 180 0.1712 nan 0.1000 -0.0005
## 200 0.1486 nan 0.1000 -0.0003
## 220 0.1308 nan 0.1000 -0.0003
## 240 0.1156 nan 0.1000 -0.0003
## 260 0.1016 nan 0.1000 -0.0002
## 280 0.0896 nan 0.1000 -0.0004
## 300 0.0802 nan 0.1000 -0.0003
## 320 0.0717 nan 0.1000 -0.0002
## 340 0.0641 nan 0.1000 -0.0002
## 360 0.0566 nan 0.1000 -0.0002
## 380 0.0498 nan 0.1000 -0.0002
## 400 0.0445 nan 0.1000 -0.0001
## 420 0.0393 nan 0.1000 -0.0002
## 440 0.0350 nan 0.1000 -0.0002
## 460 0.0309 nan 0.1000 -0.0001
## 480 0.0278 nan 0.1000 -0.0001
## 500 0.0250 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0003
## 3 1.3183 nan 0.0010 0.0003
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0003
## 6 1.3159 nan 0.0010 0.0003
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3136 nan 0.0010 0.0003
## 10 1.3128 nan 0.0010 0.0003
## 20 1.3052 nan 0.0010 0.0003
## 40 1.2903 nan 0.0010 0.0003
## 60 1.2756 nan 0.0010 0.0003
## 80 1.2616 nan 0.0010 0.0003
## 100 1.2479 nan 0.0010 0.0003
## 120 1.2347 nan 0.0010 0.0003
## 140 1.2215 nan 0.0010 0.0003
## 160 1.2091 nan 0.0010 0.0003
## 180 1.1968 nan 0.0010 0.0003
## 200 1.1850 nan 0.0010 0.0003
## 220 1.1734 nan 0.0010 0.0002
## 240 1.1621 nan 0.0010 0.0002
## 260 1.1514 nan 0.0010 0.0002
## 280 1.1408 nan 0.0010 0.0002
## 300 1.1306 nan 0.0010 0.0002
## 320 1.1207 nan 0.0010 0.0002
## 340 1.1110 nan 0.0010 0.0002
## 360 1.1011 nan 0.0010 0.0002
## 380 1.0919 nan 0.0010 0.0002
## 400 1.0829 nan 0.0010 0.0002
## 420 1.0740 nan 0.0010 0.0001
## 440 1.0653 nan 0.0010 0.0002
## 460 1.0569 nan 0.0010 0.0002
## 480 1.0490 nan 0.0010 0.0002
## 500 1.0409 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3191 nan 0.0010 0.0003
## 3 1.3183 nan 0.0010 0.0003
## 4 1.3175 nan 0.0010 0.0003
## 5 1.3167 nan 0.0010 0.0003
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3135 nan 0.0010 0.0003
## 10 1.3128 nan 0.0010 0.0003
## 20 1.3049 nan 0.0010 0.0003
## 40 1.2899 nan 0.0010 0.0003
## 60 1.2753 nan 0.0010 0.0003
## 80 1.2612 nan 0.0010 0.0003
## 100 1.2474 nan 0.0010 0.0003
## 120 1.2342 nan 0.0010 0.0002
## 140 1.2215 nan 0.0010 0.0003
## 160 1.2089 nan 0.0010 0.0003
## 180 1.1969 nan 0.0010 0.0002
## 200 1.1850 nan 0.0010 0.0003
## 220 1.1736 nan 0.0010 0.0002
## 240 1.1627 nan 0.0010 0.0002
## 260 1.1519 nan 0.0010 0.0002
## 280 1.1413 nan 0.0010 0.0003
## 300 1.1310 nan 0.0010 0.0002
## 320 1.1209 nan 0.0010 0.0002
## 340 1.1113 nan 0.0010 0.0002
## 360 1.1016 nan 0.0010 0.0002
## 380 1.0922 nan 0.0010 0.0002
## 400 1.0833 nan 0.0010 0.0002
## 420 1.0746 nan 0.0010 0.0002
## 440 1.0659 nan 0.0010 0.0002
## 460 1.0575 nan 0.0010 0.0002
## 480 1.0494 nan 0.0010 0.0002
## 500 1.0413 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0003
## 3 1.3183 nan 0.0010 0.0003
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3167 nan 0.0010 0.0004
## 6 1.3159 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0003
## 8 1.3143 nan 0.0010 0.0003
## 9 1.3136 nan 0.0010 0.0003
## 10 1.3128 nan 0.0010 0.0004
## 20 1.3051 nan 0.0010 0.0003
## 40 1.2904 nan 0.0010 0.0003
## 60 1.2758 nan 0.0010 0.0003
## 80 1.2619 nan 0.0010 0.0003
## 100 1.2487 nan 0.0010 0.0003
## 120 1.2354 nan 0.0010 0.0002
## 140 1.2226 nan 0.0010 0.0003
## 160 1.2098 nan 0.0010 0.0003
## 180 1.1977 nan 0.0010 0.0003
## 200 1.1859 nan 0.0010 0.0003
## 220 1.1742 nan 0.0010 0.0002
## 240 1.1630 nan 0.0010 0.0003
## 260 1.1523 nan 0.0010 0.0002
## 280 1.1414 nan 0.0010 0.0002
## 300 1.1314 nan 0.0010 0.0002
## 320 1.1213 nan 0.0010 0.0002
## 340 1.1115 nan 0.0010 0.0002
## 360 1.1020 nan 0.0010 0.0002
## 380 1.0928 nan 0.0010 0.0002
## 400 1.0839 nan 0.0010 0.0002
## 420 1.0753 nan 0.0010 0.0002
## 440 1.0667 nan 0.0010 0.0002
## 460 1.0583 nan 0.0010 0.0002
## 480 1.0500 nan 0.0010 0.0002
## 500 1.0421 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0003
## 3 1.3181 nan 0.0010 0.0004
## 4 1.3173 nan 0.0010 0.0004
## 5 1.3164 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0003
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3139 nan 0.0010 0.0004
## 9 1.3131 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0004
## 20 1.3041 nan 0.0010 0.0004
## 40 1.2875 nan 0.0010 0.0003
## 60 1.2719 nan 0.0010 0.0004
## 80 1.2566 nan 0.0010 0.0004
## 100 1.2421 nan 0.0010 0.0003
## 120 1.2278 nan 0.0010 0.0003
## 140 1.2140 nan 0.0010 0.0003
## 160 1.2006 nan 0.0010 0.0003
## 180 1.1875 nan 0.0010 0.0003
## 200 1.1747 nan 0.0010 0.0003
## 220 1.1624 nan 0.0010 0.0003
## 240 1.1502 nan 0.0010 0.0003
## 260 1.1386 nan 0.0010 0.0002
## 280 1.1276 nan 0.0010 0.0002
## 300 1.1168 nan 0.0010 0.0002
## 320 1.1063 nan 0.0010 0.0002
## 340 1.0959 nan 0.0010 0.0002
## 360 1.0858 nan 0.0010 0.0002
## 380 1.0758 nan 0.0010 0.0002
## 400 1.0664 nan 0.0010 0.0002
## 420 1.0571 nan 0.0010 0.0002
## 440 1.0480 nan 0.0010 0.0002
## 460 1.0391 nan 0.0010 0.0002
## 480 1.0301 nan 0.0010 0.0002
## 500 1.0216 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3190 nan 0.0010 0.0003
## 3 1.3182 nan 0.0010 0.0003
## 4 1.3174 nan 0.0010 0.0004
## 5 1.3165 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0004
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3140 nan 0.0010 0.0003
## 9 1.3132 nan 0.0010 0.0004
## 10 1.3123 nan 0.0010 0.0005
## 20 1.3038 nan 0.0010 0.0004
## 40 1.2875 nan 0.0010 0.0004
## 60 1.2718 nan 0.0010 0.0004
## 80 1.2566 nan 0.0010 0.0003
## 100 1.2421 nan 0.0010 0.0003
## 120 1.2279 nan 0.0010 0.0003
## 140 1.2141 nan 0.0010 0.0003
## 160 1.2006 nan 0.0010 0.0003
## 180 1.1873 nan 0.0010 0.0003
## 200 1.1745 nan 0.0010 0.0003
## 220 1.1621 nan 0.0010 0.0002
## 240 1.1501 nan 0.0010 0.0003
## 260 1.1385 nan 0.0010 0.0003
## 280 1.1271 nan 0.0010 0.0002
## 300 1.1161 nan 0.0010 0.0002
## 320 1.1053 nan 0.0010 0.0002
## 340 1.0949 nan 0.0010 0.0002
## 360 1.0849 nan 0.0010 0.0002
## 380 1.0750 nan 0.0010 0.0002
## 400 1.0654 nan 0.0010 0.0002
## 420 1.0560 nan 0.0010 0.0002
## 440 1.0470 nan 0.0010 0.0002
## 460 1.0380 nan 0.0010 0.0002
## 480 1.0294 nan 0.0010 0.0002
## 500 1.0208 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3171 nan 0.0010 0.0004
## 5 1.3162 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3136 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3036 nan 0.0010 0.0004
## 40 1.2874 nan 0.0010 0.0003
## 60 1.2719 nan 0.0010 0.0003
## 80 1.2566 nan 0.0010 0.0003
## 100 1.2419 nan 0.0010 0.0003
## 120 1.2278 nan 0.0010 0.0003
## 140 1.2138 nan 0.0010 0.0003
## 160 1.2004 nan 0.0010 0.0003
## 180 1.1877 nan 0.0010 0.0003
## 200 1.1749 nan 0.0010 0.0002
## 220 1.1631 nan 0.0010 0.0002
## 240 1.1514 nan 0.0010 0.0003
## 260 1.1400 nan 0.0010 0.0002
## 280 1.1285 nan 0.0010 0.0002
## 300 1.1177 nan 0.0010 0.0002
## 320 1.1072 nan 0.0010 0.0002
## 340 1.0971 nan 0.0010 0.0002
## 360 1.0872 nan 0.0010 0.0002
## 380 1.0774 nan 0.0010 0.0002
## 400 1.0679 nan 0.0010 0.0002
## 420 1.0587 nan 0.0010 0.0002
## 440 1.0496 nan 0.0010 0.0002
## 460 1.0406 nan 0.0010 0.0002
## 480 1.0321 nan 0.0010 0.0002
## 500 1.0238 nan 0.0010 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3152 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3133 nan 0.0010 0.0004
## 9 1.3124 nan 0.0010 0.0004
## 10 1.3115 nan 0.0010 0.0004
## 20 1.3028 nan 0.0010 0.0004
## 40 1.2856 nan 0.0010 0.0003
## 60 1.2689 nan 0.0010 0.0004
## 80 1.2529 nan 0.0010 0.0003
## 100 1.2377 nan 0.0010 0.0003
## 120 1.2229 nan 0.0010 0.0003
## 140 1.2082 nan 0.0010 0.0003
## 160 1.1942 nan 0.0010 0.0003
## 180 1.1803 nan 0.0010 0.0003
## 200 1.1668 nan 0.0010 0.0003
## 220 1.1538 nan 0.0010 0.0002
## 240 1.1415 nan 0.0010 0.0002
## 260 1.1293 nan 0.0010 0.0002
## 280 1.1175 nan 0.0010 0.0002
## 300 1.1060 nan 0.0010 0.0003
## 320 1.0949 nan 0.0010 0.0002
## 340 1.0842 nan 0.0010 0.0002
## 360 1.0736 nan 0.0010 0.0002
## 380 1.0631 nan 0.0010 0.0002
## 400 1.0531 nan 0.0010 0.0002
## 420 1.0436 nan 0.0010 0.0002
## 440 1.0339 nan 0.0010 0.0002
## 460 1.0245 nan 0.0010 0.0002
## 480 1.0154 nan 0.0010 0.0002
## 500 1.0066 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3179 nan 0.0010 0.0004
## 4 1.3170 nan 0.0010 0.0004
## 5 1.3161 nan 0.0010 0.0004
## 6 1.3153 nan 0.0010 0.0004
## 7 1.3143 nan 0.0010 0.0004
## 8 1.3135 nan 0.0010 0.0003
## 9 1.3126 nan 0.0010 0.0004
## 10 1.3117 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2860 nan 0.0010 0.0004
## 60 1.2693 nan 0.0010 0.0004
## 80 1.2534 nan 0.0010 0.0004
## 100 1.2376 nan 0.0010 0.0003
## 120 1.2228 nan 0.0010 0.0003
## 140 1.2080 nan 0.0010 0.0003
## 160 1.1939 nan 0.0010 0.0003
## 180 1.1805 nan 0.0010 0.0003
## 200 1.1670 nan 0.0010 0.0002
## 220 1.1545 nan 0.0010 0.0003
## 240 1.1421 nan 0.0010 0.0003
## 260 1.1302 nan 0.0010 0.0003
## 280 1.1182 nan 0.0010 0.0003
## 300 1.1070 nan 0.0010 0.0002
## 320 1.0960 nan 0.0010 0.0002
## 340 1.0850 nan 0.0010 0.0002
## 360 1.0746 nan 0.0010 0.0002
## 380 1.0643 nan 0.0010 0.0002
## 400 1.0543 nan 0.0010 0.0002
## 420 1.0448 nan 0.0010 0.0002
## 440 1.0353 nan 0.0010 0.0002
## 460 1.0261 nan 0.0010 0.0002
## 480 1.0170 nan 0.0010 0.0002
## 500 1.0085 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3198 nan 0.0010 0.0004
## 2 1.3189 nan 0.0010 0.0004
## 3 1.3180 nan 0.0010 0.0004
## 4 1.3172 nan 0.0010 0.0004
## 5 1.3163 nan 0.0010 0.0004
## 6 1.3154 nan 0.0010 0.0004
## 7 1.3145 nan 0.0010 0.0004
## 8 1.3137 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3032 nan 0.0010 0.0004
## 40 1.2865 nan 0.0010 0.0003
## 60 1.2699 nan 0.0010 0.0004
## 80 1.2540 nan 0.0010 0.0004
## 100 1.2387 nan 0.0010 0.0003
## 120 1.2243 nan 0.0010 0.0003
## 140 1.2098 nan 0.0010 0.0003
## 160 1.1960 nan 0.0010 0.0002
## 180 1.1825 nan 0.0010 0.0003
## 200 1.1693 nan 0.0010 0.0003
## 220 1.1566 nan 0.0010 0.0003
## 240 1.1443 nan 0.0010 0.0003
## 260 1.1321 nan 0.0010 0.0002
## 280 1.1205 nan 0.0010 0.0002
## 300 1.1090 nan 0.0010 0.0003
## 320 1.0980 nan 0.0010 0.0002
## 340 1.0871 nan 0.0010 0.0002
## 360 1.0767 nan 0.0010 0.0002
## 380 1.0664 nan 0.0010 0.0002
## 400 1.0567 nan 0.0010 0.0002
## 420 1.0472 nan 0.0010 0.0002
## 440 1.0378 nan 0.0010 0.0002
## 460 1.0286 nan 0.0010 0.0002
## 480 1.0196 nan 0.0010 0.0002
## 500 1.0111 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3129 nan 0.0100 0.0037
## 2 1.3053 nan 0.0100 0.0038
## 3 1.2982 nan 0.0100 0.0032
## 4 1.2912 nan 0.0100 0.0026
## 5 1.2842 nan 0.0100 0.0031
## 6 1.2769 nan 0.0100 0.0033
## 7 1.2698 nan 0.0100 0.0031
## 8 1.2629 nan 0.0100 0.0031
## 9 1.2560 nan 0.0100 0.0028
## 10 1.2492 nan 0.0100 0.0031
## 20 1.1872 nan 0.0100 0.0022
## 40 1.0851 nan 0.0100 0.0019
## 60 1.0041 nan 0.0100 0.0013
## 80 0.9406 nan 0.0100 0.0010
## 100 0.8868 nan 0.0100 0.0008
## 120 0.8434 nan 0.0100 0.0008
## 140 0.8083 nan 0.0100 0.0004
## 160 0.7784 nan 0.0100 0.0004
## 180 0.7545 nan 0.0100 0.0004
## 200 0.7322 nan 0.0100 0.0003
## 220 0.7119 nan 0.0100 0.0002
## 240 0.6941 nan 0.0100 0.0002
## 260 0.6788 nan 0.0100 0.0000
## 280 0.6644 nan 0.0100 0.0000
## 300 0.6527 nan 0.0100 -0.0000
## 320 0.6404 nan 0.0100 0.0001
## 340 0.6295 nan 0.0100 0.0001
## 360 0.6184 nan 0.0100 0.0000
## 380 0.6077 nan 0.0100 -0.0001
## 400 0.5971 nan 0.0100 0.0000
## 420 0.5872 nan 0.0100 -0.0000
## 440 0.5786 nan 0.0100 -0.0000
## 460 0.5705 nan 0.0100 -0.0001
## 480 0.5625 nan 0.0100 -0.0001
## 500 0.5549 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0037
## 2 1.3049 nan 0.0100 0.0033
## 3 1.2977 nan 0.0100 0.0030
## 4 1.2904 nan 0.0100 0.0032
## 5 1.2835 nan 0.0100 0.0030
## 6 1.2763 nan 0.0100 0.0033
## 7 1.2697 nan 0.0100 0.0028
## 8 1.2630 nan 0.0100 0.0029
## 9 1.2560 nan 0.0100 0.0031
## 10 1.2491 nan 0.0100 0.0031
## 20 1.1871 nan 0.0100 0.0026
## 40 1.0848 nan 0.0100 0.0020
## 60 1.0054 nan 0.0100 0.0014
## 80 0.9418 nan 0.0100 0.0010
## 100 0.8905 nan 0.0100 0.0011
## 120 0.8474 nan 0.0100 0.0005
## 140 0.8133 nan 0.0100 0.0006
## 160 0.7839 nan 0.0100 0.0004
## 180 0.7594 nan 0.0100 0.0003
## 200 0.7369 nan 0.0100 0.0004
## 220 0.7186 nan 0.0100 0.0002
## 240 0.7023 nan 0.0100 0.0002
## 260 0.6860 nan 0.0100 0.0001
## 280 0.6718 nan 0.0100 0.0001
## 300 0.6584 nan 0.0100 0.0001
## 320 0.6460 nan 0.0100 0.0001
## 340 0.6345 nan 0.0100 -0.0001
## 360 0.6241 nan 0.0100 -0.0000
## 380 0.6127 nan 0.0100 -0.0002
## 400 0.6030 nan 0.0100 -0.0001
## 420 0.5935 nan 0.0100 0.0001
## 440 0.5846 nan 0.0100 0.0000
## 460 0.5752 nan 0.0100 0.0001
## 480 0.5665 nan 0.0100 -0.0001
## 500 0.5588 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3131 nan 0.0100 0.0033
## 2 1.3050 nan 0.0100 0.0034
## 3 1.2982 nan 0.0100 0.0028
## 4 1.2907 nan 0.0100 0.0031
## 5 1.2830 nan 0.0100 0.0033
## 6 1.2763 nan 0.0100 0.0033
## 7 1.2691 nan 0.0100 0.0031
## 8 1.2617 nan 0.0100 0.0030
## 9 1.2547 nan 0.0100 0.0034
## 10 1.2474 nan 0.0100 0.0032
## 20 1.1845 nan 0.0100 0.0023
## 40 1.0842 nan 0.0100 0.0019
## 60 1.0057 nan 0.0100 0.0014
## 80 0.9409 nan 0.0100 0.0011
## 100 0.8900 nan 0.0100 0.0010
## 120 0.8488 nan 0.0100 0.0008
## 140 0.8138 nan 0.0100 0.0006
## 160 0.7846 nan 0.0100 0.0006
## 180 0.7593 nan 0.0100 0.0004
## 200 0.7384 nan 0.0100 0.0003
## 220 0.7197 nan 0.0100 0.0001
## 240 0.7035 nan 0.0100 0.0004
## 260 0.6892 nan 0.0100 -0.0000
## 280 0.6754 nan 0.0100 0.0000
## 300 0.6628 nan 0.0100 0.0000
## 320 0.6511 nan 0.0100 -0.0000
## 340 0.6406 nan 0.0100 -0.0001
## 360 0.6310 nan 0.0100 -0.0002
## 380 0.6215 nan 0.0100 0.0001
## 400 0.6118 nan 0.0100 -0.0000
## 420 0.6030 nan 0.0100 -0.0000
## 440 0.5943 nan 0.0100 0.0001
## 460 0.5871 nan 0.0100 -0.0000
## 480 0.5789 nan 0.0100 -0.0001
## 500 0.5702 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3127 nan 0.0100 0.0032
## 2 1.3046 nan 0.0100 0.0033
## 3 1.2960 nan 0.0100 0.0040
## 4 1.2881 nan 0.0100 0.0037
## 5 1.2798 nan 0.0100 0.0038
## 6 1.2720 nan 0.0100 0.0031
## 7 1.2644 nan 0.0100 0.0033
## 8 1.2570 nan 0.0100 0.0033
## 9 1.2503 nan 0.0100 0.0030
## 10 1.2429 nan 0.0100 0.0028
## 20 1.1748 nan 0.0100 0.0027
## 40 1.0676 nan 0.0100 0.0022
## 60 0.9835 nan 0.0100 0.0014
## 80 0.9155 nan 0.0100 0.0012
## 100 0.8621 nan 0.0100 0.0011
## 120 0.8177 nan 0.0100 0.0003
## 140 0.7814 nan 0.0100 0.0006
## 160 0.7502 nan 0.0100 0.0002
## 180 0.7249 nan 0.0100 0.0001
## 200 0.7006 nan 0.0100 0.0002
## 220 0.6790 nan 0.0100 0.0001
## 240 0.6608 nan 0.0100 -0.0000
## 260 0.6428 nan 0.0100 0.0002
## 280 0.6272 nan 0.0100 0.0001
## 300 0.6130 nan 0.0100 -0.0001
## 320 0.6000 nan 0.0100 -0.0001
## 340 0.5871 nan 0.0100 -0.0001
## 360 0.5743 nan 0.0100 -0.0000
## 380 0.5619 nan 0.0100 0.0001
## 400 0.5503 nan 0.0100 -0.0000
## 420 0.5401 nan 0.0100 -0.0000
## 440 0.5301 nan 0.0100 -0.0001
## 460 0.5206 nan 0.0100 0.0000
## 480 0.5106 nan 0.0100 0.0000
## 500 0.5007 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0040
## 2 1.3035 nan 0.0100 0.0040
## 3 1.2961 nan 0.0100 0.0030
## 4 1.2877 nan 0.0100 0.0036
## 5 1.2796 nan 0.0100 0.0040
## 6 1.2719 nan 0.0100 0.0034
## 7 1.2645 nan 0.0100 0.0031
## 8 1.2570 nan 0.0100 0.0031
## 9 1.2495 nan 0.0100 0.0033
## 10 1.2419 nan 0.0100 0.0033
## 20 1.1750 nan 0.0100 0.0025
## 40 1.0668 nan 0.0100 0.0022
## 60 0.9818 nan 0.0100 0.0015
## 80 0.9161 nan 0.0100 0.0011
## 100 0.8629 nan 0.0100 0.0008
## 120 0.8188 nan 0.0100 0.0007
## 140 0.7832 nan 0.0100 0.0005
## 160 0.7533 nan 0.0100 0.0004
## 180 0.7269 nan 0.0100 0.0003
## 200 0.7042 nan 0.0100 0.0004
## 220 0.6846 nan 0.0100 0.0001
## 240 0.6669 nan 0.0100 0.0001
## 260 0.6507 nan 0.0100 0.0000
## 280 0.6363 nan 0.0100 0.0001
## 300 0.6217 nan 0.0100 -0.0001
## 320 0.6081 nan 0.0100 0.0001
## 340 0.5960 nan 0.0100 0.0001
## 360 0.5835 nan 0.0100 0.0001
## 380 0.5710 nan 0.0100 0.0002
## 400 0.5598 nan 0.0100 -0.0001
## 420 0.5501 nan 0.0100 0.0000
## 440 0.5399 nan 0.0100 0.0001
## 460 0.5295 nan 0.0100 0.0001
## 480 0.5203 nan 0.0100 -0.0000
## 500 0.5113 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0036
## 2 1.3039 nan 0.0100 0.0036
## 3 1.2965 nan 0.0100 0.0029
## 4 1.2886 nan 0.0100 0.0034
## 5 1.2811 nan 0.0100 0.0034
## 6 1.2731 nan 0.0100 0.0035
## 7 1.2661 nan 0.0100 0.0033
## 8 1.2587 nan 0.0100 0.0033
## 9 1.2508 nan 0.0100 0.0032
## 10 1.2428 nan 0.0100 0.0034
## 20 1.1774 nan 0.0100 0.0025
## 40 1.0707 nan 0.0100 0.0016
## 60 0.9871 nan 0.0100 0.0015
## 80 0.9197 nan 0.0100 0.0011
## 100 0.8663 nan 0.0100 0.0010
## 120 0.8235 nan 0.0100 0.0007
## 140 0.7884 nan 0.0100 0.0006
## 160 0.7591 nan 0.0100 0.0005
## 180 0.7324 nan 0.0100 0.0005
## 200 0.7099 nan 0.0100 0.0002
## 220 0.6901 nan 0.0100 0.0001
## 240 0.6716 nan 0.0100 0.0001
## 260 0.6559 nan 0.0100 -0.0001
## 280 0.6408 nan 0.0100 -0.0001
## 300 0.6263 nan 0.0100 -0.0001
## 320 0.6135 nan 0.0100 -0.0001
## 340 0.6011 nan 0.0100 -0.0000
## 360 0.5896 nan 0.0100 -0.0000
## 380 0.5781 nan 0.0100 0.0001
## 400 0.5681 nan 0.0100 0.0001
## 420 0.5564 nan 0.0100 -0.0001
## 440 0.5464 nan 0.0100 0.0000
## 460 0.5374 nan 0.0100 -0.0002
## 480 0.5282 nan 0.0100 -0.0001
## 500 0.5194 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3123 nan 0.0100 0.0037
## 2 1.3033 nan 0.0100 0.0042
## 3 1.2944 nan 0.0100 0.0039
## 4 1.2865 nan 0.0100 0.0035
## 5 1.2782 nan 0.0100 0.0037
## 6 1.2698 nan 0.0100 0.0035
## 7 1.2617 nan 0.0100 0.0035
## 8 1.2541 nan 0.0100 0.0036
## 9 1.2464 nan 0.0100 0.0038
## 10 1.2391 nan 0.0100 0.0031
## 20 1.1684 nan 0.0100 0.0030
## 40 1.0552 nan 0.0100 0.0024
## 60 0.9690 nan 0.0100 0.0015
## 80 0.8983 nan 0.0100 0.0012
## 100 0.8425 nan 0.0100 0.0009
## 120 0.7970 nan 0.0100 0.0005
## 140 0.7580 nan 0.0100 0.0003
## 160 0.7254 nan 0.0100 0.0005
## 180 0.6962 nan 0.0100 0.0002
## 200 0.6707 nan 0.0100 0.0005
## 220 0.6478 nan 0.0100 0.0002
## 240 0.6275 nan 0.0100 0.0001
## 260 0.6095 nan 0.0100 -0.0001
## 280 0.5925 nan 0.0100 0.0002
## 300 0.5766 nan 0.0100 0.0001
## 320 0.5617 nan 0.0100 0.0001
## 340 0.5458 nan 0.0100 0.0000
## 360 0.5333 nan 0.0100 0.0001
## 380 0.5212 nan 0.0100 -0.0000
## 400 0.5104 nan 0.0100 0.0000
## 420 0.4990 nan 0.0100 -0.0001
## 440 0.4877 nan 0.0100 0.0000
## 460 0.4775 nan 0.0100 -0.0000
## 480 0.4673 nan 0.0100 -0.0000
## 500 0.4572 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3120 nan 0.0100 0.0038
## 2 1.3028 nan 0.0100 0.0041
## 3 1.2937 nan 0.0100 0.0042
## 4 1.2855 nan 0.0100 0.0034
## 5 1.2771 nan 0.0100 0.0039
## 6 1.2689 nan 0.0100 0.0037
## 7 1.2606 nan 0.0100 0.0036
## 8 1.2535 nan 0.0100 0.0031
## 9 1.2456 nan 0.0100 0.0036
## 10 1.2382 nan 0.0100 0.0035
## 20 1.1663 nan 0.0100 0.0029
## 40 1.0557 nan 0.0100 0.0014
## 60 0.9686 nan 0.0100 0.0015
## 80 0.9006 nan 0.0100 0.0014
## 100 0.8449 nan 0.0100 0.0010
## 120 0.7993 nan 0.0100 0.0005
## 140 0.7619 nan 0.0100 0.0006
## 160 0.7305 nan 0.0100 0.0003
## 180 0.7020 nan 0.0100 0.0004
## 200 0.6758 nan 0.0100 0.0002
## 220 0.6537 nan 0.0100 0.0002
## 240 0.6331 nan 0.0100 0.0001
## 260 0.6146 nan 0.0100 0.0001
## 280 0.5989 nan 0.0100 -0.0001
## 300 0.5829 nan 0.0100 0.0000
## 320 0.5680 nan 0.0100 0.0001
## 340 0.5541 nan 0.0100 0.0000
## 360 0.5408 nan 0.0100 0.0000
## 380 0.5290 nan 0.0100 0.0001
## 400 0.5169 nan 0.0100 0.0002
## 420 0.5059 nan 0.0100 -0.0000
## 440 0.4956 nan 0.0100 0.0000
## 460 0.4854 nan 0.0100 -0.0001
## 480 0.4752 nan 0.0100 0.0001
## 500 0.4656 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0041
## 2 1.3030 nan 0.0100 0.0039
## 3 1.2944 nan 0.0100 0.0038
## 4 1.2860 nan 0.0100 0.0039
## 5 1.2776 nan 0.0100 0.0036
## 6 1.2689 nan 0.0100 0.0037
## 7 1.2605 nan 0.0100 0.0033
## 8 1.2530 nan 0.0100 0.0034
## 9 1.2451 nan 0.0100 0.0033
## 10 1.2373 nan 0.0100 0.0034
## 20 1.1676 nan 0.0100 0.0031
## 40 1.0567 nan 0.0100 0.0020
## 60 0.9705 nan 0.0100 0.0016
## 80 0.9034 nan 0.0100 0.0011
## 100 0.8482 nan 0.0100 0.0007
## 120 0.8034 nan 0.0100 0.0009
## 140 0.7671 nan 0.0100 0.0003
## 160 0.7354 nan 0.0100 0.0003
## 180 0.7082 nan 0.0100 0.0005
## 200 0.6845 nan 0.0100 0.0002
## 220 0.6642 nan 0.0100 -0.0000
## 240 0.6454 nan 0.0100 -0.0001
## 260 0.6260 nan 0.0100 0.0001
## 280 0.6097 nan 0.0100 0.0001
## 300 0.5952 nan 0.0100 -0.0000
## 320 0.5815 nan 0.0100 -0.0000
## 340 0.5681 nan 0.0100 -0.0001
## 360 0.5554 nan 0.0100 0.0001
## 380 0.5430 nan 0.0100 0.0001
## 400 0.5317 nan 0.0100 -0.0001
## 420 0.5211 nan 0.0100 -0.0001
## 440 0.5103 nan 0.0100 0.0001
## 460 0.4997 nan 0.0100 -0.0001
## 480 0.4901 nan 0.0100 0.0001
## 500 0.4802 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2466 nan 0.1000 0.0354
## 2 1.1854 nan 0.1000 0.0283
## 3 1.1298 nan 0.1000 0.0227
## 4 1.0807 nan 0.1000 0.0188
## 5 1.0386 nan 0.1000 0.0189
## 6 1.0004 nan 0.1000 0.0167
## 7 0.9692 nan 0.1000 0.0128
## 8 0.9418 nan 0.1000 0.0094
## 9 0.9145 nan 0.1000 0.0120
## 10 0.8903 nan 0.1000 0.0087
## 20 0.7412 nan 0.1000 0.0024
## 40 0.5997 nan 0.1000 0.0006
## 60 0.5150 nan 0.1000 -0.0003
## 80 0.4588 nan 0.1000 -0.0011
## 100 0.4001 nan 0.1000 -0.0000
## 120 0.3543 nan 0.1000 -0.0002
## 140 0.3152 nan 0.1000 -0.0004
## 160 0.2859 nan 0.1000 -0.0002
## 180 0.2607 nan 0.1000 -0.0008
## 200 0.2362 nan 0.1000 -0.0004
## 220 0.2141 nan 0.1000 -0.0009
## 240 0.1956 nan 0.1000 -0.0003
## 260 0.1780 nan 0.1000 -0.0003
## 280 0.1638 nan 0.1000 -0.0000
## 300 0.1496 nan 0.1000 -0.0005
## 320 0.1382 nan 0.1000 -0.0002
## 340 0.1269 nan 0.1000 -0.0001
## 360 0.1157 nan 0.1000 -0.0000
## 380 0.1071 nan 0.1000 -0.0002
## 400 0.0984 nan 0.1000 -0.0002
## 420 0.0917 nan 0.1000 -0.0003
## 440 0.0840 nan 0.1000 -0.0001
## 460 0.0773 nan 0.1000 -0.0002
## 480 0.0705 nan 0.1000 -0.0001
## 500 0.0652 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2556 nan 0.1000 0.0277
## 2 1.1898 nan 0.1000 0.0282
## 3 1.1409 nan 0.1000 0.0228
## 4 1.0985 nan 0.1000 0.0157
## 5 1.0546 nan 0.1000 0.0185
## 6 1.0180 nan 0.1000 0.0154
## 7 0.9787 nan 0.1000 0.0171
## 8 0.9477 nan 0.1000 0.0086
## 9 0.9202 nan 0.1000 0.0080
## 10 0.8959 nan 0.1000 0.0098
## 20 0.7462 nan 0.1000 0.0042
## 40 0.6104 nan 0.1000 -0.0005
## 60 0.5308 nan 0.1000 -0.0004
## 80 0.4773 nan 0.1000 0.0002
## 100 0.4216 nan 0.1000 -0.0012
## 120 0.3800 nan 0.1000 -0.0008
## 140 0.3397 nan 0.1000 -0.0013
## 160 0.3038 nan 0.1000 -0.0012
## 180 0.2720 nan 0.1000 -0.0000
## 200 0.2502 nan 0.1000 -0.0002
## 220 0.2297 nan 0.1000 -0.0011
## 240 0.2094 nan 0.1000 -0.0004
## 260 0.1903 nan 0.1000 -0.0002
## 280 0.1720 nan 0.1000 -0.0003
## 300 0.1563 nan 0.1000 -0.0004
## 320 0.1444 nan 0.1000 -0.0001
## 340 0.1326 nan 0.1000 -0.0004
## 360 0.1218 nan 0.1000 -0.0006
## 380 0.1125 nan 0.1000 -0.0003
## 400 0.1035 nan 0.1000 -0.0002
## 420 0.0956 nan 0.1000 -0.0004
## 440 0.0878 nan 0.1000 -0.0002
## 460 0.0814 nan 0.1000 -0.0002
## 480 0.0750 nan 0.1000 -0.0001
## 500 0.0697 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2424 nan 0.1000 0.0343
## 2 1.1825 nan 0.1000 0.0304
## 3 1.1262 nan 0.1000 0.0263
## 4 1.0788 nan 0.1000 0.0204
## 5 1.0351 nan 0.1000 0.0196
## 6 0.9972 nan 0.1000 0.0147
## 7 0.9648 nan 0.1000 0.0136
## 8 0.9364 nan 0.1000 0.0105
## 9 0.9098 nan 0.1000 0.0096
## 10 0.8807 nan 0.1000 0.0104
## 20 0.7405 nan 0.1000 0.0048
## 40 0.6121 nan 0.1000 0.0004
## 60 0.5384 nan 0.1000 -0.0012
## 80 0.4721 nan 0.1000 -0.0003
## 100 0.4261 nan 0.1000 -0.0013
## 120 0.3812 nan 0.1000 -0.0013
## 140 0.3509 nan 0.1000 -0.0011
## 160 0.3155 nan 0.1000 -0.0008
## 180 0.2883 nan 0.1000 -0.0007
## 200 0.2603 nan 0.1000 -0.0005
## 220 0.2358 nan 0.1000 -0.0007
## 240 0.2173 nan 0.1000 -0.0008
## 260 0.1980 nan 0.1000 -0.0001
## 280 0.1831 nan 0.1000 -0.0007
## 300 0.1684 nan 0.1000 -0.0007
## 320 0.1543 nan 0.1000 -0.0002
## 340 0.1431 nan 0.1000 -0.0005
## 360 0.1320 nan 0.1000 -0.0003
## 380 0.1229 nan 0.1000 -0.0006
## 400 0.1128 nan 0.1000 -0.0004
## 420 0.1043 nan 0.1000 -0.0003
## 440 0.0961 nan 0.1000 -0.0005
## 460 0.0893 nan 0.1000 -0.0002
## 480 0.0826 nan 0.1000 -0.0004
## 500 0.0771 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2355 nan 0.1000 0.0382
## 2 1.1671 nan 0.1000 0.0307
## 3 1.1082 nan 0.1000 0.0237
## 4 1.0607 nan 0.1000 0.0198
## 5 1.0169 nan 0.1000 0.0172
## 6 0.9773 nan 0.1000 0.0151
## 7 0.9423 nan 0.1000 0.0157
## 8 0.9121 nan 0.1000 0.0103
## 9 0.8836 nan 0.1000 0.0103
## 10 0.8611 nan 0.1000 0.0098
## 20 0.7021 nan 0.1000 0.0009
## 40 0.5599 nan 0.1000 -0.0002
## 60 0.4664 nan 0.1000 -0.0023
## 80 0.3991 nan 0.1000 -0.0007
## 100 0.3429 nan 0.1000 -0.0004
## 120 0.2982 nan 0.1000 -0.0000
## 140 0.2574 nan 0.1000 -0.0008
## 160 0.2247 nan 0.1000 -0.0004
## 180 0.2000 nan 0.1000 -0.0010
## 200 0.1790 nan 0.1000 -0.0004
## 220 0.1599 nan 0.1000 0.0001
## 240 0.1416 nan 0.1000 -0.0003
## 260 0.1261 nan 0.1000 -0.0003
## 280 0.1117 nan 0.1000 -0.0003
## 300 0.1011 nan 0.1000 -0.0005
## 320 0.0919 nan 0.1000 -0.0002
## 340 0.0834 nan 0.1000 -0.0002
## 360 0.0746 nan 0.1000 -0.0003
## 380 0.0676 nan 0.1000 -0.0003
## 400 0.0610 nan 0.1000 0.0000
## 420 0.0550 nan 0.1000 -0.0001
## 440 0.0502 nan 0.1000 -0.0001
## 460 0.0455 nan 0.1000 -0.0001
## 480 0.0414 nan 0.1000 -0.0002
## 500 0.0376 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2318 nan 0.1000 0.0375
## 2 1.1608 nan 0.1000 0.0297
## 3 1.1008 nan 0.1000 0.0243
## 4 1.0490 nan 0.1000 0.0226
## 5 1.0059 nan 0.1000 0.0179
## 6 0.9702 nan 0.1000 0.0154
## 7 0.9412 nan 0.1000 0.0123
## 8 0.9125 nan 0.1000 0.0142
## 9 0.8845 nan 0.1000 0.0112
## 10 0.8614 nan 0.1000 0.0072
## 20 0.7047 nan 0.1000 0.0025
## 40 0.5586 nan 0.1000 0.0002
## 60 0.4706 nan 0.1000 -0.0004
## 80 0.4039 nan 0.1000 -0.0010
## 100 0.3519 nan 0.1000 -0.0015
## 120 0.3087 nan 0.1000 -0.0003
## 140 0.2719 nan 0.1000 -0.0004
## 160 0.2410 nan 0.1000 -0.0013
## 180 0.2130 nan 0.1000 -0.0002
## 200 0.1914 nan 0.1000 -0.0009
## 220 0.1696 nan 0.1000 -0.0001
## 240 0.1518 nan 0.1000 -0.0007
## 260 0.1357 nan 0.1000 -0.0008
## 280 0.1224 nan 0.1000 -0.0002
## 300 0.1095 nan 0.1000 -0.0003
## 320 0.0993 nan 0.1000 -0.0004
## 340 0.0903 nan 0.1000 -0.0005
## 360 0.0806 nan 0.1000 -0.0004
## 380 0.0727 nan 0.1000 -0.0002
## 400 0.0659 nan 0.1000 -0.0002
## 420 0.0593 nan 0.1000 -0.0001
## 440 0.0537 nan 0.1000 -0.0002
## 460 0.0485 nan 0.1000 -0.0001
## 480 0.0441 nan 0.1000 -0.0001
## 500 0.0401 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2324 nan 0.1000 0.0383
## 2 1.1712 nan 0.1000 0.0279
## 3 1.1169 nan 0.1000 0.0223
## 4 1.0650 nan 0.1000 0.0211
## 5 1.0209 nan 0.1000 0.0171
## 6 0.9858 nan 0.1000 0.0141
## 7 0.9496 nan 0.1000 0.0136
## 8 0.9202 nan 0.1000 0.0116
## 9 0.8960 nan 0.1000 0.0089
## 10 0.8674 nan 0.1000 0.0111
## 20 0.7167 nan 0.1000 -0.0005
## 40 0.5779 nan 0.1000 0.0004
## 60 0.4841 nan 0.1000 0.0011
## 80 0.4197 nan 0.1000 -0.0008
## 100 0.3628 nan 0.1000 -0.0007
## 120 0.3223 nan 0.1000 -0.0006
## 140 0.2823 nan 0.1000 -0.0005
## 160 0.2516 nan 0.1000 -0.0002
## 180 0.2248 nan 0.1000 -0.0005
## 200 0.2020 nan 0.1000 -0.0003
## 220 0.1812 nan 0.1000 -0.0014
## 240 0.1638 nan 0.1000 -0.0007
## 260 0.1471 nan 0.1000 -0.0010
## 280 0.1334 nan 0.1000 -0.0003
## 300 0.1198 nan 0.1000 0.0000
## 320 0.1079 nan 0.1000 -0.0002
## 340 0.0976 nan 0.1000 -0.0004
## 360 0.0877 nan 0.1000 -0.0003
## 380 0.0789 nan 0.1000 -0.0002
## 400 0.0720 nan 0.1000 -0.0003
## 420 0.0654 nan 0.1000 -0.0001
## 440 0.0605 nan 0.1000 -0.0002
## 460 0.0545 nan 0.1000 -0.0004
## 480 0.0496 nan 0.1000 -0.0003
## 500 0.0444 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2363 nan 0.1000 0.0409
## 2 1.1652 nan 0.1000 0.0327
## 3 1.0972 nan 0.1000 0.0271
## 4 1.0427 nan 0.1000 0.0212
## 5 1.0008 nan 0.1000 0.0158
## 6 0.9568 nan 0.1000 0.0188
## 7 0.9223 nan 0.1000 0.0129
## 8 0.8917 nan 0.1000 0.0136
## 9 0.8598 nan 0.1000 0.0124
## 10 0.8317 nan 0.1000 0.0112
## 20 0.6667 nan 0.1000 -0.0006
## 40 0.5094 nan 0.1000 0.0014
## 60 0.4112 nan 0.1000 -0.0011
## 80 0.3439 nan 0.1000 -0.0025
## 100 0.2856 nan 0.1000 0.0004
## 120 0.2406 nan 0.1000 -0.0008
## 140 0.2030 nan 0.1000 -0.0002
## 160 0.1754 nan 0.1000 -0.0003
## 180 0.1502 nan 0.1000 -0.0002
## 200 0.1330 nan 0.1000 -0.0003
## 220 0.1155 nan 0.1000 -0.0001
## 240 0.1019 nan 0.1000 -0.0004
## 260 0.0896 nan 0.1000 -0.0002
## 280 0.0793 nan 0.1000 -0.0001
## 300 0.0695 nan 0.1000 -0.0001
## 320 0.0610 nan 0.1000 -0.0002
## 340 0.0553 nan 0.1000 -0.0003
## 360 0.0493 nan 0.1000 -0.0001
## 380 0.0437 nan 0.1000 -0.0000
## 400 0.0384 nan 0.1000 0.0000
## 420 0.0340 nan 0.1000 -0.0001
## 440 0.0301 nan 0.1000 -0.0001
## 460 0.0269 nan 0.1000 -0.0000
## 480 0.0238 nan 0.1000 -0.0001
## 500 0.0216 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2364 nan 0.1000 0.0374
## 2 1.1637 nan 0.1000 0.0330
## 3 1.0991 nan 0.1000 0.0264
## 4 1.0469 nan 0.1000 0.0227
## 5 1.0071 nan 0.1000 0.0168
## 6 0.9695 nan 0.1000 0.0161
## 7 0.9288 nan 0.1000 0.0179
## 8 0.8990 nan 0.1000 0.0133
## 9 0.8727 nan 0.1000 0.0077
## 10 0.8459 nan 0.1000 0.0094
## 20 0.6770 nan 0.1000 0.0000
## 40 0.5317 nan 0.1000 0.0004
## 60 0.4317 nan 0.1000 -0.0002
## 80 0.3538 nan 0.1000 -0.0002
## 100 0.2961 nan 0.1000 0.0003
## 120 0.2544 nan 0.1000 -0.0012
## 140 0.2168 nan 0.1000 -0.0009
## 160 0.1869 nan 0.1000 -0.0005
## 180 0.1601 nan 0.1000 -0.0009
## 200 0.1385 nan 0.1000 -0.0005
## 220 0.1231 nan 0.1000 -0.0003
## 240 0.1085 nan 0.1000 -0.0003
## 260 0.0950 nan 0.1000 -0.0002
## 280 0.0818 nan 0.1000 -0.0001
## 300 0.0728 nan 0.1000 -0.0002
## 320 0.0638 nan 0.1000 0.0000
## 340 0.0571 nan 0.1000 -0.0000
## 360 0.0501 nan 0.1000 -0.0002
## 380 0.0446 nan 0.1000 -0.0002
## 400 0.0399 nan 0.1000 -0.0002
## 420 0.0353 nan 0.1000 -0.0000
## 440 0.0311 nan 0.1000 -0.0000
## 460 0.0280 nan 0.1000 0.0000
## 480 0.0249 nan 0.1000 -0.0000
## 500 0.0220 nan 0.1000 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2326 nan 0.1000 0.0356
## 2 1.1624 nan 0.1000 0.0300
## 3 1.1075 nan 0.1000 0.0259
## 4 1.0542 nan 0.1000 0.0231
## 5 1.0083 nan 0.1000 0.0189
## 6 0.9701 nan 0.1000 0.0160
## 7 0.9377 nan 0.1000 0.0113
## 8 0.9090 nan 0.1000 0.0097
## 9 0.8790 nan 0.1000 0.0110
## 10 0.8520 nan 0.1000 0.0081
## 20 0.6776 nan 0.1000 0.0007
## 40 0.5346 nan 0.1000 -0.0004
## 60 0.4455 nan 0.1000 -0.0002
## 80 0.3646 nan 0.1000 -0.0006
## 100 0.3100 nan 0.1000 -0.0007
## 120 0.2655 nan 0.1000 -0.0006
## 140 0.2278 nan 0.1000 -0.0016
## 160 0.1966 nan 0.1000 -0.0003
## 180 0.1710 nan 0.1000 -0.0004
## 200 0.1521 nan 0.1000 0.0002
## 220 0.1343 nan 0.1000 -0.0002
## 240 0.1197 nan 0.1000 -0.0001
## 260 0.1056 nan 0.1000 -0.0002
## 280 0.0931 nan 0.1000 -0.0003
## 300 0.0823 nan 0.1000 -0.0002
## 320 0.0721 nan 0.1000 -0.0001
## 340 0.0642 nan 0.1000 -0.0002
## 360 0.0568 nan 0.1000 -0.0002
## 380 0.0505 nan 0.1000 -0.0001
## 400 0.0454 nan 0.1000 -0.0002
## 420 0.0407 nan 0.1000 -0.0002
## 440 0.0366 nan 0.1000 -0.0001
## 460 0.0327 nan 0.1000 -0.0002
## 480 0.0291 nan 0.1000 -0.0002
## 500 0.0259 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3162 nan 0.0010 0.0003
## 7 1.3155 nan 0.0010 0.0003
## 8 1.3146 nan 0.0010 0.0004
## 9 1.3137 nan 0.0010 0.0004
## 10 1.3129 nan 0.0010 0.0004
## 20 1.3049 nan 0.0010 0.0004
## 40 1.2891 nan 0.0010 0.0004
## 60 1.2738 nan 0.0010 0.0003
## 80 1.2589 nan 0.0010 0.0004
## 100 1.2442 nan 0.0010 0.0003
## 120 1.2301 nan 0.0010 0.0003
## 140 1.2165 nan 0.0010 0.0003
## 160 1.2033 nan 0.0010 0.0003
## 180 1.1904 nan 0.0010 0.0003
## 200 1.1779 nan 0.0010 0.0003
## 220 1.1660 nan 0.0010 0.0003
## 240 1.1544 nan 0.0010 0.0002
## 260 1.1429 nan 0.0010 0.0002
## 280 1.1318 nan 0.0010 0.0002
## 300 1.1208 nan 0.0010 0.0002
## 320 1.1102 nan 0.0010 0.0002
## 340 1.1004 nan 0.0010 0.0003
## 360 1.0903 nan 0.0010 0.0002
## 380 1.0806 nan 0.0010 0.0002
## 400 1.0710 nan 0.0010 0.0002
## 420 1.0620 nan 0.0010 0.0002
## 440 1.0531 nan 0.0010 0.0002
## 460 1.0445 nan 0.0010 0.0002
## 480 1.0360 nan 0.0010 0.0001
## 500 1.0275 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0003
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0003
## 5 1.3170 nan 0.0010 0.0004
## 6 1.3162 nan 0.0010 0.0004
## 7 1.3154 nan 0.0010 0.0003
## 8 1.3146 nan 0.0010 0.0004
## 9 1.3137 nan 0.0010 0.0004
## 10 1.3129 nan 0.0010 0.0003
## 20 1.3049 nan 0.0010 0.0004
## 40 1.2889 nan 0.0010 0.0004
## 60 1.2732 nan 0.0010 0.0003
## 80 1.2584 nan 0.0010 0.0003
## 100 1.2441 nan 0.0010 0.0003
## 120 1.2301 nan 0.0010 0.0003
## 140 1.2167 nan 0.0010 0.0003
## 160 1.2035 nan 0.0010 0.0003
## 180 1.1906 nan 0.0010 0.0003
## 200 1.1785 nan 0.0010 0.0003
## 220 1.1666 nan 0.0010 0.0003
## 240 1.1546 nan 0.0010 0.0002
## 260 1.1433 nan 0.0010 0.0002
## 280 1.1322 nan 0.0010 0.0002
## 300 1.1214 nan 0.0010 0.0002
## 320 1.1109 nan 0.0010 0.0002
## 340 1.1011 nan 0.0010 0.0002
## 360 1.0911 nan 0.0010 0.0003
## 380 1.0813 nan 0.0010 0.0002
## 400 1.0717 nan 0.0010 0.0002
## 420 1.0627 nan 0.0010 0.0002
## 440 1.0537 nan 0.0010 0.0002
## 460 1.0449 nan 0.0010 0.0002
## 480 1.0366 nan 0.0010 0.0002
## 500 1.0285 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0003
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0003
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3153 nan 0.0010 0.0004
## 8 1.3145 nan 0.0010 0.0004
## 9 1.3137 nan 0.0010 0.0004
## 10 1.3129 nan 0.0010 0.0004
## 20 1.3049 nan 0.0010 0.0003
## 40 1.2895 nan 0.0010 0.0004
## 60 1.2746 nan 0.0010 0.0003
## 80 1.2598 nan 0.0010 0.0003
## 100 1.2459 nan 0.0010 0.0003
## 120 1.2320 nan 0.0010 0.0003
## 140 1.2183 nan 0.0010 0.0003
## 160 1.2053 nan 0.0010 0.0003
## 180 1.1925 nan 0.0010 0.0003
## 200 1.1801 nan 0.0010 0.0003
## 220 1.1683 nan 0.0010 0.0002
## 240 1.1567 nan 0.0010 0.0003
## 260 1.1452 nan 0.0010 0.0003
## 280 1.1343 nan 0.0010 0.0003
## 300 1.1233 nan 0.0010 0.0002
## 320 1.1129 nan 0.0010 0.0002
## 340 1.1028 nan 0.0010 0.0002
## 360 1.0927 nan 0.0010 0.0002
## 380 1.0833 nan 0.0010 0.0002
## 400 1.0736 nan 0.0010 0.0002
## 420 1.0645 nan 0.0010 0.0002
## 440 1.0555 nan 0.0010 0.0002
## 460 1.0472 nan 0.0010 0.0002
## 480 1.0388 nan 0.0010 0.0002
## 500 1.0305 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3160 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3144 nan 0.0010 0.0003
## 9 1.3134 nan 0.0010 0.0004
## 10 1.3126 nan 0.0010 0.0004
## 20 1.3038 nan 0.0010 0.0003
## 40 1.2870 nan 0.0010 0.0004
## 60 1.2702 nan 0.0010 0.0004
## 80 1.2541 nan 0.0010 0.0003
## 100 1.2386 nan 0.0010 0.0004
## 120 1.2233 nan 0.0010 0.0003
## 140 1.2091 nan 0.0010 0.0003
## 160 1.1954 nan 0.0010 0.0003
## 180 1.1819 nan 0.0010 0.0003
## 200 1.1688 nan 0.0010 0.0003
## 220 1.1561 nan 0.0010 0.0002
## 240 1.1437 nan 0.0010 0.0002
## 260 1.1317 nan 0.0010 0.0003
## 280 1.1201 nan 0.0010 0.0002
## 300 1.1087 nan 0.0010 0.0002
## 320 1.0977 nan 0.0010 0.0002
## 340 1.0867 nan 0.0010 0.0002
## 360 1.0760 nan 0.0010 0.0002
## 380 1.0659 nan 0.0010 0.0002
## 400 1.0561 nan 0.0010 0.0002
## 420 1.0464 nan 0.0010 0.0002
## 440 1.0370 nan 0.0010 0.0002
## 460 1.0276 nan 0.0010 0.0002
## 480 1.0186 nan 0.0010 0.0002
## 500 1.0100 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3204 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3187 nan 0.0010 0.0004
## 4 1.3177 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3152 nan 0.0010 0.0004
## 8 1.3143 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0004
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3037 nan 0.0010 0.0004
## 40 1.2872 nan 0.0010 0.0004
## 60 1.2711 nan 0.0010 0.0004
## 80 1.2553 nan 0.0010 0.0003
## 100 1.2396 nan 0.0010 0.0003
## 120 1.2250 nan 0.0010 0.0003
## 140 1.2108 nan 0.0010 0.0003
## 160 1.1970 nan 0.0010 0.0003
## 180 1.1836 nan 0.0010 0.0003
## 200 1.1704 nan 0.0010 0.0003
## 220 1.1577 nan 0.0010 0.0003
## 240 1.1454 nan 0.0010 0.0003
## 260 1.1336 nan 0.0010 0.0003
## 280 1.1221 nan 0.0010 0.0002
## 300 1.1107 nan 0.0010 0.0002
## 320 1.0995 nan 0.0010 0.0003
## 340 1.0886 nan 0.0010 0.0002
## 360 1.0780 nan 0.0010 0.0002
## 380 1.0679 nan 0.0010 0.0002
## 400 1.0582 nan 0.0010 0.0002
## 420 1.0486 nan 0.0010 0.0002
## 440 1.0390 nan 0.0010 0.0002
## 460 1.0296 nan 0.0010 0.0002
## 480 1.0208 nan 0.0010 0.0002
## 500 1.0122 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0004
## 2 1.3195 nan 0.0010 0.0004
## 3 1.3186 nan 0.0010 0.0004
## 4 1.3178 nan 0.0010 0.0004
## 5 1.3169 nan 0.0010 0.0004
## 6 1.3161 nan 0.0010 0.0004
## 7 1.3151 nan 0.0010 0.0004
## 8 1.3142 nan 0.0010 0.0004
## 9 1.3133 nan 0.0010 0.0004
## 10 1.3124 nan 0.0010 0.0004
## 20 1.3040 nan 0.0010 0.0004
## 40 1.2873 nan 0.0010 0.0004
## 60 1.2716 nan 0.0010 0.0004
## 80 1.2558 nan 0.0010 0.0004
## 100 1.2409 nan 0.0010 0.0003
## 120 1.2265 nan 0.0010 0.0003
## 140 1.2122 nan 0.0010 0.0003
## 160 1.1984 nan 0.0010 0.0003
## 180 1.1849 nan 0.0010 0.0003
## 200 1.1719 nan 0.0010 0.0003
## 220 1.1593 nan 0.0010 0.0002
## 240 1.1470 nan 0.0010 0.0002
## 260 1.1354 nan 0.0010 0.0003
## 280 1.1237 nan 0.0010 0.0003
## 300 1.1127 nan 0.0010 0.0003
## 320 1.1018 nan 0.0010 0.0002
## 340 1.0911 nan 0.0010 0.0002
## 360 1.0807 nan 0.0010 0.0002
## 380 1.0707 nan 0.0010 0.0002
## 400 1.0611 nan 0.0010 0.0002
## 420 1.0516 nan 0.0010 0.0002
## 440 1.0424 nan 0.0010 0.0002
## 460 1.0331 nan 0.0010 0.0002
## 480 1.0244 nan 0.0010 0.0002
## 500 1.0157 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0005
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3156 nan 0.0010 0.0005
## 7 1.3148 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3128 nan 0.0010 0.0004
## 10 1.3119 nan 0.0010 0.0004
## 20 1.3028 nan 0.0010 0.0004
## 40 1.2852 nan 0.0010 0.0004
## 60 1.2682 nan 0.0010 0.0003
## 80 1.2514 nan 0.0010 0.0004
## 100 1.2350 nan 0.0010 0.0003
## 120 1.2196 nan 0.0010 0.0003
## 140 1.2045 nan 0.0010 0.0003
## 160 1.1900 nan 0.0010 0.0003
## 180 1.1757 nan 0.0010 0.0003
## 200 1.1617 nan 0.0010 0.0003
## 220 1.1485 nan 0.0010 0.0003
## 240 1.1356 nan 0.0010 0.0002
## 260 1.1232 nan 0.0010 0.0003
## 280 1.1112 nan 0.0010 0.0003
## 300 1.0995 nan 0.0010 0.0003
## 320 1.0881 nan 0.0010 0.0002
## 340 1.0766 nan 0.0010 0.0002
## 360 1.0657 nan 0.0010 0.0002
## 380 1.0549 nan 0.0010 0.0002
## 400 1.0446 nan 0.0010 0.0002
## 420 1.0343 nan 0.0010 0.0002
## 440 1.0245 nan 0.0010 0.0002
## 460 1.0148 nan 0.0010 0.0002
## 480 1.0054 nan 0.0010 0.0002
## 500 0.9965 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3203 nan 0.0010 0.0005
## 2 1.3194 nan 0.0010 0.0004
## 3 1.3185 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0005
## 20 1.3030 nan 0.0010 0.0004
## 40 1.2856 nan 0.0010 0.0004
## 60 1.2685 nan 0.0010 0.0004
## 80 1.2524 nan 0.0010 0.0003
## 100 1.2362 nan 0.0010 0.0003
## 120 1.2205 nan 0.0010 0.0003
## 140 1.2054 nan 0.0010 0.0003
## 160 1.1911 nan 0.0010 0.0003
## 180 1.1771 nan 0.0010 0.0003
## 200 1.1633 nan 0.0010 0.0002
## 220 1.1500 nan 0.0010 0.0003
## 240 1.1372 nan 0.0010 0.0002
## 260 1.1248 nan 0.0010 0.0003
## 280 1.1125 nan 0.0010 0.0003
## 300 1.1007 nan 0.0010 0.0002
## 320 1.0894 nan 0.0010 0.0003
## 340 1.0785 nan 0.0010 0.0002
## 360 1.0679 nan 0.0010 0.0002
## 380 1.0574 nan 0.0010 0.0002
## 400 1.0470 nan 0.0010 0.0002
## 420 1.0372 nan 0.0010 0.0002
## 440 1.0273 nan 0.0010 0.0002
## 460 1.0179 nan 0.0010 0.0002
## 480 1.0087 nan 0.0010 0.0002
## 500 0.9999 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3202 nan 0.0010 0.0004
## 2 1.3193 nan 0.0010 0.0004
## 3 1.3184 nan 0.0010 0.0004
## 4 1.3175 nan 0.0010 0.0004
## 5 1.3166 nan 0.0010 0.0004
## 6 1.3157 nan 0.0010 0.0004
## 7 1.3147 nan 0.0010 0.0004
## 8 1.3138 nan 0.0010 0.0004
## 9 1.3129 nan 0.0010 0.0004
## 10 1.3120 nan 0.0010 0.0004
## 20 1.3031 nan 0.0010 0.0004
## 40 1.2856 nan 0.0010 0.0004
## 60 1.2691 nan 0.0010 0.0004
## 80 1.2530 nan 0.0010 0.0004
## 100 1.2373 nan 0.0010 0.0003
## 120 1.2222 nan 0.0010 0.0003
## 140 1.2073 nan 0.0010 0.0003
## 160 1.1931 nan 0.0010 0.0003
## 180 1.1792 nan 0.0010 0.0003
## 200 1.1659 nan 0.0010 0.0003
## 220 1.1527 nan 0.0010 0.0003
## 240 1.1399 nan 0.0010 0.0003
## 260 1.1278 nan 0.0010 0.0003
## 280 1.1157 nan 0.0010 0.0003
## 300 1.1040 nan 0.0010 0.0003
## 320 1.0927 nan 0.0010 0.0003
## 340 1.0818 nan 0.0010 0.0003
## 360 1.0710 nan 0.0010 0.0002
## 380 1.0605 nan 0.0010 0.0002
## 400 1.0502 nan 0.0010 0.0002
## 420 1.0407 nan 0.0010 0.0002
## 440 1.0313 nan 0.0010 0.0002
## 460 1.0219 nan 0.0010 0.0002
## 480 1.0128 nan 0.0010 0.0002
## 500 1.0040 nan 0.0010 0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3116 nan 0.0100 0.0043
## 2 1.3038 nan 0.0100 0.0032
## 3 1.2960 nan 0.0100 0.0037
## 4 1.2882 nan 0.0100 0.0037
## 5 1.2800 nan 0.0100 0.0038
## 6 1.2712 nan 0.0100 0.0037
## 7 1.2642 nan 0.0100 0.0030
## 8 1.2566 nan 0.0100 0.0034
## 9 1.2489 nan 0.0100 0.0035
## 10 1.2414 nan 0.0100 0.0031
## 20 1.1747 nan 0.0100 0.0030
## 40 1.0680 nan 0.0100 0.0021
## 60 0.9886 nan 0.0100 0.0013
## 80 0.9256 nan 0.0100 0.0012
## 100 0.8735 nan 0.0100 0.0006
## 120 0.8312 nan 0.0100 0.0007
## 140 0.7958 nan 0.0100 0.0006
## 160 0.7659 nan 0.0100 0.0005
## 180 0.7409 nan 0.0100 0.0002
## 200 0.7194 nan 0.0100 0.0002
## 220 0.6994 nan 0.0100 0.0001
## 240 0.6818 nan 0.0100 0.0002
## 260 0.6650 nan 0.0100 0.0001
## 280 0.6507 nan 0.0100 0.0000
## 300 0.6362 nan 0.0100 0.0000
## 320 0.6234 nan 0.0100 -0.0001
## 340 0.6116 nan 0.0100 -0.0001
## 360 0.6006 nan 0.0100 0.0000
## 380 0.5902 nan 0.0100 -0.0001
## 400 0.5809 nan 0.0100 -0.0000
## 420 0.5711 nan 0.0100 -0.0000
## 440 0.5619 nan 0.0100 -0.0000
## 460 0.5532 nan 0.0100 0.0000
## 480 0.5444 nan 0.0100 -0.0001
## 500 0.5359 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3142 nan 0.0100 0.0032
## 2 1.3058 nan 0.0100 0.0042
## 3 1.2976 nan 0.0100 0.0039
## 4 1.2897 nan 0.0100 0.0034
## 5 1.2819 nan 0.0100 0.0034
## 6 1.2739 nan 0.0100 0.0038
## 7 1.2668 nan 0.0100 0.0032
## 8 1.2599 nan 0.0100 0.0030
## 9 1.2528 nan 0.0100 0.0032
## 10 1.2459 nan 0.0100 0.0031
## 20 1.1825 nan 0.0100 0.0027
## 40 1.0741 nan 0.0100 0.0023
## 60 0.9923 nan 0.0100 0.0013
## 80 0.9289 nan 0.0100 0.0009
## 100 0.8753 nan 0.0100 0.0011
## 120 0.8324 nan 0.0100 0.0006
## 140 0.7982 nan 0.0100 0.0004
## 160 0.7689 nan 0.0100 0.0003
## 180 0.7443 nan 0.0100 0.0002
## 200 0.7225 nan 0.0100 0.0002
## 220 0.7038 nan 0.0100 0.0002
## 240 0.6865 nan 0.0100 0.0002
## 260 0.6703 nan 0.0100 -0.0001
## 280 0.6570 nan 0.0100 -0.0001
## 300 0.6439 nan 0.0100 0.0001
## 320 0.6324 nan 0.0100 -0.0001
## 340 0.6216 nan 0.0100 -0.0002
## 360 0.6104 nan 0.0100 0.0000
## 380 0.6000 nan 0.0100 -0.0001
## 400 0.5910 nan 0.0100 -0.0001
## 420 0.5825 nan 0.0100 -0.0000
## 440 0.5737 nan 0.0100 -0.0000
## 460 0.5642 nan 0.0100 -0.0000
## 480 0.5568 nan 0.0100 0.0001
## 500 0.5494 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3138 nan 0.0100 0.0035
## 2 1.3058 nan 0.0100 0.0034
## 3 1.2981 nan 0.0100 0.0037
## 4 1.2911 nan 0.0100 0.0031
## 5 1.2837 nan 0.0100 0.0036
## 6 1.2762 nan 0.0100 0.0036
## 7 1.2688 nan 0.0100 0.0033
## 8 1.2620 nan 0.0100 0.0032
## 9 1.2549 nan 0.0100 0.0033
## 10 1.2475 nan 0.0100 0.0034
## 20 1.1830 nan 0.0100 0.0026
## 40 1.0737 nan 0.0100 0.0023
## 60 0.9912 nan 0.0100 0.0016
## 80 0.9276 nan 0.0100 0.0010
## 100 0.8743 nan 0.0100 0.0010
## 120 0.8325 nan 0.0100 0.0006
## 140 0.7985 nan 0.0100 0.0005
## 160 0.7699 nan 0.0100 0.0003
## 180 0.7451 nan 0.0100 0.0003
## 200 0.7242 nan 0.0100 0.0001
## 220 0.7056 nan 0.0100 0.0001
## 240 0.6879 nan 0.0100 0.0000
## 260 0.6730 nan 0.0100 0.0001
## 280 0.6592 nan 0.0100 -0.0001
## 300 0.6462 nan 0.0100 0.0001
## 320 0.6341 nan 0.0100 0.0001
## 340 0.6232 nan 0.0100 -0.0001
## 360 0.6134 nan 0.0100 -0.0001
## 380 0.6031 nan 0.0100 0.0000
## 400 0.5951 nan 0.0100 -0.0001
## 420 0.5861 nan 0.0100 -0.0000
## 440 0.5777 nan 0.0100 0.0000
## 460 0.5699 nan 0.0100 -0.0000
## 480 0.5630 nan 0.0100 -0.0000
## 500 0.5551 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0042
## 2 1.3032 nan 0.0100 0.0040
## 3 1.2948 nan 0.0100 0.0034
## 4 1.2861 nan 0.0100 0.0041
## 5 1.2780 nan 0.0100 0.0036
## 6 1.2700 nan 0.0100 0.0032
## 7 1.2617 nan 0.0100 0.0038
## 8 1.2539 nan 0.0100 0.0035
## 9 1.2463 nan 0.0100 0.0031
## 10 1.2390 nan 0.0100 0.0029
## 20 1.1687 nan 0.0100 0.0027
## 40 1.0539 nan 0.0100 0.0021
## 60 0.9694 nan 0.0100 0.0016
## 80 0.9014 nan 0.0100 0.0013
## 100 0.8477 nan 0.0100 0.0010
## 120 0.8017 nan 0.0100 0.0007
## 140 0.7655 nan 0.0100 0.0004
## 160 0.7349 nan 0.0100 0.0003
## 180 0.7088 nan 0.0100 0.0004
## 200 0.6848 nan 0.0100 0.0003
## 220 0.6638 nan 0.0100 0.0001
## 240 0.6451 nan 0.0100 0.0000
## 260 0.6270 nan 0.0100 0.0003
## 280 0.6114 nan 0.0100 0.0001
## 300 0.5973 nan 0.0100 -0.0000
## 320 0.5822 nan 0.0100 0.0000
## 340 0.5687 nan 0.0100 -0.0000
## 360 0.5561 nan 0.0100 -0.0000
## 380 0.5443 nan 0.0100 -0.0002
## 400 0.5336 nan 0.0100 -0.0000
## 420 0.5233 nan 0.0100 0.0001
## 440 0.5130 nan 0.0100 0.0000
## 460 0.5030 nan 0.0100 0.0000
## 480 0.4935 nan 0.0100 -0.0001
## 500 0.4838 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3122 nan 0.0100 0.0040
## 2 1.3033 nan 0.0100 0.0040
## 3 1.2951 nan 0.0100 0.0036
## 4 1.2876 nan 0.0100 0.0034
## 5 1.2791 nan 0.0100 0.0038
## 6 1.2714 nan 0.0100 0.0035
## 7 1.2636 nan 0.0100 0.0033
## 8 1.2556 nan 0.0100 0.0035
## 9 1.2476 nan 0.0100 0.0033
## 10 1.2397 nan 0.0100 0.0036
## 20 1.1702 nan 0.0100 0.0027
## 40 1.0581 nan 0.0100 0.0018
## 60 0.9735 nan 0.0100 0.0015
## 80 0.9068 nan 0.0100 0.0010
## 100 0.8526 nan 0.0100 0.0008
## 120 0.8078 nan 0.0100 0.0008
## 140 0.7710 nan 0.0100 0.0006
## 160 0.7404 nan 0.0100 0.0004
## 180 0.7143 nan 0.0100 0.0004
## 200 0.6920 nan 0.0100 0.0002
## 220 0.6720 nan 0.0100 0.0003
## 240 0.6539 nan 0.0100 0.0001
## 260 0.6367 nan 0.0100 0.0000
## 280 0.6207 nan 0.0100 0.0001
## 300 0.6071 nan 0.0100 0.0002
## 320 0.5941 nan 0.0100 0.0001
## 340 0.5813 nan 0.0100 0.0000
## 360 0.5705 nan 0.0100 0.0000
## 380 0.5586 nan 0.0100 0.0001
## 400 0.5474 nan 0.0100 -0.0000
## 420 0.5372 nan 0.0100 -0.0000
## 440 0.5270 nan 0.0100 0.0000
## 460 0.5174 nan 0.0100 -0.0000
## 480 0.5076 nan 0.0100 -0.0001
## 500 0.4980 nan 0.0100 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3120 nan 0.0100 0.0040
## 2 1.3035 nan 0.0100 0.0037
## 3 1.2951 nan 0.0100 0.0034
## 4 1.2873 nan 0.0100 0.0037
## 5 1.2795 nan 0.0100 0.0035
## 6 1.2724 nan 0.0100 0.0031
## 7 1.2645 nan 0.0100 0.0036
## 8 1.2566 nan 0.0100 0.0035
## 9 1.2485 nan 0.0100 0.0038
## 10 1.2407 nan 0.0100 0.0035
## 20 1.1713 nan 0.0100 0.0030
## 40 1.0605 nan 0.0100 0.0021
## 60 0.9725 nan 0.0100 0.0017
## 80 0.9070 nan 0.0100 0.0011
## 100 0.8535 nan 0.0100 0.0008
## 120 0.8103 nan 0.0100 0.0008
## 140 0.7723 nan 0.0100 0.0005
## 160 0.7422 nan 0.0100 0.0004
## 180 0.7172 nan 0.0100 0.0002
## 200 0.6944 nan 0.0100 0.0002
## 220 0.6758 nan 0.0100 0.0001
## 240 0.6588 nan 0.0100 -0.0000
## 260 0.6425 nan 0.0100 0.0001
## 280 0.6271 nan 0.0100 0.0002
## 300 0.6136 nan 0.0100 -0.0001
## 320 0.6012 nan 0.0100 0.0000
## 340 0.5897 nan 0.0100 0.0001
## 360 0.5779 nan 0.0100 -0.0000
## 380 0.5672 nan 0.0100 -0.0000
## 400 0.5569 nan 0.0100 -0.0001
## 420 0.5466 nan 0.0100 -0.0000
## 440 0.5373 nan 0.0100 0.0000
## 460 0.5286 nan 0.0100 -0.0001
## 480 0.5204 nan 0.0100 -0.0001
## 500 0.5114 nan 0.0100 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3114 nan 0.0100 0.0044
## 2 1.3016 nan 0.0100 0.0045
## 3 1.2932 nan 0.0100 0.0038
## 4 1.2849 nan 0.0100 0.0037
## 5 1.2757 nan 0.0100 0.0036
## 6 1.2675 nan 0.0100 0.0038
## 7 1.2588 nan 0.0100 0.0038
## 8 1.2503 nan 0.0100 0.0037
## 9 1.2423 nan 0.0100 0.0035
## 10 1.2344 nan 0.0100 0.0035
## 20 1.1620 nan 0.0100 0.0034
## 40 1.0457 nan 0.0100 0.0018
## 60 0.9544 nan 0.0100 0.0018
## 80 0.8837 nan 0.0100 0.0013
## 100 0.8264 nan 0.0100 0.0006
## 120 0.7797 nan 0.0100 0.0004
## 140 0.7412 nan 0.0100 0.0004
## 160 0.7080 nan 0.0100 0.0005
## 180 0.6801 nan 0.0100 0.0001
## 200 0.6550 nan 0.0100 0.0002
## 220 0.6320 nan 0.0100 0.0003
## 240 0.6109 nan 0.0100 0.0002
## 260 0.5920 nan 0.0100 0.0001
## 280 0.5743 nan 0.0100 0.0000
## 300 0.5579 nan 0.0100 -0.0001
## 320 0.5427 nan 0.0100 -0.0001
## 340 0.5273 nan 0.0100 0.0001
## 360 0.5143 nan 0.0100 0.0000
## 380 0.5018 nan 0.0100 0.0000
## 400 0.4899 nan 0.0100 0.0001
## 420 0.4787 nan 0.0100 -0.0000
## 440 0.4672 nan 0.0100 -0.0000
## 460 0.4570 nan 0.0100 0.0000
## 480 0.4465 nan 0.0100 -0.0000
## 500 0.4369 nan 0.0100 0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3121 nan 0.0100 0.0037
## 2 1.3027 nan 0.0100 0.0043
## 3 1.2931 nan 0.0100 0.0042
## 4 1.2842 nan 0.0100 0.0037
## 5 1.2756 nan 0.0100 0.0041
## 6 1.2672 nan 0.0100 0.0034
## 7 1.2584 nan 0.0100 0.0037
## 8 1.2498 nan 0.0100 0.0041
## 9 1.2423 nan 0.0100 0.0033
## 10 1.2341 nan 0.0100 0.0033
## 20 1.1607 nan 0.0100 0.0031
## 40 1.0444 nan 0.0100 0.0021
## 60 0.9572 nan 0.0100 0.0018
## 80 0.8890 nan 0.0100 0.0014
## 100 0.8336 nan 0.0100 0.0006
## 120 0.7884 nan 0.0100 0.0006
## 140 0.7514 nan 0.0100 0.0004
## 160 0.7196 nan 0.0100 0.0006
## 180 0.6921 nan 0.0100 0.0003
## 200 0.6661 nan 0.0100 0.0002
## 220 0.6446 nan 0.0100 0.0001
## 240 0.6240 nan 0.0100 0.0002
## 260 0.6053 nan 0.0100 0.0001
## 280 0.5890 nan 0.0100 -0.0001
## 300 0.5732 nan 0.0100 0.0001
## 320 0.5583 nan 0.0100 0.0001
## 340 0.5433 nan 0.0100 -0.0001
## 360 0.5305 nan 0.0100 -0.0001
## 380 0.5181 nan 0.0100 -0.0001
## 400 0.5066 nan 0.0100 -0.0000
## 420 0.4958 nan 0.0100 -0.0001
## 440 0.4843 nan 0.0100 -0.0001
## 460 0.4746 nan 0.0100 -0.0000
## 480 0.4641 nan 0.0100 0.0001
## 500 0.4535 nan 0.0100 -0.0000
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3120 nan 0.0100 0.0041
## 2 1.3031 nan 0.0100 0.0038
## 3 1.2938 nan 0.0100 0.0044
## 4 1.2851 nan 0.0100 0.0038
## 5 1.2770 nan 0.0100 0.0036
## 6 1.2685 nan 0.0100 0.0039
## 7 1.2603 nan 0.0100 0.0034
## 8 1.2515 nan 0.0100 0.0041
## 9 1.2433 nan 0.0100 0.0037
## 10 1.2349 nan 0.0100 0.0037
## 20 1.1622 nan 0.0100 0.0028
## 40 1.0461 nan 0.0100 0.0021
## 60 0.9589 nan 0.0100 0.0013
## 80 0.8892 nan 0.0100 0.0012
## 100 0.8348 nan 0.0100 0.0011
## 120 0.7904 nan 0.0100 0.0008
## 140 0.7533 nan 0.0100 0.0005
## 160 0.7209 nan 0.0100 0.0006
## 180 0.6939 nan 0.0100 0.0003
## 200 0.6704 nan 0.0100 0.0002
## 220 0.6499 nan 0.0100 0.0002
## 240 0.6299 nan 0.0100 0.0002
## 260 0.6132 nan 0.0100 0.0001
## 280 0.5967 nan 0.0100 -0.0000
## 300 0.5803 nan 0.0100 0.0001
## 320 0.5667 nan 0.0100 -0.0000
## 340 0.5538 nan 0.0100 0.0000
## 360 0.5412 nan 0.0100 0.0000
## 380 0.5289 nan 0.0100 -0.0000
## 400 0.5187 nan 0.0100 -0.0000
## 420 0.5079 nan 0.0100 -0.0001
## 440 0.4971 nan 0.0100 0.0000
## 460 0.4867 nan 0.0100 0.0001
## 480 0.4778 nan 0.0100 -0.0002
## 500 0.4683 nan 0.0100 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2432 nan 0.1000 0.0336
## 2 1.1832 nan 0.1000 0.0260
## 3 1.1225 nan 0.1000 0.0251
## 4 1.0738 nan 0.1000 0.0222
## 5 1.0251 nan 0.1000 0.0237
## 6 0.9873 nan 0.1000 0.0149
## 7 0.9565 nan 0.1000 0.0127
## 8 0.9301 nan 0.1000 0.0082
## 9 0.9022 nan 0.1000 0.0113
## 10 0.8770 nan 0.1000 0.0093
## 20 0.7236 nan 0.1000 0.0047
## 40 0.5820 nan 0.1000 0.0002
## 60 0.5024 nan 0.1000 -0.0009
## 80 0.4433 nan 0.1000 -0.0008
## 100 0.3918 nan 0.1000 -0.0006
## 120 0.3489 nan 0.1000 -0.0005
## 140 0.3145 nan 0.1000 -0.0002
## 160 0.2823 nan 0.1000 -0.0003
## 180 0.2515 nan 0.1000 -0.0001
## 200 0.2259 nan 0.1000 -0.0002
## 220 0.2030 nan 0.1000 -0.0001
## 240 0.1849 nan 0.1000 -0.0002
## 260 0.1668 nan 0.1000 -0.0002
## 280 0.1520 nan 0.1000 -0.0006
## 300 0.1375 nan 0.1000 -0.0003
## 320 0.1265 nan 0.1000 -0.0002
## 340 0.1171 nan 0.1000 -0.0005
## 360 0.1084 nan 0.1000 -0.0004
## 380 0.0995 nan 0.1000 -0.0004
## 400 0.0920 nan 0.1000 -0.0002
## 420 0.0852 nan 0.1000 -0.0005
## 440 0.0783 nan 0.1000 -0.0002
## 460 0.0714 nan 0.1000 -0.0001
## 480 0.0653 nan 0.1000 0.0001
## 500 0.0604 nan 0.1000 -0.0003
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2448 nan 0.1000 0.0335
## 2 1.1728 nan 0.1000 0.0304
## 3 1.1139 nan 0.1000 0.0289
## 4 1.0616 nan 0.1000 0.0232
## 5 1.0202 nan 0.1000 0.0155
## 6 0.9832 nan 0.1000 0.0165
## 7 0.9481 nan 0.1000 0.0152
## 8 0.9187 nan 0.1000 0.0128
## 9 0.8900 nan 0.1000 0.0126
## 10 0.8715 nan 0.1000 0.0064
## 20 0.7191 nan 0.1000 0.0032
## 40 0.6004 nan 0.1000 0.0004
## 60 0.5260 nan 0.1000 0.0006
## 80 0.4579 nan 0.1000 -0.0017
## 100 0.4086 nan 0.1000 -0.0001
## 120 0.3613 nan 0.1000 -0.0010
## 140 0.3241 nan 0.1000 -0.0018
## 160 0.2930 nan 0.1000 -0.0007
## 180 0.2645 nan 0.1000 -0.0008
## 200 0.2371 nan 0.1000 -0.0002
## 220 0.2178 nan 0.1000 -0.0009
## 240 0.1964 nan 0.1000 -0.0008
## 260 0.1775 nan 0.1000 -0.0002
## 280 0.1616 nan 0.1000 0.0001
## 300 0.1482 nan 0.1000 -0.0003
## 320 0.1357 nan 0.1000 -0.0003
## 340 0.1250 nan 0.1000 -0.0002
## 360 0.1155 nan 0.1000 -0.0004
## 380 0.1050 nan 0.1000 -0.0003
## 400 0.0967 nan 0.1000 -0.0003
## 420 0.0886 nan 0.1000 -0.0003
## 440 0.0810 nan 0.1000 -0.0003
## 460 0.0745 nan 0.1000 -0.0002
## 480 0.0688 nan 0.1000 -0.0002
## 500 0.0639 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2391 nan 0.1000 0.0330
## 2 1.1805 nan 0.1000 0.0259
## 3 1.1283 nan 0.1000 0.0234
## 4 1.0777 nan 0.1000 0.0195
## 5 1.0300 nan 0.1000 0.0220
## 6 0.9886 nan 0.1000 0.0173
## 7 0.9548 nan 0.1000 0.0136
## 8 0.9266 nan 0.1000 0.0101
## 9 0.9003 nan 0.1000 0.0116
## 10 0.8749 nan 0.1000 0.0113
## 20 0.7291 nan 0.1000 0.0008
## 40 0.6100 nan 0.1000 -0.0009
## 60 0.5254 nan 0.1000 -0.0003
## 80 0.4722 nan 0.1000 -0.0013
## 100 0.4222 nan 0.1000 -0.0004
## 120 0.3757 nan 0.1000 -0.0015
## 140 0.3434 nan 0.1000 -0.0009
## 160 0.3103 nan 0.1000 -0.0003
## 180 0.2817 nan 0.1000 -0.0007
## 200 0.2546 nan 0.1000 0.0001
## 220 0.2308 nan 0.1000 -0.0004
## 240 0.2109 nan 0.1000 -0.0005
## 260 0.1930 nan 0.1000 -0.0005
## 280 0.1780 nan 0.1000 -0.0006
## 300 0.1630 nan 0.1000 -0.0002
## 320 0.1502 nan 0.1000 -0.0002
## 340 0.1388 nan 0.1000 -0.0006
## 360 0.1278 nan 0.1000 -0.0005
## 380 0.1192 nan 0.1000 -0.0005
## 400 0.1099 nan 0.1000 -0.0002
## 420 0.1009 nan 0.1000 -0.0002
## 440 0.0934 nan 0.1000 -0.0001
## 460 0.0861 nan 0.1000 -0.0004
## 480 0.0796 nan 0.1000 -0.0003
## 500 0.0741 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2333 nan 0.1000 0.0386
## 2 1.1692 nan 0.1000 0.0276
## 3 1.1075 nan 0.1000 0.0260
## 4 1.0580 nan 0.1000 0.0213
## 5 1.0118 nan 0.1000 0.0209
## 6 0.9712 nan 0.1000 0.0167
## 7 0.9383 nan 0.1000 0.0113
## 8 0.9082 nan 0.1000 0.0100
## 9 0.8795 nan 0.1000 0.0111
## 10 0.8524 nan 0.1000 0.0116
## 20 0.6876 nan 0.1000 0.0010
## 40 0.5497 nan 0.1000 -0.0014
## 60 0.4523 nan 0.1000 -0.0001
## 80 0.3826 nan 0.1000 0.0003
## 100 0.3275 nan 0.1000 -0.0008
## 120 0.2811 nan 0.1000 -0.0003
## 140 0.2452 nan 0.1000 0.0000
## 160 0.2144 nan 0.1000 0.0000
## 180 0.1864 nan 0.1000 -0.0000
## 200 0.1649 nan 0.1000 -0.0007
## 220 0.1475 nan 0.1000 -0.0002
## 240 0.1314 nan 0.1000 -0.0003
## 260 0.1165 nan 0.1000 -0.0003
## 280 0.1040 nan 0.1000 -0.0001
## 300 0.0935 nan 0.1000 -0.0003
## 320 0.0837 nan 0.1000 0.0000
## 340 0.0752 nan 0.1000 -0.0001
## 360 0.0672 nan 0.1000 -0.0001
## 380 0.0603 nan 0.1000 -0.0002
## 400 0.0553 nan 0.1000 -0.0001
## 420 0.0498 nan 0.1000 -0.0001
## 440 0.0444 nan 0.1000 -0.0001
## 460 0.0404 nan 0.1000 -0.0002
## 480 0.0364 nan 0.1000 -0.0001
## 500 0.0330 nan 0.1000 0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2345 nan 0.1000 0.0346
## 2 1.1623 nan 0.1000 0.0303
## 3 1.1022 nan 0.1000 0.0260
## 4 1.0435 nan 0.1000 0.0229
## 5 0.9938 nan 0.1000 0.0222
## 6 0.9524 nan 0.1000 0.0166
## 7 0.9193 nan 0.1000 0.0130
## 8 0.8871 nan 0.1000 0.0134
## 9 0.8600 nan 0.1000 0.0077
## 10 0.8371 nan 0.1000 0.0077
## 20 0.6800 nan 0.1000 0.0042
## 40 0.5428 nan 0.1000 0.0000
## 60 0.4538 nan 0.1000 -0.0006
## 80 0.3785 nan 0.1000 -0.0008
## 100 0.3270 nan 0.1000 -0.0004
## 120 0.2917 nan 0.1000 -0.0012
## 140 0.2557 nan 0.1000 -0.0011
## 160 0.2242 nan 0.1000 -0.0003
## 180 0.1996 nan 0.1000 -0.0014
## 200 0.1765 nan 0.1000 -0.0006
## 220 0.1589 nan 0.1000 -0.0004
## 240 0.1411 nan 0.1000 -0.0004
## 260 0.1262 nan 0.1000 -0.0003
## 280 0.1122 nan 0.1000 -0.0006
## 300 0.0997 nan 0.1000 -0.0002
## 320 0.0889 nan 0.1000 -0.0005
## 340 0.0807 nan 0.1000 0.0000
## 360 0.0723 nan 0.1000 -0.0003
## 380 0.0657 nan 0.1000 -0.0001
## 400 0.0588 nan 0.1000 -0.0001
## 420 0.0533 nan 0.1000 -0.0002
## 440 0.0482 nan 0.1000 -0.0002
## 460 0.0439 nan 0.1000 -0.0000
## 480 0.0401 nan 0.1000 -0.0001
## 500 0.0365 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2475 nan 0.1000 0.0341
## 2 1.1685 nan 0.1000 0.0345
## 3 1.1105 nan 0.1000 0.0268
## 4 1.0597 nan 0.1000 0.0208
## 5 1.0166 nan 0.1000 0.0209
## 6 0.9775 nan 0.1000 0.0155
## 7 0.9476 nan 0.1000 0.0102
## 8 0.9175 nan 0.1000 0.0107
## 9 0.8897 nan 0.1000 0.0100
## 10 0.8653 nan 0.1000 0.0100
## 20 0.7041 nan 0.1000 0.0028
## 40 0.5632 nan 0.1000 0.0006
## 60 0.4672 nan 0.1000 -0.0010
## 80 0.4073 nan 0.1000 0.0002
## 100 0.3586 nan 0.1000 -0.0007
## 120 0.3133 nan 0.1000 -0.0007
## 140 0.2735 nan 0.1000 -0.0008
## 160 0.2416 nan 0.1000 -0.0003
## 180 0.2166 nan 0.1000 -0.0007
## 200 0.1915 nan 0.1000 -0.0004
## 220 0.1689 nan 0.1000 -0.0006
## 240 0.1514 nan 0.1000 -0.0002
## 260 0.1352 nan 0.1000 -0.0005
## 280 0.1228 nan 0.1000 -0.0004
## 300 0.1097 nan 0.1000 -0.0006
## 320 0.0990 nan 0.1000 -0.0005
## 340 0.0891 nan 0.1000 -0.0002
## 360 0.0805 nan 0.1000 -0.0002
## 380 0.0725 nan 0.1000 -0.0003
## 400 0.0660 nan 0.1000 -0.0003
## 420 0.0599 nan 0.1000 -0.0002
## 440 0.0536 nan 0.1000 -0.0002
## 460 0.0486 nan 0.1000 -0.0001
## 480 0.0443 nan 0.1000 -0.0001
## 500 0.0403 nan 0.1000 -0.0002
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2381 nan 0.1000 0.0410
## 2 1.1672 nan 0.1000 0.0317
## 3 1.1027 nan 0.1000 0.0291
## 4 1.0442 nan 0.1000 0.0248
## 5 0.9980 nan 0.1000 0.0229
## 6 0.9570 nan 0.1000 0.0136
## 7 0.9175 nan 0.1000 0.0149
## 8 0.8893 nan 0.1000 0.0105
## 9 0.8597 nan 0.1000 0.0115
## 10 0.8329 nan 0.1000 0.0074
## 20 0.6609 nan 0.1000 0.0033
## 40 0.4933 nan 0.1000 -0.0002
## 60 0.3999 nan 0.1000 -0.0004
## 80 0.3292 nan 0.1000 0.0007
## 100 0.2711 nan 0.1000 -0.0006
## 120 0.2291 nan 0.1000 -0.0009
## 140 0.1957 nan 0.1000 -0.0005
## 160 0.1684 nan 0.1000 -0.0002
## 180 0.1439 nan 0.1000 0.0001
## 200 0.1264 nan 0.1000 -0.0003
## 220 0.1092 nan 0.1000 -0.0002
## 240 0.0954 nan 0.1000 -0.0003
## 260 0.0838 nan 0.1000 -0.0002
## 280 0.0748 nan 0.1000 0.0001
## 300 0.0662 nan 0.1000 -0.0002
## 320 0.0571 nan 0.1000 -0.0002
## 340 0.0508 nan 0.1000 0.0001
## 360 0.0452 nan 0.1000 -0.0000
## 380 0.0396 nan 0.1000 -0.0000
## 400 0.0351 nan 0.1000 0.0000
## 420 0.0314 nan 0.1000 -0.0001
## 440 0.0279 nan 0.1000 -0.0000
## 460 0.0246 nan 0.1000 -0.0001
## 480 0.0221 nan 0.1000 0.0000
## 500 0.0197 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2324 nan 0.1000 0.0405
## 2 1.1554 nan 0.1000 0.0331
## 3 1.0897 nan 0.1000 0.0298
## 4 1.0326 nan 0.1000 0.0245
## 5 0.9879 nan 0.1000 0.0189
## 6 0.9449 nan 0.1000 0.0177
## 7 0.9059 nan 0.1000 0.0158
## 8 0.8751 nan 0.1000 0.0148
## 9 0.8435 nan 0.1000 0.0112
## 10 0.8181 nan 0.1000 0.0097
## 20 0.6571 nan 0.1000 0.0044
## 40 0.5023 nan 0.1000 -0.0012
## 60 0.4126 nan 0.1000 -0.0007
## 80 0.3326 nan 0.1000 -0.0009
## 100 0.2784 nan 0.1000 -0.0000
## 120 0.2354 nan 0.1000 -0.0011
## 140 0.1981 nan 0.1000 -0.0004
## 160 0.1679 nan 0.1000 -0.0006
## 180 0.1450 nan 0.1000 -0.0004
## 200 0.1242 nan 0.1000 -0.0003
## 220 0.1086 nan 0.1000 -0.0004
## 240 0.0952 nan 0.1000 -0.0001
## 260 0.0826 nan 0.1000 -0.0003
## 280 0.0728 nan 0.1000 -0.0002
## 300 0.0639 nan 0.1000 -0.0002
## 320 0.0564 nan 0.1000 -0.0002
## 340 0.0495 nan 0.1000 -0.0001
## 360 0.0437 nan 0.1000 -0.0002
## 380 0.0390 nan 0.1000 -0.0001
## 400 0.0348 nan 0.1000 -0.0002
## 420 0.0308 nan 0.1000 -0.0001
## 440 0.0270 nan 0.1000 -0.0001
## 460 0.0241 nan 0.1000 -0.0001
## 480 0.0213 nan 0.1000 -0.0001
## 500 0.0189 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2340 nan 0.1000 0.0379
## 2 1.1660 nan 0.1000 0.0339
## 3 1.1029 nan 0.1000 0.0279
## 4 1.0487 nan 0.1000 0.0230
## 5 0.9986 nan 0.1000 0.0190
## 6 0.9566 nan 0.1000 0.0193
## 7 0.9207 nan 0.1000 0.0134
## 8 0.8892 nan 0.1000 0.0104
## 9 0.8597 nan 0.1000 0.0114
## 10 0.8305 nan 0.1000 0.0110
## 20 0.6697 nan 0.1000 0.0026
## 40 0.5193 nan 0.1000 0.0002
## 60 0.4286 nan 0.1000 -0.0018
## 80 0.3600 nan 0.1000 -0.0019
## 100 0.3020 nan 0.1000 -0.0013
## 120 0.2533 nan 0.1000 -0.0007
## 140 0.2152 nan 0.1000 -0.0001
## 160 0.1885 nan 0.1000 -0.0006
## 180 0.1631 nan 0.1000 -0.0004
## 200 0.1424 nan 0.1000 -0.0009
## 220 0.1222 nan 0.1000 -0.0004
## 240 0.1071 nan 0.1000 -0.0003
## 260 0.0943 nan 0.1000 -0.0008
## 280 0.0831 nan 0.1000 -0.0003
## 300 0.0732 nan 0.1000 -0.0003
## 320 0.0640 nan 0.1000 -0.0002
## 340 0.0569 nan 0.1000 -0.0002
## 360 0.0509 nan 0.1000 -0.0002
## 380 0.0449 nan 0.1000 -0.0001
## 400 0.0398 nan 0.1000 -0.0002
## 420 0.0350 nan 0.1000 -0.0001
## 440 0.0312 nan 0.1000 -0.0001
## 460 0.0278 nan 0.1000 -0.0000
## 480 0.0249 nan 0.1000 -0.0001
## 500 0.0225 nan 0.1000 -0.0001
##
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.2390 nan 0.1000 0.0358
## 2 1.1744 nan 0.1000 0.0271
## 3 1.1110 nan 0.1000 0.0288
## 4 1.0590 nan 0.1000 0.0239
## 5 1.0149 nan 0.1000 0.0193
## 6 0.9817 nan 0.1000 0.0133
## 7 0.9503 nan 0.1000 0.0135
## 8 0.9206 nan 0.1000 0.0134
## 9 0.8915 nan 0.1000 0.0109
## 10 0.8643 nan 0.1000 0.0113
## 20 0.7128 nan 0.1000 0.0021
## 40 0.5738 nan 0.1000 0.0002
## 60 0.4796 nan 0.1000 -0.0001
## 80 0.4069 nan 0.1000 0.0007
## 100 0.3495 nan 0.1000 -0.0002
## 120 0.3062 nan 0.1000 -0.0005
## 140 0.2678 nan 0.1000 -0.0003
## 160 0.2363 nan 0.1000 -0.0004
## 180 0.2097 nan 0.1000 -0.0001
## 200 0.1871 nan 0.1000 -0.0004
## 220 0.1651 nan 0.1000 -0.0000
## 240 0.1501 nan 0.1000 -0.0003
## 260 0.1321 nan 0.1000 -0.0002
## 280 0.1185 nan 0.1000 -0.0001
## 300 0.1062 nan 0.1000 -0.0003
## 320 0.0968 nan 0.1000 0.0000
## 340 0.0878 nan 0.1000 -0.0001
## 360 0.0795 nan 0.1000 -0.0001
## 380 0.0722 nan 0.1000 0.0000
## 400 0.0656 nan 0.1000 -0.0001
## 420 0.0600 nan 0.1000 -0.0001
## 440 0.0545 nan 0.1000 -0.0001
## 460 0.0495 nan 0.1000 0.0000
## 480 0.0456 nan 0.1000 -0.0002
## 500 0.0410 nan 0.1000 0.0000
##################################
# Reporting the cross-validation results
# for the train set
##################################
MBS_GBM_Tune
## Stochastic Gradient Boosting
##
## 912 samples
## 6 predictor
## 2 classes: 'B', 'M'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results across tuning parameters:
##
## shrinkage interaction.depth n.minobsinnode ROC Sens Spec
## 0.001 4 5 0.8855789 0.9562868 0.4994118
## 0.001 4 10 0.8856134 0.9583799 0.4976471
## 0.001 4 15 0.8847856 0.9576751 0.5035294
## 0.001 5 5 0.8905689 0.9555850 0.5411765
## 0.001 5 10 0.8896630 0.9583768 0.5341176
## 0.001 5 15 0.8897455 0.9583799 0.5347059
## 0.001 6 5 0.8949840 0.9580320 0.5570588
## 0.001 6 10 0.8937728 0.9587216 0.5547059
## 0.001 6 15 0.8924258 0.9587307 0.5470588
## 0.010 4 5 0.9123163 0.9006865 0.7588235
## 0.010 4 10 0.9103749 0.8978978 0.7541176
## 0.010 4 15 0.9096421 0.8964882 0.7547059
## 0.010 5 5 0.9191473 0.8985843 0.7676471
## 0.010 5 10 0.9173787 0.9027887 0.7670588
## 0.010 5 15 0.9148426 0.9010313 0.7694118
## 0.010 6 5 0.9256190 0.9066270 0.7835294
## 0.010 6 10 0.9232580 0.9062853 0.7811765
## 0.010 6 15 0.9205954 0.9101236 0.7752941
## 0.100 4 5 0.9611713 0.9457818 0.9011765
## 0.100 4 10 0.9614875 0.9485889 0.9011765
## 0.100 4 15 0.9585759 0.9506758 0.9011765
## 0.100 5 5 0.9647306 0.9492784 0.8964706
## 0.100 5 10 0.9623021 0.9503219 0.9000000
## 0.100 5 15 0.9599509 0.9471915 0.9011765
## 0.100 6 5 0.9644697 0.9468284 0.9041176
## 0.100 6 10 0.9638927 0.9489214 0.9000000
## 0.100 6 15 0.9622481 0.9499771 0.8964706
##
## Tuning parameter 'n.trees' was held constant at a value of 500
## ROC was used to select the optimal model using the largest value.
## The final values used for the model were n.trees = 500, interaction.depth =
## 5, shrinkage = 0.1 and n.minobsinnode = 5.
$finalModel MBS_GBM_Tune
## A gradient boosted model with bernoulli loss function.
## 500 iterations were performed.
## There were 6 predictors of which 6 had non-zero influence.
$results MBS_GBM_Tune
## shrinkage interaction.depth n.minobsinnode n.trees ROC Sens
## 1 0.001 4 5 500 0.8855789 0.9562868
## 2 0.001 4 10 500 0.8856134 0.9583799
## 3 0.001 4 15 500 0.8847856 0.9576751
## 10 0.010 4 5 500 0.9123163 0.9006865
## 11 0.010 4 10 500 0.9103749 0.8978978
## 12 0.010 4 15 500 0.9096421 0.8964882
## 19 0.100 4 5 500 0.9611713 0.9457818
## 20 0.100 4 10 500 0.9614875 0.9485889
## 21 0.100 4 15 500 0.9585759 0.9506758
## 4 0.001 5 5 500 0.8905689 0.9555850
## 5 0.001 5 10 500 0.8896630 0.9583768
## 6 0.001 5 15 500 0.8897455 0.9583799
## 13 0.010 5 5 500 0.9191473 0.8985843
## 14 0.010 5 10 500 0.9173787 0.9027887
## 15 0.010 5 15 500 0.9148426 0.9010313
## 22 0.100 5 5 500 0.9647306 0.9492784
## 23 0.100 5 10 500 0.9623021 0.9503219
## 24 0.100 5 15 500 0.9599509 0.9471915
## 7 0.001 6 5 500 0.8949840 0.9580320
## 8 0.001 6 10 500 0.8937728 0.9587216
## 9 0.001 6 15 500 0.8924258 0.9587307
## 16 0.010 6 5 500 0.9256190 0.9066270
## 17 0.010 6 10 500 0.9232580 0.9062853
## 18 0.010 6 15 500 0.9205954 0.9101236
## 25 0.100 6 5 500 0.9644697 0.9468284
## 26 0.100 6 10 500 0.9638927 0.9489214
## 27 0.100 6 15 500 0.9622481 0.9499771
## Spec ROCSD SensSD SpecSD
## 1 0.4994118 0.02405694 0.01921378 0.07346566
## 2 0.4976471 0.02439127 0.01843250 0.06880404
## 3 0.5035294 0.02463188 0.01817604 0.07429503
## 10 0.7588235 0.02111331 0.02157003 0.05111925
## 11 0.7541176 0.02115259 0.01996058 0.05378548
## 12 0.7547059 0.02078841 0.02241857 0.04853684
## 19 0.9011765 0.02144100 0.02674577 0.04038990
## 20 0.9011765 0.02063435 0.02529413 0.04016618
## 21 0.9011765 0.02002030 0.02834439 0.03689194
## 4 0.5411765 0.02375077 0.01854122 0.07039905
## 5 0.5341176 0.02388615 0.01809084 0.06278338
## 6 0.5347059 0.02371382 0.01807513 0.05895818
## 13 0.7676471 0.02037398 0.01977190 0.05233862
## 14 0.7670588 0.02138825 0.02131783 0.05211086
## 15 0.7694118 0.02096681 0.01922948 0.04426446
## 22 0.8964706 0.01848443 0.02802019 0.03760798
## 23 0.9000000 0.01999341 0.02767224 0.04093944
## 24 0.9011765 0.02014616 0.02444462 0.04061238
## 7 0.5570588 0.02361444 0.01674995 0.07183338
## 8 0.5547059 0.02400943 0.01668693 0.06514999
## 9 0.5470588 0.02430751 0.01664892 0.06576671
## 16 0.7835294 0.02103619 0.01924743 0.04905391
## 17 0.7811765 0.02057186 0.01993281 0.05099924
## 18 0.7752941 0.02101536 0.01799566 0.05173601
## 25 0.9041176 0.02010398 0.03064104 0.04393754
## 26 0.9000000 0.02091672 0.02910325 0.03867582
## 27 0.8964706 0.02050969 0.02508434 0.04169837
<- MBS_GBM_Tune$results[MBS_GBM_Tune$results$n.trees==MBS_GBM_Tune$bestTune$n.trees &
(MBS_GBM_Train_AUROC $results$shrinkage==MBS_GBM_Tune$bestTune$shrinkage &
MBS_GBM_Tune$results$n.minobsinnode==MBS_GBM_Tune$bestTune$n.minobsinnode &
MBS_GBM_Tune$results$interaction.depth==MBS_GBM_Tune$bestTune$interaction.depth,
MBS_GBM_Tunec("ROC")])
## [1] 0.9647306
##################################
# Identifying and plotting the
# best model predictors
##################################
<- varImp(MBS_GBM_Tune, scale = TRUE)
MBS_GBM_VarImp plot(MBS_GBM_VarImp,
top=6,
scales=list(y=list(cex = .95)),
main="Ranked VariGBMle Importance : Stochastic Gradient Boosting",
xlGBM="Scaled Variable Importance Metrics",
ylGBM="Predictors",
cex=2,
origin=0,
alpha=0.45)
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(MBS_GBM_Test_Observed = MA_Test$diagnosis,
MBS_GBM_Test MBS_GBM_Test_Predicted = predict(MBS_GBM_Tune,
!names(MA_Test) %in% c("diagnosis")],
MA_Test[,type = "prob"))
##################################
# Reporting the independent evaluation results
# for the test set
##################################
<- roc(response = MBS_GBM_Test$MBS_GBM_Test_Observed,
MBS_GBM_Test_ROC predictor = MBS_GBM_Test$MBS_GBM_Test_Predicted.M,
levels = rev(levels(MBS_GBM_Test$MBS_GBM_Test_Observed)))
<- auc(MBS_GBM_Test_ROC)[1]) (MBS_GBM_Test_AUROC
## [1] 0.9830651
##################################
# Setting the conditions
# for hyperparameter tuning
##################################
= expand.grid(nrounds = 500,
XGB_Grid max_depth = c(4,5,6),
eta = c(0.2,0.3,0.4),
gamma = c(0.1,0.01,0.001),
colsample_bytree = 1,
min_child_weight = 1,
subsample = 1)
##################################
# Running the extreme gradient boosting model
# by setting the caret method to 'xgbTree'
##################################
set.seed(12345678)
<- train(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
MBS_XGB_Tune y = MA_Train$diagnosis,
method = "xgbTree",
tuneGrid = XGB_Grid,
metric = "ROC",
trControl = RKFold_Control)
##################################
# Reporting the cross-validation results
# for the train set
##################################
MBS_XGB_Tune
## eXtreme Gradient Boosting
##
## 912 samples
## 6 predictor
## 2 classes: 'B', 'M'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results across tuning parameters:
##
## eta max_depth gamma ROC Sens Spec
## 0.2 4 0.001 0.9626835 0.9527841 0.9005882
## 0.2 4 0.010 0.9633867 0.9527841 0.9023529
## 0.2 4 0.100 0.9596240 0.9503402 0.9088235
## 0.2 5 0.001 0.9626556 0.9520732 0.8982353
## 0.2 5 0.010 0.9629735 0.9527750 0.9029412
## 0.2 5 0.100 0.9611275 0.9534859 0.9011765
## 0.2 6 0.001 0.9634468 0.9503432 0.9000000
## 0.2 6 0.010 0.9633502 0.9489336 0.9000000
## 0.2 6 0.100 0.9605459 0.9499924 0.8988235
## 0.3 4 0.001 0.9629143 0.9545294 0.8958824
## 0.3 4 0.010 0.9615315 0.9520793 0.8994118
## 0.3 4 0.100 0.9582784 0.9520763 0.9011765
## 0.3 5 0.001 0.9619608 0.9534798 0.9005882
## 0.3 5 0.010 0.9613663 0.9531350 0.9029412
## 0.3 5 0.100 0.9610368 0.9513959 0.9029412
## 0.3 6 0.001 0.9640349 0.9562777 0.9000000
## 0.3 6 0.010 0.9630643 0.9520915 0.9023529
## 0.3 6 0.100 0.9599690 0.9496506 0.8994118
## 0.4 4 0.001 0.9623312 0.9545385 0.8929412
## 0.4 4 0.010 0.9614739 0.9548772 0.8982353
## 0.4 4 0.100 0.9605573 0.9531228 0.8958824
## 0.4 5 0.001 0.9627857 0.9534706 0.8976471
## 0.4 5 0.010 0.9616459 0.9510359 0.8976471
## 0.4 5 0.100 0.9599004 0.9496415 0.9011765
## 0.4 6 0.001 0.9634008 0.9559298 0.8923529
## 0.4 6 0.010 0.9635828 0.9545294 0.8982353
## 0.4 6 0.100 0.9604719 0.9538337 0.8958824
##
## Tuning parameter 'nrounds' was held constant at a value of 500
## Tuning
##
## Tuning parameter 'min_child_weight' was held constant at a value of 1
##
## Tuning parameter 'subsample' was held constant at a value of 1
## ROC was used to select the optimal model using the largest value.
## The final values used for the model were nrounds = 500, max_depth = 6, eta
## = 0.3, gamma = 0.001, colsample_bytree = 1, min_child_weight = 1 and
## subsample = 1.
$finalModel MBS_XGB_Tune
## ##### xgb.Booster
## raw: 538.2 Kb
## call:
## xgboost::xgb.train(params = list(eta = param$eta, max_depth = param$max_depth,
## gamma = param$gamma, colsample_bytree = param$colsample_bytree,
## min_child_weight = param$min_child_weight, subsample = param$subsample),
## data = x, nrounds = param$nrounds, objective = "binary:logistic")
## params (as set within xgb.train):
## eta = "0.3", max_depth = "6", gamma = "0.001", colsample_bytree = "1", min_child_weight = "1", subsample = "1", objective = "binary:logistic", validate_parameters = "TRUE"
## xgb.attributes:
## niter
## callbacks:
## cb.print.evaluation(period = print_every_n)
## # of features: 6
## niter: 500
## nfeatures : 6
## xNames : texture_mean smoothness_mean compactness_se texture_worst smoothness_worst symmetry_worst
## problemType : Classification
## tuneValue :
## nrounds max_depth eta gamma colsample_bytree min_child_weight subsample
## 16 500 6 0.3 0.001 1 1 1
## obsLevels : B M
## param :
## list()
$results MBS_XGB_Tune
## eta max_depth gamma colsample_bytree min_child_weight subsample nrounds
## 1 0.2 4 0.001 1 1 1 500
## 2 0.2 4 0.010 1 1 1 500
## 3 0.2 4 0.100 1 1 1 500
## 10 0.3 4 0.001 1 1 1 500
## 11 0.3 4 0.010 1 1 1 500
## 12 0.3 4 0.100 1 1 1 500
## 19 0.4 4 0.001 1 1 1 500
## 20 0.4 4 0.010 1 1 1 500
## 21 0.4 4 0.100 1 1 1 500
## 4 0.2 5 0.001 1 1 1 500
## 5 0.2 5 0.010 1 1 1 500
## 6 0.2 5 0.100 1 1 1 500
## 13 0.3 5 0.001 1 1 1 500
## 14 0.3 5 0.010 1 1 1 500
## 15 0.3 5 0.100 1 1 1 500
## 22 0.4 5 0.001 1 1 1 500
## 23 0.4 5 0.010 1 1 1 500
## 24 0.4 5 0.100 1 1 1 500
## 7 0.2 6 0.001 1 1 1 500
## 8 0.2 6 0.010 1 1 1 500
## 9 0.2 6 0.100 1 1 1 500
## 16 0.3 6 0.001 1 1 1 500
## 17 0.3 6 0.010 1 1 1 500
## 18 0.3 6 0.100 1 1 1 500
## 25 0.4 6 0.001 1 1 1 500
## 26 0.4 6 0.010 1 1 1 500
## 27 0.4 6 0.100 1 1 1 500
## ROC Sens Spec ROCSD SensSD SpecSD
## 1 0.9626835 0.9527841 0.9005882 0.01990016 0.02550560 0.03943261
## 2 0.9633867 0.9527841 0.9023529 0.02079769 0.02622359 0.04388829
## 3 0.9596240 0.9503402 0.9088235 0.02060821 0.02884956 0.04004935
## 10 0.9629143 0.9545294 0.8958824 0.01934490 0.02694649 0.04264706
## 11 0.9615315 0.9520793 0.8994118 0.01941356 0.02743655 0.04020206
## 12 0.9582784 0.9520763 0.9011765 0.02142355 0.02636969 0.04038990
## 19 0.9623312 0.9545385 0.8929412 0.02124940 0.02497991 0.03832945
## 20 0.9614739 0.9548772 0.8982353 0.02214990 0.02546892 0.03498640
## 21 0.9605573 0.9531228 0.8958824 0.02067730 0.02897600 0.04047904
## 4 0.9626556 0.9520732 0.8982353 0.02161392 0.03009695 0.03957859
## 5 0.9629735 0.9527750 0.9029412 0.02098700 0.02881183 0.03844213
## 6 0.9611275 0.9534859 0.9011765 0.02202231 0.02675918 0.03925856
## 13 0.9619608 0.9534798 0.9005882 0.02232639 0.02702379 0.03874100
## 14 0.9613663 0.9531350 0.9029412 0.02225453 0.02750147 0.03890811
## 15 0.9610368 0.9513959 0.9029412 0.02131813 0.02534249 0.04308430
## 22 0.9627857 0.9534706 0.8976471 0.02058136 0.02729771 0.04148171
## 23 0.9616459 0.9510359 0.8976471 0.02039345 0.03068864 0.03993218
## 24 0.9599004 0.9496415 0.9011765 0.02195891 0.03038183 0.04277365
## 7 0.9634468 0.9503432 0.9000000 0.02089150 0.02937984 0.03749279
## 8 0.9633502 0.9489336 0.9000000 0.01942530 0.02897697 0.03773236
## 9 0.9605459 0.9499924 0.8988235 0.02224698 0.02723485 0.04100982
## 16 0.9640349 0.9562777 0.9000000 0.02173075 0.02449281 0.03474864
## 17 0.9630643 0.9520915 0.9023529 0.02155671 0.02787412 0.03392993
## 18 0.9599690 0.9496506 0.8994118 0.02134556 0.02661137 0.04064787
## 25 0.9634008 0.9559298 0.8923529 0.02148545 0.02504785 0.03388741
## 26 0.9635828 0.9545294 0.8982353 0.02175795 0.02789006 0.03723232
## 27 0.9604719 0.9538337 0.8958824 0.02372122 0.02640844 0.04092183
<- MBS_XGB_Tune$results[MBS_XGB_Tune$results$nrounds==MBS_XGB_Tune$bestTune$nrounds &
(MBS_XGB_Train_AUROC $results$max_depth==MBS_XGB_Tune$bestTune$max_depth &
MBS_XGB_Tune$results$eta==MBS_XGB_Tune$bestTune$eta &
MBS_XGB_Tune$results$gamma==MBS_XGB_Tune$bestTune$gamma &
MBS_XGB_Tune$results$colsample_bytree==MBS_XGB_Tune$bestTune$colsample_bytree &
MBS_XGB_Tune$results$min_child_weight==MBS_XGB_Tune$bestTune$min_child_weight &
MBS_XGB_Tune$results$subsample==MBS_XGB_Tune$bestTune$subsample,
MBS_XGB_Tunec("ROC")])
## [1] 0.9640349
##################################
# Identifying and plotting the
# best model predictors
##################################
<- varImp(MBS_XGB_Tune, scale = TRUE)
MBS_XGB_VarImp plot(MBS_XGB_VarImp,
top=6,
scales=list(y=list(cex = .95)),
main="Ranked VariXGBle Importance : Extreme Gradient Boosting",
xlXGB="Scaled Variable Importance Metrics",
ylXGB="Predictors",
cex=2,
origin=0,
alpha=0.45)
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(MBS_XGB_Test_Observed = MA_Test$diagnosis,
MBS_XGB_Test MBS_XGB_Test_Predicted = predict(MBS_XGB_Tune,
!names(MA_Test) %in% c("diagnosis")],
MA_Test[,type = "prob"))
##################################
# Reporting the independent evaluation results
# for the test set
##################################
<- roc(response = MBS_XGB_Test$MBS_XGB_Test_Observed,
MBS_XGB_Test_ROC predictor = MBS_XGB_Test$MBS_XGB_Test_Predicted.M,
levels = rev(levels(MBS_XGB_Test$MBS_XGB_Test_Observed)))
<- auc(MBS_XGB_Test_ROC)[1]) (MBS_XGB_Test_AUROC
## [1] 0.989772
##################################
# Setting the conditions
# for hyperparameter tuning
##################################
= data.frame(mtry = c(2,3,4,5))
RF_Grid
##################################
# Running the random forest model
# by setting the caret method to 'rf'
##################################
set.seed(12345678)
<- train(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
MBG_RF_Tune y = MA_Train$diagnosis,
method = "rf",
tuneGrid = RF_Grid,
metric = "ROC",
trControl = RKFold_Control)
##################################
# Reporting the cross-validation results
# for the train set
##################################
MBG_RF_Tune
## Random Forest
##
## 912 samples
## 6 predictor
## 2 classes: 'B', 'M'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results across tuning parameters:
##
## mtry ROC Sens Spec
## 2 0.9700081 0.9580351 0.8970588
## 3 0.9686273 0.9562929 0.9000000
## 4 0.9677953 0.9548955 0.8952941
## 5 0.9663847 0.9538459 0.8988235
##
## ROC was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 2.
$finalModel MBG_RF_Tune
##
## Call:
## randomForest(x = x, y = y, mtry = param$mtry)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 2
##
## OOB estimate of error rate: 3.18%
## Confusion matrix:
## B M class.error
## B 561 11 0.01923077
## M 18 322 0.05294118
$results MBG_RF_Tune
## mtry ROC Sens Spec ROCSD SensSD SpecSD
## 1 2 0.9700081 0.9580351 0.8970588 0.01778857 0.02017469 0.04746303
## 2 3 0.9686273 0.9562929 0.9000000 0.01882807 0.02196796 0.04572245
## 3 4 0.9677953 0.9548955 0.8952941 0.01958256 0.02104960 0.04637213
## 4 5 0.9663847 0.9538459 0.8988235 0.02050067 0.02117526 0.04273149
<- MBG_RF_Tune$results[MBG_RF_Tune$results$mtry==MBG_RF_Tune$bestTune$mtry,
(MBG_RF_Train_AUROC c("ROC")])
## [1] 0.9700081
##################################
# Identifying and plotting the
# best model predictors
##################################
<- varImp(MBG_RF_Tune, scale = TRUE)
MBG_RF_VarImp plot(MBG_RF_VarImp,
top=6,
scales=list(y=list(cex = .95)),
main="Ranked Variable Importance : Random Forest",
xlab="Scaled Variable Importance Metrics",
ylab="Predictors",
cex=2,
origin=0,
alpha=0.45)
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(MBG_RF_Test_Observed = MA_Test$diagnosis,
MBG_RF_Test MBG_RF_Test_Predicted = predict(MBG_RF_Tune,
!names(MA_Test) %in% c("diagnosis")],
MA_Test[,type = "prob"))
##################################
# Reporting the independent evaluation results
# for the test set
##################################
<- roc(response = MBG_RF_Test$MBG_RF_Test_Observed,
MBG_RF_Test_ROC predictor = MBG_RF_Test$MBG_RF_Test_Predicted.M,
levels = rev(levels(MBG_RF_Test$MBG_RF_Test_Observed)))
<- auc(MBG_RF_Test_ROC)[1]) (MBG_RF_Test_AUROC
## [1] 0.9919517
##################################
# Setting the conditions
# for hyperparameter tuning
##################################
# No hyperparameter tuning process required
##################################
# Running the bagged CART model
# by setting the caret method to 'treebag'
##################################
set.seed(12345678)
<- train(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
MBG_BCART_Tune y = MA_Train$diagnosis,
method = "treebag",
nbagg = 50,
metric = "ROC",
trControl = RKFold_Control)
##################################
# Reporting the cross-validation results
# for the train set
##################################
MBG_BCART_Tune
## Bagged CART
##
## 912 samples
## 6 predictor
## 2 classes: 'B', 'M'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results:
##
## ROC Sens Spec
## 0.9644432 0.9541998 0.9
$finalModel MBG_BCART_Tune
##
## Bagging classification trees with 50 bootstrap replications
$results MBG_BCART_Tune
## parameter ROC Sens Spec ROCSD SensSD SpecSD
## 1 none 0.9644432 0.9541998 0.9 0.02096563 0.02170402 0.04452427
<- MBG_BCART_Tune$results$ROC) (MBG_BCART_Train_AUROC
## [1] 0.9644432
##################################
# Identifying and plotting the
# best model predictors
##################################
<- varImp(MBG_BCART_Tune, scale = TRUE)
MBG_BCART_VarImp plot(MBG_BCART_VarImp,
top=6,
scales=list(y=list(cex = .95)),
main="Ranked Variable Importance : Bagged Classification and Regression Trees",
xlab="Scaled Variable Importance Metrics",
ylab="Predictors",
cex=2,
origin=0,
alpha=0.45)
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(MBG_BCART_Test_Observed = MA_Test$diagnosis,
MBG_BCART_Test MBG_BCART_Test_Predicted = predict(MBG_BCART_Tune,
!names(MA_Test) %in% c("diagnosis")],
MA_Test[,type = "prob"))
##################################
# Reporting the independent evaluation results
# for the test set
##################################
<- roc(response = MBG_BCART_Test$MBG_BCART_Test_Observed,
MBG_BCART_Test_ROC predictor = MBG_BCART_Test$MBG_BCART_Test_Predicted.M,
levels = rev(levels(MBG_BCART_Test$MBG_BCART_Test_Observed)))
<- auc(MBG_BCART_Test_ROC)[1]) (MBG_BCART_Test_AUROC
## [1] 0.9858317
##################################
# Setting the conditions
# for hyperparameter tuning
##################################
# No hyperparameter tuning process required
##################################
# Running the linear discriminant analysis model
# by setting the caret method to 'lda'
##################################
set.seed(12345678)
<- train(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
BAL_LDA_Tune y = MA_Train$diagnosis,
method = "lda",
preProc = c("center","scale"),
metric = "ROC",
trControl = RKFold_Control)
##################################
# Reporting the cross-validation results
# for the train set
##################################
BAL_LDA_Tune
## Linear Discriminant Analysis
##
## 912 samples
## 6 predictor
## 2 classes: 'B', 'M'
##
## Pre-processing: centered (6), scaled (6)
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results:
##
## ROC Sens Spec
## 0.8762815 0.8720214 0.7105882
$finalModel BAL_LDA_Tune
## Call:
## lda(x, y)
##
## Prior probabilities of groups:
## B M
## 0.627193 0.372807
##
## Group means:
## texture_mean smoothness_mean compactness_se texture_worst smoothness_worst
## B -0.3194390 -0.2990245 -0.2777306 -0.3459548 -0.3280838
## M 0.5374091 0.5030648 0.4672410 0.5820181 0.5519528
## symmetry_worst
## B -0.2957632
## M 0.4975782
##
## Coefficients of linear discriminants:
## LD1
## texture_mean 0.4986787
## smoothness_mean 0.3218366
## compactness_se 0.2495221
## texture_worst 0.2741160
## smoothness_worst 0.2408554
## symmetry_worst 0.3784915
$results BAL_LDA_Tune
## parameter ROC Sens Spec ROCSD SensSD SpecSD
## 1 none 0.8762815 0.8720214 0.7105882 0.02587517 0.02739545 0.05331432
<- BAL_LDA_Tune$results$ROC) (BAL_LDA_Train_AUROC
## [1] 0.8762815
##################################
# Identifying and plotting the
# best model predictors
##################################
# model does not support variable importance measurement
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(BAL_LDA_Test_Observed = MA_Test$diagnosis,
BAL_LDA_Test BAL_LDA_Test_Predicted = predict(BAL_LDA_Tune,
!names(MA_Test) %in% c("diagnosis")],
MA_Test[,type = "prob"))
BAL_LDA_Test
## BAL_LDA_Test_Observed BAL_LDA_Test_Predicted.B BAL_LDA_Test_Predicted.M
## 4 M 0.006126311 0.993873689
## 5 M 0.942711747 0.057288253
## 14 M 0.579064544 0.420935456
## 19 M 0.385557673 0.614442327
## 24 M 0.407580856 0.592419144
## 29 M 0.038467533 0.961532467
## 31 M 0.084012467 0.915987533
## 33 M 0.059833498 0.940166502
## 34 M 0.104825602 0.895174398
## 36 M 0.123134528 0.876865472
## 37 M 0.213046186 0.786953814
## 41 M 0.762194634 0.237805366
## 44 M 0.243102433 0.756897567
## 50 B 0.633616180 0.366383820
## 51 B 0.883649952 0.116350048
## 60 B 0.941211680 0.058788320
## 63 M 0.152736238 0.847263762
## 66 M 0.081701843 0.918298157
## 70 B 0.956197471 0.043802529
## 72 B 0.926760828 0.073239172
## 76 M 0.675879310 0.324120690
## 77 B 0.938615062 0.061384938
## 98 B 0.859180598 0.140819402
## 101 M 0.412386407 0.587613593
## 107 B 0.348515684 0.651484316
## 122 M 0.615196888 0.384803112
## 124 B 0.968617292 0.031382708
## 136 M 0.496541534 0.503458466
## 143 B 0.686963614 0.313036386
## 145 B 0.981924747 0.018075253
## 151 B 0.492150246 0.507849754
## 153 B 0.610903722 0.389096278
## 157 M 0.413945858 0.586054142
## 167 B 0.997185897 0.002814103
## 170 B 0.904311554 0.095688446
## 173 M 0.812030361 0.187969639
## 174 B 0.978730977 0.021269023
## 181 M 0.241659587 0.758340413
## 185 M 0.524089965 0.475910035
## 186 B 0.878625062 0.121374938
## 195 M 0.231580068 0.768419932
## 199 M 0.348140696 0.651859304
## 213 M 0.947602810 0.052397190
## 215 M 0.056860324 0.943139676
## 219 M 0.393506696 0.606493304
## 221 B 0.973705501 0.026294499
## 227 B 0.921367896 0.078632104
## 231 M 0.330572265 0.669427735
## 237 M 0.134627362 0.865372638
## 238 M 0.805720619 0.194279381
## 247 B 0.929093207 0.070906793
## 252 B 0.876231913 0.123768087
## 260 M 0.008999501 0.991000499
## 261 M 0.114163242 0.885836758
## 262 M 0.728155795 0.271844205
## 265 M 0.364679966 0.635320034
## 271 B 0.995874752 0.004125248
## 280 B 0.900993219 0.099006781
## 305 B 0.924318983 0.075681017
## 311 B 0.736500069 0.263499931
## 315 B 0.633755327 0.366244673
## 317 B 0.996050405 0.003949595
## 327 B 0.987789680 0.012210320
## 328 B 0.984770455 0.015229545
## 330 M 0.290667258 0.709332742
## 334 B 0.968849268 0.031150732
## 338 M 0.174995596 0.825004404
## 356 B 0.903045873 0.096954127
## 357 B 0.430524826 0.569475174
## 364 B 0.811698009 0.188301991
## 368 B 0.727507423 0.272492577
## 373 M 0.872668128 0.127331872
## 388 B 0.988414548 0.011585452
## 389 B 0.957745027 0.042254973
## 391 B 0.962310108 0.037689892
## 393 M 0.177431936 0.822568064
## 396 B 0.948444928 0.051555072
## 397 B 0.583968627 0.416031373
## 404 B 0.824463807 0.175536193
## 420 B 0.563842969 0.436157031
## 422 B 0.842474215 0.157525785
## 427 B 0.779936684 0.220063316
## 429 B 0.983858264 0.016141736
## 430 B 0.978101034 0.021898966
## 435 B 0.925778429 0.074221571
## 442 M 0.325620184 0.674379816
## 454 B 0.936216623 0.063783377
## 458 B 0.633531078 0.366468922
## 463 B 0.810336471 0.189663529
## 470 B 0.356359884 0.643640116
## 474 B 0.706685284 0.293314716
## 478 B 0.981238254 0.018761746
## 479 B 0.821103847 0.178896153
## 484 B 0.902055587 0.097944413
## 486 B 0.677747460 0.322252540
## 489 B 0.744881334 0.255118666
## 490 M 0.681658672 0.318341328
## 491 B 0.606617961 0.393382039
## 492 B 0.998066340 0.001933660
## 499 M 0.735157269 0.264842731
## 501 B 0.943572720 0.056427280
## 503 B 0.598182906 0.401817094
## 507 B 0.534944505 0.465055495
## 509 B 0.958952980 0.041047020
## 514 B 0.956897407 0.043102593
## 518 M 0.588851055 0.411148945
## 523 B 0.920707025 0.079292975
## 534 M 0.563458379 0.436541621
## 535 B 0.756220900 0.243779100
## 536 M 0.446628256 0.553371744
## 542 B 0.214561414 0.785438586
## 545 B 0.789312389 0.210687611
## 549 B 0.885565165 0.114434835
## 550 B 0.510647176 0.489352824
## 561 B 0.382288558 0.617711442
## 563 M 0.016016932 0.983983068
## 566 M 0.247083120 0.752916880
## 569 B 0.965394152 0.034605848
## 574 M 0.942711747 0.057288253
## 581 M 0.356014840 0.643985160
## 582 M 0.215119777 0.784880223
## 594 M 0.113214377 0.886785623
## 596 M 0.091384833 0.908615167
## 597 M 0.702993860 0.297006140
## 598 M 0.038467533 0.961532467
## 599 M 0.878500112 0.121499888
## 600 M 0.084012467 0.915987533
## 612 M 0.057472228 0.942527772
## 616 B 0.855211064 0.144788936
## 617 M 0.148575243 0.851424757
## 626 M 0.290264105 0.709735895
## 634 M 0.064339370 0.935660630
## 639 B 0.956197471 0.043802529
## 641 B 0.926760828 0.073239172
## 650 B 0.320740216 0.679259784
## 665 M 0.259719596 0.740280404
## 666 B 0.925463665 0.074536335
## 673 B 0.557022744 0.442977256
## 676 B 0.348515684 0.651484316
## 684 B 0.620718568 0.379281432
## 688 M 0.068214661 0.931785339
## 691 M 0.615196888 0.384803112
## 696 M 0.222808227 0.777191773
## 709 B 0.943771833 0.056228167
## 710 B 0.987245490 0.012754510
## 713 B 0.830292869 0.169707131
## 727 B 0.921633095 0.078366905
## 730 B 0.382668083 0.617331917
## 732 M 0.333612109 0.666387891
## 740 B 0.961858377 0.038141623
## 741 M 0.681062354 0.318937646
## 763 M 0.040915331 0.959084669
## 769 M 0.177021698 0.822978302
## 788 M 0.393506696 0.606493304
## 790 B 0.973705501 0.026294499
## 791 B 0.870821564 0.129178436
## 792 B 0.682622616 0.317377384
## 793 M 0.182399687 0.817600313
## 801 B 0.726087033 0.273912967
## 803 M 0.354643905 0.645356095
## 806 M 0.134627362 0.865372638
## 810 B 0.944706021 0.055293979
## 812 B 0.393474697 0.606525303
## 813 B 0.779393668 0.220606332
## 821 B 0.876231913 0.123768087
## 836 B 0.658099084 0.341900916
## 842 M 0.448926362 0.551073638
## 843 B 0.901788723 0.098211277
## 848 B 0.973498488 0.026501512
## 853 M 0.528763024 0.471236976
## 862 B 0.711409516 0.288590484
## 865 B 0.979123666 0.020876334
## 879 B 0.996373712 0.003626288
## 890 B 0.741480170 0.258519830
## 891 M 0.878337490 0.121662510
## 897 B 0.984770455 0.015229545
## 902 B 0.499343912 0.500656088
## 904 B 0.892320617 0.107679383
## 905 M 0.361244898 0.638755102
## 906 B 0.966801278 0.033198722
## 916 B 0.823396789 0.176603211
## 919 B 0.783263908 0.216736092
## 920 B 0.979880096 0.020119904
## 930 B 0.990855172 0.009144828
## 940 M 0.079097793 0.920902207
## 945 B 0.846430270 0.153569730
## 947 B 0.594187576 0.405812424
## 948 B 0.872995937 0.127004063
## 951 B 0.941410746 0.058589254
## 961 B 0.801844856 0.198155144
## 966 B 0.583968627 0.416031373
## 969 B 0.832591332 0.167408668
## 979 B 0.638449388 0.361550612
## 985 B 0.374275674 0.625724326
## 991 B 0.842474215 0.157525785
## 996 B 0.779936684 0.220063316
## 998 B 0.983858264 0.016141736
## 999 B 0.978101034 0.021898966
## 1005 M 0.305786852 0.694213148
## 1006 B 0.708477812 0.291522188
## 1007 B 0.942459340 0.057540660
## 1009 B 0.990361755 0.009638245
## 1015 B 0.367056365 0.632943635
## 1021 M 0.374776492 0.625223508
## 1027 B 0.633531078 0.366468922
## 1033 B 0.755897965 0.244102035
## 1042 B 0.973607166 0.026392834
## 1044 B 0.862842688 0.137157312
## 1049 M 0.359535242 0.640464758
## 1059 M 0.681658672 0.318341328
## 1060 B 0.606617961 0.393382039
## 1063 B 0.997138711 0.002861289
## 1075 B 0.556668408 0.443331592
## 1087 M 0.588851055 0.411148945
## 1089 B 0.652990898 0.347009102
## 1095 B 0.868301964 0.131698036
## 1098 B 0.929532127 0.070467873
## 1099 B 0.925197595 0.074802405
## 1102 B 0.934244281 0.065755719
## 1106 M 0.302156084 0.697843916
## 1108 B 0.561058807 0.438941193
## 1109 B 0.180044308 0.819955692
## 1113 B 0.517216264 0.482783736
## 1114 B 0.789312389 0.210687611
## 1119 B 0.510647176 0.489352824
## 1133 M 0.147249898 0.852750102
##################################
# Reporting the independent evaluation results
# for the test set
##################################
<- roc(response = BAL_LDA_Test$BAL_LDA_Test_Observed,
BAL_LDA_Test_ROC predictor = BAL_LDA_Test$BAL_LDA_Test_Predicted.M,
levels = rev(levels(BAL_LDA_Test$BAL_LDA_Test_Observed)))
<- auc(BAL_LDA_Test_ROC)[1]) (BAL_LDA_Test_AUROC
## [1] 0.88833
##################################
# Setting the conditions
# for hyperparameter tuning
##################################
= data.frame(cp = c(0.001, 0.005, 0.010, 0.015, 0.020))
CART_Grid
##################################
# Running the classification and regression tree model
# by setting the caret method to 'rpart'
##################################
set.seed(12345678)
<- train(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
BAL_CART_Tune y = MA_Train$diagnosis,
method = "rpart",
tuneGrid = CART_Grid,
metric = "ROC",
trControl = RKFold_Control)
##################################
# Reporting the cross-validation results
# for the train set
##################################
BAL_CART_Tune
## CART
##
## 912 samples
## 6 predictor
## 2 classes: 'B', 'M'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results across tuning parameters:
##
## cp ROC Sens Spec
## 0.001 0.8614523 0.8503158 0.7482353
## 0.005 0.8512807 0.8640061 0.7452941
## 0.010 0.8297650 0.8702944 0.7211765
## 0.015 0.8139619 0.8608299 0.7170588
## 0.020 0.8094478 0.8552677 0.7164706
##
## ROC was used to select the optimal model using the largest value.
## The final value used for the model was cp = 0.001.
$finalModel BAL_CART_Tune
## n= 912
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 912 340 B (0.62719298 0.37280702)
## 2) texture_mean< 2.927988 437 67 B (0.84668192 0.15331808)
## 4) symmetry_worst< -1.34686 398 38 B (0.90452261 0.09547739)
## 8) smoothness_mean< -2.074653 390 33 B (0.91538462 0.08461538)
## 16) texture_mean< 2.711046 134 0 B (1.00000000 0.00000000) *
## 17) texture_mean>=2.711046 256 33 B (0.87109375 0.12890625)
## 34) symmetry_worst< -1.427209 247 28 B (0.88663968 0.11336032)
## 68) smoothness_mean< -2.468758 62 0 B (1.00000000 0.00000000) *
## 69) smoothness_mean>=-2.468758 185 28 B (0.84864865 0.15135135)
## 138) smoothness_mean>=-2.28574 59 3 B (0.94915254 0.05084746) *
## 139) smoothness_mean< -2.28574 126 25 B (0.80158730 0.19841270)
## 278) compactness_se< -4.691273 24 0 B (1.00000000 0.00000000) *
## 279) compactness_se>=-4.691273 102 25 B (0.75490196 0.24509804)
## 558) smoothness_mean< -2.296604 94 20 B (0.78723404 0.21276596)
## 1116) compactness_se>=-4.479607 81 14 B (0.82716049 0.17283951)
## 2232) smoothness_worst< -1.472892 73 10 B (0.86301370 0.13698630)
## 4464) symmetry_worst>=-1.749307 28 0 B (1.00000000 0.00000000) *
## 4465) symmetry_worst< -1.749307 45 10 B (0.77777778 0.22222222)
## 8930) symmetry_worst< -1.841614 34 2 B (0.94117647 0.05882353) *
## 8931) symmetry_worst>=-1.841614 11 3 M (0.27272727 0.72727273) *
## 2233) smoothness_worst>=-1.472892 8 4 B (0.50000000 0.50000000) *
## 1117) compactness_se< -4.479607 13 6 B (0.53846154 0.46153846) *
## 559) smoothness_mean>=-2.296604 8 3 M (0.37500000 0.62500000) *
## 35) symmetry_worst>=-1.427209 9 4 M (0.44444444 0.55555556) *
## 9) smoothness_mean>=-2.074653 8 3 M (0.37500000 0.62500000) *
## 5) symmetry_worst>=-1.34686 39 10 M (0.25641026 0.74358974)
## 10) smoothness_mean< -2.32364 7 1 B (0.85714286 0.14285714) *
## 11) smoothness_mean>=-2.32364 32 4 M (0.12500000 0.87500000) *
## 3) texture_mean>=2.927988 475 202 M (0.42526316 0.57473684)
## 6) smoothness_mean< -2.425205 140 29 B (0.79285714 0.20714286)
## 12) symmetry_worst< -1.496954 133 23 B (0.82706767 0.17293233)
## 24) smoothness_worst< -1.60101 85 8 B (0.90588235 0.09411765)
## 48) texture_mean>=2.980363 64 3 B (0.95312500 0.04687500) *
## 49) texture_mean< 2.980363 21 5 B (0.76190476 0.23809524)
## 98) symmetry_worst< -1.919875 12 0 B (1.00000000 0.00000000) *
## 99) symmetry_worst>=-1.919875 9 4 M (0.44444444 0.55555556) *
## 25) smoothness_worst>=-1.60101 48 15 B (0.68750000 0.31250000)
## 50) texture_mean< 3.108829 21 1 B (0.95238095 0.04761905) *
## 51) texture_mean>=3.108829 27 13 M (0.48148148 0.51851852)
## 102) texture_mean>=3.176386 20 7 B (0.65000000 0.35000000)
## 204) compactness_se< -3.643388 13 2 B (0.84615385 0.15384615) *
## 205) compactness_se>=-3.643388 7 2 M (0.28571429 0.71428571) *
## 103) texture_mean< 3.176386 7 0 M (0.00000000 1.00000000) *
## 13) symmetry_worst>=-1.496954 7 1 M (0.14285714 0.85714286) *
## 7) smoothness_mean>=-2.425205 335 91 M (0.27164179 0.72835821)
## 14) texture_worst< 4.411908 18 1 B (0.94444444 0.05555556) *
## 15) texture_worst>=4.411908 317 74 M (0.23343849 0.76656151)
## 30) symmetry_worst< -1.776275 102 44 M (0.43137255 0.56862745)
## 60) compactness_se< -3.02233 89 44 M (0.49438202 0.50561798)
## 120) texture_worst< 4.897936 54 20 B (0.62962963 0.37037037)
## 240) texture_worst>=4.751011 13 0 B (1.00000000 0.00000000) *
## 241) texture_worst< 4.751011 41 20 B (0.51219512 0.48780488)
## 482) texture_mean< 3.07522 26 9 B (0.65384615 0.34615385)
## 964) smoothness_mean>=-2.347868 12 1 B (0.91666667 0.08333333) *
## 965) smoothness_mean< -2.347868 14 6 M (0.42857143 0.57142857) *
## 483) texture_mean>=3.07522 15 4 M (0.26666667 0.73333333) *
## 121) texture_worst>=4.897936 35 10 M (0.28571429 0.71428571)
## 242) symmetry_worst< -2.207988 9 2 B (0.77777778 0.22222222) *
## 243) symmetry_worst>=-2.207988 26 3 M (0.11538462 0.88461538) *
## 61) compactness_se>=-3.02233 13 0 M (0.00000000 1.00000000) *
## 31) symmetry_worst>=-1.776275 215 30 M (0.13953488 0.86046512)
## 62) compactness_se< -4.040144 38 16 M (0.42105263 0.57894737)
## 124) smoothness_mean>=-2.294648 15 2 B (0.86666667 0.13333333) *
## 125) smoothness_mean< -2.294648 23 3 M (0.13043478 0.86956522) *
## 63) compactness_se>=-4.040144 177 14 M (0.07909605 0.92090395)
## 126) smoothness_mean< -2.32432 37 9 M (0.24324324 0.75675676) *
## 127) smoothness_mean>=-2.32432 140 5 M (0.03571429 0.96428571)
## 254) texture_worst< 4.824228 54 5 M (0.09259259 0.90740741)
## 508) compactness_se< -3.447524 24 5 M (0.20833333 0.79166667)
## 1016) texture_worst>=4.608306 8 3 B (0.62500000 0.37500000) *
## 1017) texture_worst< 4.608306 16 0 M (0.00000000 1.00000000) *
## 509) compactness_se>=-3.447524 30 0 M (0.00000000 1.00000000) *
## 255) texture_worst>=4.824228 86 0 M (0.00000000 1.00000000) *
$results BAL_CART_Tune
## cp ROC Sens Spec ROCSD SensSD SpecSD
## 1 0.001 0.8614523 0.8503158 0.7482353 0.03685507 0.04170440 0.06635597
## 2 0.005 0.8512807 0.8640061 0.7452941 0.03662151 0.02767361 0.05816420
## 3 0.010 0.8297650 0.8702944 0.7211765 0.04245561 0.03675966 0.07705751
## 4 0.015 0.8139619 0.8608299 0.7170588 0.04009017 0.04021240 0.07056270
## 5 0.020 0.8094478 0.8552677 0.7164706 0.04150844 0.03315895 0.06891397
<- BAL_CART_Tune$results[BAL_CART_Tune$results$cp==BAL_CART_Tune$bestTune$cp,
(BAL_CART_Train_AUROC c("ROC")])
## [1] 0.8614523
##################################
# Identifying and plotting the
# best model predictors
##################################
<- varImp(BAL_CART_Tune, scale = TRUE)
BAL_CART_VarImp plot(BAL_CART_VarImp,
top=6,
scales=list(y=list(cex = .95)),
main="Ranked Variable Importance : Classification and Regression Trees",
xlab="Scaled Variable Importance Metrics",
ylab="Predictors",
cex=2,
origin=0,
alpha=0.45)
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(BAL_CART_Test_Observed = MA_Test$diagnosis,
BAL_CART_Test BAL_CART_Test_Predicted = predict(BAL_CART_Tune,
!names(MA_Test) %in% c("diagnosis")],
MA_Test[,type = "prob"))
BAL_CART_Test
## BAL_CART_Test_Observed BAL_CART_Test_Predicted.B BAL_CART_Test_Predicted.M
## 4 M 0.0000000 1.00000000
## 5 M 1.0000000 0.00000000
## 14 M 0.0000000 1.00000000
## 19 M 0.1153846 0.88461538
## 24 M 0.1304348 0.86956522
## 29 M 0.0000000 1.00000000
## 31 M 0.0000000 1.00000000
## 33 M 0.0000000 1.00000000
## 34 M 0.2432432 0.75675676
## 36 M 0.2432432 0.75675676
## 37 M 0.0000000 1.00000000
## 41 M 0.9531250 0.04687500
## 44 M 0.6250000 0.37500000
## 50 B 0.9523810 0.04761905
## 51 B 0.9531250 0.04687500
## 60 B 1.0000000 0.00000000
## 63 M 0.0000000 1.00000000
## 66 M 0.0000000 1.00000000
## 70 B 1.0000000 0.00000000
## 72 B 1.0000000 0.00000000
## 76 M 0.4285714 0.57142857
## 77 B 0.3750000 0.62500000
## 98 B 0.9166667 0.08333333
## 101 M 0.1153846 0.88461538
## 107 B 0.9491525 0.05084746
## 122 M 0.9491525 0.05084746
## 124 B 1.0000000 0.00000000
## 136 M 0.1304348 0.86956522
## 143 B 0.9491525 0.05084746
## 145 B 1.0000000 0.00000000
## 151 B 0.8666667 0.13333333
## 153 B 0.9491525 0.05084746
## 157 M 0.0000000 1.00000000
## 167 B 1.0000000 0.00000000
## 170 B 0.9411765 0.05882353
## 173 M 0.3750000 0.62500000
## 174 B 1.0000000 0.00000000
## 181 M 0.0000000 1.00000000
## 185 M 0.1304348 0.86956522
## 186 B 0.5000000 0.50000000
## 195 M 0.0000000 1.00000000
## 199 M 0.0000000 1.00000000
## 213 M 0.9491525 0.05084746
## 215 M 0.2432432 0.75675676
## 219 M 0.2432432 0.75675676
## 221 B 1.0000000 0.00000000
## 227 B 0.9491525 0.05084746
## 231 M 0.0000000 1.00000000
## 237 M 0.2432432 0.75675676
## 238 M 0.9523810 0.04761905
## 247 B 1.0000000 0.00000000
## 252 B 0.5384615 0.46153846
## 260 M 0.0000000 1.00000000
## 261 M 0.1304348 0.86956522
## 262 M 0.0000000 1.00000000
## 265 M 0.1304348 0.86956522
## 271 B 1.0000000 0.00000000
## 280 B 1.0000000 0.00000000
## 305 B 0.9411765 0.05882353
## 311 B 0.1428571 0.85714286
## 315 B 0.9491525 0.05084746
## 317 B 1.0000000 0.00000000
## 327 B 1.0000000 0.00000000
## 328 B 1.0000000 0.00000000
## 330 M 0.2666667 0.73333333
## 334 B 1.0000000 0.00000000
## 338 M 0.2432432 0.75675676
## 356 B 1.0000000 0.00000000
## 357 B 0.9491525 0.05084746
## 364 B 0.9411765 0.05882353
## 368 B 1.0000000 0.00000000
## 373 M 0.2727273 0.72727273
## 388 B 1.0000000 0.00000000
## 389 B 1.0000000 0.00000000
## 391 B 1.0000000 0.00000000
## 393 M 0.0000000 1.00000000
## 396 B 1.0000000 0.00000000
## 397 B 0.9166667 0.08333333
## 404 B 1.0000000 0.00000000
## 420 B 0.8666667 0.13333333
## 422 B 1.0000000 0.00000000
## 427 B 1.0000000 0.00000000
## 429 B 1.0000000 0.00000000
## 430 B 1.0000000 0.00000000
## 435 B 0.9411765 0.05882353
## 442 M 0.2857143 0.71428571
## 454 B 1.0000000 0.00000000
## 458 B 0.8461538 0.15384615
## 463 B 0.9531250 0.04687500
## 470 B 0.9491525 0.05084746
## 474 B 0.9531250 0.04687500
## 478 B 1.0000000 0.00000000
## 479 B 1.0000000 0.00000000
## 484 B 1.0000000 0.00000000
## 486 B 1.0000000 0.00000000
## 489 B 0.9491525 0.05084746
## 490 M 0.1428571 0.85714286
## 491 B 0.0000000 1.00000000
## 492 B 1.0000000 0.00000000
## 499 M 0.3750000 0.62500000
## 501 B 0.9411765 0.05882353
## 503 B 0.9491525 0.05084746
## 507 B 0.9166667 0.08333333
## 509 B 0.9411765 0.05882353
## 514 B 1.0000000 0.00000000
## 518 M 0.9166667 0.08333333
## 523 B 0.9531250 0.04687500
## 534 M 0.2432432 0.75675676
## 535 B 0.9411765 0.05882353
## 536 M 0.0000000 1.00000000
## 542 B 0.2857143 0.71428571
## 545 B 0.9166667 0.08333333
## 549 B 0.9523810 0.04761905
## 550 B 0.8461538 0.15384615
## 561 B 0.7777778 0.22222222
## 563 M 0.0000000 1.00000000
## 566 M 0.1153846 0.88461538
## 569 B 0.9531250 0.04687500
## 574 M 1.0000000 0.00000000
## 581 M 0.8571429 0.14285714
## 582 M 0.2432432 0.75675676
## 594 M 0.0000000 1.00000000
## 596 M 0.0000000 1.00000000
## 597 M 0.4285714 0.57142857
## 598 M 0.0000000 1.00000000
## 599 M 0.2727273 0.72727273
## 600 M 0.0000000 1.00000000
## 612 M 0.2432432 0.75675676
## 616 B 1.0000000 0.00000000
## 617 M 0.1250000 0.87500000
## 626 M 0.4444444 0.55555556
## 634 M 0.0000000 1.00000000
## 639 B 1.0000000 0.00000000
## 641 B 1.0000000 0.00000000
## 650 B 0.1153846 0.88461538
## 665 M 0.2432432 0.75675676
## 666 B 0.9491525 0.05084746
## 673 B 0.9166667 0.08333333
## 676 B 0.9491525 0.05084746
## 684 B 0.9491525 0.05084746
## 688 M 0.0000000 1.00000000
## 691 M 0.9491525 0.05084746
## 696 M 0.1304348 0.86956522
## 709 B 1.0000000 0.00000000
## 710 B 1.0000000 0.00000000
## 713 B 0.4444444 0.55555556
## 727 B 1.0000000 0.00000000
## 730 B 0.6250000 0.37500000
## 732 M 0.1250000 0.87500000
## 740 B 1.0000000 0.00000000
## 741 M 0.1304348 0.86956522
## 763 M 0.0000000 1.00000000
## 769 M 0.1304348 0.86956522
## 788 M 0.2432432 0.75675676
## 790 B 1.0000000 0.00000000
## 791 B 1.0000000 0.00000000
## 792 B 0.9491525 0.05084746
## 793 M 0.0000000 1.00000000
## 801 B 0.9531250 0.04687500
## 803 M 0.1153846 0.88461538
## 806 M 0.2432432 0.75675676
## 810 B 0.9411765 0.05882353
## 812 B 1.0000000 0.00000000
## 813 B 0.9531250 0.04687500
## 821 B 0.5384615 0.46153846
## 836 B 0.9444444 0.05555556
## 842 M 0.2432432 0.75675676
## 843 B 0.5384615 0.46153846
## 848 B 1.0000000 0.00000000
## 853 M 0.9166667 0.08333333
## 862 B 0.5000000 0.50000000
## 865 B 1.0000000 0.00000000
## 879 B 1.0000000 0.00000000
## 890 B 0.9491525 0.05084746
## 891 M 0.4444444 0.55555556
## 897 B 1.0000000 0.00000000
## 902 B 0.8666667 0.13333333
## 904 B 0.9523810 0.04761905
## 905 M 0.1153846 0.88461538
## 906 B 1.0000000 0.00000000
## 916 B 0.9523810 0.04761905
## 919 B 1.0000000 0.00000000
## 920 B 1.0000000 0.00000000
## 930 B 1.0000000 0.00000000
## 940 M 0.2432432 0.75675676
## 945 B 1.0000000 0.00000000
## 947 B 0.9531250 0.04687500
## 948 B 1.0000000 0.00000000
## 951 B 1.0000000 0.00000000
## 961 B 0.9491525 0.05084746
## 966 B 0.9166667 0.08333333
## 969 B 0.2727273 0.72727273
## 979 B 0.4444444 0.55555556
## 985 B 0.2432432 0.75675676
## 991 B 1.0000000 0.00000000
## 996 B 1.0000000 0.00000000
## 998 B 1.0000000 0.00000000
## 999 B 1.0000000 0.00000000
## 1005 M 0.0000000 1.00000000
## 1006 B 0.1304348 0.86956522
## 1007 B 1.0000000 0.00000000
## 1009 B 1.0000000 0.00000000
## 1015 B 1.0000000 0.00000000
## 1021 M 0.1153846 0.88461538
## 1027 B 0.8461538 0.15384615
## 1033 B 1.0000000 0.00000000
## 1042 B 1.0000000 0.00000000
## 1044 B 0.3750000 0.62500000
## 1049 M 0.9444444 0.05555556
## 1059 M 0.1428571 0.85714286
## 1060 B 0.0000000 1.00000000
## 1063 B 1.0000000 0.00000000
## 1075 B 1.0000000 0.00000000
## 1087 M 0.9166667 0.08333333
## 1089 B 0.9491525 0.05084746
## 1095 B 1.0000000 0.00000000
## 1098 B 1.0000000 0.00000000
## 1099 B 1.0000000 0.00000000
## 1102 B 1.0000000 0.00000000
## 1106 M 0.1153846 0.88461538
## 1108 B 0.8461538 0.15384615
## 1109 B 0.2857143 0.71428571
## 1113 B 0.9531250 0.04687500
## 1114 B 0.9166667 0.08333333
## 1119 B 0.8461538 0.15384615
## 1133 M 0.0000000 1.00000000
##################################
# Reporting the independent evaluation results
# for the test set
##################################
<- roc(response = BAL_CART_Test$BAL_CART_Test_Observed,
BAL_CART_Test_ROC predictor = BAL_CART_Test$BAL_CART_Test_Predicted.M,
levels = rev(levels(BAL_CART_Test$BAL_CART_Test_Observed)))
<- auc(BAL_CART_Test_ROC)[1]) (BAL_CART_Test_AUROC
## [1] 0.9210681
##################################
# Setting the conditions
# for hyperparameter tuning
##################################
# used a range of default values
##################################
# Running the support vector machine model
# by setting the caret method to 'svmRadial'
##################################
set.seed(12345678)
<- train(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
BAL_SVM_R_Tune y = MA_Train$diagnosis,
method = "svmRadial",
preProc = c("center", "scale"),
tuneLength = 14,
metric = "ROC",
trControl = RKFold_Control)
##################################
# Reporting the cross-validation results
# for the train set
##################################
BAL_SVM_R_Tune
## Support Vector Machines with Radial Basis Function Kernel
##
## 912 samples
## 6 predictor
## 2 classes: 'B', 'M'
##
## Pre-processing: centered (6), scaled (6)
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results across tuning parameters:
##
## C ROC Sens Spec
## 0.25 0.8792747 0.8964943 0.7005882
## 0.50 0.8828520 0.8964851 0.7064706
## 1.00 0.8853296 0.8961373 0.7064706
## 2.00 0.8879393 0.8958017 0.7164706
## 4.00 0.8909259 0.8937056 0.7241176
## 8.00 0.8960321 0.9003417 0.7300000
## 16.00 0.8967388 0.9052235 0.7264706
## 32.00 0.8982511 0.9059283 0.7347059
## 64.00 0.9005909 0.9135957 0.7482353
## 128.00 0.9039633 0.9233837 0.7552941
## 256.00 0.9090738 0.9374005 0.7652941
## 512.00 0.9090150 0.9412387 0.7782353
## 1024.00 0.9077637 0.9387643 0.7900000
## 2048.00 0.9097712 0.9436857 0.7976471
##
## Tuning parameter 'sigma' was held constant at a value of 0.1790538
## ROC was used to select the optimal model using the largest value.
## The final values used for the model were sigma = 0.1790538 and C = 2048.
$finalModel BAL_SVM_R_Tune
## Support Vector Machine object of class "ksvm"
##
## SV type: C-svc (classification)
## parameter : cost C = 2048
##
## Gaussian Radial Basis kernel function.
## Hyperparameter : sigma = 0.179053781320727
##
## Number of Support Vectors : 302
##
## Objective Function Value : -123343.3
## Training error : 0.013158
## Probability model included.
$results BAL_SVM_R_Tune
## sigma C ROC Sens Spec ROCSD SensSD
## 1 0.1790538 0.25 0.8792747 0.8964943 0.7005882 0.02548830 0.02322464
## 2 0.1790538 0.50 0.8828520 0.8964851 0.7064706 0.02410960 0.02143187
## 3 0.1790538 1.00 0.8853296 0.8961373 0.7064706 0.02490232 0.02237023
## 4 0.1790538 2.00 0.8879393 0.8958017 0.7164706 0.02417564 0.02120909
## 5 0.1790538 4.00 0.8909259 0.8937056 0.7241176 0.02144916 0.02499684
## 6 0.1790538 8.00 0.8960321 0.9003417 0.7300000 0.02035456 0.02715491
## 7 0.1790538 16.00 0.8967388 0.9052235 0.7264706 0.01882154 0.02634028
## 8 0.1790538 32.00 0.8982511 0.9059283 0.7347059 0.01957743 0.02468638
## 9 0.1790538 64.00 0.9005909 0.9135957 0.7482353 0.02224693 0.02746024
## 10 0.1790538 128.00 0.9039633 0.9233837 0.7552941 0.02309492 0.02966385
## 11 0.1790538 256.00 0.9090738 0.9374005 0.7652941 0.02327350 0.03080847
## 12 0.1790538 512.00 0.9090150 0.9412387 0.7782353 0.02271246 0.02797556
## 13 0.1790538 1024.00 0.9077637 0.9387643 0.7900000 0.02374909 0.03058368
## 14 0.1790538 2048.00 0.9097712 0.9436857 0.7976471 0.02420238 0.02639300
## SpecSD
## 1 0.04800669
## 2 0.05031619
## 3 0.05085061
## 4 0.04319292
## 5 0.04864069
## 6 0.04838064
## 7 0.04727280
## 8 0.05766006
## 9 0.05474856
## 10 0.04762981
## 11 0.04793155
## 12 0.04706648
## 13 0.04083366
## 14 0.04272306
<- BAL_SVM_R_Tune$results[BAL_SVM_R_Tune$results$C==BAL_SVM_R_Tune$bestTune$C,
(BAL_SVM_R_Train_AUROC c("ROC")])
## [1] 0.9097712
##################################
# Identifying and plotting the
# best model predictors
##################################
# model does not support variable importance measurement
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(BAL_SVM_R_Test_Observed = MA_Test$diagnosis,
BAL_SVM_R_Test BAL_SVM_R_Test_Predicted = predict(BAL_SVM_R_Tune,
!names(MA_Test) %in% c("diagnosis")],
MA_Test[,type = "prob"))
BAL_SVM_R_Test
## BAL_SVM_R_Test_Observed BAL_SVM_R_Test_Predicted.B
## 1 M 0.44662437
## 2 M 0.96681608
## 3 M 0.44658845
## 4 M 0.49596629
## 5 M 0.11150143
## 6 M 0.01569566
## 7 M 0.31379620
## 8 M 0.34778723
## 9 M 0.44660688
## 10 M 0.02901136
## 11 M 0.40709619
## 12 M 0.44656374
## 13 M 0.29240511
## 14 B 0.63760184
## 15 B 0.83664724
## 16 B 0.99997498
## 17 M 0.18805776
## 18 M 0.44661459
## 19 B 0.98590876
## 20 B 0.88515682
## 21 M 0.44663097
## 22 B 0.83142716
## 23 B 0.76187257
## 24 M 0.31943128
## 25 B 0.39732776
## 26 M 0.48486454
## 27 B 0.99999887
## 28 M 0.16435496
## 29 B 0.64793620
## 30 B 0.99903564
## 31 B 0.96526165
## 32 B 0.97945396
## 33 M 0.44659843
## 34 B 0.99989690
## 35 B 0.76853900
## 36 M 0.44661322
## 37 B 0.99278361
## 38 M 0.28446231
## 39 M 0.25385324
## 40 B 0.74652191
## 41 M 0.18837573
## 42 M 0.44657322
## 43 M 0.44660513
## 44 M 0.01181226
## 45 M 0.43513162
## 46 B 0.99791285
## 47 B 0.99736932
## 48 M 0.40247234
## 49 M 0.75056262
## 50 M 0.44663490
## 51 B 0.94428161
## 52 B 0.69498110
## 53 M 0.25963825
## 54 M 0.44662583
## 55 M 0.44658866
## 56 M 0.23194048
## 57 B 0.99979818
## 58 B 0.87311077
## 59 B 0.81813510
## 60 B 0.63012226
## 61 B 0.95423496
## 62 B 0.99996983
## 63 B 0.99936958
## 64 B 0.99994043
## 65 M 0.22183777
## 66 B 0.67864195
## 67 M 0.05280415
## 68 B 0.78464884
## 69 B 0.63010395
## 70 B 0.63010153
## 71 B 0.63010836
## 72 M 0.44657233
## 73 B 0.99782870
## 74 B 0.91971262
## 75 B 0.99999643
## 76 M 0.31889360
## 77 B 0.89735021
## 78 B 0.68136752
## 79 B 0.98731832
## 80 B 0.88269311
## 81 B 0.44297446
## 82 B 0.58223340
## 83 B 0.99975744
## 84 B 0.99912945
## 85 B 0.63008456
## 86 M 0.44662362
## 87 B 0.99996040
## 88 B 0.31981095
## 89 B 0.96210734
## 90 B 0.63009806
## 91 B 0.91531929
## 92 B 0.95207073
## 93 B 0.63007930
## 94 B 0.94340184
## 95 B 0.97479915
## 96 B 0.81672158
## 97 M 0.85442999
## 98 B 0.68987415
## 99 B 0.99952286
## 100 M 0.69757647
## 101 B 0.84281820
## 102 B 0.86154082
## 103 B 0.63009434
## 104 B 0.98927084
## 105 B 0.99013721
## 106 M 0.62321388
## 107 B 0.97686343
## 108 M 0.39672259
## 109 B 0.63005392
## 110 M 0.44666425
## 111 B 0.63008359
## 112 B 0.50601663
## 113 B 0.63010998
## 114 B 0.80018102
## 115 B 0.70965320
## 116 M 0.05996713
## 117 M 0.25816221
## 118 B 0.91915162
## 119 M 0.96681608
## 120 M 0.44659653
## 121 M 0.37879946
## 122 M 0.14365041
## 123 M 0.01429934
## 124 M 0.45018460
## 125 M 0.01569566
## 126 M 0.44660385
## 127 M 0.31379620
## 128 M 0.44665740
## 129 B 0.63002533
## 130 M 0.04854398
## 131 M 0.36356965
## 132 M 0.07119308
## 133 B 0.98590876
## 134 B 0.88515682
## 135 B 0.63009259
## 136 M 0.39144637
## 137 B 0.97776648
## 138 B 0.68430267
## 139 B 0.39732776
## 140 B 0.63008428
## 141 M 0.13525971
## 142 M 0.48486454
## 143 M 0.39703097
## 144 B 0.99876888
## 145 B 0.99999993
## 146 B 0.98148681
## 147 B 0.75366862
## 148 B 0.52900974
## 149 M 0.18003087
## 150 B 0.99999816
## 151 M 0.36986063
## 152 M 0.01615978
## 153 M 0.01179846
## 154 M 0.43513162
## 155 B 0.99791285
## 156 B 0.98708004
## 157 B 0.87622783
## 158 M 0.06918595
## 159 B 0.85767588
## 160 M 0.44656950
## 161 M 0.75056262
## 162 B 0.92288555
## 163 B 0.63009247
## 164 B 0.63004151
## 165 B 0.69498110
## 166 B 0.63006726
## 167 M 0.37933654
## 168 B 0.63007755
## 169 B 0.99838210
## 170 M 0.58769029
## 171 B 0.94781870
## 172 B 0.99996600
## 173 B 0.99990480
## 174 B 0.71567239
## 175 M 0.11516391
## 176 B 0.99994043
## 177 B 0.63004753
## 178 B 0.77813933
## 179 M 0.44656839
## 180 B 0.75846635
## 181 B 0.63003696
## 182 B 0.99014081
## 183 B 0.99624884
## 184 B 0.99999605
## 185 M 0.44661287
## 186 B 0.99400435
## 187 B 0.83290603
## 188 B 0.91218616
## 189 B 0.96651099
## 190 B 0.63007931
## 191 B 0.68136752
## 192 B 0.63006207
## 193 B 0.78872709
## 194 B 0.45225906
## 195 B 0.44297446
## 196 B 0.58223340
## 197 B 0.99975744
## 198 B 0.99912945
## 199 M 0.48573932
## 200 B 0.63006243
## 201 B 0.63009567
## 202 B 0.99988961
## 203 B 0.63002311
## 204 M 0.44656566
## 205 B 0.31981095
## 206 B 0.63875182
## 207 B 0.88256443
## 208 B 0.63011291
## 209 M 0.44659950
## 210 M 0.85442999
## 211 B 0.68987415
## 212 B 0.99992615
## 213 B 0.63005153
## 214 M 0.62321388
## 215 B 0.89393457
## 216 B 0.78611843
## 217 B 0.99888634
## 218 B 0.99998287
## 219 B 0.94387725
## 220 M 0.09677441
## 221 B 0.63002120
## 222 B 0.84509788
## 223 B 0.63007770
## 224 B 0.50601663
## 225 B 0.80018102
## 226 M 0.16459484
## BAL_SVM_R_Test_Predicted.M
## 1 5.533756e-01
## 2 3.318392e-02
## 3 5.534116e-01
## 4 5.040337e-01
## 5 8.884986e-01
## 6 9.843043e-01
## 7 6.862038e-01
## 8 6.522128e-01
## 9 5.533931e-01
## 10 9.709886e-01
## 11 5.929038e-01
## 12 5.534363e-01
## 13 7.075949e-01
## 14 3.623982e-01
## 15 1.633528e-01
## 16 2.501836e-05
## 17 8.119422e-01
## 18 5.533854e-01
## 19 1.409124e-02
## 20 1.148432e-01
## 21 5.533690e-01
## 22 1.685728e-01
## 23 2.381274e-01
## 24 6.805687e-01
## 25 6.026722e-01
## 26 5.151355e-01
## 27 1.134718e-06
## 28 8.356450e-01
## 29 3.520638e-01
## 30 9.643595e-04
## 31 3.473835e-02
## 32 2.054604e-02
## 33 5.534016e-01
## 34 1.030988e-04
## 35 2.314610e-01
## 36 5.533868e-01
## 37 7.216388e-03
## 38 7.155377e-01
## 39 7.461468e-01
## 40 2.534781e-01
## 41 8.116243e-01
## 42 5.534268e-01
## 43 5.533949e-01
## 44 9.881877e-01
## 45 5.648684e-01
## 46 2.087152e-03
## 47 2.630682e-03
## 48 5.975277e-01
## 49 2.494374e-01
## 50 5.533651e-01
## 51 5.571839e-02
## 52 3.050189e-01
## 53 7.403618e-01
## 54 5.533742e-01
## 55 5.534113e-01
## 56 7.680595e-01
## 57 2.018189e-04
## 58 1.268892e-01
## 59 1.818649e-01
## 60 3.698777e-01
## 61 4.576504e-02
## 62 3.016689e-05
## 63 6.304212e-04
## 64 5.956627e-05
## 65 7.781622e-01
## 66 3.213580e-01
## 67 9.471958e-01
## 68 2.153512e-01
## 69 3.698960e-01
## 70 3.698985e-01
## 71 3.698916e-01
## 72 5.534277e-01
## 73 2.171299e-03
## 74 8.028738e-02
## 75 3.566212e-06
## 76 6.811064e-01
## 77 1.026498e-01
## 78 3.186325e-01
## 79 1.268168e-02
## 80 1.173069e-01
## 81 5.570255e-01
## 82 4.177666e-01
## 83 2.425552e-04
## 84 8.705529e-04
## 85 3.699154e-01
## 86 5.533764e-01
## 87 3.960191e-05
## 88 6.801890e-01
## 89 3.789266e-02
## 90 3.699019e-01
## 91 8.468071e-02
## 92 4.792927e-02
## 93 3.699207e-01
## 94 5.659816e-02
## 95 2.520085e-02
## 96 1.832784e-01
## 97 1.455700e-01
## 98 3.101259e-01
## 99 4.771412e-04
## 100 3.024235e-01
## 101 1.571818e-01
## 102 1.384592e-01
## 103 3.699057e-01
## 104 1.072916e-02
## 105 9.862787e-03
## 106 3.767861e-01
## 107 2.313657e-02
## 108 6.032774e-01
## 109 3.699461e-01
## 110 5.533357e-01
## 111 3.699164e-01
## 112 4.939834e-01
## 113 3.698900e-01
## 114 1.998190e-01
## 115 2.903468e-01
## 116 9.400329e-01
## 117 7.418378e-01
## 118 8.084838e-02
## 119 3.318392e-02
## 120 5.534035e-01
## 121 6.212005e-01
## 122 8.563496e-01
## 123 9.857007e-01
## 124 5.498154e-01
## 125 9.843043e-01
## 126 5.533962e-01
## 127 6.862038e-01
## 128 5.533426e-01
## 129 3.699747e-01
## 130 9.514560e-01
## 131 6.364303e-01
## 132 9.288069e-01
## 133 1.409124e-02
## 134 1.148432e-01
## 135 3.699074e-01
## 136 6.085536e-01
## 137 2.223352e-02
## 138 3.156973e-01
## 139 6.026722e-01
## 140 3.699157e-01
## 141 8.647403e-01
## 142 5.151355e-01
## 143 6.029690e-01
## 144 1.231116e-03
## 145 7.017823e-08
## 146 1.851319e-02
## 147 2.463314e-01
## 148 4.709903e-01
## 149 8.199691e-01
## 150 1.843389e-06
## 151 6.301394e-01
## 152 9.838402e-01
## 153 9.882015e-01
## 154 5.648684e-01
## 155 2.087152e-03
## 156 1.291996e-02
## 157 1.237722e-01
## 158 9.308140e-01
## 159 1.423241e-01
## 160 5.534305e-01
## 161 2.494374e-01
## 162 7.711445e-02
## 163 3.699075e-01
## 164 3.699585e-01
## 165 3.050189e-01
## 166 3.699327e-01
## 167 6.206635e-01
## 168 3.699224e-01
## 169 1.617901e-03
## 170 4.123097e-01
## 171 5.218130e-02
## 172 3.400486e-05
## 173 9.519997e-05
## 174 2.843276e-01
## 175 8.848361e-01
## 176 5.956627e-05
## 177 3.699525e-01
## 178 2.218607e-01
## 179 5.534316e-01
## 180 2.415337e-01
## 181 3.699630e-01
## 182 9.859192e-03
## 183 3.751161e-03
## 184 3.949477e-06
## 185 5.533871e-01
## 186 5.995654e-03
## 187 1.670940e-01
## 188 8.781384e-02
## 189 3.348901e-02
## 190 3.699207e-01
## 191 3.186325e-01
## 192 3.699379e-01
## 193 2.112729e-01
## 194 5.477409e-01
## 195 5.570255e-01
## 196 4.177666e-01
## 197 2.425552e-04
## 198 8.705529e-04
## 199 5.142607e-01
## 200 3.699376e-01
## 201 3.699043e-01
## 202 1.103856e-04
## 203 3.699769e-01
## 204 5.534343e-01
## 205 6.801890e-01
## 206 3.612482e-01
## 207 1.174356e-01
## 208 3.698871e-01
## 209 5.534005e-01
## 210 1.455700e-01
## 211 3.101259e-01
## 212 7.384536e-05
## 213 3.699485e-01
## 214 3.767861e-01
## 215 1.060654e-01
## 216 2.138816e-01
## 217 1.113662e-03
## 218 1.712896e-05
## 219 5.612275e-02
## 220 9.032256e-01
## 221 3.699788e-01
## 222 1.549021e-01
## 223 3.699223e-01
## 224 4.939834e-01
## 225 1.998190e-01
## 226 8.354052e-01
##################################
# Reporting the independent evaluation results
# for the test set
##################################
<- roc(response = BAL_SVM_R_Test$BAL_SVM_R_Test_Observed,
BAL_SVM_R_Test_ROC predictor = BAL_SVM_R_Test$BAL_SVM_R_Test_Predicted.M,
levels = rev(levels(BAL_SVM_R_Test$BAL_SVM_R_Test_Observed)))
<- auc(BAL_SVM_R_Test_ROC)[1]) (BAL_SVM_R_Test_AUROC
## [1] 0.9376258
##################################
# Setting the conditions
# for hyperparameter tuning
##################################
= data.frame(k = 1:15)
KNN_Grid
##################################
# Running the k-nearest neighbors model
# by setting the caret method to 'knn'
##################################
set.seed(12345678)
<- train(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
BAL_KNN_Tune y = MA_Train$diagnosis,
method = "knn",
preProc = c("center", "scale"),
tuneGrid = KNN_Grid,
metric = "ROC",
trControl = RKFold_Control)
##################################
# Reporting the cross-validation results
# for the train set
##################################
BAL_KNN_Tune
## k-Nearest Neighbors
##
## 912 samples
## 6 predictor
## 2 classes: 'B', 'M'
##
## Pre-processing: centered (6), scaled (6)
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results across tuning parameters:
##
## k ROC Sens Spec
## 1 0.9076428 0.9205797 0.8947059
## 2 0.8730636 0.8202471 0.7252941
## 3 0.8824422 0.8248574 0.6982353
## 4 0.8818437 0.8643417 0.7288235
## 5 0.8815800 0.8887689 0.7488235
## 6 0.8827032 0.8716644 0.7294118
## 7 0.8848091 0.8730648 0.7211765
## 8 0.8832599 0.8825080 0.7247059
## 9 0.8826004 0.8905416 0.7305882
## 10 0.8821762 0.8839146 0.7129412
## 11 0.8817764 0.8867185 0.7164706
## 12 0.8794329 0.8870664 0.7135294
## 13 0.8785997 0.8860046 0.7123529
## 14 0.8818346 0.8821571 0.7117647
## 15 0.8846281 0.8825263 0.7135294
##
## ROC was used to select the optimal model using the largest value.
## The final value used for the model was k = 1.
$finalModel BAL_KNN_Tune
## 1-nearest neighbor model
## Training set outcome distribution:
##
## B M
## 572 340
$results BAL_KNN_Tune
## k ROC Sens Spec ROCSD SensSD SpecSD
## 1 1 0.9076428 0.9205797 0.8947059 0.02583023 0.02881542 0.04064787
## 2 2 0.8730636 0.8202471 0.7252941 0.02876280 0.04286672 0.05229039
## 3 3 0.8824422 0.8248574 0.6982353 0.02631797 0.03848173 0.05807117
## 4 4 0.8818437 0.8643417 0.7288235 0.02394691 0.03156119 0.04651958
## 5 5 0.8815800 0.8887689 0.7488235 0.01965491 0.03178715 0.03649904
## 6 6 0.8827032 0.8716644 0.7294118 0.02080533 0.03287310 0.04411765
## 7 7 0.8848091 0.8730648 0.7211765 0.01884352 0.02994811 0.04482280
## 8 8 0.8832599 0.8825080 0.7247059 0.02014577 0.03294398 0.04127265
## 9 9 0.8826004 0.8905416 0.7305882 0.01945929 0.03067128 0.03620157
## 10 10 0.8821762 0.8839146 0.7129412 0.02010782 0.02722610 0.04767519
## 11 11 0.8817764 0.8867185 0.7164706 0.02007380 0.02682363 0.04462939
## 12 12 0.8794329 0.8870664 0.7135294 0.02001825 0.03088131 0.04331791
## 13 13 0.8785997 0.8860046 0.7123529 0.02061021 0.03027290 0.04246920
## 14 14 0.8818346 0.8821571 0.7117647 0.02038839 0.02947233 0.04432142
## 15 15 0.8846281 0.8825263 0.7135294 0.02000838 0.02791806 0.05114040
<- BAL_KNN_Tune$results[BAL_KNN_Tune$results$k==BAL_KNN_Tune$bestTune$k,
(BAL_KNN_Train_AUROC c("ROC")])
## [1] 0.9076428
##################################
# Identifying and plotting the
# best model predictors
##################################
# model does not support variable importance measurement
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(BAL_KNN_Test_Observed = MA_Test$diagnosis,
BAL_KNN_Test BAL_KNN_Test_Predicted = predict(BAL_KNN_Tune,
!names(MA_Test) %in% c("diagnosis")],
MA_Test[,type = "prob"))
BAL_KNN_Test
## BAL_KNN_Test_Observed BAL_KNN_Test_Predicted.B BAL_KNN_Test_Predicted.M
## 1 M 0 1
## 2 M 1 0
## 3 M 0 1
## 4 M 0 1
## 5 M 0 1
## 6 M 0 1
## 7 M 0 1
## 8 M 0 1
## 9 M 0 1
## 10 M 0 1
## 11 M 0 1
## 12 M 0 1
## 13 M 0 1
## 14 B 1 0
## 15 B 1 0
## 16 B 1 0
## 17 M 0 1
## 18 M 0 1
## 19 B 1 0
## 20 B 1 0
## 21 M 0 1
## 22 B 1 0
## 23 B 1 0
## 24 M 0 1
## 25 B 0 1
## 26 M 1 0
## 27 B 1 0
## 28 M 0 1
## 29 B 1 0
## 30 B 1 0
## 31 B 1 0
## 32 B 1 0
## 33 M 0 1
## 34 B 1 0
## 35 B 1 0
## 36 M 0 1
## 37 B 1 0
## 38 M 0 1
## 39 M 0 1
## 40 B 1 0
## 41 M 0 1
## 42 M 0 1
## 43 M 0 1
## 44 M 0 1
## 45 M 0 1
## 46 B 1 0
## 47 B 1 0
## 48 M 0 1
## 49 M 0 1
## 50 M 0 1
## 51 B 1 0
## 52 B 0 1
## 53 M 0 1
## 54 M 0 1
## 55 M 0 1
## 56 M 0 1
## 57 B 1 0
## 58 B 1 0
## 59 B 1 0
## 60 B 1 0
## 61 B 1 0
## 62 B 1 0
## 63 B 1 0
## 64 B 1 0
## 65 M 0 1
## 66 B 1 0
## 67 M 0 1
## 68 B 1 0
## 69 B 1 0
## 70 B 1 0
## 71 B 1 0
## 72 M 0 1
## 73 B 1 0
## 74 B 1 0
## 75 B 1 0
## 76 M 0 1
## 77 B 1 0
## 78 B 1 0
## 79 B 1 0
## 80 B 1 0
## 81 B 1 0
## 82 B 1 0
## 83 B 1 0
## 84 B 1 0
## 85 B 1 0
## 86 M 0 1
## 87 B 1 0
## 88 B 0 1
## 89 B 1 0
## 90 B 1 0
## 91 B 1 0
## 92 B 1 0
## 93 B 1 0
## 94 B 1 0
## 95 B 1 0
## 96 B 1 0
## 97 M 0 1
## 98 B 0 1
## 99 B 1 0
## 100 M 0 1
## 101 B 1 0
## 102 B 1 0
## 103 B 1 0
## 104 B 1 0
## 105 B 1 0
## 106 M 1 0
## 107 B 1 0
## 108 M 0 1
## 109 B 1 0
## 110 M 0 1
## 111 B 1 0
## 112 B 1 0
## 113 B 1 0
## 114 B 1 0
## 115 B 1 0
## 116 M 0 1
## 117 M 0 1
## 118 B 1 0
## 119 M 1 0
## 120 M 0 1
## 121 M 0 1
## 122 M 0 1
## 123 M 0 1
## 124 M 0 1
## 125 M 0 1
## 126 M 0 1
## 127 M 0 1
## 128 M 0 1
## 129 B 1 0
## 130 M 0 1
## 131 M 0 1
## 132 M 0 1
## 133 B 1 0
## 134 B 1 0
## 135 B 1 0
## 136 M 0 1
## 137 B 1 0
## 138 B 1 0
## 139 B 0 1
## 140 B 1 0
## 141 M 0 1
## 142 M 1 0
## 143 M 0 1
## 144 B 1 0
## 145 B 1 0
## 146 B 1 0
## 147 B 1 0
## 148 B 1 0
## 149 M 0 1
## 150 B 1 0
## 151 M 0 1
## 152 M 0 1
## 153 M 0 1
## 154 M 0 1
## 155 B 1 0
## 156 B 1 0
## 157 B 1 0
## 158 M 0 1
## 159 B 1 0
## 160 M 0 1
## 161 M 0 1
## 162 B 1 0
## 163 B 1 0
## 164 B 1 0
## 165 B 0 1
## 166 B 1 0
## 167 M 0 1
## 168 B 1 0
## 169 B 1 0
## 170 M 0 1
## 171 B 1 0
## 172 B 1 0
## 173 B 1 0
## 174 B 1 0
## 175 M 0 1
## 176 B 1 0
## 177 B 1 0
## 178 B 1 0
## 179 M 0 1
## 180 B 1 0
## 181 B 1 0
## 182 B 1 0
## 183 B 1 0
## 184 B 1 0
## 185 M 0 1
## 186 B 1 0
## 187 B 1 0
## 188 B 1 0
## 189 B 1 0
## 190 B 1 0
## 191 B 1 0
## 192 B 1 0
## 193 B 1 0
## 194 B 1 0
## 195 B 1 0
## 196 B 1 0
## 197 B 1 0
## 198 B 1 0
## 199 M 0 1
## 200 B 1 0
## 201 B 1 0
## 202 B 1 0
## 203 B 1 0
## 204 M 0 1
## 205 B 0 1
## 206 B 1 0
## 207 B 1 0
## 208 B 1 0
## 209 M 0 1
## 210 M 0 1
## 211 B 0 1
## 212 B 1 0
## 213 B 1 0
## 214 M 1 0
## 215 B 1 0
## 216 B 1 0
## 217 B 1 0
## 218 B 1 0
## 219 B 1 0
## 220 M 0 1
## 221 B 1 0
## 222 B 1 0
## 223 B 1 0
## 224 B 1 0
## 225 B 1 0
## 226 M 0 1
##################################
# Reporting the independent evaluation results
# for the test set
##################################
<- roc(response = BAL_KNN_Test$BAL_KNN_Test_Observed,
BAL_KNN_Test_ROC predictor = BAL_KNN_Test$BAL_KNN_Test_Predicted.M,
levels = rev(levels(BAL_KNN_Test$BAL_KNN_Test_Observed)))
<- auc(BAL_KNN_Test_ROC)[1]) (BAL_KNN_Test_AUROC
## [1] 0.9361167
##################################
# Setting the conditions
# for hyperparameter tuning
##################################
= data.frame(usekernel = c(TRUE, FALSE),
NB_Grid fL = 2,
adjust = FALSE)
##################################
# Running the naive bayes model
# by setting the caret method to 'nb'
##################################
set.seed(12345678)
<- train(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
BAL_NB_Tune y = MA_Train$diagnosis,
method = "nb",
tuneGrid = NB_Grid,
metric = "ROC",
trControl = RKFold_Control)
##################################
# Reporting the cross-validation results
# for the train set
##################################
BAL_NB_Tune
## Naive Bayes
##
## 912 samples
## 6 predictor
## 2 classes: 'B', 'M'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results across tuning parameters:
##
## usekernel ROC Sens Spec
## FALSE 0.8873525 0.8552525 0.7605882
## TRUE NaN NaN NaN
##
## Tuning parameter 'fL' was held constant at a value of 2
## Tuning
## parameter 'adjust' was held constant at a value of FALSE
## ROC was used to select the optimal model using the largest value.
## The final values used for the model were fL = 2, usekernel = FALSE and adjust
## = FALSE.
$finalModel BAL_NB_Tune
## $apriori
## grouping
## B M
## 0.627193 0.372807
##
## $tables
## $tables$texture_mean
## [,1] [,2]
## B 2.866111 0.2131833
## M 3.054338 0.1756238
##
## $tables$smoothness_mean
## [,1] [,2]
## B -2.394224 0.1422449
## M -2.276397 0.1231980
##
## $tables$compactness_se
## [,1] [,2]
## B -4.044941 0.6453164
## M -3.559438 0.5400556
##
## $tables$texture_worst
## [,1] [,2]
## B 4.374373 0.4270218
## M 4.792405 0.3581525
##
## $tables$smoothness_worst
## [,1] [,2]
## B -1.553385 0.08631008
## M -1.469949 0.08502371
##
## $tables$symmetry_worst
## [,1] [,2]
## B -1.882256 0.3136605
## M -1.584759 0.3956037
##
##
## $levels
## [1] "B" "M"
##
## $call
## NaiveBayes.default(x = x, grouping = y, usekernel = FALSE, fL = param$fL)
##
## $x
## texture_mean smoothness_mean compactness_se texture_worst
## X1 2.339881 -2.133687 -3.015119 3.845649
## X2 2.877512 -2.468168 -4.336671 4.393994
## X3 3.056357 -2.210918 -3.217377 4.558289
## X6 2.753661 -2.057289 -3.397703 4.421124
## X7 2.994732 -2.357781 -4.281638 4.712710
## X8 3.036394 -2.129472 -3.496938 4.746189
## X9 3.082827 -2.061209 -3.351836 4.919334
## X10 3.179719 -2.131999 -2.628731 5.491708
## X11 3.145875 -2.500305 -4.681080 5.114832
## X12 2.884242 -2.332014 -3.203741 4.685875
## X13 3.210844 -2.328929 -2.489276 4.867801
## X15 3.118392 -2.179483 -2.824135 5.000625
## X16 3.315639 -2.172434 -3.160607 5.301845
## X17 3.002211 -2.315974 -4.455028 4.928999
## X18 3.029167 -2.145581 -3.688480 4.967287
## X20 2.664447 -2.324933 -4.226734 4.034440
## X21 2.754297 -2.230264 -3.964369 4.146994
## X22 2.520917 -2.278869 -4.246098 3.668189
## X23 2.657458 -2.232127 -2.932194 4.017490
## X25 3.062456 -2.188364 -3.972835 4.972347
## X26 2.797281 -2.131999 -3.270432 4.226835
## X27 3.069447 -2.249993 -3.488391 5.074506
## X28 3.008155 -2.360214 -3.603803 4.684455
## X30 2.711378 -2.318003 -3.495618 4.058702
## X32 2.928524 -2.199126 -3.377286 4.744803
## X35 2.883683 -2.263364 -3.551555 4.684455
## X38 2.913437 -2.409836 -5.318724 4.345339
## X39 3.226844 -2.365844 -4.515329 4.533450
## X40 3.035914 -2.286712 -3.799141 4.594701
## X42 3.061052 -2.098013 -4.037586 5.200544
## X43 3.211247 -2.398986 -2.296603 5.072078
## X45 3.082369 -2.331602 -4.280192 4.864503
## X46 2.867899 -2.208184 -3.220377 4.219926
## X47 2.823757 -2.453408 -4.106822 4.274627
## X48 2.926382 -2.155891 -3.756730 4.732992
## X49 2.683074 -2.272056 -4.249596 4.165667
## X52 2.793616 -2.565900 -4.442201 4.376271
## X53 2.903617 -2.493625 -4.781907 4.220791
## X54 2.928524 -2.164564 -3.519643 4.451081
## X55 3.091951 -2.401743 -4.575611 4.980549
## X56 2.931194 -2.351355 -4.741907 4.317312
## X57 2.921547 -2.250942 -3.769656 4.746189
## X58 3.072230 -2.174192 -3.556098 4.917397
## X59 2.960623 -2.518257 -4.756807 4.298995
## X61 2.700018 -2.176834 -4.510770 3.857866
## X62 3.043570 -2.085057 -3.453965 4.668773
## X64 2.629007 -2.561226 -3.234497 4.031624
## X65 3.171365 -2.187472 -3.631366 5.090232
## X67 3.044999 -2.259526 -4.042701 4.972347
## X68 2.946542 -2.508503 -4.686814 4.428254
## X69 2.852439 -2.238672 -2.452711 4.332192
## X71 3.059176 -2.406946 -4.103184 4.635650
## X73 3.199489 -2.233992 -2.879551 5.111247
## X74 2.759377 -2.295609 -3.880040 4.179793
## X75 2.804572 -2.389015 -4.006883 4.377888
## X78 2.781920 -2.239610 -2.840611 4.001364
## X79 3.176803 -2.051048 -2.683114 4.982438
## X80 2.890372 -2.309207 -4.097750 4.504524
## X81 3.043093 -2.205458 -4.071019 5.009980
## X82 2.763800 -2.227478 -3.331205 4.376271
## X83 3.215269 -2.241490 -2.865933 5.099260
## X84 3.269189 -2.107841 -2.805112 5.044600
## X85 2.750471 -2.330676 -4.010739 4.510643
## X86 2.918851 -2.315265 -4.105001 4.714115
## X87 3.066191 -2.359791 -3.512241 4.821893
## X88 3.202340 -2.404729 -3.994318 4.898589
## X89 3.081910 -2.433605 -3.691683 4.904441
## X90 2.723924 -2.178599 -3.120842 3.936655
## X91 3.178887 -2.410839 -4.007433 4.812472
## X92 3.125005 -2.385967 -3.711534 4.581390
## X93 2.691921 -2.609790 -4.567874 4.307339
## X94 2.906901 -2.280824 -4.205723 4.588794
## X95 2.987196 -2.264326 -3.292792 4.458901
## X96 3.136798 -2.399316 -3.357563 4.974243
## X97 2.881443 -2.258568 -4.440504 4.185067
## X99 2.552565 -2.409836 -4.319991 3.828226
## X100 2.984166 -2.327698 -3.542185 4.927712
## X102 2.597491 -2.145581 -4.524512 4.060557
## X103 3.021400 -2.524105 -5.099794 5.051957
## X104 2.965273 -2.297598 -3.818533 4.653708
## X105 2.959587 -2.303686 -3.808114 4.385955
## X106 2.744704 -1.967542 -3.536330 4.311499
## X108 2.919931 -2.467814 -4.559241 4.700742
## X109 2.979095 -2.020418 -2.445532 4.737167
## X110 3.056827 -2.435088 -4.162409 4.815168
## X111 2.832625 -2.266253 -3.529485 4.232863
## X112 3.033028 -2.309308 -3.208431 4.553792
## X113 2.978077 -2.546314 -2.597493 4.419537
## X114 3.005187 -2.187472 -3.284215 4.340417
## X115 2.761907 -2.162823 -3.810821 4.067964
## X116 3.069447 -2.326058 -3.686083 4.604270
## X117 2.757475 -2.357886 -2.694147 3.818947
## X118 2.813611 -2.152442 -3.663992 4.692258
## X119 3.131573 -2.158485 -3.224894 4.904441
## X120 2.996232 -2.476700 -4.776908 4.724620
## X121 2.381396 -2.367337 -4.180556 3.702239
## X123 3.005683 -1.933093 -2.322176 4.440088
## X125 2.796671 -2.642965 -3.420380 4.340417
## X126 2.845491 -2.432124 -4.691927 4.407598
## X127 3.206398 -2.379682 -4.444753 5.217803
## X128 2.939691 -2.498965 -3.600502 4.573218
## X129 2.796671 -2.162823 -3.173663 3.945456
## X130 3.223664 -2.287696 -3.448604 5.096856
## X131 2.587012 -2.238672 -3.706636 3.894116
## X132 2.969388 -2.214574 -4.210429 4.593226
## X133 3.069912 -2.294617 -3.936316 4.979920
## X134 2.634045 -2.357886 -4.189755 4.033502
## X135 3.086943 -2.361274 -4.253106 4.961581
## X137 2.813611 -2.252843 -4.283087 4.554542
## X138 2.733718 -2.339353 -4.185802 4.279690
## X139 2.866193 -2.148149 -3.358138 4.229421
## X140 2.594508 -2.150723 -3.352979 3.680332
## X141 2.482404 -2.380547 -5.175038 3.488165
## X142 2.893146 -2.330882 -3.943514 4.538741
## X144 2.767576 -2.444494 -4.158563 4.262772
## X146 2.684440 -2.161086 -3.048922 3.760309
## X147 2.808197 -2.215490 -3.315111 4.621105
## X148 2.932260 -2.508626 -3.020640 4.553792
## X149 2.719979 -2.305590 -3.773566 4.089126
## X150 2.885359 -2.532753 -4.140179 4.316482
## X152 3.030134 -2.363929 -2.915813 4.853256
## X154 2.571084 -2.327493 -4.712199 3.737909
## X155 2.730464 -2.366164 -3.903559 4.147887
## X156 2.887033 -2.447149 -4.159203 4.534963
## X158 2.968361 -2.597628 -3.626468 4.741335
## X159 2.544747 -2.373974 -4.626496 3.953251
## X160 2.561868 -2.588269 -5.093908 3.932732
## X161 3.004692 -2.217325 -3.727620 4.608673
## X162 2.768832 -2.442537 -3.488391 3.894116
## X163 2.898671 -2.189256 -3.782311 4.621834
## X164 3.100993 -2.290657 -3.449863 4.783310
## X165 3.092859 -2.472306 -3.671433 4.751724
## X166 2.983660 -2.474442 -4.830441 4.579906
## X168 2.933857 -2.423059 -3.700952 4.615263
## X169 3.205993 -2.254748 -3.314836 5.020540
## X171 2.516890 -2.274970 -4.439656 3.665973
## X172 2.977059 -2.402626 -4.544075 4.863182
## X175 2.718001 -2.431328 -4.588313 4.028805
## X176 2.670694 -2.392729 -4.827439 3.815845
## X177 2.893700 -2.333147 -2.429510 4.471360
## X178 3.001217 -2.319630 -3.195648 4.767568
## X179 3.100993 -2.772429 -6.095937 4.806397
## X180 2.569554 -2.437374 -5.057098 3.721768
## X182 3.279783 -2.170680 -3.045133 5.090835
## X183 3.011113 -2.343720 -3.788479 5.050733
## X184 2.702703 -2.401411 -3.289298 3.883102
## X187 2.922086 -2.454804 -4.690619 4.619646
## X188 2.844328 -2.325444 -4.743973 4.225973
## X189 2.855895 -2.295609 -4.735735 4.628388
## X190 2.766319 -2.515778 -3.811273 4.065190
## X191 3.140698 -2.230264 -1.999522 5.304618
## X192 3.063858 -2.436231 -4.022955 4.401206
## X193 2.902520 -2.666429 -5.000289 4.177151
## X194 3.290638 -2.269150 -3.258397 5.421659
## X196 2.793004 -2.533131 -4.143325 4.278004
## X197 3.104138 -2.120264 -3.396807 5.122583
## X198 3.083743 -2.607617 -2.938218 4.495315
## X200 3.006672 -2.315468 -4.137043 4.879637
## X201 2.973487 -2.344866 -4.029119 4.761381
## X202 2.961141 -2.411508 -3.661653 4.581390
## X203 3.283539 -2.170680 -2.971820 5.042143
## X204 3.167583 -2.022683 -3.471191 5.551376
## X205 2.923162 -2.306091 -3.957544 4.490698
## X206 2.814210 -2.421819 -3.953366 4.124564
## X207 2.848971 -2.217325 -4.382827 4.378696
## X208 3.008648 -2.433605 -4.197707 4.522074
## X209 3.115292 -2.300587 -3.492984 4.815841
## X210 2.558002 -2.503234 -4.393290 3.696783
## X211 3.097386 -2.397995 -3.321185 4.725319
## X212 2.941276 -2.422383 -4.058784 4.517508
## X214 3.241029 -2.296603 -2.458654 4.741335
## X216 2.829087 -2.276917 -3.370280 4.660893
## X217 2.909630 -2.368404 -3.171992 4.673060
## X218 2.861057 -2.519001 -3.467337 4.477566
## X220 3.480317 -2.474560 -3.632877 5.725074
## X222 2.631889 -2.252843 -3.766193 3.825137
## X223 2.863914 -2.243373 -4.268698 4.347796
## X224 3.008155 -2.277892 -3.740594 4.890764
## X225 2.834389 -2.471596 -4.506230 4.409194
## X226 2.600465 -2.312030 -4.248895 3.801311
## X228 2.741485 -2.480397 -3.439834 4.039126
## X229 3.176803 -2.537928 -3.774873 4.956498
## X230 3.105931 -2.218244 -3.255021 4.881604
## X232 3.298795 -2.676116 -4.106215 5.107058
## X233 3.520757 -2.553614 -4.988923 5.547844
## X234 3.325396 -2.390433 -3.881494 5.315680
## X235 2.766948 -2.469348 -4.565949 4.025039
## X236 3.056357 -2.400198 -4.280915 4.890111
## X239 3.326833 -2.498235 -3.559607 5.484477
## X240 3.670715 -2.321564 -3.580922 5.699444
## X241 2.747271 -2.362017 -4.383628 4.014653
## X242 2.710713 -2.535022 -5.312416 4.136255
## X243 2.900872 -2.344241 -2.827848 4.733688
## X244 3.168424 -2.520368 -3.624216 4.618186
## X245 3.157000 -2.275943 -3.425900 4.906389
## X246 2.988708 -2.234926 -4.278748 4.835955
## X248 2.646884 -2.434974 -2.883833 3.883102
## X249 3.227637 -2.337487 -4.570769 5.191870
## X250 2.703373 -2.289669 -4.399783 4.208655
## X251 3.159550 -2.295609 -3.242144 4.665910
## X253 2.986692 -2.242431 -2.984397 4.562778
## X254 2.837908 -2.294617 -4.197707 4.525113
## X255 2.961658 -2.268184 -4.017384 4.485299
## X256 2.836150 -2.210918 -3.619727 4.283900
## X257 3.359333 -2.379466 -3.043873 5.253674
## X258 2.848971 -2.013654 -3.081726 4.333015
## X259 3.144152 -2.199126 -2.814244 4.977398
## X263 3.096934 -2.408057 -2.816582 4.683033
## X264 2.964242 -2.545931 -4.698932 4.979289
## X266 3.437851 -2.357147 -4.214480 5.806493
## X267 2.941804 -2.334282 -3.329528 4.355967
## X268 3.083743 -2.531244 -3.727205 4.874383
## X269 2.785628 -2.361804 -3.826763 4.412381
## X270 3.015045 -2.223774 -3.039684 4.534207
## X272 2.568022 -2.319324 -4.490057 3.725005
## X273 3.044046 -2.364354 -3.003764 4.748958
## X274 2.751748 -2.403843 -4.540319 4.181552
## X275 3.197856 -2.424188 -4.449022 5.162741
## X276 2.854169 -2.099644 -4.207065 4.008967
## X277 2.650421 -2.366697 -4.710753 4.008967
## X278 2.994732 -2.416538 -4.497213 4.464360
## X279 2.881443 -2.532250 -5.244966 4.600594
## X281 3.280911 -2.282782 -3.709490 5.232668
## X282 2.640485 -2.549381 -4.239139 3.938613
## X283 2.900322 -2.266253 -3.817167 4.781263
## X284 2.932260 -2.238672 -3.357851 4.525113
## X285 2.753661 -2.548741 -3.228674 4.074426
## X286 2.912351 -2.477772 -4.827314 4.367359
## X287 3.033028 -2.452827 -2.965009 4.686585
## X288 2.574138 -2.665709 -4.308776 3.654863
## X289 2.993730 -2.523232 -2.493503 4.305672
## X290 2.938633 -2.440354 -4.272276 4.603535
## X291 2.982140 -2.435317 -2.240550 4.288943
## X292 2.949688 -2.408835 -3.856115 4.607206
## X293 2.773838 -2.297598 -3.910524 4.096440
## X294 2.859913 -2.480277 -4.199705 4.574706
## X295 2.623218 -2.336452 -4.439656 3.860909
## X296 2.585506 -2.386184 -4.809369 3.804433
## X297 2.513656 -2.462989 -4.405500 3.573135
## X298 2.898119 -2.305790 -4.600183 4.392389
## X299 2.899772 -2.721744 -4.285263 4.537986
## X300 3.139400 -2.287696 -4.238446 4.458120
## X301 2.939162 -2.162823 -3.441082 4.610871
## X302 2.990217 -2.470885 -3.390554 4.366547
## X303 3.172203 -2.225624 -3.050822 4.833951
## X304 2.923699 -2.236797 -4.671096 4.482982
## X306 3.198265 -2.593740 -3.756302 4.976136
## X307 2.761275 -2.463811 -4.845841 4.143420
## X308 2.667228 -2.658546 -5.321995 4.109184
## X309 2.542389 -2.606939 -5.587067 3.805473
## X310 2.627563 -2.482669 -5.357855 3.852784
## X312 2.753024 -2.574656 -5.112502 4.256821
## X313 2.593013 -2.431101 -3.587045 3.748604
## X314 2.372111 -2.453757 -4.312501 3.334618
## X316 2.824351 -2.463811 -5.805151 4.076268
## X318 2.937573 -2.328313 -4.098955 4.518270
## X319 2.939162 -2.305790 -2.719617 4.393191
## X320 2.833213 -2.582696 -4.529135 4.121857
## X321 2.783776 -2.243373 -3.154728 4.157683
## X322 2.978586 -2.523232 -4.285989 4.363297
## X323 2.589267 -2.176834 -3.904055 4.199074
## X324 3.068518 -2.145581 -3.716867 4.991235
## X325 2.721953 -2.444955 -4.255923 4.225110
## X326 2.850707 -2.274970 -4.654991 4.200819
## X329 3.030617 -2.146436 -3.871361 4.896635
## X331 2.741485 -2.354826 -3.428055 4.276316
## X332 2.962692 -2.345597 -3.424978 4.273783
## X333 2.988708 -2.249993 -4.506230 4.576936
## X335 2.945491 -2.487350 -4.775721 4.768255
## X336 3.044522 -2.190150 -4.019052 5.070864
## X337 2.655352 -2.357886 -3.770090 3.802352
## X339 2.863914 -2.295609 -4.234297 4.654427
## X340 3.189241 -2.235861 -3.926629 4.919334
## X341 2.805782 -2.327800 -3.489045 4.236301
## X342 2.823757 -2.467342 -3.360727 4.366547
## X343 2.705380 -2.270118 -3.975495 4.093700
## X344 3.076390 -2.323094 -3.390851 5.160983
## X345 2.737609 -2.162823 -4.512591 3.928802
## X346 2.688528 -2.314455 -3.063797 4.054986
## X347 2.939162 -2.478607 -4.513503 4.670202
## X348 2.690565 -2.421932 -4.195713 3.906069
## X349 2.774462 -2.399537 -4.762058 4.173623
## X350 2.705380 -2.155891 -3.722643 3.885109
## X351 2.837323 -2.582167 -4.963132 4.079030
## X352 2.955951 -2.085057 -2.724332 4.454212
## X353 2.859913 -2.163693 -3.159900 4.407598
## X354 3.248046 -2.278869 -3.716867 5.075113
## X355 2.644045 -2.620864 -3.385226 3.685830
## X358 2.785628 -2.436917 -4.835968 4.562030
## X359 2.740195 -2.489758 -3.540804 3.883102
## X360 2.907993 -2.293625 -4.712533 4.519792
## X361 2.894253 -2.598837 -5.361683 4.190330
## X362 3.071303 -2.455503 -3.889772 4.818532
## X363 2.935982 -2.335522 -4.106822 4.592488
## X365 2.830268 -2.533635 -4.382027 4.252561
## X366 3.080992 -2.391416 -3.960163 4.620376
## X367 3.289521 -2.312131 -3.063155 5.110649
## X369 2.847812 -2.366164 -4.499010 4.625478
## X370 3.086487 -2.241490 -3.568079 4.578422
## X371 3.148024 -2.328724 -3.239844 4.938626
## X372 2.580974 -2.530364 -4.209755 3.675924
## X374 2.853593 -2.359579 -3.965951 4.374653
## X375 2.776954 -2.488674 -4.143325 4.121857
## X376 2.776954 -2.314658 -4.010739 4.023154
## X377 3.006672 -2.399867 -2.571380 4.346158
## X378 3.339677 -2.588003 -4.420352 5.217230
## X379 2.718001 -2.492778 -3.511906 4.069812
## X380 2.935451 -2.107018 -3.090263 5.050733
## X381 2.561868 -2.089896 -4.030244 4.150563
## X382 2.703373 -2.527355 -3.961739 4.177151
## X383 3.123246 -2.668589 -3.087848 4.785356
## X384 2.861057 -2.261443 -3.295487 4.371414
## X385 2.618855 -2.481353 -3.876173 3.849729
## X386 3.148024 -2.443918 -4.050136 4.981809
## X387 2.645465 -2.512319 -3.497929 4.037253
## X390 3.144583 -2.292635 -3.194915 4.900541
## X392 2.823757 -2.264326 -3.929169 4.344519
## X394 3.103689 -2.148149 -3.289835 4.787400
## X395 2.874694 -2.273998 -4.115977 4.578422
## X398 2.859913 -2.520244 -3.359000 4.197328
## X399 2.696652 -2.558639 -4.220588 4.134460
## X400 2.848392 -2.398325 -3.930187 4.479114
## X401 3.045474 -2.095571 -3.291984 4.721123
## X402 2.389680 -2.422270 -4.419521 4.115529
## X403 2.906354 -2.610334 -3.293330 4.488386
## X405 2.704711 -2.443918 -4.768748 3.796097
## X406 2.922624 -2.298593 -3.847172 4.562030
## X407 2.698673 -2.354405 -4.385232 4.064264
## X408 3.061988 -2.583490 -3.105547 4.666626
## X409 3.028199 -2.267218 -3.585601 4.549287
## X410 2.885917 -2.443573 -3.767923 4.796917
## X411 2.866193 -2.423849 -4.610484 5.256500
## X412 2.823163 -2.228406 -4.671844 4.625478
## X413 3.076390 -2.529611 -3.621595 4.735776
## X414 3.096030 -2.463341 -3.572698 4.971715
## X415 3.394844 -2.486508 -4.249596 5.289608
## X416 3.052585 -2.325547 -3.489045 4.680900
## X417 3.077312 -2.259526 -4.089954 4.952042
## X418 3.048325 -2.189256 -3.247018 4.712008
## X419 2.498974 -2.432124 -4.371680 3.803393
## X421 2.946542 -2.459707 -3.888795 4.664478
## X423 2.773838 -2.218244 -3.909526 4.072581
## X424 2.951258 -2.401632 -3.572342 4.556042
## X425 2.950735 -2.230264 -3.971242 4.374653
## X426 3.057768 -2.511210 -5.167816 4.800985
## X428 3.090133 -2.430305 -3.840633 5.002499
## X431 3.114848 -2.307899 -2.778526 4.706382
## X432 2.872434 -2.249993 -3.412764 4.353519
## X433 2.972464 -2.177716 -3.606378 4.523594
## X434 3.089678 -2.284745 -3.197114 4.932212
## X436 2.976549 -2.244316 -4.022396 4.923849
## X437 2.972464 -2.392948 -4.296216 4.470584
## X438 2.771338 -2.470057 -4.346659 4.242305
## X439 2.975530 -2.443688 -4.514416 4.737167
## X440 2.751110 -2.529988 -4.684430 4.039126
## X441 2.844909 -2.417435 -3.070887 4.656584
## X443 2.759377 -2.428489 -4.460204 3.862936
## X444 2.907993 -2.508134 -4.319240 4.385955
## X445 2.824351 -2.413852 -4.160484 4.279690
## X446 3.214466 -2.273026 -3.899600 4.895332
## X447 3.333275 -2.302885 -3.904551 5.378924
## X448 2.871302 -2.388252 -4.447312 4.339596
## X449 2.962175 -2.478368 -3.888306 4.763445
## X450 3.021400 -2.334695 -3.875209 5.004371
## X451 3.069912 -2.716133 -2.802965 4.748958
## X452 3.218876 -2.271086 -3.948168 4.934138
## X453 3.340385 -2.472543 -3.653898 5.343130
## X455 2.841998 -2.455387 -4.763111 4.290621
## X456 3.424914 -2.381087 -4.385232 5.539246
## X457 3.377246 -2.369045 -3.722229 5.393426
## X459 3.224062 -2.480636 -4.713424 4.992489
## X460 3.339322 -2.527731 -4.366153 5.290165
## X461 3.301377 -2.312837 -3.808114 5.150996
## X462 3.268428 -2.221927 -2.923598 4.960311
## X464 2.910174 -2.464163 -4.073954 4.442448
## X465 2.902520 -2.594811 -4.204383 4.432205
## X466 3.002211 -2.490844 -2.709501 4.556042
## X467 3.032064 -2.444725 -3.448604 4.553042
## X468 2.895912 -2.487590 -4.376442 4.489157
## X469 3.149740 -2.376339 -2.655695 4.796239
## X471 2.917230 -2.413964 -3.917538 4.565019
## X472 3.337192 -2.435888 -4.163695 5.081777
## X473 2.703373 -2.513553 -4.034191 3.934694
## X475 2.748552 -2.295609 -3.476029 4.042868
## X476 2.755570 -2.403511 -4.019608 4.042868
## X477 3.021887 -2.415642 -3.414891 4.684455
## X480 2.970927 -2.276917 -2.905892 4.364923
## X481 2.892037 -2.398325 -4.094745 4.727414
## X482 2.956991 -2.526854 -4.526359 4.624021
## X483 2.643334 -2.233992 -4.232228 3.944480
## X485 2.423031 -2.260484 -4.006334 3.500171
## X487 2.824351 -2.448652 -4.273710 4.551541
## X488 2.934920 -2.217325 -3.563834 4.897287
## X493 3.023347 -2.301586 -3.611918 4.597650
## X494 2.551786 -2.607481 -4.602175 3.744333
## X495 3.022374 -2.612513 -3.886355 4.768255
## X496 3.006178 -2.344762 -4.289630 4.769627
## X497 2.899772 -2.229335 -3.688080 4.290621
## X498 2.851284 -2.415978 -4.289630 4.467474
## X500 3.055886 -2.221005 -3.578770 4.921270
## X502 3.198265 -2.152442 -3.255540 5.058072
## X504 2.987196 -2.370650 -3.394420 4.430625
## X505 2.554899 -1.811554 -3.091803 3.746469
## X506 2.575661 -2.075450 -3.018387 3.916970
## X508 2.840247 -2.125276 -3.751606 4.169207
## X510 3.175968 -2.134532 -3.007805 5.257064
## X511 2.687167 -2.513430 -3.240099 3.873042
## X512 2.687847 -2.468404 -4.425352 3.871024
## X513 3.021400 -2.201835 -3.787595 4.849274
## X515 2.948116 -2.384338 -4.116590 4.740641
## X516 2.923699 -2.254748 -4.109864 4.363297
## X517 3.024320 -2.236797 -3.975495 4.607940
## X519 2.902520 -2.105375 -3.759731 4.469807
## X520 2.815409 -2.184802 -4.159844 4.255969
## X521 2.631889 -1.987045 -3.673006 3.897110
## X522 3.072693 -2.273026 -3.438276 4.660893
## X524 2.927453 -2.311021 -3.793796 4.565765
## X525 2.752386 -2.354721 -3.891240 4.360042
## X526 2.572612 -2.267218 -4.038721 3.957138
## X527 2.931194 -2.230264 -4.238446 4.530422
## X528 2.507157 -2.407612 -4.691927 4.035378
## X529 2.577942 -2.081043 -3.427439 3.636967
## X530 2.598235 -2.207275 -4.462803 3.680332
## X531 2.865624 -2.232127 -4.009085 4.735080
## X532 2.996732 -2.286712 -4.058784 4.792163
## X533 2.793004 -2.377632 -4.929793 4.120954
## X537 3.115735 -2.265289 -3.656219 5.138013
## X538 3.196221 -2.090705 -3.353837 5.011847
## X539 3.238286 -2.513553 -4.636454 4.931570
## X540 3.236323 -2.445532 -2.740005 4.993116
## X541 2.670002 -2.304186 -3.191261 4.073504
## X543 3.235536 -2.491931 -4.446458 5.018060
## X544 3.334345 -2.445186 -4.288901 5.304063
## X546 3.145445 -2.380979 -3.863709 4.811124
## X547 2.794228 -2.360850 -4.927168 4.258523
## X548 2.808197 -2.421707 -3.478943 4.281375
## X551 3.067122 -2.599510 -4.506230 4.500691
## X552 3.110845 -2.346955 -3.489701 4.754487
## X553 3.382015 -2.491810 -4.395720 5.238363
## X554 3.088311 -2.381628 -3.998671 4.522074
## X555 3.364533 -2.510471 -3.838308 5.223531
## X556 3.318178 -2.404618 -3.598673 5.175599
## X557 2.975019 -2.299590 -3.806762 4.351068
## X558 3.327910 -2.510471 -4.488276 5.136237
## X559 3.121483 -2.468286 -3.070671 4.685165
## X560 3.175133 -2.379358 -3.512576 5.303509
## X562 3.379974 -2.597090 -4.724179 5.365966
## X564 3.222469 -2.208184 -3.144232 4.832614
## X565 3.108614 -2.198225 -3.543568 4.622564
## X567 3.335058 -2.470412 -3.288494 5.129122
## X568 3.378611 -2.138767 -2.787418 5.425895
## X570 2.339881 -2.133687 -3.015119 3.845649
## X571 2.877512 -2.468168 -4.336671 4.393994
## X572 3.056357 -2.210918 -3.217377 4.558289
## X573 3.014554 -1.948413 -2.595883 4.629842
## X575 2.753661 -2.057289 -3.397703 4.421124
## X576 2.994732 -2.357781 -4.281638 4.712710
## X577 3.036394 -2.129472 -3.496938 4.746189
## X578 3.082827 -2.061209 -3.351836 4.919334
## X579 3.179719 -2.131999 -2.628731 5.491708
## X580 3.145875 -2.500305 -4.681080 5.114832
## X583 3.175968 -2.476819 -3.465416 4.712710
## X584 3.118392 -2.179483 -2.824135 5.000625
## X585 3.315639 -2.172434 -3.160607 5.301845
## X586 3.002211 -2.315974 -4.455028 4.928999
## X587 3.029167 -2.145581 -3.688480 4.967287
## X588 3.097837 -2.319630 -3.967007 4.928999
## X589 2.664447 -2.324933 -4.226734 4.034440
## X590 2.754297 -2.230264 -3.964369 4.146994
## X591 2.520917 -2.278869 -4.246098 3.668189
## X592 2.657458 -2.232127 -2.932194 4.017490
## X593 3.137232 -2.361486 -4.374852 5.214935
## X595 2.797281 -2.131999 -3.270432 4.226835
## X601 2.928524 -2.199126 -3.377286 4.744803
## X602 3.177220 -2.122767 -3.479591 5.005619
## X603 3.276012 -2.364354 -3.405808 4.930285
## X604 2.883683 -2.263364 -3.551555 4.684455
## X605 3.072230 -2.342366 -3.689280 4.806397
## X606 3.078233 -2.320444 -3.508226 4.895332
## X607 2.913437 -2.409836 -5.318724 4.345339
## X608 3.226844 -2.365844 -4.515329 4.533450
## X609 3.035914 -2.286712 -3.799141 4.594701
## X610 3.071767 -2.505681 -4.508043 4.888151
## X611 3.061052 -2.098013 -4.037586 5.200544
## X613 3.009635 -2.262403 -3.841099 4.736472
## X614 3.082369 -2.331602 -4.280192 4.864503
## X615 2.867899 -2.208184 -3.220377 4.219926
## X618 2.683074 -2.272056 -4.249596 4.165667
## X619 3.104587 -2.435888 -4.281638 4.988725
## X620 3.072693 -2.449115 -4.629668 4.572474
## X621 2.793616 -2.565900 -4.442201 4.376271
## X622 2.903617 -2.493625 -4.781907 4.220791
## X623 2.928524 -2.164564 -3.519643 4.451081
## X624 3.091951 -2.401743 -4.575611 4.980549
## X625 2.931194 -2.351355 -4.741907 4.317312
## X627 3.072230 -2.174192 -3.556098 4.917397
## X628 2.960623 -2.518257 -4.756807 4.298995
## X629 2.467252 -2.327698 -4.551629 3.639212
## X630 2.700018 -2.176834 -4.510770 3.857866
## X631 3.043570 -2.085057 -3.453965 4.668773
## X632 3.097837 -2.254748 -2.651292 4.839292
## X633 2.629007 -2.561226 -3.234497 4.031624
## X635 3.175551 -2.143873 -3.767923 5.085404
## X636 3.044999 -2.259526 -4.042701 4.972347
## X637 2.946542 -2.508503 -4.686814 4.428254
## X638 2.852439 -2.238672 -2.452711 4.332192
## X640 3.059176 -2.406946 -4.103184 4.635650
## X642 3.199489 -2.233992 -2.879551 5.111247
## X643 2.759377 -2.295609 -3.880040 4.179793
## X644 2.804572 -2.389015 -4.006883 4.377888
## X645 2.978077 -2.389451 -3.815350 4.484527
## X646 2.392426 -2.047168 -3.561718 3.284809
## X647 2.781920 -2.239610 -2.840611 4.001364
## X648 3.176803 -2.051048 -2.683114 4.982438
## X649 2.890372 -2.309207 -4.097750 4.504524
## X651 2.763800 -2.227478 -3.331205 4.376271
## X652 3.215269 -2.241490 -2.865933 5.099260
## X653 3.269189 -2.107841 -2.805112 5.044600
## X654 2.750471 -2.330676 -4.010739 4.510643
## X655 2.918851 -2.315265 -4.105001 4.714115
## X656 3.066191 -2.359791 -3.512241 4.821893
## X657 3.202340 -2.404729 -3.994318 4.898589
## X658 3.081910 -2.433605 -3.691683 4.904441
## X659 2.723924 -2.178599 -3.120842 3.936655
## X660 3.178887 -2.410839 -4.007433 4.812472
## X661 3.125005 -2.385967 -3.711534 4.581390
## X662 2.691921 -2.609790 -4.567874 4.307339
## X663 2.906901 -2.280824 -4.205723 4.588794
## X664 2.987196 -2.264326 -3.292792 4.458901
## X667 2.992728 -2.278869 -4.224681 4.614531
## X668 2.552565 -2.409836 -4.319991 3.828226
## X669 2.984166 -2.327698 -3.542185 4.927712
## X670 3.218076 -2.355142 -4.207737 5.196499
## X671 2.597491 -2.145581 -4.524512 4.060557
## X672 3.021400 -2.524105 -5.099794 5.051957
## X674 2.959587 -2.303686 -3.808114 4.385955
## X675 2.744704 -1.967542 -3.536330 4.311499
## X677 2.919931 -2.467814 -4.559241 4.700742
## X678 2.979095 -2.020418 -2.445532 4.737167
## X679 3.056827 -2.435088 -4.162409 4.815168
## X680 2.832625 -2.266253 -3.529485 4.232863
## X681 3.033028 -2.309308 -3.208431 4.553792
## X682 2.978077 -2.546314 -2.597493 4.419537
## X683 3.005187 -2.187472 -3.284215 4.340417
## X685 3.069447 -2.326058 -3.686083 4.604270
## X686 2.757475 -2.357886 -2.694147 3.818947
## X687 2.813611 -2.152442 -3.663992 4.692258
## X689 2.996232 -2.476700 -4.776908 4.724620
## X690 2.381396 -2.367337 -4.180556 3.702239
## X692 3.005683 -1.933093 -2.322176 4.440088
## X693 2.387845 -2.206366 -4.361440 3.703328
## X694 2.796671 -2.642965 -3.420380 4.340417
## X695 2.845491 -2.432124 -4.691927 4.407598
## X697 2.939691 -2.498965 -3.600502 4.573218
## X698 2.796671 -2.162823 -3.173663 3.945456
## X699 3.223664 -2.287696 -3.448604 5.096856
## X700 2.587012 -2.238672 -3.706636 3.894116
## X701 2.969388 -2.214574 -4.210429 4.593226
## X702 3.069912 -2.294617 -3.936316 4.979920
## X703 2.634045 -2.357886 -4.189755 4.033502
## X704 3.086943 -2.361274 -4.253106 4.961581
## X705 3.112181 -2.401853 -4.421183 5.084195
## X706 2.813611 -2.252843 -4.283087 4.554542
## X707 2.733718 -2.339353 -4.185802 4.279690
## X708 2.866193 -2.148149 -3.358138 4.229421
## X711 2.893146 -2.330882 -3.943514 4.538741
## X712 2.851284 -2.214574 -4.054163 4.648665
## X714 2.706048 -2.551944 -4.027995 4.167437
## X715 2.684440 -2.161086 -3.048922 3.760309
## X716 2.808197 -2.215490 -3.315111 4.621105
## X717 2.932260 -2.508626 -3.020640 4.553792
## X718 2.719979 -2.305590 -3.773566 4.089126
## X719 2.885359 -2.532753 -4.140179 4.316482
## X720 3.033991 -2.175952 -4.472389 4.449513
## X721 3.030134 -2.363929 -2.915813 4.853256
## X722 2.730464 -2.233059 -2.344866 4.055916
## X723 2.571084 -2.327493 -4.712199 3.737909
## X724 2.730464 -2.366164 -3.903559 4.147887
## X725 2.887033 -2.447149 -4.159203 4.534963
## X726 3.032064 -2.193731 -3.004975 4.526631
## X728 2.544747 -2.373974 -4.626496 3.953251
## X729 2.561868 -2.588269 -5.093908 3.932732
## X731 2.768832 -2.442537 -3.488391 3.894116
## X733 3.100993 -2.290657 -3.449863 4.783310
## X734 3.092859 -2.472306 -3.671433 4.751724
## X735 2.983660 -2.474442 -4.830441 4.579906
## X736 2.273156 -2.344032 -4.623742 3.221497
## X737 2.933857 -2.423059 -3.700952 4.615263
## X738 3.205993 -2.254748 -3.314836 5.020540
## X739 2.830268 -2.317191 -4.290359 4.360856
## X742 2.475698 -2.073857 -3.763172 3.815845
## X743 2.688528 -2.296603 -3.853283 3.792962
## X744 2.718001 -2.431328 -4.588313 4.028805
## X745 2.670694 -2.392729 -4.827439 3.815845
## X746 2.893700 -2.333147 -2.429510 4.471360
## X747 3.001217 -2.319630 -3.195648 4.767568
## X748 3.100993 -2.772429 -6.095937 4.806397
## X749 2.569554 -2.437374 -5.057098 3.721768
## X750 3.085116 -2.212744 -3.674188 5.052569
## X751 3.279783 -2.170680 -3.045133 5.090835
## X752 3.011113 -2.343720 -3.788479 5.050733
## X753 2.702703 -2.401411 -3.289298 3.883102
## X754 3.109507 -2.401632 -4.272276 4.738557
## X755 2.715357 -2.378710 -4.422849 4.207786
## X756 2.922086 -2.454804 -4.690619 4.619646
## X757 2.844328 -2.325444 -4.743973 4.225973
## X758 2.855895 -2.295609 -4.735735 4.628388
## X759 2.766319 -2.515778 -3.811273 4.065190
## X760 3.140698 -2.230264 -1.999522 5.304618
## X761 3.063858 -2.436231 -4.022955 4.401206
## X762 2.902520 -2.666429 -5.000289 4.177151
## X764 3.144583 -2.259526 -2.971625 4.721123
## X765 2.793004 -2.533131 -4.143325 4.278004
## X766 3.104138 -2.120264 -3.396807 5.122583
## X767 3.083743 -2.607617 -2.938218 4.495315
## X768 3.113071 -2.462402 -3.297378 5.003747
## X770 2.973487 -2.344866 -4.029119 4.761381
## X771 2.961141 -2.411508 -3.661653 4.581390
## X772 3.283539 -2.170680 -2.971820 5.042143
## X773 3.167583 -2.022683 -3.471191 5.551376
## X774 2.923162 -2.306091 -3.957544 4.490698
## X775 2.814210 -2.421819 -3.953366 4.124564
## X776 2.848971 -2.217325 -4.382827 4.378696
## X777 3.008648 -2.433605 -4.197707 4.522074
## X778 3.115292 -2.300587 -3.492984 4.815841
## X779 2.558002 -2.503234 -4.393290 3.696783
## X780 3.097386 -2.397995 -3.321185 4.725319
## X781 2.941276 -2.422383 -4.058784 4.517508
## X782 2.916148 -2.169804 -3.585601 3.959079
## X783 3.241029 -2.296603 -2.458654 4.741335
## X784 3.170106 -2.357781 -3.294138 5.172099
## X785 2.829087 -2.276917 -3.370280 4.660893
## X786 2.909630 -2.368404 -3.171992 4.673060
## X787 2.861057 -2.519001 -3.467337 4.477566
## X789 3.480317 -2.474560 -3.632877 5.725074
## X794 2.834389 -2.471596 -4.506230 4.409194
## X795 2.600465 -2.312030 -4.248895 3.801311
## X796 2.738256 -2.250942 -4.820718 4.084542
## X797 2.741485 -2.480397 -3.439834 4.039126
## X798 3.176803 -2.537928 -3.774873 4.956498
## X799 3.105931 -2.218244 -3.255021 4.881604
## X800 2.948641 -2.170680 -3.908031 4.509879
## X802 3.520757 -2.553614 -4.988923 5.547844
## X804 2.766948 -2.469348 -4.565949 4.025039
## X805 3.056357 -2.400198 -4.280915 4.890111
## X807 3.066191 -2.482310 -3.463179 4.605738
## X808 3.326833 -2.498235 -3.559607 5.484477
## X809 3.670715 -2.321564 -3.580922 5.699444
## X811 2.710713 -2.535022 -5.312416 4.136255
## X814 3.157000 -2.275943 -3.425900 4.906389
## X815 2.988708 -2.234926 -4.278748 4.835955
## X816 2.858193 -2.629008 -4.154732 4.723921
## X817 2.646884 -2.434974 -2.883833 3.883102
## X818 3.227637 -2.337487 -4.570769 5.191870
## X819 2.703373 -2.289669 -4.399783 4.208655
## X820 3.159550 -2.295609 -3.242144 4.665910
## X822 2.986692 -2.242431 -2.984397 4.562778
## X823 2.837908 -2.294617 -4.197707 4.525113
## X824 2.961658 -2.268184 -4.017384 4.485299
## X825 2.836150 -2.210918 -3.619727 4.283900
## X826 3.359333 -2.379466 -3.043873 5.253674
## X827 2.848971 -2.013654 -3.081726 4.333015
## X828 3.144152 -2.199126 -2.814244 4.977398
## X829 3.513335 -2.241490 -3.666727 5.913428
## X830 3.298057 -2.302585 -4.149012 5.412105
## X831 3.138100 -2.446225 -4.504420 4.966653
## X832 3.096934 -2.408057 -2.816582 4.683033
## X833 2.964242 -2.545931 -4.698932 4.979289
## X834 3.094219 -2.330367 -4.417861 4.827259
## X835 3.437851 -2.357147 -4.214480 5.806493
## X837 3.083743 -2.531244 -3.727205 4.874383
## X838 2.785628 -2.361804 -3.826763 4.412381
## X839 3.015045 -2.223774 -3.039684 4.534207
## X840 2.822569 -2.744351 -5.596723 4.161235
## X841 2.568022 -2.319324 -4.490057 3.725005
## X844 3.197856 -2.424188 -4.449022 5.162741
## X845 2.854169 -2.099644 -4.207065 4.008967
## X846 2.650421 -2.366697 -4.710753 4.008967
## X847 2.994732 -2.416538 -4.497213 4.464360
## X849 2.719979 -2.352196 -4.172739 4.255969
## X850 3.280911 -2.282782 -3.709490 5.232668
## X851 2.640485 -2.549381 -4.239139 3.938613
## X852 2.900322 -2.266253 -3.817167 4.781263
## X854 2.753661 -2.548741 -3.228674 4.074426
## X855 2.912351 -2.477772 -4.827314 4.367359
## X856 3.033028 -2.452827 -2.965009 4.686585
## X857 2.574138 -2.665709 -4.308776 3.654863
## X858 2.993730 -2.523232 -2.493503 4.305672
## X859 2.938633 -2.440354 -4.272276 4.603535
## X860 2.982140 -2.435317 -2.240550 4.288943
## X861 2.949688 -2.408835 -3.856115 4.607206
## X863 2.859913 -2.480277 -4.199705 4.574706
## X864 2.623218 -2.336452 -4.439656 3.860909
## X866 2.513656 -2.462989 -4.405500 3.573135
## X867 2.898119 -2.305790 -4.600183 4.392389
## X868 2.899772 -2.721744 -4.285263 4.537986
## X869 3.139400 -2.287696 -4.238446 4.458120
## X870 2.939162 -2.162823 -3.441082 4.610871
## X871 2.990217 -2.470885 -3.390554 4.366547
## X872 3.172203 -2.225624 -3.050822 4.833951
## X873 2.923699 -2.236797 -4.671096 4.482982
## X874 2.899221 -2.424414 -3.629856 4.244873
## X875 3.198265 -2.593740 -3.756302 4.976136
## X876 2.761275 -2.463811 -4.845841 4.143420
## X877 2.667228 -2.658546 -5.321995 4.109184
## X878 2.542389 -2.606939 -5.587067 3.805473
## X880 2.950212 -2.428829 -4.698383 4.633474
## X881 2.753024 -2.574656 -5.112502 4.256821
## X882 2.593013 -2.431101 -3.587045 3.748604
## X883 2.372111 -2.453757 -4.312501 3.334618
## X884 2.923162 -2.231195 -4.266557 4.314822
## X885 2.824351 -2.463811 -5.805151 4.076268
## X886 2.644755 -2.559544 -5.155603 3.756060
## X887 2.937573 -2.328313 -4.098955 4.518270
## X888 2.939162 -2.305790 -2.719617 4.393191
## X889 2.833213 -2.582696 -4.529135 4.121857
## X892 2.589267 -2.176834 -3.904055 4.199074
## X893 3.068518 -2.145581 -3.716867 4.991235
## X894 2.721953 -2.444955 -4.255923 4.225110
## X895 2.850707 -2.274970 -4.654991 4.200819
## X896 2.555676 -2.374189 -4.481184 3.913012
## X898 3.030617 -2.146436 -3.871361 4.896635
## X899 3.085573 -2.149864 -3.281816 4.534207
## X900 2.741485 -2.354826 -3.428055 4.276316
## X901 2.962692 -2.345597 -3.424978 4.273783
## X903 2.693275 -2.488192 -4.944286 4.283059
## X907 3.064792 -2.395139 -3.244963 5.143922
## X908 2.863914 -2.295609 -4.234297 4.654427
## X909 3.189241 -2.235861 -3.926629 4.919334
## X910 2.805782 -2.327800 -3.489045 4.236301
## X911 2.823757 -2.467342 -3.360727 4.366547
## X912 2.705380 -2.270118 -3.975495 4.093700
## X913 3.076390 -2.323094 -3.390851 5.160983
## X914 2.737609 -2.162823 -4.512591 3.928802
## X915 2.688528 -2.314455 -3.063797 4.054986
## X917 2.690565 -2.421932 -4.195713 3.906069
## X918 2.774462 -2.399537 -4.762058 4.173623
## X921 2.955951 -2.085057 -2.724332 4.454212
## X922 2.859913 -2.163693 -3.159900 4.407598
## X923 3.248046 -2.278869 -3.716867 5.075113
## X924 2.644045 -2.620864 -3.385226 3.685830
## X925 2.948116 -2.434974 -3.161787 4.313992
## X926 2.922624 -2.223774 -3.236022 4.506820
## X927 2.785628 -2.436917 -4.835968 4.562030
## X928 2.740195 -2.489758 -3.540804 3.883102
## X929 2.907993 -2.293625 -4.712533 4.519792
## X931 3.071303 -2.455503 -3.889772 4.818532
## X932 2.935982 -2.335522 -4.106822 4.592488
## X933 2.906354 -2.334489 -4.014610 4.552291
## X934 2.830268 -2.533635 -4.382027 4.252561
## X935 3.080992 -2.391416 -3.960163 4.620376
## X936 3.289521 -2.312131 -3.063155 5.110649
## X937 2.891482 -2.382603 -4.277306 4.444020
## X938 2.847812 -2.366164 -4.499010 4.625478
## X939 3.086487 -2.241490 -3.568079 4.578422
## X941 2.580974 -2.530364 -4.209755 3.675924
## X942 2.714695 -2.301586 -3.621221 4.264469
## X943 2.853593 -2.359579 -3.965951 4.374653
## X944 2.776954 -2.488674 -4.143325 4.121857
## X946 3.006672 -2.399867 -2.571380 4.346158
## X949 2.935451 -2.107018 -3.090263 5.050733
## X950 2.561868 -2.089896 -4.030244 4.150563
## X952 3.123246 -2.668589 -3.087848 4.785356
## X953 2.861057 -2.261443 -3.295487 4.371414
## X954 2.618855 -2.481353 -3.876173 3.849729
## X955 3.148024 -2.443918 -4.050136 4.981809
## X956 2.645465 -2.512319 -3.497929 4.037253
## X957 2.782539 -2.655553 -4.258041 4.100089
## X958 2.740840 -2.481114 -2.707700 4.003267
## X959 3.144583 -2.292635 -3.194915 4.900541
## X960 2.503074 -2.302985 -4.294016 3.667081
## X962 2.994231 -2.154165 -3.530851 4.832614
## X963 3.103689 -2.148149 -3.289835 4.787400
## X964 2.874694 -2.273998 -4.115977 4.578422
## X965 2.843746 -2.520119 -4.363794 4.544020
## X967 2.859913 -2.520244 -3.359000 4.197328
## X968 2.696652 -2.558639 -4.220588 4.134460
## X970 3.045474 -2.095571 -3.291984 4.721123
## X971 2.389680 -2.422270 -4.419521 4.115529
## X972 2.906354 -2.610334 -3.293330 4.488386
## X973 2.783158 -2.314759 -4.354411 4.362484
## X974 2.704711 -2.443918 -4.768748 3.796097
## X975 2.922624 -2.298593 -3.847172 4.562030
## X976 2.698673 -2.354405 -4.385232 4.064264
## X977 3.061988 -2.583490 -3.105547 4.666626
## X978 3.028199 -2.267218 -3.585601 4.549287
## X980 2.866193 -2.423849 -4.610484 5.256500
## X981 2.823163 -2.228406 -4.671844 4.625478
## X982 3.076390 -2.529611 -3.621595 4.735776
## X983 3.096030 -2.463341 -3.572698 4.971715
## X984 3.394844 -2.486508 -4.249596 5.289608
## X986 3.077312 -2.259526 -4.089954 4.952042
## X987 3.048325 -2.189256 -3.247018 4.712008
## X988 2.498974 -2.432124 -4.371680 3.803393
## X989 3.063858 -2.284745 -4.662587 4.799630
## X990 2.946542 -2.459707 -3.888795 4.664478
## X992 2.773838 -2.218244 -3.909526 4.072581
## X993 2.951258 -2.401632 -3.572342 4.556042
## X994 2.950735 -2.230264 -3.971242 4.374653
## X995 3.057768 -2.511210 -5.167816 4.800985
## X997 3.090133 -2.430305 -3.840633 5.002499
## X1000 3.114848 -2.307899 -2.778526 4.706382
## X1001 2.872434 -2.249993 -3.412764 4.353519
## X1002 2.972464 -2.177716 -3.606378 4.523594
## X1003 3.089678 -2.284745 -3.197114 4.932212
## X1004 2.829678 -2.416426 -4.095345 4.151454
## X1008 2.975530 -2.443688 -4.514416 4.737167
## X1010 2.844909 -2.417435 -3.070887 4.656584
## X1011 3.235536 -2.485187 -3.144696 5.207462
## X1012 2.759377 -2.428489 -4.460204 3.862936
## X1013 2.907993 -2.508134 -4.319240 4.385955
## X1014 2.824351 -2.413852 -4.160484 4.279690
## X1016 3.333275 -2.302885 -3.904551 5.378924
## X1017 2.871302 -2.388252 -4.447312 4.339596
## X1018 2.962175 -2.478368 -3.888306 4.763445
## X1019 3.021400 -2.334695 -3.875209 5.004371
## X1020 3.069912 -2.716133 -2.802965 4.748958
## X1022 3.340385 -2.472543 -3.653898 5.343130
## X1023 2.637628 -2.208184 -4.381227 3.804433
## X1024 2.841998 -2.455387 -4.763111 4.290621
## X1025 3.424914 -2.381087 -4.385232 5.539246
## X1026 3.377246 -2.369045 -3.722229 5.393426
## X1028 3.224062 -2.480636 -4.713424 4.992489
## X1029 3.339322 -2.527731 -4.366153 5.290165
## X1030 3.301377 -2.312837 -3.808114 5.150996
## X1031 3.268428 -2.221927 -2.923598 4.960311
## X1032 3.295466 -2.659975 -4.086972 4.998750
## X1034 2.902520 -2.594811 -4.204383 4.432205
## X1035 3.002211 -2.490844 -2.709501 4.556042
## X1036 3.032064 -2.444725 -3.448604 4.553042
## X1037 2.895912 -2.487590 -4.376442 4.489157
## X1038 3.149740 -2.376339 -2.655695 4.796239
## X1039 2.900322 -2.141317 -3.440146 4.548535
## X1040 2.917230 -2.413964 -3.917538 4.565019
## X1041 3.337192 -2.435888 -4.163695 5.081777
## X1043 3.400197 -2.564080 -4.798391 5.352397
## X1045 2.755570 -2.403511 -4.019608 4.042868
## X1046 3.021887 -2.415642 -3.414891 4.684455
## X1047 2.810607 -2.684138 -4.331334 4.261073
## X1048 2.680336 -2.257612 -3.856588 4.269554
## X1050 2.892037 -2.398325 -4.094745 4.727414
## X1051 2.956991 -2.526854 -4.526359 4.624021
## X1052 2.643334 -2.233992 -4.232228 3.944480
## X1053 2.870169 -2.307598 -4.805330 4.403605
## X1054 2.423031 -2.260484 -4.006334 3.500171
## X1055 2.797891 -2.352406 -2.594141 4.194706
## X1056 2.824351 -2.448652 -4.273710 4.551541
## X1057 2.934920 -2.217325 -3.563834 4.897287
## X1058 2.783158 -2.182139 -4.348979 4.243161
## X1061 2.582487 -2.546186 -4.794637 3.954223
## X1062 3.023347 -2.301586 -3.611918 4.597650
## X1064 3.022374 -2.612513 -3.886355 4.768255
## X1065 3.006178 -2.344762 -4.289630 4.769627
## X1066 2.899772 -2.229335 -3.688080 4.290621
## X1067 2.851284 -2.415978 -4.289630 4.467474
## X1068 2.863343 -2.290657 -3.437654 4.351068
## X1069 3.055886 -2.221005 -3.578770 4.921270
## X1070 2.817801 -2.314354 -4.006883 4.141631
## X1071 3.198265 -2.152442 -3.255540 5.058072
## X1072 2.792391 -2.155891 -4.156007 4.226835
## X1073 2.987196 -2.370650 -3.394420 4.430625
## X1074 2.554899 -1.811554 -3.091803 3.746469
## X1076 2.997730 -2.210918 -3.817622 4.454212
## X1077 2.840247 -2.125276 -3.751606 4.169207
## X1078 2.753661 -2.361592 -4.420352 3.889116
## X1079 3.175968 -2.134532 -3.007805 5.257064
## X1080 2.687167 -2.513430 -3.240099 3.873042
## X1081 2.687847 -2.468404 -4.425352 3.871024
## X1082 3.021400 -2.201835 -3.787595 4.849274
## X1083 2.614472 -2.319528 -4.010739 3.836443
## X1084 2.948116 -2.384338 -4.116590 4.740641
## X1085 2.923699 -2.254748 -4.109864 4.363297
## X1086 3.024320 -2.236797 -3.975495 4.607940
## X1088 2.902520 -2.105375 -3.759731 4.469807
## X1090 2.631889 -1.987045 -3.673006 3.897110
## X1091 3.072693 -2.273026 -3.438276 4.660893
## X1092 2.987196 -2.463811 -5.132803 4.624750
## X1093 2.927453 -2.311021 -3.793796 4.565765
## X1094 2.752386 -2.354721 -3.891240 4.360042
## X1096 2.931194 -2.230264 -4.238446 4.530422
## X1097 2.507157 -2.407612 -4.691927 4.035378
## X1100 2.865624 -2.232127 -4.009085 4.735080
## X1101 2.996732 -2.286712 -4.058784 4.792163
## X1103 3.028683 -2.390761 -3.477323 4.676626
## X1104 2.869035 -2.334385 -3.387886 4.630569
## X1105 3.037833 -2.257612 -3.643524 4.554542
## X1107 3.196221 -2.090705 -3.353837 5.011847
## X1110 2.670002 -2.304186 -3.191261 4.073504
## X1111 3.218476 -2.426223 -3.067658 4.983068
## X1112 3.235536 -2.491931 -4.446458 5.018060
## X1115 3.145445 -2.380979 -3.863709 4.811124
## X1116 2.794228 -2.360850 -4.927168 4.258523
## X1117 2.808197 -2.421707 -3.478943 4.281375
## X1118 2.962175 -2.466163 -4.489167 4.562778
## X1120 3.067122 -2.599510 -4.506230 4.500691
## X1121 3.110845 -2.346955 -3.489701 4.754487
## X1122 3.382015 -2.491810 -4.395720 5.238363
## X1123 3.088311 -2.381628 -3.998671 4.522074
## X1124 3.364533 -2.510471 -3.838308 5.223531
## X1125 3.318178 -2.404618 -3.598673 5.175599
## X1126 2.975019 -2.299590 -3.806762 4.351068
## X1127 3.327910 -2.510471 -4.488276 5.136237
## X1128 3.121483 -2.468286 -3.070671 4.685165
## X1129 3.175133 -2.379358 -3.512576 5.303509
## X1130 3.301377 -2.309710 -3.620100 5.072078
## X1131 3.379974 -2.597090 -4.724179 5.365966
## X1132 3.421653 -2.255702 -3.027429 5.598355
## X1134 3.108614 -2.198225 -3.543568 4.622564
## X1135 3.341093 -2.324831 -3.720164 5.363258
## X1136 3.335058 -2.470412 -3.288494 5.129122
## X1137 3.378611 -2.138767 -2.787418 5.425895
## X1138 3.200304 -2.944469 -5.368740 4.895984
## smoothness_worst symmetry_worst
## X1 -1.401837 -0.9485186
## X2 -1.552206 -1.8138504
## X3 -1.468032 -1.3273311
## X6 -1.343543 -1.1682237
## X7 -1.468808 -1.6137366
## X8 -1.390483 -1.5377457
## X9 -1.373392 -1.0226796
## X10 -1.323124 -1.0268307
## X11 -1.577215 -1.6835473
## X12 -1.486854 -1.2478490
## X13 -1.644401 -1.5488672
## X15 -1.391541 -1.3351867
## X16 -1.382068 -1.0794752
## X17 -1.460319 -1.6339618
## X18 -1.344209 -1.2853171
## X20 -1.469584 -1.6655621
## X21 -1.520913 -1.5444060
## X22 -1.515956 -2.0406102
## X23 -1.489239 -0.9275957
## X25 -1.338889 -1.3273311
## X26 -1.429814 -1.1365073
## X27 -1.437240 -1.0628195
## X28 -1.510212 -2.1336080
## X30 -1.544904 -1.8096966
## X32 -1.396495 -0.8985507
## X35 -1.467258 -1.0606668
## X38 -1.677854 -2.4867416
## X39 -1.694115 -3.0556014
## X40 -1.406135 -1.7749290
## X42 -1.305088 -1.6735918
## X43 -1.548331 -0.9266552
## X45 -1.445488 -1.2910944
## X46 -1.381719 -1.2448554
## X47 -1.527155 -1.5892127
## X48 -1.345211 -1.2025631
## X49 -1.448886 -1.8159323
## X52 -1.619427 -2.1292007
## X53 -1.593905 -1.7898096
## X54 -1.534290 -1.6387702
## X55 -1.489637 -1.8669460
## X56 -1.547473 -1.4783924
## X57 -1.401122 -1.3628885
## X58 -1.498044 -1.2888687
## X59 -1.652261 -2.0497116
## X61 -1.536401 -1.3534209
## X62 -1.395786 -1.6686442
## X64 -1.670976 -1.4910873
## X65 -1.323775 -1.4385789
## X67 -1.428706 -1.7280747
## X68 -1.530085 -2.0824829
## X69 -1.453440 -1.0758313
## X71 -1.571881 -1.9598138
## X73 -1.415161 -1.4747157
## X74 -1.480924 -1.9306463
## X75 -1.579449 -1.9088155
## X78 -1.454964 -1.2655509
## X79 -1.395786 -0.7116307
## X80 -1.530504 -1.7938986
## X81 -1.425391 -1.8055564
## X82 -1.433147 -1.3676525
## X83 -1.419530 -2.1213029
## X84 -1.488443 -2.1603515
## X85 -1.494430 -1.4406136
## X86 -1.486061 -1.2902036
## X87 -1.523404 -1.6393726
## X88 -1.547473 -1.1798150
## X89 -1.524236 -1.6686442
## X90 -1.535556 -1.5629177
## X91 -1.607252 -1.9825153
## X92 -1.544049 -1.9559389
## X93 -1.659708 -2.4422513
## X94 -1.509803 -1.8647794
## X95 -1.427599 -1.7569038
## X96 -1.573211 -1.2928782
## X97 -1.595732 -2.2380872
## X99 -1.473086 -1.7986859
## X100 -1.473086 -1.8362356
## X102 -1.415525 -1.6935843
## X103 -1.603546 -1.8532856
## X104 -1.424656 -1.9058328
## X105 -1.560451 -1.7622177
## X106 -1.320199 -1.5651813
## X108 -1.575878 -1.6618738
## X109 -1.374083 -1.1407587
## X110 -1.374774 -1.7602223
## X111 -1.459169 -1.9738585
## X112 -1.531344 -2.2390390
## X113 -1.717446 -2.0841850
## X114 -1.525902 -2.0970190
## X115 -1.366172 -1.6973693
## X116 -1.446619 -2.0505420
## X117 -1.578108 -2.9206783
## X118 -1.315025 -1.3402996
## X119 -1.322473 -1.4953499
## X120 -1.627498 -0.8624052
## X121 -1.428706 -1.6417852
## X123 -1.375812 -1.5234428
## X125 -1.650288 -2.4194174
## X126 -1.587999 -2.1134503
## X127 -1.457637 -1.3951992
## X128 -1.652261 -1.7522726
## X129 -1.490832 -1.9298875
## X130 -1.536401 -1.4789186
## X131 -1.474648 -1.3956885
## X132 -1.429444 -1.7549169
## X133 -1.487251 -1.3903175
## X134 -1.558708 -1.8327119
## X135 -1.459935 -1.5869030
## X137 -1.538094 -2.8336824
## X138 -1.573211 -1.8662234
## X139 -1.480924 -1.4229317
## X140 -1.498446 -2.3622810
## X141 -1.553935 -1.5892127
## X142 -1.520085 -1.7850558
## X144 -1.520913 -1.3571983
## X146 -1.475821 -1.8298999
## X147 -1.491231 -0.6320347
## X148 -1.664214 -1.7450294
## X149 -1.519257 -1.8554329
## X150 -1.677342 -2.1256850
## X152 -1.398983 -1.4700056
## X154 -1.508987 -1.7404419
## X155 -1.454964 -1.2237105
## X156 -1.561324 -1.5845978
## X158 -1.726991 -1.9785734
## X159 -1.536401 -1.9888468
## X160 -1.581689 -1.8221988
## X161 -1.502079 -1.5533453
## X162 -1.603084 -2.0463949
## X163 -1.470361 -1.3136025
## X164 -1.464938 -2.1996053
## X165 -1.556535 -1.3384376
## X166 -1.640502 -1.8880790
## X168 -1.583037 -1.7729134
## X169 -1.494831 -2.3033148
## X171 -1.491231 -1.7615522
## X172 -1.484873 -1.7241946
## X175 -1.625591 -1.8418938
## X176 -1.585739 -1.9283710
## X177 -1.525485 -1.9118051
## X178 -1.479351 -1.6190575
## X179 -1.763600 -2.1748286
## X180 -1.585739 -2.7364649
## X182 -1.450022 -1.1242373
## X183 -1.479744 -1.4114596
## X184 -1.604471 -2.6997069
## X187 -1.553935 -1.5322240
## X188 -1.516368 -1.9436150
## X189 -1.502888 -1.5355339
## X190 -1.616129 -2.0144782
## X191 -1.434262 -0.7826129
## X192 -1.694063 -2.2845122
## X193 -1.824755 -2.5774861
## X194 -1.345545 -1.5272765
## X196 -1.615659 -1.6369648
## X197 -1.361734 -1.6037499
## X198 -1.724360 -2.1091071
## X200 -1.427231 -0.9009890
## X201 -1.473867 -1.8720155
## X202 -1.492829 -1.6961064
## X203 -1.433147 -1.5366393
## X204 -1.209422 -1.0042088
## X205 -1.475039 -1.6429933
## X206 -1.450022 -1.4224305
## X207 -1.479351 -1.6581966
## X208 -1.609112 -1.4948162
## X209 -1.505728 -1.1128640
## X210 -1.559143 -2.1522740
## X211 -1.578555 -1.7081572
## X212 -1.534290 -1.9722906
## X214 -1.550051 -2.9953191
## X216 -1.461856 -1.3195307
## X217 -1.483291 -1.4314859
## X218 -1.686819 -1.7345684
## X220 -1.482107 -1.8397690
## X222 -1.494831 -1.6125574
## X223 -1.482896 -1.6184651
## X224 -1.427231 -1.1650483
## X225 -1.535978 -1.9952086
## X226 -1.527155 -1.6143267
## X228 -1.597563 -1.6798045
## X229 -1.556969 -1.7622177
## X230 -1.348222 -1.4264463
## X232 -1.703821 -1.7470007
## X233 -1.663010 -1.7068831
## X234 -1.558708 -2.0513730
## X235 -1.445111 -1.8090056
## X236 -1.531344 -2.2390390
## X239 -1.623214 -2.6004371
## X240 -1.499252 -1.7443730
## X241 -1.535133 -1.9762139
## X242 -1.644401 -1.7132666
## X243 -1.506542 -1.4773407
## X244 -1.695111 -1.8756488
## X245 -1.460703 -1.7011661
## X246 -1.440979 -1.7248404
## X248 -1.545331 -1.8932321
## X249 -1.446997 -1.4254410
## X250 -1.489637 -1.8749213
## X251 -1.563950 -1.5771365
## X253 -1.370978 -1.8145440
## X254 -1.478958 -1.5702903
## X255 -1.447752 -1.4406136
## X256 -1.442104 -1.6107907
## X257 -1.533868 -1.7675541
## X258 -1.352253 -1.5039222
## X259 -1.445111 -1.4937496
## X263 -1.598480 -1.6113793
## X264 -1.621791 -1.8611765
## X266 -1.484873 -1.7345684
## X267 -1.563074 -1.6885556
## X268 -1.660208 -2.0439127
## X269 -1.544476 -1.3314830
## X270 -1.511439 -1.9185567
## X272 -1.502079 -1.8256936
## X273 -1.537670 -1.7575668
## X274 -1.459169 -1.7522726
## X275 -1.519671 -1.9968038
## X276 -1.501675 -2.2447636
## X277 -1.543196 -1.8083150
## X278 -1.550051 -1.9474538
## X279 -1.638076 -2.1389154
## X281 -1.346547 -1.5039222
## X282 -1.644889 -1.5915268
## X283 -1.439855 -1.3379726
## X284 -1.499252 -1.8000570
## X285 -1.666428 -2.4732544
## X286 -1.643912 -1.9960060
## X287 -1.606788 -2.0282975
## X288 -1.682219 -2.1621529
## X289 -1.648811 -1.6791818
## X290 -1.605860 -1.4990925
## X291 -1.663562 -2.1959068
## X292 -1.520499 -1.6748318
## X293 -1.453060 -1.4401046
## X294 -1.497641 -1.5915268
## X295 -1.526737 -2.1091071
## X296 -1.582138 -1.7642162
## X297 -1.698055 -2.3203498
## X298 -1.597105 -2.4969375
## X299 -1.691083 -1.8954469
## X300 -1.594361 -2.2380872
## X301 -1.448508 -1.6711155
## X302 -1.638076 -1.8597382
## X303 -1.506542 -1.4847225
## X304 -1.480137 -2.2514717
## X306 -1.685886 -1.5114749
## X307 -1.601239 -1.8844106
## X308 -1.669710 -1.6569733
## X309 -1.713449 -2.2005314
## X310 -1.654735 -2.3571020
## X312 -1.657217 -1.9762139
## X313 -1.571438 -1.9497625
## X314 -1.618012 -1.4350269
## X316 -1.612379 -2.5679247
## X318 -1.471917 -1.7715714
## X319 -1.559579 -1.5719981
## X320 -1.747337 -2.5871077
## X321 -1.484477 -1.9163022
## X322 -1.636142 -1.6184651
## X323 -1.429075 -2.0978789
## X324 -1.412624 -0.6826914
## X325 -1.544476 -1.8771050
## X326 -1.491630 -1.8575837
## X329 -1.413348 -1.5903692
## X331 -1.471528 -1.6399753
## X332 -1.530924 -1.3351867
## X333 -1.475821 -1.4857809
## X335 -1.559143 -1.9574875
## X336 -1.466097 -1.9050882
## X337 -1.594818 -2.0555355
## X339 -1.484477 -1.7177547
## X340 -1.440979 -1.9276134
## X341 -1.528409 -1.6196501
## X342 -1.554367 -1.6624877
## X343 -1.478172 -1.4810257
## X344 -1.560888 -1.1446385
## X345 -1.461856 -1.8034913
## X346 -1.501270 -2.0538690
## X347 -1.530504 -1.7267800
## X348 -1.560015 -1.5869030
## X349 -1.435005 -1.7456862
## X350 -1.528827 -1.5782813
## X351 -1.664817 -1.8270941
## X352 -1.427968 -1.0696662
## X353 -1.435377 -1.2924320
## X354 -1.388371 -1.8822146
## X355 -1.722066 -1.9405520
## X358 -1.589811 -2.1151915
## X359 -1.655231 -2.0538690
## X360 -1.512259 -2.0373158
## X361 -1.699057 -2.2323896
## X362 -1.595732 -1.8947083
## X363 -1.526737 -1.8180177
## X365 -1.563074 -1.8201066
## X366 -1.514721 -1.9155516
## X367 -1.535133 -1.4969524
## X369 -1.500059 -1.9920239
## X370 -1.528409 -1.8201066
## X371 -1.479351 -0.8795614
## X372 -1.602161 -2.0104407
## X374 -1.423555 -1.8568664
## X375 -1.611911 -1.4694835
## X376 -1.553502 -1.5617875
## X377 -1.594361 -1.9245875
## X378 -1.610510 -1.8532856
## X379 -1.536401 -1.4365479
## X380 -1.221525 -1.1031070
## X381 -1.406135 -1.4590896
## X382 -1.616599 -1.5344296
## X383 -1.725620 -2.2727630
## X384 -1.474648 -1.7668858
## X385 -1.583937 -1.8235956
## X386 -1.520913 -2.0185279
## X387 -1.649795 -1.8655012
## X390 -1.546616 -1.9405520
## X392 -1.461856 -2.0447396
## X394 -1.445865 -1.2325409
## X395 -1.472696 -1.6220237
## X398 -1.686456 -2.4856130
## X399 -1.604934 -1.9298875
## X400 -1.549621 -1.7938986
## X401 -1.316639 -1.5109338
## X402 -1.495633 -2.0323892
## X403 -1.697160 -1.5316732
## X405 -1.639046 -2.1721026
## X406 -1.500059 -2.2154336
## X407 -1.566145 -1.7945814
## X408 -1.693330 -2.0096347
## X409 -1.453440 -1.6155076
## X410 -1.551343 -1.4025614
## X411 -1.464552 -1.6680271
## X412 -1.497641 -1.6527015
## X413 -1.620845 -2.1030497
## X414 -1.625115 -1.5561527
## X415 -1.592083 -1.5174436
## X416 -1.475039 -1.6066785
## X417 -1.436867 -1.7319669
## X418 -1.440230 -1.6496593
## X419 -1.520085 -1.7966320
## X421 -1.547473 -1.6303680
## X423 -1.479351 -1.7884496
## X424 -1.592538 -1.8180177
## X425 -1.486061 -1.5377457
## X426 -1.602161 -2.1265631
## X428 -1.524652 -1.6729722
## X431 -1.477780 -1.7358713
## X432 -1.465711 -1.9559389
## X433 -1.386615 -1.6321636
## X434 -1.489239 -1.6472311
## X436 -1.405058 -1.5471923
## X437 -1.562636 -1.4747157
## X438 -1.570996 -1.8313051
## X439 -1.605860 -1.9896404
## X440 -1.645868 -2.3274232
## X441 -1.488841 -1.9683790
## X443 -1.627498 -2.6386361
## X444 -1.673109 -1.8497148
## X445 -1.541491 -1.7516124
## X446 -1.521328 -1.9230772
## X447 -1.484873 -1.6686442
## X448 -1.557403 -1.3333333
## X449 -1.619427 -2.0234038
## X450 -1.498044 -2.1996053
## X451 -1.690457 -2.1657627
## X452 -1.436122 -2.1766489
## X453 -1.565266 -2.0430863
## X455 -1.557838 -1.4974871
## X456 -1.581241 -2.2678960
## X457 -1.482896 -1.7241946
## X459 -1.560888 -2.1648594
## X460 -1.609578 -2.1513794
## X461 -1.409733 -1.6454131
## X462 -1.502484 -1.8917577
## X464 -1.508579 -1.5300225
## X465 -1.533447 -2.2304954
## X466 -1.568346 -1.7496338
## X467 -1.504916 -1.9505330
## X468 -1.490035 -1.6172812
## X469 -1.565705 -2.1693820
## X471 -1.553935 -1.5499851
## X472 -1.642449 -2.0790851
## X473 -1.630847 -1.8575837
## X475 -1.512668 -1.9367333
## X476 -1.515133 -1.6478377
## X477 -1.589811 -1.9730743
## X480 -1.494430 -1.4720966
## X481 -1.565266 -2.0773893
## X482 -1.631327 -2.1204282
## X483 -1.488046 -1.5207121
## X485 -1.431294 -1.9551652
## X487 -1.594818 -2.0364934
## X488 -1.441353 -1.4996282
## X493 -1.522157 -1.5076925
## X494 -1.691396 -2.1885391
## X495 -1.605860 -1.8583015
## X496 -1.561761 -2.1091071
## X497 -1.434262 -1.5190767
## X498 -1.535978 -1.6303680
## X500 -1.460319 -2.0160966
## X502 -1.342543 -1.3099702
## X504 -1.569228 -1.7087947
## X505 -1.307322 -1.6285751
## X506 -1.274705 -1.7476584
## X508 -1.387668 -1.7932162
## X510 -1.363438 -1.6435978
## X511 -1.627020 -1.9193090
## X512 -1.596189 -2.1398020
## X513 -1.419165 -1.3402996
## X515 -1.548761 -2.1867032
## X516 -1.453060 -1.6155076
## X517 -1.449644 -1.6072651
## X519 -1.463396 -1.9359709
## X520 -1.456108 -1.6090266
## X521 -1.324101 -1.2964545
## X522 -1.508579 -1.5595304
## X524 -1.475430 -1.7470007
## X525 -1.477780 -1.9984010
## X526 -1.395077 -1.6618738
## X527 -1.401122 -1.3719574
## X528 -1.529246 -1.5863263
## X529 -1.487648 -2.3033148
## X530 -1.438733 -1.7925342
## X531 -1.505728 -2.0177170
## X532 -1.427968 -1.5322240
## X533 -1.541066 -1.7756016
## X537 -1.493229 -1.8504281
## X538 -1.351243 -1.7776215
## X539 -1.544476 -1.6166897
## X540 -1.411177 -1.7864122
## X541 -1.507356 -2.1442434
## X543 -1.633249 -1.8334159
## X544 -1.627498 -2.0217765
## X546 -1.561761 -1.8910211
## X547 -1.532184 -1.8626165
## X548 -1.461471 -1.8554329
## X551 -1.662208 -2.0340294
## X552 -1.620372 -1.5527846
## X553 -1.553935 -2.0765423
## X554 -1.612846 -2.0530365
## X555 -1.556969 -2.1065078
## X556 -1.491630 -2.2390390
## X557 -1.540640 -2.2051713
## X558 -1.627020 -2.0201513
## X559 -1.649795 -2.2088944
## X560 -1.526737 -2.3519414
## X562 -1.700430 -3.0539870
## X564 -1.482501 -1.6954754
## X565 -1.481318 -2.4065265
## X567 -1.596189 -2.2466769
## X568 -1.391894 -1.1284389
## X570 -1.401837 -0.9485186
## X571 -1.552206 -1.8138504
## X572 -1.468032 -1.3273311
## X573 -1.246824 -0.4547732
## X575 -1.343543 -1.1682237
## X576 -1.468808 -1.6137366
## X577 -1.390483 -1.5377457
## X578 -1.373392 -1.0226796
## X579 -1.323124 -1.0268307
## X580 -1.577215 -1.6835473
## X583 -1.599859 -1.7735849
## X584 -1.391541 -1.3351867
## X585 -1.382068 -1.0794752
## X586 -1.460319 -1.6339618
## X587 -1.344209 -1.2853171
## X588 -1.442104 -1.8014296
## X589 -1.469584 -1.6655621
## X590 -1.520913 -1.5444060
## X591 -1.515956 -2.0406102
## X592 -1.489239 -0.9275957
## X593 -1.484873 -1.7648831
## X595 -1.429814 -1.1365073
## X601 -1.396495 -0.8985507
## X602 -1.397561 -1.3662211
## X603 -1.443230 -1.3004918
## X604 -1.467258 -1.0606668
## X605 -1.423188 -0.8679915
## X606 -1.467258 -1.3375078
## X607 -1.677854 -2.4867416
## X608 -1.694115 -3.0556014
## X609 -1.406135 -1.7749290
## X610 -1.617070 -1.6551407
## X611 -1.305088 -1.6735918
## X613 -1.435377 -1.2707870
## X614 -1.445488 -1.2910944
## X615 -1.381719 -1.2448554
## X618 -1.448886 -1.8159323
## X619 -1.585739 -1.7326167
## X620 -1.621318 -2.0547020
## X621 -1.619427 -2.1292007
## X622 -1.593905 -1.7898096
## X623 -1.534290 -1.6387702
## X624 -1.489637 -1.8669460
## X625 -1.547473 -1.4783924
## X627 -1.498044 -1.2888687
## X628 -1.652261 -2.0497116
## X629 -1.363097 -1.5245369
## X630 -1.536401 -1.3534209
## X631 -1.395786 -1.6686442
## X632 -1.395431 -1.7502930
## X633 -1.670976 -1.4910873
## X635 -1.392600 -1.4705280
## X636 -1.428706 -1.7280747
## X637 -1.530085 -2.0824829
## X638 -1.453440 -1.0758313
## X640 -1.571881 -1.9598138
## X642 -1.415161 -1.4747157
## X643 -1.480924 -1.9306463
## X644 -1.579449 -1.9088155
## X645 -1.446619 -1.8851434
## X646 -1.465324 -1.8418938
## X647 -1.454964 -1.2655509
## X648 -1.395786 -0.7116307
## X649 -1.530504 -1.7938986
## X651 -1.433147 -1.3676525
## X652 -1.419530 -2.1213029
## X653 -1.488443 -2.1603515
## X654 -1.494430 -1.4406136
## X655 -1.486061 -1.2902036
## X656 -1.523404 -1.6393726
## X657 -1.547473 -1.1798150
## X658 -1.524236 -1.6686442
## X659 -1.535556 -1.5629177
## X660 -1.607252 -1.9825153
## X661 -1.544049 -1.9559389
## X662 -1.659708 -2.4422513
## X663 -1.509803 -1.8647794
## X664 -1.427599 -1.7569038
## X667 -1.519257 -2.5478042
## X668 -1.473086 -1.7986859
## X669 -1.473086 -1.8362356
## X670 -1.540640 -1.8844106
## X671 -1.415525 -1.6935843
## X672 -1.603546 -1.8532856
## X674 -1.560451 -1.7622177
## X675 -1.320199 -1.5651813
## X677 -1.575878 -1.6618738
## X678 -1.374083 -1.1407587
## X679 -1.374774 -1.7602223
## X680 -1.459169 -1.9738585
## X681 -1.531344 -2.2390390
## X682 -1.717446 -2.0841850
## X683 -1.525902 -2.0970190
## X685 -1.446619 -2.0505420
## X686 -1.578108 -2.9206783
## X687 -1.315025 -1.3402996
## X689 -1.627498 -0.8624052
## X690 -1.428706 -1.6417852
## X692 -1.375812 -1.5234428
## X693 -1.520499 -1.7209705
## X694 -1.650288 -2.4194174
## X695 -1.587999 -2.1134503
## X697 -1.652261 -1.7522726
## X698 -1.490832 -1.9298875
## X699 -1.536401 -1.4789186
## X700 -1.474648 -1.3956885
## X701 -1.429444 -1.7549169
## X702 -1.487251 -1.3903175
## X703 -1.558708 -1.8327119
## X704 -1.459935 -1.5869030
## X705 -1.477780 -1.7602223
## X706 -1.538094 -2.8336824
## X707 -1.573211 -1.8662234
## X708 -1.480924 -1.4229317
## X711 -1.520085 -1.7850558
## X712 -1.480137 -1.9344474
## X714 -1.625591 -2.1702883
## X715 -1.475821 -1.8298999
## X716 -1.491231 -0.6320347
## X717 -1.664214 -1.7450294
## X718 -1.519257 -1.8554329
## X719 -1.677342 -2.1256850
## X720 -1.527155 -1.5377457
## X721 -1.398983 -1.4700056
## X722 -1.529246 -1.5874800
## X723 -1.508987 -1.7404419
## X724 -1.454964 -1.2237105
## X725 -1.561324 -1.5845978
## X726 -1.478172 -2.0299327
## X728 -1.536401 -1.9888468
## X729 -1.581689 -1.8221988
## X731 -1.603084 -2.0463949
## X733 -1.464938 -2.1996053
## X734 -1.556535 -1.3384376
## X735 -1.640502 -1.8880790
## X736 -1.471139 -2.3747864
## X737 -1.583037 -1.7729134
## X738 -1.494831 -2.3033148
## X739 -1.561761 -2.0790851
## X742 -1.435005 -1.5267281
## X743 -1.561761 -2.5859017
## X744 -1.625591 -1.8418938
## X745 -1.585739 -1.9283710
## X746 -1.525485 -1.9118051
## X747 -1.479351 -1.6190575
## X748 -1.763600 -2.1748286
## X749 -1.585739 -2.7364649
## X750 -1.457254 -1.7424059
## X751 -1.450022 -1.1242373
## X752 -1.479744 -1.4114596
## X753 -1.604471 -2.6997069
## X754 -1.525485 -1.5494260
## X755 -1.438733 -1.6929546
## X756 -1.553935 -1.5322240
## X757 -1.516368 -1.9436150
## X758 -1.502888 -1.5355339
## X759 -1.616129 -2.0144782
## X760 -1.434262 -0.7826129
## X761 -1.694063 -2.2845122
## X762 -1.824755 -2.5774861
## X764 -1.519257 -1.6514837
## X765 -1.615659 -1.6369648
## X766 -1.361734 -1.6037499
## X767 -1.724360 -2.1091071
## X768 -1.516780 -1.5394073
## X770 -1.473867 -1.8720155
## X771 -1.492829 -1.6961064
## X772 -1.433147 -1.5366393
## X773 -1.209422 -1.0042088
## X774 -1.475039 -1.6429933
## X775 -1.450022 -1.4224305
## X776 -1.479351 -1.6581966
## X777 -1.609112 -1.4948162
## X778 -1.505728 -1.1128640
## X779 -1.559143 -2.1522740
## X780 -1.578555 -1.7081572
## X781 -1.534290 -1.9722906
## X782 -1.594818 -2.9266464
## X783 -1.550051 -2.9953191
## X784 -1.424656 -0.9098798
## X785 -1.461856 -1.3195307
## X786 -1.483291 -1.4314859
## X787 -1.686819 -1.7345684
## X789 -1.482107 -1.8397690
## X794 -1.535978 -1.9952086
## X795 -1.527155 -1.6143267
## X796 -1.508987 -1.9110570
## X797 -1.597563 -1.6798045
## X798 -1.556969 -1.7622177
## X799 -1.348222 -1.4264463
## X800 -1.373392 -1.5869030
## X802 -1.663010 -1.7068831
## X804 -1.445111 -1.8090056
## X805 -1.531344 -2.2390390
## X807 -1.556535 -2.2276590
## X808 -1.623214 -2.6004371
## X809 -1.499252 -1.7443730
## X811 -1.644401 -1.7132666
## X814 -1.460703 -1.7011661
## X815 -1.440979 -1.7248404
## X816 -1.613783 -1.8021165
## X817 -1.545331 -1.8932321
## X818 -1.446997 -1.4254410
## X819 -1.489637 -1.8749213
## X820 -1.563950 -1.5771365
## X822 -1.370978 -1.8145440
## X823 -1.478958 -1.5702903
## X824 -1.447752 -1.4406136
## X825 -1.442104 -1.6107907
## X826 -1.533868 -1.7675541
## X827 -1.352253 -1.5039222
## X828 -1.445111 -1.4937496
## X829 -1.313415 -1.3748365
## X830 -1.438360 -1.5629177
## X831 -1.551343 -2.0389620
## X832 -1.598480 -1.6113793
## X833 -1.621791 -1.8611765
## X834 -1.425023 -1.5267281
## X835 -1.484873 -1.7345684
## X837 -1.660208 -2.0439127
## X838 -1.544476 -1.3314830
## X839 -1.511439 -1.9185567
## X840 -1.738456 -2.0340294
## X841 -1.502079 -1.8256936
## X844 -1.519671 -1.9968038
## X845 -1.501675 -2.2447636
## X846 -1.543196 -1.8083150
## X847 -1.550051 -1.9474538
## X849 -1.575433 -1.6791818
## X850 -1.346547 -1.5039222
## X851 -1.644889 -1.5915268
## X852 -1.439855 -1.3379726
## X854 -1.666428 -2.4732544
## X855 -1.643912 -1.9960060
## X856 -1.606788 -2.0282975
## X857 -1.682219 -2.1621529
## X858 -1.648811 -1.6791818
## X859 -1.605860 -1.4990925
## X860 -1.663562 -2.1959068
## X861 -1.520499 -1.6748318
## X863 -1.497641 -1.5915268
## X864 -1.526737 -2.1091071
## X866 -1.698055 -2.3203498
## X867 -1.597105 -2.4969375
## X868 -1.691083 -1.8954469
## X869 -1.594361 -2.2380872
## X870 -1.448508 -1.6711155
## X871 -1.638076 -1.8597382
## X872 -1.506542 -1.4847225
## X873 -1.480137 -2.2514717
## X874 -1.593905 -2.2562827
## X875 -1.685886 -1.5114749
## X876 -1.601239 -1.8844106
## X877 -1.669710 -1.6569733
## X878 -1.713449 -2.2005314
## X880 -1.558708 -1.3869129
## X881 -1.657217 -1.9762139
## X882 -1.571438 -1.9497625
## X883 -1.618012 -1.4350269
## X884 -1.506542 -1.5680169
## X885 -1.612379 -2.5679247
## X886 -1.662208 -2.1766489
## X887 -1.471917 -1.7715714
## X888 -1.559579 -1.5719981
## X889 -1.747337 -2.5871077
## X892 -1.429075 -2.0978789
## X893 -1.412624 -0.6826914
## X894 -1.544476 -1.8771050
## X895 -1.491630 -1.8575837
## X896 -1.533868 -2.3643578
## X898 -1.413348 -1.5903692
## X899 -1.475039 -1.8235956
## X900 -1.471528 -1.6399753
## X901 -1.530924 -1.3351867
## X903 -1.583937 -1.7695612
## X907 -1.447374 -1.2973504
## X908 -1.484477 -1.7177547
## X909 -1.440979 -1.9276134
## X910 -1.528409 -1.6196501
## X911 -1.554367 -1.6624877
## X912 -1.478172 -1.4810257
## X913 -1.560888 -1.1446385
## X914 -1.461856 -1.8034913
## X915 -1.501270 -2.0538690
## X917 -1.560015 -1.5869030
## X918 -1.435005 -1.7456862
## X921 -1.427968 -1.0696662
## X922 -1.435377 -1.2924320
## X923 -1.388371 -1.8822146
## X924 -1.722066 -1.9405520
## X925 -1.616129 -2.3426983
## X926 -1.508171 -1.5845978
## X927 -1.589811 -2.1151915
## X928 -1.655231 -2.0538690
## X929 -1.512259 -2.0373158
## X931 -1.595732 -1.8947083
## X932 -1.526737 -1.8180177
## X933 -1.510212 -2.0875956
## X934 -1.563074 -1.8201066
## X935 -1.514721 -1.9155516
## X936 -1.535133 -1.4969524
## X937 -1.498044 -1.5256320
## X938 -1.500059 -1.9920239
## X939 -1.528409 -1.8201066
## X941 -1.602161 -2.0104407
## X942 -1.572324 -1.8277950
## X943 -1.423555 -1.8568664
## X944 -1.611911 -1.4694835
## X946 -1.594361 -1.9245875
## X949 -1.221525 -1.1031070
## X950 -1.406135 -1.4590896
## X952 -1.725620 -2.2727630
## X953 -1.474648 -1.7668858
## X954 -1.583937 -1.8235956
## X955 -1.520913 -2.0185279
## X956 -1.649795 -1.8655012
## X957 -1.743107 -1.9668175
## X958 -1.613314 -2.3063064
## X959 -1.546616 -1.9405520
## X960 -1.508171 -1.6904389
## X962 -1.381022 -1.5427374
## X963 -1.445865 -1.2325409
## X964 -1.472696 -1.6220237
## X965 -1.630368 -1.9817260
## X967 -1.686456 -2.4856130
## X968 -1.604934 -1.9298875
## X970 -1.316639 -1.5109338
## X971 -1.495633 -2.0323892
## X972 -1.697160 -1.5316732
## X973 -1.581241 -1.4831367
## X974 -1.639046 -2.1721026
## X975 -1.500059 -2.2154336
## X976 -1.566145 -1.7945814
## X977 -1.693330 -2.0096347
## X978 -1.453440 -1.6155076
## X980 -1.464552 -1.6680271
## X981 -1.497641 -1.6527015
## X982 -1.620845 -2.1030497
## X983 -1.625115 -1.5561527
## X984 -1.592083 -1.5174436
## X986 -1.436867 -1.7319669
## X987 -1.440230 -1.6496593
## X988 -1.520085 -1.7966320
## X989 -1.533447 -1.6661779
## X990 -1.547473 -1.6303680
## X992 -1.479351 -1.7884496
## X993 -1.592538 -1.8180177
## X994 -1.486061 -1.5377457
## X995 -1.602161 -2.1265631
## X997 -1.524652 -1.6729722
## X1000 -1.477780 -1.7358713
## X1001 -1.465711 -1.9559389
## X1002 -1.386615 -1.6321636
## X1003 -1.489239 -1.6472311
## X1004 -1.560888 -1.9801488
## X1008 -1.605860 -1.9896404
## X1010 -1.488841 -1.9683790
## X1011 -1.471139 -2.0000000
## X1012 -1.627498 -2.6386361
## X1013 -1.673109 -1.8497148
## X1014 -1.541491 -1.7516124
## X1016 -1.484873 -1.6686442
## X1017 -1.557403 -1.3333333
## X1018 -1.619427 -2.0234038
## X1019 -1.498044 -2.1996053
## X1020 -1.690457 -2.1657627
## X1022 -1.565266 -2.0430863
## X1023 -1.506542 -1.9178048
## X1024 -1.557838 -1.4974871
## X1025 -1.581241 -2.2678960
## X1026 -1.482896 -1.7241946
## X1028 -1.560888 -2.1648594
## X1029 -1.609578 -2.1513794
## X1030 -1.409733 -1.6454131
## X1031 -1.502484 -1.8917577
## X1032 -1.654239 -2.1300810
## X1034 -1.533447 -2.2304954
## X1035 -1.568346 -1.7496338
## X1036 -1.504916 -1.9505330
## X1037 -1.490035 -1.6172812
## X1038 -1.565705 -2.1693820
## X1039 -1.347217 -1.8778337
## X1040 -1.553935 -1.5499851
## X1041 -1.642449 -2.0790851
## X1043 -1.692284 -2.0748497
## X1045 -1.515133 -1.6478377
## X1046 -1.589811 -1.9730743
## X1047 -1.694272 -1.8640580
## X1048 -1.504510 -1.6879284
## X1050 -1.565266 -2.0773893
## X1051 -1.631327 -2.1204282
## X1052 -1.488046 -1.5207121
## X1053 -1.569228 -1.9856773
## X1054 -1.431294 -1.9551652
## X1055 -1.579897 -1.5185321
## X1056 -1.594818 -2.0364934
## X1057 -1.441353 -1.4996282
## X1058 -1.436867 -1.7769479
## X1061 -1.669659 -2.7364649
## X1062 -1.522157 -1.5076925
## X1064 -1.605860 -1.8583015
## X1065 -1.561761 -2.1091071
## X1066 -1.434262 -1.5190767
## X1067 -1.535978 -1.6303680
## X1068 -1.480531 -1.9920239
## X1069 -1.460319 -2.0160966
## X1070 -1.598022 -2.2864798
## X1071 -1.342543 -1.3099702
## X1072 -1.416979 -1.5606584
## X1073 -1.569228 -1.7087947
## X1074 -1.307322 -1.6285751
## X1076 -1.484477 -1.8426028
## X1077 -1.387668 -1.7932162
## X1078 -1.503699 -2.1702883
## X1079 -1.363438 -1.6435978
## X1080 -1.627020 -1.9193090
## X1081 -1.596189 -2.1398020
## X1082 -1.419165 -1.3402996
## X1083 -1.558708 -1.9028570
## X1084 -1.548761 -2.1867032
## X1085 -1.453060 -1.6155076
## X1086 -1.449644 -1.6072651
## X1088 -1.463396 -1.9359709
## X1090 -1.324101 -1.2964545
## X1091 -1.508579 -1.5595304
## X1092 -1.610510 -1.9551652
## X1093 -1.475430 -1.7470007
## X1094 -1.477780 -1.9984010
## X1096 -1.401122 -1.3719574
## X1097 -1.529246 -1.5863263
## X1100 -1.505728 -2.0177170
## X1101 -1.427968 -1.5322240
## X1103 -1.615659 -1.5245369
## X1104 -1.474257 -2.1802966
## X1105 -1.539366 -1.6055062
## X1107 -1.351243 -1.7776215
## X1110 -1.507356 -2.1442434
## X1111 -1.509395 -1.5427374
## X1112 -1.633249 -1.8334159
## X1115 -1.561761 -1.8910211
## X1116 -1.532184 -1.8626165
## X1117 -1.461471 -1.8554329
## X1118 -1.569228 -1.9590379
## X1120 -1.662208 -2.0340294
## X1121 -1.620372 -1.5527846
## X1122 -1.553935 -2.0765423
## X1123 -1.612846 -2.0530365
## X1124 -1.556969 -2.1065078
## X1125 -1.491630 -2.2390390
## X1126 -1.540640 -2.2051713
## X1127 -1.627020 -2.0201513
## X1128 -1.649795 -2.2088944
## X1129 -1.526737 -2.3519414
## X1130 -1.550912 -2.2163702
## X1131 -1.700430 -3.0539870
## X1132 -1.478565 -1.1276737
## X1134 -1.481318 -2.4065265
## X1135 -1.583937 -1.9436150
## X1136 -1.596189 -2.2466769
## X1137 -1.391894 -1.1284389
## X1138 -1.714905 -1.7326167
##
## $usekernel
## [1] FALSE
##
## $varnames
## [1] "texture_mean" "smoothness_mean" "compactness_se" "texture_worst"
## [5] "smoothness_worst" "symmetry_worst"
##
## $xNames
## [1] "texture_mean" "smoothness_mean" "compactness_se" "texture_worst"
## [5] "smoothness_worst" "symmetry_worst"
##
## $problemType
## [1] "Classification"
##
## $tuneValue
## fL usekernel adjust
## 1 2 FALSE FALSE
##
## $obsLevels
## [1] "B" "M"
## attr(,"ordered")
## [1] FALSE
##
## $param
## list()
##
## attr(,"class")
## [1] "NaiveBayes"
$results BAL_NB_Tune
## usekernel fL adjust ROC Sens Spec ROCSD SensSD
## 1 FALSE 2 FALSE 0.8873525 0.8552525 0.7605882 0.02125535 0.03336686
## 2 TRUE 2 FALSE NaN NaN NaN NA NA
## SpecSD
## 1 0.04679768
## 2 NA
<- BAL_NB_Tune$results[BAL_NB_Tune$results$usekernel==BAL_NB_Tune$bestTune$usekernel &
(BAL_NB_Train_AUROC $results$adjust==BAL_NB_Tune$bestTune$adjust &
BAL_NB_Tune$results$fL==BAL_NB_Tune$bestTune$fL,
BAL_NB_Tunec("ROC")])
## [1] 0.8873525
##################################
# Identifying and plotting the
# best model predictors
##################################
# model does not support variable importance measurement
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(BAL_NB_Test_Observed = MA_Test$diagnosis,
BAL_NB_Test BAL_NB_Test_Predicted = predict(BAL_NB_Tune,
!names(MA_Test) %in% c("diagnosis")],
MA_Test[,type = "prob"))
BAL_NB_Test
## BAL_NB_Test_Observed BAL_NB_Test_Predicted.B BAL_NB_Test_Predicted.M
## 4 M 8.418867e-06 9.999916e-01
## 5 M 9.947802e-01 5.219820e-03
## 14 M 6.863230e-01 3.136770e-01
## 19 M 1.487708e-01 8.512292e-01
## 24 M 2.813452e-01 7.186548e-01
## 29 M 1.320029e-03 9.986800e-01
## 31 M 8.611625e-03 9.913884e-01
## 33 M 3.780788e-03 9.962192e-01
## 34 M 1.443492e-02 9.855651e-01
## 36 M 3.884397e-03 9.961156e-01
## 37 M 3.340474e-02 9.665953e-01
## 41 M 9.299440e-01 7.005599e-02
## 44 M 3.661805e-02 9.633819e-01
## 50 B 7.332968e-01 2.667032e-01
## 51 B 9.811725e-01 1.882750e-02
## 60 B 9.987229e-01 1.277059e-03
## 63 M 2.245885e-02 9.775412e-01
## 66 M 7.032281e-03 9.929677e-01
## 70 B 9.984081e-01 1.591913e-03
## 72 B 9.969872e-01 3.012839e-03
## 76 M 5.911188e-01 4.088812e-01
## 77 B 9.997844e-01 2.155780e-04
## 98 B 7.482378e-01 2.517622e-01
## 101 M 3.492114e-01 6.507886e-01
## 107 B 7.730772e-02 9.226923e-01
## 122 M 4.615057e-01 5.384943e-01
## 124 B 9.997801e-01 2.199472e-04
## 136 M 4.127956e-01 5.872044e-01
## 143 B 5.884881e-01 4.115119e-01
## 145 B 9.993874e-01 6.125853e-04
## 151 B 5.333960e-01 4.666040e-01
## 153 B 8.349582e-01 1.650418e-01
## 157 M 1.931211e-01 8.068789e-01
## 167 B 9.999992e-01 8.482450e-07
## 170 B 9.702524e-01 2.974760e-02
## 173 M 9.813259e-01 1.867412e-02
## 174 B 9.974796e-01 2.520361e-03
## 181 M 5.137427e-02 9.486257e-01
## 185 M 5.043588e-01 4.956412e-01
## 186 B 9.677181e-01 3.228189e-02
## 195 M 8.459365e-02 9.154064e-01
## 199 M 1.767134e-01 8.232866e-01
## 213 M 9.316257e-01 6.837434e-02
## 215 M 1.321134e-03 9.986789e-01
## 219 M 1.859137e-01 8.140863e-01
## 221 B 9.992881e-01 7.119289e-04
## 227 B 9.935420e-01 6.458020e-03
## 231 M 9.611758e-02 9.038824e-01
## 237 M 3.190678e-02 9.680932e-01
## 238 M 8.379804e-01 1.620196e-01
## 247 B 9.931943e-01 6.805651e-03
## 252 B 9.770018e-01 2.299823e-02
## 260 M 1.145796e-03 9.988542e-01
## 261 M 3.113683e-02 9.688632e-01
## 262 M 8.395667e-01 1.604333e-01
## 265 M 1.729267e-01 8.270733e-01
## 271 B 9.999991e-01 8.758747e-07
## 280 B 9.825885e-01 1.741150e-02
## 305 B 9.765564e-01 2.344359e-02
## 311 B 8.728224e-01 1.271776e-01
## 315 B 6.945252e-01 3.054748e-01
## 317 B 9.999968e-01 3.184820e-06
## 327 B 9.996640e-01 3.360282e-04
## 328 B 9.997532e-01 2.468207e-04
## 330 M 1.151531e-01 8.848469e-01
## 334 B 9.993082e-01 6.917658e-04
## 338 M 2.069080e-02 9.793092e-01
## 356 B 9.581310e-01 4.186896e-02
## 357 B 2.390588e-01 7.609412e-01
## 364 B 8.143429e-01 1.856571e-01
## 368 B 7.906987e-01 2.093013e-01
## 373 M 9.604987e-01 3.950130e-02
## 388 B 9.999464e-01 5.360802e-05
## 389 B 9.967851e-01 3.214911e-03
## 391 B 9.994831e-01 5.168794e-04
## 393 M 1.943944e-02 9.805606e-01
## 396 B 9.950741e-01 4.925881e-03
## 397 B 3.901841e-01 6.098159e-01
## 404 B 9.410957e-01 5.890427e-02
## 420 B 6.205150e-01 3.794850e-01
## 422 B 9.717101e-01 2.828993e-02
## 427 B 8.754185e-01 1.245815e-01
## 429 B 9.998516e-01 1.484434e-04
## 430 B 9.990260e-01 9.740379e-04
## 435 B 9.885298e-01 1.147020e-02
## 442 M 1.714923e-01 8.285077e-01
## 454 B 9.970224e-01 2.977611e-03
## 458 B 8.144607e-01 1.855393e-01
## 463 B 9.808887e-01 1.911127e-02
## 470 B 8.588458e-02 9.141154e-01
## 474 B 9.838966e-01 1.610339e-02
## 478 B 9.998429e-01 1.570680e-04
## 479 B 9.162635e-01 8.373653e-02
## 484 B 9.840122e-01 1.598781e-02
## 486 B 8.471689e-01 1.528311e-01
## 489 B 8.396732e-01 1.603268e-01
## 490 M 7.804773e-01 2.195227e-01
## 491 B 7.257897e-01 2.742103e-01
## 492 B 9.999859e-01 1.405010e-05
## 499 M 6.982981e-01 3.017019e-01
## 501 B 9.879214e-01 1.207857e-02
## 503 B 5.931235e-01 4.068765e-01
## 507 B 4.010532e-01 5.989468e-01
## 509 B 9.970054e-01 2.994587e-03
## 514 B 9.983239e-01 1.676132e-03
## 518 M 4.200733e-01 5.799267e-01
## 523 B 9.951034e-01 4.896611e-03
## 534 M 5.478869e-01 4.521131e-01
## 535 B 5.845559e-01 4.154441e-01
## 536 M 3.012400e-01 6.987600e-01
## 542 B 8.485578e-02 9.151442e-01
## 545 B 7.804988e-01 2.195012e-01
## 549 B 9.727728e-01 2.722715e-02
## 550 B 5.809883e-01 4.190117e-01
## 561 B 2.193662e-01 7.806338e-01
## 563 M 1.423350e-03 9.985767e-01
## 566 M 1.961924e-01 8.038076e-01
## 569 B 9.999903e-01 9.708791e-06
## 574 M 9.947802e-01 5.219820e-03
## 581 M 9.135214e-02 9.086479e-01
## 582 M 1.856151e-01 8.143849e-01
## 594 M 6.759268e-03 9.932407e-01
## 596 M 3.445315e-03 9.965547e-01
## 597 M 5.599734e-01 4.400266e-01
## 598 M 1.320029e-03 9.986800e-01
## 599 M 9.731682e-01 2.683176e-02
## 600 M 8.611625e-03 9.913884e-01
## 612 M 5.645765e-03 9.943542e-01
## 616 B 9.570635e-01 4.293653e-02
## 617 M 8.665746e-03 9.913343e-01
## 626 M 4.669960e-02 9.533004e-01
## 634 M 3.012508e-03 9.969875e-01
## 639 B 9.984081e-01 1.591913e-03
## 641 B 9.969872e-01 3.012839e-03
## 650 B 8.920921e-02 9.107908e-01
## 665 M 9.192660e-02 9.080734e-01
## 666 B 9.860749e-01 1.392512e-02
## 673 B 2.933858e-01 7.066142e-01
## 676 B 7.730772e-02 9.226923e-01
## 684 B 6.234754e-01 3.765246e-01
## 688 M 3.351730e-03 9.966483e-01
## 691 M 4.615057e-01 5.384943e-01
## 696 M 9.143037e-02 9.085696e-01
## 709 B 9.951737e-01 4.826330e-03
## 710 B 9.999904e-01 9.624905e-06
## 713 B 9.354000e-01 6.460004e-02
## 727 B 9.923178e-01 7.682178e-03
## 730 B 1.784058e-01 8.215942e-01
## 732 M 9.242316e-02 9.075768e-01
## 740 B 9.994710e-01 5.289834e-04
## 741 M 7.095309e-01 2.904691e-01
## 763 M 2.659527e-03 9.973405e-01
## 769 M 9.119714e-03 9.908803e-01
## 788 M 1.859137e-01 8.140863e-01
## 790 B 9.992881e-01 7.119289e-04
## 791 B 9.837746e-01 1.622540e-02
## 792 B 7.322536e-01 2.677464e-01
## 793 M 1.451720e-02 9.854828e-01
## 801 B 9.807689e-01 1.923105e-02
## 803 M 3.144980e-01 6.855020e-01
## 806 M 3.190678e-02 9.680932e-01
## 810 B 9.954171e-01 4.582943e-03
## 812 B 1.753027e-01 8.246973e-01
## 813 B 9.524541e-01 4.754592e-02
## 821 B 9.770018e-01 2.299823e-02
## 836 B 7.036007e-01 2.963993e-01
## 842 M 2.889183e-01 7.110817e-01
## 843 B 9.827448e-01 1.725516e-02
## 848 B 9.994349e-01 5.651353e-04
## 853 M 3.506119e-01 6.493881e-01
## 862 B 7.849815e-01 2.150185e-01
## 865 B 9.998603e-01 1.396787e-04
## 879 B 9.999949e-01 5.115656e-06
## 890 B 8.090039e-01 1.909961e-01
## 891 M 9.859185e-01 1.408150e-02
## 897 B 9.997532e-01 2.468207e-04
## 902 B 4.109252e-01 5.890748e-01
## 904 B 9.792663e-01 2.073368e-02
## 905 M 1.257893e-01 8.742107e-01
## 906 B 9.988321e-01 1.167869e-03
## 916 B 9.339058e-01 6.609417e-02
## 919 B 9.531747e-01 4.682528e-02
## 920 B 9.998864e-01 1.136243e-04
## 930 B 9.999744e-01 2.560407e-05
## 940 M 2.528686e-03 9.974713e-01
## 945 B 9.669483e-01 3.305167e-02
## 947 B 9.167996e-01 8.320035e-02
## 948 B 9.765901e-01 2.340987e-02
## 951 B 9.964543e-01 3.545711e-03
## 961 B 8.111540e-01 1.888460e-01
## 966 B 3.901841e-01 6.098159e-01
## 969 B 9.113059e-01 8.869412e-02
## 979 B 5.619843e-01 4.380157e-01
## 985 B 1.493080e-01 8.506920e-01
## 991 B 9.717101e-01 2.828993e-02
## 996 B 8.754185e-01 1.245815e-01
## 998 B 9.998516e-01 1.484434e-04
## 999 B 9.990260e-01 9.740379e-04
## 1005 M 6.556220e-02 9.344378e-01
## 1006 B 8.166196e-01 1.833804e-01
## 1007 B 9.949829e-01 5.017136e-03
## 1009 B 9.998712e-01 1.287548e-04
## 1015 B 2.149525e-01 7.850475e-01
## 1021 M 1.238714e-01 8.761286e-01
## 1027 B 8.144607e-01 1.855393e-01
## 1033 B 8.411580e-01 1.588420e-01
## 1042 B 9.994990e-01 5.009985e-04
## 1044 B 9.564314e-01 4.356862e-02
## 1049 M 1.943295e-01 8.056705e-01
## 1059 M 7.804773e-01 2.195227e-01
## 1060 B 7.257897e-01 2.742103e-01
## 1063 B 9.999978e-01 2.202499e-06
## 1075 B 6.721752e-01 3.278248e-01
## 1087 M 4.200733e-01 5.799267e-01
## 1089 B 6.945497e-01 3.054503e-01
## 1095 B 9.728210e-01 2.717904e-02
## 1098 B 9.955472e-01 4.452756e-03
## 1099 B 9.972506e-01 2.749390e-03
## 1102 B 9.966100e-01 3.389963e-03
## 1106 M 9.370223e-02 9.062978e-01
## 1108 B 8.002142e-01 1.997858e-01
## 1109 B 5.369302e-02 9.463070e-01
## 1113 B 7.557723e-01 2.442277e-01
## 1114 B 7.804988e-01 2.195012e-01
## 1119 B 5.809883e-01 4.190117e-01
## 1133 M 3.531121e-02 9.646888e-01
##################################
# Reporting the independent evaluation results
# for the test set
##################################
<- roc(response = BAL_NB_Test$BAL_NB_Test_Observed,
BAL_NB_Test_ROC predictor = BAL_NB_Test$BAL_NB_Test_Predicted.M,
levels = rev(levels(BAL_NB_Test$BAL_NB_Test_Observed)))
<- auc(BAL_NB_Test_ROC)[1]) (BAL_NB_Test_AUROC
## [1] 0.8969651
##################################
# Consolidating the base learners
# with optimal hyperparameters
##################################
set.seed(12345678)
<- caretList(x = MA_Train[,!names(MA_Train) %in% c("diagnosis")],
BAL_LIST y = MA_Train$diagnosis,
trControl=RKFold_Control,
metric="ROC",
tuneList=list(
BAL_LDA=caretModelSpec(method="lda",
preProcess=c("center","scale")),
BAL_CART=caretModelSpec(method="rpart",
tuneGrid=data.frame(cp=0.001)),
BAL_SVM_R=caretModelSpec(method="svmRadial",
preProcess=c("center","scale"),
tuneGrid=data.frame(C = 2048, sigma = 0.1790538)),
BAL_KNN=caretModelSpec(method="knn",
preProcess=c("center","scale"),
tuneGrid=data.frame(k = 1)),
BAL_NB=caretModelSpec(method="nb",
tuneGrid=data.frame(usekernel=FALSE,fL = 2,adjust = FALSE)))
)
BAL_LIST
## $BAL_LDA
## Linear Discriminant Analysis
##
## Pre-processing: centered (6), scaled (6)
## Resampling results:
##
## ROC Sens Spec
## 0.8762815 0.8720214 0.7105882
##
##
## $BAL_CART
## CART
##
## No pre-processing
## Resampling results:
##
## ROC Sens Spec
## 0.8522563 0.8531716 0.7235294
##
## Tuning parameter 'cp' was held constant at a value of 0.001
##
## $BAL_SVM_R
## Support Vector Machines with Radial Basis Function Kernel
##
## Pre-processing: centered (6), scaled (6)
## Resampling results:
##
## ROC Sens Spec
## 0.9106172 0.9437193 0.7976471
##
## Tuning parameter 'sigma' was held constant at a value of 0.1790538
##
## Tuning parameter 'C' was held constant at a value of 2048
##
## $BAL_KNN
## k-Nearest Neighbors
##
## Pre-processing: centered (6), scaled (6)
## Resampling results:
##
## ROC Sens Spec
## 0.902676 0.9171167 0.8882353
##
## Tuning parameter 'k' was held constant at a value of 1
##
## $BAL_NB
## Naive Bayes
##
## No pre-processing
## Resampling results:
##
## ROC Sens Spec
## 0.8873915 0.852418 0.7629412
##
## Tuning parameter 'fL' was held constant at a value of 2
## Tuning
## parameter 'usekernel' was held constant at a value of FALSE
## Tuning
## parameter 'adjust' was held constant at a value of FALSE
##
## attr(,"class")
## [1] "caretList" "list"
##################################
# Comparing the base learners
# with optimal hyperparameters
##################################
<- resamples(BAL_LIST)
BAL_LIST_RESAMPLES summary(BAL_LIST_RESAMPLES)
##
## Call:
## summary.resamples(object = BAL_LIST_RESAMPLES)
##
## Models: BAL_LDA, BAL_CART, BAL_SVM_R, BAL_KNN, BAL_NB
## Number of resamples: 25
##
## ROC
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## BAL_LDA 0.8102302 0.8641641 0.8828689 0.8762815 0.8913829 0.9296675 0
## BAL_CART 0.7877967 0.8317136 0.8553708 0.8522563 0.8726343 0.9186061 0
## BAL_SVM_R 0.8675181 0.8980818 0.9066047 0.9106172 0.9307276 0.9522704 0
## BAL_KNN 0.8533282 0.8855779 0.9037084 0.9026760 0.9237616 0.9608696 0
## BAL_NB 0.8363003 0.8818369 0.8868286 0.8873915 0.8980908 0.9285166 0
##
## Sens
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## BAL_LDA 0.8260870 0.8508772 0.8684211 0.8720214 0.8947368 0.9304348 0
## BAL_CART 0.7368421 0.8333333 0.8596491 0.8531716 0.8956522 0.9122807 0
## BAL_SVM_R 0.8947368 0.9217391 0.9478261 0.9437193 0.9565217 0.9824561 0
## BAL_KNN 0.8684211 0.9035088 0.9210526 0.9171167 0.9385965 0.9652174 0
## BAL_NB 0.7807018 0.8333333 0.8508772 0.8524180 0.8771930 0.9391304 0
##
## Spec
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## BAL_LDA 0.6323529 0.6764706 0.7058824 0.7105882 0.7352941 0.8235294 0
## BAL_CART 0.6029412 0.6764706 0.7205882 0.7235294 0.7647059 0.8382353 0
## BAL_SVM_R 0.6911765 0.7647059 0.8088235 0.7976471 0.8382353 0.9117647 0
## BAL_KNN 0.7941176 0.8529412 0.8823529 0.8882353 0.9264706 1.0000000 0
## BAL_NB 0.6323529 0.7352941 0.7647059 0.7629412 0.7941176 0.8823529 0
dotplot(BAL_LIST_RESAMPLES)
splom(BAL_LIST_RESAMPLES)
##################################
# Measuring the correlation among
# base learners
##################################
<- modelCor(resamples(BAL_LIST))) (BAL_LIST_COR
## BAL_LDA BAL_CART BAL_SVM_R BAL_KNN BAL_NB
## BAL_LDA 1.00000000 -0.06342888 0.122350102 0.08479269 -0.131117037
## BAL_CART -0.06342888 1.00000000 0.078299147 -0.24343738 -0.081248867
## BAL_SVM_R 0.12235010 0.07829915 1.000000000 0.21687863 -0.005995804
## BAL_KNN 0.08479269 -0.24343738 0.216878631 1.00000000 -0.189499723
## BAL_NB -0.13111704 -0.08124887 -0.005995804 -0.18949972 1.000000000
##################################
# Formulating a stacked model
# using the base learners
# and a linear regression meta-model
##################################
set.seed(12345678)
<- caretStack(BAL_LIST,
MEL_LR metric="ROC",
trControl=RKFold_Control,
method="glm")
print(MEL_LR)
## The following models were ensembled: BAL_LDA, BAL_CART, BAL_SVM_R, BAL_KNN, BAL_NB
##
## caret::train model:
## Generalized Linear Model
##
## 912 samples
## 5 predictor
## 2 classes: 'B', 'M'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results:
##
## ROC Sens Spec
## 0.9699332 0.9520763 0.9147059
##
##
## Final model:
##
## Call: NULL
##
## Coefficients:
## (Intercept) BAL_LDA BAL_CART BAL_SVM_R BAL_KNN BAL_NB
## -5.0427 -0.1463 -0.4083 4.4379 4.9060 1.4449
##
## Degrees of Freedom: 911 Total (i.e. Null); 906 Residual
## Null Deviance: 1205
## Residual Deviance: 351.6 AIC: 363.6
<- MEL_LR$ens_model$results$ROC) (MEL_LR_Train_AUROC
## [1] 0.9699332
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(MEL_LR_Test_Observed = MA_Test$diagnosis,
MEL_LR_Test MEL_LR_Test_Predicted = predict(MEL_LR,
!names(MA_Test) %in% c("diagnosis")]))
MA_Test[,
MEL_LR_Test
## MEL_LR_Test_Observed MEL_LR_Test_Predicted.B MEL_LR_Test_Predicted.M
## 1 M 0.039723064 0.960276936
## 2 M 0.992813330 0.007186670
## 3 M 0.093030883 0.906969117
## 4 M 0.055240146 0.944759854
## 5 M 0.011613039 0.988386961
## 6 M 0.005824800 0.994175200
## 7 M 0.021610536 0.978389464
## 8 M 0.025207859 0.974792141
## 9 M 0.036324604 0.963675396
## 10 M 0.005506428 0.994493572
## 11 M 0.033738105 0.966261895
## 12 M 0.087769722 0.912230278
## 13 M 0.015512894 0.984487106
## 14 B 0.960717222 0.039282778
## 15 B 0.987974397 0.012025603
## 16 B 0.993627901 0.006372099
## 17 M 0.012285296 0.987714704
## 18 M 0.039694581 0.960305419
## 19 B 0.993312443 0.006687557
## 20 B 0.990153313 0.009846687
## 21 M 0.068869028 0.931130972
## 22 B 0.990441703 0.009558297
## 23 B 0.977328360 0.022671640
## 24 M 0.032664172 0.967335828
## 25 B 0.023097789 0.976902211
## 26 M 0.890173319 0.109826681
## 27 B 0.993612505 0.006387495
## 28 M 0.017430390 0.982569610
## 29 B 0.953826350 0.046173650
## 30 B 0.993579684 0.006420316
## 31 B 0.987581569 0.012418431
## 32 B 0.991942308 0.008057692
## 33 M 0.048984333 0.951015667
## 34 B 0.993586401 0.006413599
## 35 B 0.983657463 0.016342537
## 36 M 0.115240160 0.884759840
## 37 B 0.993436612 0.006563388
## 38 M 0.019644163 0.980355837
## 39 M 0.029504261 0.970495739
## 40 B 0.984934307 0.015065693
## 41 M 0.013315236 0.986684764
## 42 M 0.048339823 0.951660177
## 43 M 0.085933965 0.914066035
## 44 M 0.005187671 0.994812329
## 45 M 0.042009276 0.957990724
## 46 B 0.993564944 0.006435056
## 47 B 0.993681727 0.006318273
## 48 M 0.035496426 0.964503574
## 49 M 0.136258886 0.863741114
## 50 M 0.077280505 0.922719495
## 51 B 0.992173894 0.007826106
## 52 B 0.276818129 0.723181871
## 53 M 0.016843944 0.983156056
## 54 M 0.038812249 0.961187751
## 55 M 0.111282956 0.888717044
## 56 M 0.017119890 0.982880110
## 57 B 0.993585994 0.006414006
## 58 B 0.989476557 0.010523443
## 59 B 0.986887769 0.013112231
## 60 B 0.975359027 0.024640973
## 61 B 0.989153126 0.010846874
## 62 B 0.993588575 0.006411425
## 63 B 0.993582913 0.006417087
## 64 B 0.993596387 0.006403613
## 65 M 0.014414467 0.985585533
## 66 B 0.975880185 0.024119815
## 67 M 0.006160309 0.993839691
## 68 B 0.984121889 0.015878111
## 69 B 0.922598056 0.077401944
## 70 B 0.962926075 0.037073925
## 71 B 0.961265926 0.038734074
## 72 M 0.115531624 0.884468376
## 73 B 0.993555676 0.006444324
## 74 B 0.991393186 0.008606814
## 75 B 0.993615606 0.006384394
## 76 M 0.022182563 0.977817437
## 77 B 0.990570316 0.009429684
## 78 B 0.949004486 0.050995514
## 79 B 0.992911402 0.007088598
## 80 B 0.984687898 0.015312102
## 81 B 0.929374115 0.070625885
## 82 B 0.957013816 0.042986184
## 83 B 0.993595135 0.006404865
## 84 B 0.993581652 0.006418348
## 85 B 0.970466553 0.029533447
## 86 M 0.043074904 0.956925096
## 87 B 0.993616743 0.006383257
## 88 B 0.045437260 0.954562740
## 89 B 0.992822679 0.007177321
## 90 B 0.906184184 0.093815816
## 91 B 0.991562837 0.008437163
## 92 B 0.992418479 0.007581521
## 93 B 0.967041347 0.032958653
## 94 B 0.992072925 0.007927075
## 95 B 0.991702005 0.008297995
## 96 B 0.984330688 0.015669312
## 97 M 0.413153339 0.586846661
## 98 B 0.252157172 0.747842828
## 99 B 0.993579045 0.006420955
## 100 M 0.220300330 0.779699670
## 101 B 0.988352487 0.011647513
## 102 B 0.981963831 0.018036169
## 103 B 0.937622039 0.062377961
## 104 B 0.993529244 0.006470756
## 105 B 0.993407340 0.006592660
## 106 M 0.936920039 0.063079961
## 107 B 0.993227309 0.006772691
## 108 M 0.056762017 0.943237983
## 109 B 0.949467953 0.050532047
## 110 M 0.056549992 0.943450008
## 111 B 0.928109326 0.071890674
## 112 B 0.933426348 0.066573652
## 113 B 0.969844773 0.030155227
## 114 B 0.977505336 0.022494664
## 115 B 0.947481014 0.052518986
## 116 M 0.006970534 0.993029466
## 117 M 0.020414638 0.979585362
## 118 B 0.991566610 0.008433390
## 119 M 0.992813330 0.007186670
## 120 M 0.030643981 0.969356019
## 121 M 0.033350529 0.966649471
## 122 M 0.009924604 0.990075396
## 123 M 0.005767727 0.994232273
## 124 M 0.066841205 0.933158795
## 125 M 0.005824800 0.994175200
## 126 M 0.117332343 0.882667657
## 127 M 0.021610536 0.978389464
## 128 M 0.036127391 0.963872609
## 129 B 0.968712476 0.031287524
## 130 M 0.006267137 0.993732863
## 131 M 0.023341681 0.976658319
## 132 M 0.007272537 0.992727463
## 133 B 0.993312443 0.006687557
## 134 B 0.990153313 0.009846687
## 135 B 0.932029832 0.067970168
## 136 M 0.030779260 0.969220740
## 137 B 0.993168100 0.006831900
## 138 B 0.942764322 0.057235678
## 139 B 0.023097789 0.976902211
## 140 B 0.952843940 0.047156060
## 141 M 0.009582801 0.990417199
## 142 M 0.890173319 0.109826681
## 143 M 0.033125162 0.966874838
## 144 B 0.993571447 0.006428553
## 145 B 0.993597135 0.006402865
## 146 B 0.994173240 0.005826760
## 147 B 0.982658594 0.017341406
## 148 B 0.886682831 0.113317169
## 149 M 0.012155682 0.987844318
## 150 B 0.993615936 0.006384064
## 151 M 0.064487566 0.935512434
## 152 M 0.005844224 0.994155776
## 153 M 0.005396298 0.994603702
## 154 M 0.042009276 0.957990724
## 155 B 0.993564944 0.006435056
## 156 B 0.993281946 0.006718054
## 157 B 0.985898056 0.014101944
## 158 M 0.007205215 0.992794785
## 159 B 0.989245052 0.010754948
## 160 M 0.055782402 0.944217598
## 161 M 0.136258886 0.863741114
## 162 B 0.991698882 0.008301118
## 163 B 0.914571647 0.085428353
## 164 B 0.969423444 0.030576556
## 165 B 0.276818129 0.723181871
## 166 B 0.957635359 0.042364641
## 167 M 0.037296645 0.962703355
## 168 B 0.974710495 0.025289505
## 169 B 0.993575528 0.006424472
## 170 M 0.078366328 0.921633672
## 171 B 0.991766240 0.008233760
## 172 B 0.993603018 0.006396982
## 173 B 0.993587245 0.006412755
## 174 B 0.974630559 0.025369441
## 175 M 0.026427134 0.973572866
## 176 B 0.993596387 0.006403613
## 177 B 0.939939729 0.060060271
## 178 B 0.984487845 0.015512155
## 179 M 0.043000906 0.956999094
## 180 B 0.983070682 0.016929318
## 181 B 0.968436712 0.031563288
## 182 B 0.993141110 0.006858890
## 183 B 0.993531242 0.006468758
## 184 B 0.993593580 0.006406420
## 185 M 0.035858081 0.964141919
## 186 B 0.993300177 0.006699823
## 187 B 0.987167610 0.012832390
## 188 B 0.990979341 0.009020659
## 189 B 0.992825697 0.007174303
## 190 B 0.962704544 0.037295456
## 191 B 0.949004486 0.050995514
## 192 B 0.975083251 0.024916749
## 193 B 0.978926877 0.021073123
## 194 B 0.859333533 0.140666467
## 195 B 0.929374115 0.070625885
## 196 B 0.957013816 0.042986184
## 197 B 0.993595135 0.006404865
## 198 B 0.993581652 0.006418348
## 199 M 0.049761316 0.950238684
## 200 B 0.973575429 0.026424571
## 201 B 0.969969214 0.030030786
## 202 B 0.993591460 0.006408540
## 203 B 0.919248489 0.080751511
## 204 M 0.042805339 0.957194661
## 205 B 0.045437260 0.954562740
## 206 B 0.965093751 0.034906249
## 207 B 0.990018236 0.009981764
## 208 B 0.975544476 0.024455524
## 209 M 0.034162318 0.965837682
## 210 M 0.413153339 0.586846661
## 211 B 0.252157172 0.747842828
## 212 B 0.993586898 0.006413102
## 213 B 0.955422702 0.044577298
## 214 M 0.936920039 0.063079961
## 215 B 0.986216621 0.013783379
## 216 B 0.984627120 0.015372880
## 217 B 0.993590377 0.006409623
## 218 B 0.993629375 0.006370625
## 219 B 0.992194182 0.007805818
## 220 M 0.008514817 0.991485183
## 221 B 0.964848237 0.035151763
## 222 B 0.970131269 0.029868731
## 223 B 0.961235491 0.038764509
## 224 B 0.933426348 0.066573652
## 225 B 0.977505336 0.022494664
## 226 M 0.011297304 0.988702696
#################################
# Reporting the independent evaluation results
# for the test set
#################################
<- roc(response = MEL_LR_Test$MEL_LR_Test_Observed,
MEL_LR_Test_ROC predictor = MEL_LR_Test$MEL_LR_Test_Predicted.M,
levels = rev(levels(MEL_LR_Test$MEL_LR_Test_Observed)))
<- auc(MEL_LR_Test_ROC)[1]) (MEL_LR_Test_AUROC
## [1] 0.9661301
##################################
# Formulating a stacked model
# using the base learners
# and a random forest meta-model
##################################
set.seed(12345678)
<- caretStack(BAL_LIST,
MEL_RF metric="ROC",
trControl=RKFold_Control,
method="rf",
tuneGrid = RF_Grid)
print(MEL_RF)
## The following models were ensembled: BAL_LDA, BAL_CART, BAL_SVM_R, BAL_KNN, BAL_NB
##
## caret::train model:
## Random Forest
##
## 912 samples
## 5 predictor
## 2 classes: 'B', 'M'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 729, 729, 730, 730, 730, 730, ...
## Resampling results across tuning parameters:
##
## mtry ROC Sens Spec
## 2 0.9761136 0.9559359 0.9317647
## 3 0.9738558 0.9573364 0.9352941
## 4 0.9733648 0.9597834 0.9382353
## 5 0.9730741 0.9594294 0.9382353
##
## ROC was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 2.
##
## Final model:
##
## Call:
## randomForest(x = x, y = y, mtry = param$mtry)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 2
##
## OOB estimate of error rate: 4.5%
## Confusion matrix:
## B M class.error
## B 551 21 0.03671329
## M 20 320 0.05882353
<- max(MEL_RF$ens_model$results$ROC)) (MEL_RF_Train_AUROC
## [1] 0.9761136
##################################
# Independently evaluating the model
# on the test set
##################################
<- data.frame(MEL_RF_Test_Observed = MA_Test$diagnosis,
MEL_RF_Test MEL_RF_Test_Predicted = predict(MEL_RF,
!names(MA_Test) %in% c("diagnosis")]))
MA_Test[,
MEL_RF_Test
## MEL_RF_Test_Observed MEL_RF_Test_Predicted.B MEL_RF_Test_Predicted.M
## 1 M 0.006 0.994
## 2 M 0.998 0.002
## 3 M 0.104 0.896
## 4 M 0.064 0.936
## 5 M 0.096 0.904
## 6 M 0.000 1.000
## 7 M 0.000 1.000
## 8 M 0.000 1.000
## 9 M 0.026 0.974
## 10 M 0.002 0.998
## 11 M 0.022 0.978
## 12 M 0.088 0.912
## 13 M 0.062 0.938
## 14 B 1.000 0.000
## 15 B 0.994 0.006
## 16 B 1.000 0.000
## 17 M 0.000 1.000
## 18 M 0.006 0.994
## 19 B 1.000 0.000
## 20 B 1.000 0.000
## 21 M 0.012 0.988
## 22 B 0.944 0.056
## 23 B 0.992 0.008
## 24 M 0.004 0.996
## 25 B 0.292 0.708
## 26 M 0.904 0.096
## 27 B 1.000 0.000
## 28 M 0.002 0.998
## 29 B 0.998 0.002
## 30 B 1.000 0.000
## 31 B 0.982 0.018
## 32 B 0.982 0.018
## 33 M 0.148 0.852
## 34 B 1.000 0.000
## 35 B 0.998 0.002
## 36 M 0.146 0.854
## 37 B 1.000 0.000
## 38 M 0.000 1.000
## 39 M 0.112 0.888
## 40 B 0.934 0.066
## 41 M 0.016 0.984
## 42 M 0.126 0.874
## 43 M 0.100 0.900
## 44 M 0.002 0.998
## 45 M 0.032 0.968
## 46 B 1.000 0.000
## 47 B 1.000 0.000
## 48 M 0.012 0.988
## 49 M 0.050 0.950
## 50 M 0.146 0.854
## 51 B 1.000 0.000
## 52 B 0.312 0.688
## 53 M 0.000 1.000
## 54 M 0.014 0.986
## 55 M 0.206 0.794
## 56 M 0.114 0.886
## 57 B 1.000 0.000
## 58 B 0.984 0.016
## 59 B 0.994 0.006
## 60 B 0.902 0.098
## 61 B 0.978 0.022
## 62 B 1.000 0.000
## 63 B 1.000 0.000
## 64 B 1.000 0.000
## 65 M 0.002 0.998
## 66 B 1.000 0.000
## 67 M 0.006 0.994
## 68 B 0.996 0.004
## 69 B 0.910 0.090
## 70 B 0.998 0.002
## 71 B 0.994 0.006
## 72 M 0.054 0.946
## 73 B 1.000 0.000
## 74 B 1.000 0.000
## 75 B 1.000 0.000
## 76 M 0.002 0.998
## 77 B 0.992 0.008
## 78 B 0.948 0.052
## 79 B 0.968 0.032
## 80 B 1.000 0.000
## 81 B 0.858 0.142
## 82 B 0.940 0.060
## 83 B 1.000 0.000
## 84 B 1.000 0.000
## 85 B 1.000 0.000
## 86 M 0.052 0.948
## 87 B 1.000 0.000
## 88 B 0.094 0.906
## 89 B 0.968 0.032
## 90 B 0.706 0.294
## 91 B 0.876 0.124
## 92 B 1.000 0.000
## 93 B 0.950 0.050
## 94 B 0.926 0.074
## 95 B 0.966 0.034
## 96 B 0.942 0.058
## 97 M 0.616 0.384
## 98 B 0.486 0.514
## 99 B 1.000 0.000
## 100 M 0.290 0.710
## 101 B 0.994 0.006
## 102 B 1.000 0.000
## 103 B 0.942 0.058
## 104 B 1.000 0.000
## 105 B 1.000 0.000
## 106 M 0.972 0.028
## 107 B 1.000 0.000
## 108 M 0.042 0.958
## 109 B 0.906 0.094
## 110 M 0.082 0.918
## 111 B 0.924 0.076
## 112 B 0.978 0.022
## 113 B 0.996 0.004
## 114 B 0.982 0.018
## 115 B 0.648 0.352
## 116 M 0.000 1.000
## 117 M 0.004 0.996
## 118 B 1.000 0.000
## 119 M 0.998 0.002
## 120 M 0.278 0.722
## 121 M 0.164 0.836
## 122 M 0.000 1.000
## 123 M 0.000 1.000
## 124 M 0.016 0.984
## 125 M 0.000 1.000
## 126 M 0.134 0.866
## 127 M 0.000 1.000
## 128 M 0.026 0.974
## 129 B 0.998 0.002
## 130 M 0.000 1.000
## 131 M 0.006 0.994
## 132 M 0.002 0.998
## 133 B 1.000 0.000
## 134 B 1.000 0.000
## 135 B 0.862 0.138
## 136 M 0.086 0.914
## 137 B 0.968 0.032
## 138 B 0.840 0.160
## 139 B 0.292 0.708
## 140 B 0.998 0.002
## 141 M 0.000 1.000
## 142 M 0.904 0.096
## 143 M 0.094 0.906
## 144 B 1.000 0.000
## 145 B 1.000 0.000
## 146 B 0.920 0.080
## 147 B 1.000 0.000
## 148 B 0.780 0.220
## 149 M 0.000 1.000
## 150 B 1.000 0.000
## 151 M 0.058 0.942
## 152 M 0.000 1.000
## 153 M 0.008 0.992
## 154 M 0.032 0.968
## 155 B 1.000 0.000
## 156 B 0.892 0.108
## 157 B 1.000 0.000
## 158 M 0.006 0.994
## 159 B 0.988 0.012
## 160 M 0.086 0.914
## 161 M 0.050 0.950
## 162 B 1.000 0.000
## 163 B 0.806 0.194
## 164 B 0.994 0.006
## 165 B 0.312 0.688
## 166 B 0.984 0.016
## 167 M 0.106 0.894
## 168 B 0.952 0.048
## 169 B 1.000 0.000
## 170 M 0.442 0.558
## 171 B 0.950 0.050
## 172 B 1.000 0.000
## 173 B 1.000 0.000
## 174 B 0.992 0.008
## 175 M 0.268 0.732
## 176 B 1.000 0.000
## 177 B 0.956 0.044
## 178 B 0.990 0.010
## 179 M 0.010 0.990
## 180 B 1.000 0.000
## 181 B 0.996 0.004
## 182 B 0.968 0.032
## 183 B 1.000 0.000
## 184 B 1.000 0.000
## 185 M 0.026 0.974
## 186 B 0.998 0.002
## 187 B 1.000 0.000
## 188 B 0.980 0.020
## 189 B 0.998 0.002
## 190 B 0.996 0.004
## 191 B 0.948 0.052
## 192 B 0.886 0.114
## 193 B 0.874 0.126
## 194 B 0.618 0.382
## 195 B 0.858 0.142
## 196 B 0.940 0.060
## 197 B 1.000 0.000
## 198 B 1.000 0.000
## 199 M 0.010 0.990
## 200 B 0.904 0.096
## 201 B 1.000 0.000
## 202 B 1.000 0.000
## 203 B 0.824 0.176
## 204 M 0.030 0.970
## 205 B 0.094 0.906
## 206 B 0.982 0.018
## 207 B 1.000 0.000
## 208 B 0.930 0.070
## 209 M 0.316 0.684
## 210 M 0.616 0.384
## 211 B 0.486 0.514
## 212 B 1.000 0.000
## 213 B 0.992 0.008
## 214 M 0.972 0.028
## 215 B 0.978 0.022
## 216 B 0.998 0.002
## 217 B 1.000 0.000
## 218 B 1.000 0.000
## 219 B 1.000 0.000
## 220 M 0.014 0.986
## 221 B 0.996 0.004
## 222 B 0.700 0.300
## 223 B 0.956 0.044
## 224 B 0.978 0.022
## 225 B 0.982 0.018
## 226 M 0.000 1.000
#################################
# Reporting the independent evaluation results
# for the test set
#################################
<- roc(response = MEL_RF_Test$MEL_RF_Test_Observed,
MEL_RF_Test_ROC predictor = MEL_RF_Test$MEL_RF_Test_Predicted.M,
levels = rev(levels(MEL_RF_Test$MEL_RF_Test_Observed)))
<- auc(MEL_RF_Test_ROC)[1]) (MEL_RF_Test_AUROC
## [1] 0.9689805
##################################
# Consolidating the resampling results
# for the formulated individual models
##################################
<- resamples(list(MBS_AB = MBS_AB_Tune,
(Consolidated_Resampling MBS_GBM = MBS_GBM_Tune,
MBS_XGB = MBS_XGB_Tune,
MBG_RF = MBG_RF_Tune,
MBG_BCART = MBG_BCART_Tune,
BAL_LDA = BAL_LDA_Tune,
BAL_CART = BAL_CART_Tune,
BAL_KNN = BAL_KNN_Tune,
BAL_NB = BAL_NB_Tune)))
##
## Call:
## resamples.default(x = list(MBS_AB = MBS_AB_Tune, MBS_GBM =
## = MBG_BCART_Tune, BAL_LDA = BAL_LDA_Tune, BAL_CART = BAL_CART_Tune, BAL_KNN
## = BAL_KNN_Tune, BAL_NB = BAL_NB_Tune))
##
## Models: MBS_AB, MBS_GBM, MBS_XGB, MBG_RF, MBG_BCART, BAL_LDA, BAL_CART, BAL_KNN, BAL_NB
## Number of resamples: 25
## Performance metrics: ROC, Sens, Spec
## Time estimates for: everything, final model fit
summary(Consolidated_Resampling)
##
## Call:
## summary.resamples(object = Consolidated_Resampling)
##
## Models: MBS_AB, MBS_GBM, MBS_XGB, MBG_RF, MBG_BCART, BAL_LDA, BAL_CART, BAL_KNN, BAL_NB
## Number of resamples: 25
##
## ROC
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## MBS_AB 0.9336945 0.9595908 0.9748452 0.9710985 0.9860614 0.9941176 0
## MBS_GBM 0.9262126 0.9476982 0.9656863 0.9647306 0.9821981 0.9877238 0
## MBS_XGB 0.9220846 0.9460358 0.9700722 0.9640349 0.9820691 0.9900256 0
## MBG_RF 0.9324690 0.9563939 0.9729747 0.9700081 0.9831841 0.9930946 0
## MBG_BCART 0.9224071 0.9521100 0.9696852 0.9644432 0.9808437 0.9901535 0
## BAL_LDA 0.8102302 0.8641641 0.8828689 0.8762815 0.8913829 0.9296675 0
## BAL_CART 0.7863777 0.8392673 0.8597187 0.8614523 0.8853844 0.9405371 0
## BAL_KNN 0.8342363 0.8946931 0.9120227 0.9076428 0.9237616 0.9562020 0
## BAL_NB 0.8354220 0.8777090 0.8886189 0.8873525 0.8989938 0.9317136 0
##
## Sens
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## MBS_AB 0.9043478 0.9298246 0.9565217 0.9527750 0.9736842 0.9913043 0
## MBS_GBM 0.8956522 0.9298246 0.9478261 0.9492784 0.9652174 1.0000000 0
## MBS_XGB 0.9122807 0.9385965 0.9565217 0.9562777 0.9739130 0.9913043 0
## MBG_RF 0.9130435 0.9473684 0.9565217 0.9580351 0.9652174 0.9913043 0
## MBG_BCART 0.8956522 0.9473684 0.9561404 0.9541998 0.9736842 0.9913043 0
## BAL_LDA 0.8260870 0.8508772 0.8684211 0.8720214 0.8947368 0.9304348 0
## BAL_CART 0.7631579 0.8086957 0.8596491 0.8503158 0.8859649 0.9043478 0
## BAL_KNN 0.8596491 0.9035088 0.9210526 0.9205797 0.9391304 0.9736842 0
## BAL_NB 0.7913043 0.8333333 0.8596491 0.8552525 0.8771930 0.9130435 0
##
## Spec
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## MBS_AB 0.7941176 0.8823529 0.8970588 0.8964706 0.9264706 0.9558824 0
## MBS_GBM 0.8235294 0.8676471 0.8970588 0.8964706 0.9264706 0.9705882 0
## MBS_XGB 0.8235294 0.8823529 0.8970588 0.9000000 0.9264706 0.9558824 0
## MBG_RF 0.8088235 0.8676471 0.8823529 0.8970588 0.9411765 0.9852941 0
## MBG_BCART 0.8235294 0.8676471 0.9117647 0.9000000 0.9264706 0.9852941 0
## BAL_LDA 0.6323529 0.6764706 0.7058824 0.7105882 0.7352941 0.8235294 0
## BAL_CART 0.5588235 0.7058824 0.7500000 0.7482353 0.7794118 0.8823529 0
## BAL_KNN 0.8088235 0.8676471 0.8970588 0.8947059 0.9264706 0.9558824 0
## BAL_NB 0.6911765 0.7205882 0.7500000 0.7605882 0.7794118 0.8529412 0
##################################
# Exploring the resampling results
# for the formulated individual models
##################################
bwplot(Consolidated_Resampling,
main = "Model Resampling Performance Comparison (Range)",
ylab = "Model",
pch=16,
cex=2,
layout=c(3,1))
##################################
# Consolidating the train and test AUROC
# for the formulated individual models
# together with the ensemble and stacked models
##################################
<- c('MBS_AB','MBS_GBM','MBS_XGB',
Model 'MBG_RF','MBG_BCART',
'BAL_LDA','BAL_CART','BAL_SVM_R','BAL_KNN','BAL_NB',
'MEL_LR','MEL_RF',
'MBS_AB','MBS_GBM','MBS_XGB',
'MBG_RF','MBG_BCART',
'BAL_LDA','BAL_CART','BAL_SVM_R','BAL_KNN','BAL_NB',
'MEL_LR','MEL_RF')
<- c(rep('Cross-Validation',12),rep('Test',12))
Set
<- c(MBS_AB_Train_AUROC,MBS_GBM_Train_AUROC,MBS_XGB_Train_AUROC,
AUROC
MBG_RF_Train_AUROC,MBG_BCART_Train_AUROC,
BAL_LDA_Train_AUROC,BAL_CART_Train_AUROC,BAL_SVM_R_Train_AUROC,BAL_KNN_Train_AUROC,BAL_NB_Train_AUROC,
MEL_LR_Train_AUROC,MEL_RF_Train_AUROC,
MBS_AB_Test_AUROC,MBS_GBM_Test_AUROC,MBS_XGB_Test_AUROC,
MBG_RF_Test_AUROC,MBG_BCART_Test_AUROC,
BAL_LDA_Test_AUROC,BAL_CART_Test_AUROC,BAL_SVM_R_Test_AUROC,BAL_KNN_Test_AUROC,BAL_NB_Test_AUROC,
MEL_LR_Test_AUROC,MEL_RF_Test_AUROC)
<- as.data.frame(cbind(Model,Set,AUROC))
AUROC_Summary
$AUROC <- as.numeric(as.character(AUROC_Summary$AUROC))
AUROC_Summary$Set <- factor(AUROC_Summary$Set,
AUROC_Summarylevels = c("Cross-Validation",
"Test"))
$Model <- factor(AUROC_Summary$Model,
AUROC_Summarylevels = c('MBS_AB',
'MBS_GBM',
'MBS_XGB',
'MBG_RF',
'MBG_BCART',
'BAL_LDA',
'BAL_CART',
'BAL_SVM_R',
'BAL_KNN',
'BAL_NB',
'MEL_LR',
'MEL_RF'))
print(AUROC_Summary, row.names=FALSE)
## Model Set AUROC
## MBS_AB Cross-Validation 0.9710985
## MBS_GBM Cross-Validation 0.9647306
## MBS_XGB Cross-Validation 0.9640349
## MBG_RF Cross-Validation 0.9700081
## MBG_BCART Cross-Validation 0.9644432
## BAL_LDA Cross-Validation 0.8762815
## BAL_CART Cross-Validation 0.8614523
## BAL_SVM_R Cross-Validation 0.9097712
## BAL_KNN Cross-Validation 0.9076428
## BAL_NB Cross-Validation 0.8873525
## MEL_LR Cross-Validation 0.9699332
## MEL_RF Cross-Validation 0.9761136
## MBS_AB Test 0.9956405
## MBS_GBM Test 0.9830651
## MBS_XGB Test 0.9897720
## MBG_RF Test 0.9919517
## MBG_BCART Test 0.9858317
## BAL_LDA Test 0.8883300
## BAL_CART Test 0.9210681
## BAL_SVM_R Test 0.9376258
## BAL_KNN Test 0.9361167
## BAL_NB Test 0.8969651
## MEL_LR Test 0.9661301
## MEL_RF Test 0.9689805
<- dotplot(Model ~ AUROC,
(AUROC_Plot data = AUROC_Summary,
groups = Set,
main = "Classification Model Performance Comparison",
ylab = "Model",
xlab = "AUROC",
auto.key = list(adj=1, space="top", columns=2),
type=c("p", "h"),
origin = 0,
alpha = 0.45,
pch = 16,
cex = 2))