[R] address some lintr warnings (#8609)
This commit is contained in:
parent
53e6e32718
commit
17ce1f26c8
@ -629,7 +629,7 @@ xgb.attributes <- function(object) {
|
||||
#' @export
|
||||
xgb.config <- function(object) {
|
||||
handle <- xgb.get.handle(object)
|
||||
.Call(XGBoosterSaveJsonConfig_R, handle);
|
||||
.Call(XGBoosterSaveJsonConfig_R, handle)
|
||||
}
|
||||
|
||||
#' @rdname xgb.config
|
||||
|
||||
@ -404,7 +404,7 @@ test_that("Configuration works", {
|
||||
config <- xgb.config(bst)
|
||||
xgb.config(bst) <- config
|
||||
reloaded_config <- xgb.config(bst)
|
||||
expect_equal(config, reloaded_config);
|
||||
expect_equal(config, reloaded_config)
|
||||
})
|
||||
|
||||
test_that("strict_shape works", {
|
||||
|
||||
@ -28,7 +28,9 @@ Package loading:
|
||||
require(xgboost)
|
||||
require(Matrix)
|
||||
require(data.table)
|
||||
if (!require('vcd')) install.packages('vcd')
|
||||
if (!require('vcd')) {
|
||||
install.packages('vcd')
|
||||
}
|
||||
```
|
||||
|
||||
> **VCD** package is used for one of its embedded dataset only.
|
||||
|
||||
@ -163,9 +163,10 @@ We were able to get the log-odds to agree, so now let's manually calculate the s
|
||||
bst_preds <- predict(bst, as.matrix(data$dates))
|
||||
|
||||
# calculate the predictions casting doubles to floats
|
||||
bst_from_json_preds <- ifelse(fl(data$dates)<fl(node$split_condition),
|
||||
as.numeric(1/(1+exp(-1*fl(node$children[[1]]$leaf)))),
|
||||
as.numeric(1/(1+exp(-1*fl(node$children[[2]]$leaf))))
|
||||
bst_from_json_preds <- ifelse(
|
||||
fl(data$dates) < fl(node$split_condition)
|
||||
, as.numeric(1 / (1 + exp(-1 * fl(node$children[[1]]$leaf))))
|
||||
, as.numeric(1 / (1 + exp(-1 * fl(node$children[[2]]$leaf))))
|
||||
)
|
||||
|
||||
# test that values are equal
|
||||
@ -177,9 +178,10 @@ None are exactly equal again. What is going on here? Well, since we are using
|
||||
How do we fix this? We have to ensure we use the correct data types everywhere and the correct operators. If we use only floats, the float library that we have loaded will ensure the 32-bit float exponentiation operator is applied.
|
||||
```{r}
|
||||
# calculate the predictions casting doubles to floats
|
||||
bst_from_json_preds <- ifelse(fl(data$dates)<fl(node$split_condition),
|
||||
as.numeric(fl(1)/(fl(1)+exp(fl(-1)*fl(node$children[[1]]$leaf)))),
|
||||
as.numeric(fl(1)/(fl(1)+exp(fl(-1)*fl(node$children[[2]]$leaf))))
|
||||
bst_from_json_preds <- ifelse(
|
||||
fl(data$dates) < fl(node$split_condition)
|
||||
, as.numeric(fl(1) / (fl(1) + exp(fl(-1) * fl(node$children[[1]]$leaf))))
|
||||
, as.numeric(fl(1) / (fl(1) + exp(fl(-1) * fl(node$children[[2]]$leaf))))
|
||||
)
|
||||
|
||||
# test that values are equal
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
site <- 'http://cran.r-project.org'
|
||||
if (!require('dummies'))
|
||||
if (!require('dummies')) {
|
||||
install.packages('dummies', repos = site)
|
||||
if (!require('insuranceData'))
|
||||
}
|
||||
if (!require('insuranceData')) {
|
||||
install.packages('insuranceData', repos = site)
|
||||
}
|
||||
|
||||
library(dummies)
|
||||
library(insuranceData)
|
||||
@ -14,5 +16,16 @@ data$STATE <- as.factor(data$STATE)
|
||||
data$CLASS <- as.factor(data$CLASS)
|
||||
data$GENDER <- as.factor(data$GENDER)
|
||||
|
||||
data.dummy <- dummy.data.frame(data, dummy.class='factor', omit.constants=TRUE);
|
||||
write.table(data.dummy, 'autoclaims.csv', sep=',', row.names=F, col.names=F, quote=F)
|
||||
data.dummy <- dummy.data.frame(
|
||||
data
|
||||
, dummy.class = 'factor'
|
||||
, omit.constants = TRUE
|
||||
)
|
||||
write.table(
|
||||
data.dummy
|
||||
, 'autoclaims.csv'
|
||||
, sep = ','
|
||||
, row.names = FALSE
|
||||
, col.names = FALSE
|
||||
, quote = FALSE
|
||||
)
|
||||
|
||||
@ -25,7 +25,7 @@ param <- list("objective" = "binary:logitraw",
|
||||
watchlist <- list("train" = xgmat)
|
||||
nrounds <- 120
|
||||
print ("loading data end, start to boost trees")
|
||||
bst <- xgb.train(param, xgmat, nrounds, watchlist );
|
||||
bst <- xgb.train(param, xgmat, nrounds, watchlist)
|
||||
# save out model
|
||||
xgb.save(bst, "higgs.model")
|
||||
print ('finish training')
|
||||
|
||||
@ -40,7 +40,7 @@ for (i in 1:length(threads)){
|
||||
watchlist <- list("train" = xgmat)
|
||||
nrounds <- 120
|
||||
print ("loading data end, start to boost trees")
|
||||
bst <- xgb.train(param, xgmat, nrounds, watchlist );
|
||||
bst <- xgb.train(param, xgmat, nrounds, watchlist)
|
||||
# save out model
|
||||
xgb.save(bst, "higgs.model")
|
||||
print ('finish training')
|
||||
@ -67,4 +67,3 @@ xgboost.time
|
||||
# [[5]]
|
||||
# user system elapsed
|
||||
# 157.390 5.988 40.949
|
||||
|
||||
|
||||
@ -24,8 +24,13 @@ param <- list("objective" = "multi:softprob",
|
||||
|
||||
# Run Cross Validation
|
||||
cv.nrounds <- 50
|
||||
bst.cv <- xgb.cv(param=param, data = x[trind,], label = y,
|
||||
nfold = 3, nrounds=cv.nrounds)
|
||||
bst.cv <- xgb.cv(
|
||||
param = param
|
||||
, data = x[trind, ]
|
||||
, label = y
|
||||
, nfold = 3
|
||||
, nrounds = cv.nrounds
|
||||
)
|
||||
|
||||
# Train the model
|
||||
nrounds <- 50
|
||||
@ -37,7 +42,7 @@ pred <- matrix(pred,9,length(pred)/9)
|
||||
pred <- t(pred)
|
||||
|
||||
# Output submission
|
||||
pred <- format(pred, digits=2,scientific=F) # shrink the size of submission
|
||||
pred <- format(pred, digits = 2, scientific = FALSE) # shrink the size of submission
|
||||
pred <- data.frame(1:nrow(pred), pred)
|
||||
names(pred) <- c('id', paste0('Class_', 1:9))
|
||||
write.csv(pred, file = 'submission.csv', quote = FALSE, row.names = FALSE)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user