Skip to content

Commit

Permalink
Merge pull request #1234 from topepo/6-0-87-RC
Browse files Browse the repository at this point in the history
0.6.88 release
  • Loading branch information
topepo authored May 15, 2021
2 parents 77e3aad + c62ff88 commit 77aab31
Show file tree
Hide file tree
Showing 34 changed files with 280 additions and 269 deletions.
36 changes: 18 additions & 18 deletions models/deprecated/avMxnet.R
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ modelInfo <- list(label = "Model Averaged Neural Network",
grid = function(x, y, len = NULL, search = "grid") {
if(search == "grid") {
out <- expand.grid(layer1 = ((1:len) * 2) - 1, layer2 = 0, layer3 = 0,
learning.rate = 2e-6,
momentum = 0.9,
learning.rate = 2e-6,
momentum = 0.9,
dropout = seq(0, .7, length = len),
repeats = 5)
} else {
Expand All @@ -19,7 +19,7 @@ modelInfo <- list(label = "Model Averaged Neural Network",
layer3 = 0,
learning.rate = runif(len),
momentum = runif(len, min = .9),
dropout = runif(len, max = .7),
dropout = runif(len, max = .7),
repeats = sample(1:15, replace = TRUE, size = len))
}
out
Expand All @@ -31,29 +31,29 @@ modelInfo <- list(label = "Model Averaged Neural Network",
if(!is.matrix(x)) x <- as.matrix(x)
if(is.numeric(y)) {
for(i in 1:param$repeats) {
out$models[[i]] <- mxnet::mx.mlp(data = x,
out$models[[i]] <- mxnet::mx.mlp(data = x,
label = y,
hidden_node = num_units,
out_node = 1,
out_activation = "rmse",
learning.rate = param$learning.rate,
momentum = param$momentum,
eval.metric = mxnet::mx.metric.rmse,
out_node = 1,
out_activation = "rmse",
learning.rate = param$learning.rate,
momentum = param$momentum,
eval.metric = mxnet::mx.metric.rmse,
array.layout = "rowmajor",
...)
}
} else {
y <- as.numeric(y) - 1
for(i in 1:param$repeats) {
out$models[[i]] <- mxnet::mx.mlp(data = x,
out$models[[i]] <- mxnet::mx.mlp(data = x,
label = y,
hidden_node = num_units,
out_node = length(unique(y)),
out_node = length(unique(y)),
out_activation = "softmax",
learning.rate = param$learning.rate,
momentum = param$momentum,
eval.metric = mxnet::mx.metric.accuracy,
array.layout = "rowmajor",
learning.rate = param$learning.rate,
momentum = param$momentum,
eval.metric = mxnet::mx.metric.accuracy,
array.layout = "rowmajor",
...)
}
}
Expand All @@ -66,7 +66,7 @@ modelInfo <- list(label = "Model Averaged Neural Network",
pred <- if(i == 1) tmp else pred + tmp
}
pred <- pred/length(modelFit$models)

if(modelFit$problemType == "Regression") {
pred <- pred[1,]
} else {
Expand All @@ -83,14 +83,14 @@ modelInfo <- list(label = "Model Averaged Neural Network",
}
pred <- pred/length(modelFit$models)
pred <- t(apply(pred, 2, function(x) x/sum(x)))

colnames(pred) <- modelFit$obsLevels
pred
},
predictors = function(x, ...) {
if(any(names(x) == "xNames")) x$xNames else NA
},
notes = paste("The `mxnet` package is not yet on CRAN.",
"See [http://mxnet.io](http://mxnet.io) for installation instructions."),
"See [https://mxnet.apache.org/](https://mxnet.apache.org/) for installation instructions."),
tags = c("Neural Network", "Ensemble Model"),
sort = function(x) x[order(x$layer1, x$layer2, x$layer3),])
42 changes: 21 additions & 21 deletions models/files/mxnet.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,21 @@ modelInfo <- list(label = "Neural Network",
library = "mxnet",
loop = NULL,
type = c('Classification', 'Regression'),
parameters = data.frame(parameter = c('layer1', 'layer2', 'layer3',
"learning.rate", "momentum", "dropout",
parameters = data.frame(parameter = c('layer1', 'layer2', 'layer3',
"learning.rate", "momentum", "dropout",
"activation"),
class = c(rep('numeric', 6), "character"),
label = c('#Hidden Units in Layer 1', '#Hidden Units in Layer 2',
label = c('#Hidden Units in Layer 1', '#Hidden Units in Layer 2',
'#Hidden Units in Layer 3',
"Learning Rate", "Momentum",
"Dropout Rate",
"Learning Rate", "Momentum",
"Dropout Rate",
"Activation Function")),
grid = function(x, y, len = NULL, search = "grid") {
if(search == "grid") {
out <- expand.grid(layer1 = ((1:len) * 2) - 1, layer2 = 0, layer3 = 0,
learning.rate = 2e-6,
momentum = 0.9,
dropout = seq(0, .7, length = len),
learning.rate = 2e-6,
momentum = 0.9,
dropout = seq(0, .7, length = len),
activation = 'relu')
} else {
out <- data.frame(layer1 = sample(2:20, replace = TRUE, size = len),
Expand All @@ -30,37 +30,37 @@ modelInfo <- list(label = "Neural Network",
out
},
fit = function(x, y, wts, param, lev, last, classProbs, ...) {
mxnet::mx.set.seed(21)
mxnet::mx.set.seed(21)
num_units <- param[grepl("layer[1-9]", names(param))]
num_units <- num_units[num_units > 0]
if(!is.matrix(x)) x <- as.matrix(x)
if(is.numeric(y)) {
out <- mxnet::mx.mlp(data = x,
label = y,
hidden_node = num_units,
out_node = 1,
out_activation = "rmse",
learning.rate = param$learning.rate,
momentum = param$momentum,
eval.metric = mxnet::mx.metric.rmse,
out_node = 1,
out_activation = "rmse",
learning.rate = param$learning.rate,
momentum = param$momentum,
eval.metric = mxnet::mx.metric.rmse,
array.layout = "rowmajor",
activation = rep( as.character(param$activation), length(num_units)),
# Use He/MSRA when available in R
# Use He/MSRA when available in R
initializer = mxnet::mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
...)
} else {
y <- as.numeric(y) - 1
out <- mxnet::mx.mlp(data = x,
label = y,
hidden_node = num_units,
out_node = length(unique(y)),
out_node = length(unique(y)),
out_activation = "softmax",
learning.rate = param$learning.rate,
momentum = param$momentum,
eval.metric = mxnet::mx.metric.accuracy,
learning.rate = param$learning.rate,
momentum = param$momentum,
eval.metric = mxnet::mx.metric.accuracy,
array.layout = "rowmajor",
activation = rep( as.character(param$activation), length(num_units)),
initializer = mxnet::mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
initializer = mxnet::mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
...)
}
if(last)
Expand All @@ -85,6 +85,6 @@ modelInfo <- list(label = "Neural Network",
pred
},
notes = paste("The `mxnet` package is not yet on CRAN.",
"See [http://mxnet.io](http://mxnet.io) for installation instructions."),
"See [https://mxnet.apache.org/](https://mxnet.apache.org/) for installation instructions."),
tags = c("Neural Network"),
sort = function(x) x[order(x$layer1, x$layer2, x$layer3),])
42 changes: 21 additions & 21 deletions models/files/mxnetAdam.R
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
modelInfo <- list(label = "Neural Network",
library = "mxnet",
modelInfo <- list(label = "Neural Network",
library = "mxnet",
type = c('Classification','Regression'),
parameters = data.frame(parameter = c("layer1", "layer2", "layer3", "dropout",
"beta1", "beta2", "learningrate", "activation"),
class = c(rep('numeric', 7), "character"),
label = c('#Hidden Units in Layer 1', '#Hidden Units in Layer 2', '#Hidden Units in Layer 3',
label = c('#Hidden Units in Layer 1', '#Hidden Units in Layer 2', '#Hidden Units in Layer 3',
"Dropout Rate", "beta1", "beta2", "Learning Rate", "Activation Function")),
grid = function(x, y, len = NULL, search = "grid") {
if(search == "grid") {
out <- expand.grid(layer1 = ((1:len) * 4) - 1, layer2 = 0, layer3 = 0,
learningrate = 2e-6,
beta1 = 0.9,
learningrate = 2e-6,
beta1 = 0.9,
beta2 = 0.9999,
dropout = seq(0, .7, length = len),
dropout = seq(0, .7, length = len),
activation = 'relu')
} else {
out <- data.frame(layer1 = sample(2:20, replace = TRUE, size = len),
Expand All @@ -31,27 +31,27 @@ modelInfo <- list(label = "Neural Network",
num_units <- num_units[num_units > 0]
if(!is.matrix(x)) x <- as.matrix(x)
if(is.numeric(y)) {
mxnet::mx.set.seed(21)
out <- mxnet::mx.mlp(data = x, label = y, out_node = 1, out_activation = "rmse",
optimizer = 'adam', eval.metric = mxnet::mx.metric.rmse, array.layout = "rowmajor",
learning.rate = param$learningrate,
beta1 = param$beta1,
beta2 = param$beta2,
mxnet::mx.set.seed(21)
out <- mxnet::mx.mlp(data = x, label = y, out_node = 1, out_activation = "rmse",
optimizer = 'adam', eval.metric = mxnet::mx.metric.rmse, array.layout = "rowmajor",
learning.rate = param$learningrate,
beta1 = param$beta1,
beta2 = param$beta2,
dropout = param$dropout,
hidden_node = num_units,
activation = rep( as.character(param$activation), length(num_units)),
# Consider using He/MSRA paper when available in R
initializer = mxnet::mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
...)
} else {
y <- as.numeric(y) - 1
y <- as.numeric(y) - 1
mxnet::mx.set.seed(21)
out <- mxnet::mx.mlp(data = x, label = y, out_node = length(unique(y)), out_activation = "softmax",
optimizer = 'adam', eval.metric = mxnet::mx.metric.accuracy, array.layout = "rowmajor",
learning.rate = param$learningrate,
beta1 = param$beta1,
beta2 = param$beta2,
dropout = param$dropout,
out <- mxnet::mx.mlp(data = x, label = y, out_node = length(unique(y)), out_activation = "softmax",
optimizer = 'adam', eval.metric = mxnet::mx.metric.accuracy, array.layout = "rowmajor",
learning.rate = param$learningrate,
beta1 = param$beta1,
beta2 = param$beta2,
dropout = param$dropout,
hidden_node = num_units,
activation = rep( as.character(param$activation), length(num_units)),
initializer = mxnet::mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
Expand All @@ -61,7 +61,7 @@ modelInfo <- list(label = "Neural Network",
out <- mxnet::mx.serialize(out)
out
},
predict = function(modelFit, newdata, submodels = NULL) {
predict = function(modelFit, newdata, submodels = NULL) {
if(!is.matrix(newdata)) newdata <- as.matrix(newdata)
pred <- predict(modelFit, newdata, array.layout = 'rowmajor')
if(modelFit$problemType == "Regression") {
Expand All @@ -81,7 +81,7 @@ modelInfo <- list(label = "Neural Network",
pred
},
notes = paste("The `mxnet` package is not yet on CRAN.",
"See [http://mxnet.io](http://mxnet.io) for installation instructions.",
"See [https://mxnet.apache.org/](https://mxnet.apache.org/) for installation instructions.",
"Users are strongly advised to define `num.round` themselves."),
tags = c("Neural Network"),
sort = function(x) x[order(x$layer1, x$layer2,x$layer3,
Expand Down
Loading

0 comments on commit 77aab31

Please sign in to comment.