Skip to content

Commit

Permalink
feat(mlp): add n_layers parameter
Browse files Browse the repository at this point in the history
  • Loading branch information
sebffischer committed Nov 26, 2024
1 parent de256df commit 43956ac
Show file tree
Hide file tree
Showing 4 changed files with 32 additions and 2 deletions.
1 change: 1 addition & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
* perf: Use a faster image loader
* feat: Add parameter `num_interop_threads` to `LearnerTorch`
* feat: Add adaptive average pooling
* feat: Added `n_layers` parameter to MLP

# mlr3torch 0.1.2

Expand Down
13 changes: 12 additions & 1 deletion R/LearnerTorchMLP.R
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
#' * `neurons` :: `integer()`\cr
#' The number of neurons per hidden layer. By default there is no hidden layer.
#' Setting this to `c(10, 20)` would have a the first hidden layer with 10 neurons and the second with 20.
#' * `n_layers` :: `integer()`\cr
#' The number of layers. This parameter must only be set when `neurons` has length 1.
#' * `p` :: `numeric(1)`\cr
#' The dropout probability. Is initialized to `0.5`.
#' * `shape` :: `integer()` or `NULL`\cr
Expand All @@ -48,6 +50,7 @@ LearnerTorchMLP = R6Class("LearnerTorchMLP",
param_set = ps(
neurons = p_uty(tags = c("train", "predict"), custom_check = check_neurons),
p = p_dbl(lower = 0, upper = 1, tags = "train"),
n_layers = p_int(lower = 1L, tags = "train"),
activation = p_uty(tags = c("required", "train"), custom_check = check_nn_module),
activation_args = p_uty(tags = c("required", "train"), custom_check = check_activation_args),
shape = p_uty(tags = "train", custom_check = check_shape)
Expand Down Expand Up @@ -127,8 +130,16 @@ single_lazy_tensor = function(task) {
}

# shape is (NA, x) if preesnt
make_mlp = function(task, d_in, d_out, activation, neurons = integer(0), p, activation_args, ...) {
make_mlp = function(task, d_in, d_out, activation, neurons = integer(0), p, activation_args, n_layers = NULL, ...) {
# This way, dropout_args will have length 0 if p is `NULL`

if (!is.null(n_layers)) {
if (length(neurons) != 1L) {
stopf("Can only supply `n_layers` when neurons has length 1.")
}
neurons = rep(neurons, n_layers)
}

dropout_args = list()
dropout_args$p = p
prev_dim = d_in
Expand Down
2 changes: 2 additions & 0 deletions man/mlr_learners.mlp.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 17 additions & 1 deletion tests/testthat/test_LearnerTorchMLP.R
Original file line number Diff line number Diff line change
Expand Up @@ -52,4 +52,20 @@ test_that("works for lazy tensor", {
expect_class(pred, "Prediction")
})

# TODO: More tests
test_that("neurons and n_layers", {
l1 = lrn("classif.mlp", batch_size = 32, epochs = 0L)
l2 = l1$clone(deep = TRUE)
task = tsk("iris")
l1$param_set$set_values(neurons = c(10, 10))
l2$param_set$set_values(neurons = 10, n_layers = 2)
l1$train(task)
l2$train(task)
expect_equal(l1$network$parameters[[1]]$shape, l2$network$parameters[[1]]$shape)
expect_equal(l1$network$parameters[[3]]$shape, l2$network$parameters[[3]]$shape)
expect_equal(l1$network$parameters[[1]]$shape, c(10, 4))
expect_equal(l1$network$parameters[[3]]$shape, c(3, 10))

l1$param_set$set_values(n_layers = 2)
expect_error(l2$train(task), "Can only supply")
})

0 comments on commit 43956ac

Please sign in to comment.