Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion R/TuningInstanceSingleCrit.R
Original file line number Diff line number Diff line change
Expand Up @@ -115,10 +115,24 @@ TuningInstanceSingleCrit = R6Class("TuningInstanceSingleCrit",
#' This defines the resampled performance of a learner on a task, a
#' feasibility region for the parameters the tuner is supposed to optimize,
#' and a termination criterion.
initialize = function(task, learner, resampling, measure, search_space,
initialize = function(task, learner, resampling, measure, search_space = NULL,
terminator, store_benchmark_result = TRUE, store_models = FALSE,
check_values = FALSE) {
measure = as_measure(measure)
# We might want to have the following in a function bc its used in MultiCrit as well
if (is.null(search_space)) {
# TODO: check if we can construct search space from learner$param_set using tune_tokens
tmp = learner$param_set$get_tune_pair() # return fixed_values (all but tune tokens) and search_space (from tune tokens),
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think we should have a function that returns two things that easily could be obtained by two calls, since here the two things that would happen are independent of each other.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just to emphasize that you always want to do both? If you want to have the search_space you also always should take the param_set with the remaining fixed param values.

# Question: If we have tune tokens in the learner$param_set the learner is practically "broken". So we have to clean it up in order to use it. Why not
# a) Don't use tune tokens at all (we can pass the info in get_tune_pair(tune_tokens = xxx))
# b) Use a second slot next to $param_vals, eg. $param_vals_to_tune. (++)
# c) Only use param_set$get_values() which can filter out TuneTokens, maybe param_set$values discards TuneTokens, on the other hand if I put a lot of effort here to always treat TuneTokens and real values I could jut go with solution b)
learner$param_set$values = tmp$fixed_values #ohne tune token / das könnte inplace passieren
search_space = tmp$search_space
# we dont allow a mix of search_space and tune tokens
} else {
# TODO: check that no tune tokens exist in learner$param_set$values
}
obj = ObjectiveTuning$new(task = task, learner = learner,
resampling = resampling, measures = list(measure),
store_benchmark_result = store_benchmark_result,
Expand Down
32 changes: 32 additions & 0 deletions attic/test_tune_token.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
library(mlr3learners)

task = tsk("iris")
learner = lrn("classif.svm", type = "C-classification")
resampling = rsmp("holdout")
measure = msr("classif.ce")
terminator = trm("none")

learner$param_set$values$kernel = to_tune(c("polynomial", "radial"))
learner$param_set$values$degree = to_tune(1, 3)

#solution 1
foo = learner$param_set$get_tune_pair()
learner$param_set = foo$param_set #ohne tune token / das könnte inplace passieren
search_space = foo$search_space #das was mir param_set$tune_ps() geben würde

#solutin 2
learner$convert_for_tuning() # param_vals tune tokens löschen und in param_set umwandeln
search_space = learner$param_set$
# geht das hier schlecht, wenn ich param$vals

instance = TuningInstanceSingleCrit$new(
task = task,
learner = learner,
resampling = resampling,
measure = measure,
search_space = search_space,
terminator = terminator
)

tuner = tnr("grid_search", resolution = 1)
tuner$optimize(instance)