Skip to contents

Random search randomly draws feature sets.

Feature sets are evaluated in batches of size batch_size. Larger batches mean we can parallelize more, smaller batches imply a more fine-grained checking of termination criteria.

Source

Bergstra J, Bengio Y (2012). “Random Search for Hyper-Parameter Optimization.” Journal of Machine Learning Research, 13(10), 281--305. https://jmlr.csail.mit.edu/papers/v13/bergstra12a.html.

Dictionary

This FSelector can be instantiated via the dictionary mlr_fselectors or with the associated sugar function fs():

mlr_fselectors$get("random_search")
fs("random_search")

Parameters

max_features

integer(1)
Maximum number of features. By default, number of features in mlr3::Task.

batch_size

integer(1)
Maximum number of feature sets to try in a batch.

Super class

mlr3fselect::FSelector -> FSelectorRandomSearch

Methods

Inherited methods


Method new()

Creates a new instance of this R6 class.

Usage


Method clone()

The objects of this class are cloneable with this method.

Usage

FSelectorRandomSearch$clone(deep = FALSE)

Arguments

deep

Whether to make a deep clone.

Examples

# retrieve task
task = tsk("pima")

# load learner
learner = lrn("classif.rpart")

# \donttest{
# feature selection on the pima indians diabetes data set
instance = fselect(
  method = "random_search",
  task = task,
  learner = learner,
  resampling = rsmp("holdout"),
  measure = msr("classif.ce"),
  term_evals = 100
)

# best performing feature subset
instance$result
#>     age glucose insulin mass pedigree pregnant pressure triceps
#> 1: TRUE    TRUE    TRUE TRUE     TRUE     TRUE     TRUE    TRUE
#>                                          features classif.ce
#> 1: age,glucose,insulin,mass,pedigree,pregnant,...   0.234375

# all evaluated feature subsets
as.data.table(instance$archive)
#>        age glucose insulin  mass pedigree pregnant pressure triceps classif.ce
#>   1:  TRUE   FALSE    TRUE  TRUE    FALSE     TRUE     TRUE    TRUE  0.3242188
#>   2:  TRUE    TRUE    TRUE FALSE     TRUE     TRUE     TRUE    TRUE  0.2734375
#>   3: FALSE    TRUE   FALSE FALSE     TRUE     TRUE     TRUE   FALSE  0.2656250
#>   4:  TRUE   FALSE   FALSE FALSE    FALSE    FALSE    FALSE   FALSE  0.3515625
#>   5:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>   6: FALSE    TRUE    TRUE  TRUE     TRUE    FALSE     TRUE    TRUE  0.2539062
#>   7: FALSE    TRUE    TRUE FALSE     TRUE     TRUE     TRUE   FALSE  0.2695312
#>   8: FALSE   FALSE    TRUE  TRUE    FALSE    FALSE     TRUE   FALSE  0.3632812
#>   9:  TRUE    TRUE   FALSE  TRUE     TRUE     TRUE    FALSE    TRUE  0.2382812
#>  10:  TRUE   FALSE   FALSE FALSE    FALSE    FALSE    FALSE    TRUE  0.3554688
#>  11: FALSE   FALSE    TRUE FALSE    FALSE    FALSE    FALSE    TRUE  0.3671875
#>  12:  TRUE    TRUE    TRUE  TRUE    FALSE    FALSE     TRUE   FALSE  0.2734375
#>  13: FALSE   FALSE   FALSE  TRUE     TRUE    FALSE     TRUE   FALSE  0.3437500
#>  14:  TRUE   FALSE    TRUE FALSE     TRUE     TRUE     TRUE    TRUE  0.3437500
#>  15:  TRUE    TRUE   FALSE FALSE    FALSE     TRUE    FALSE   FALSE  0.2656250
#>  16: FALSE   FALSE   FALSE FALSE    FALSE     TRUE    FALSE    TRUE  0.3398438
#>  17: FALSE    TRUE    TRUE FALSE     TRUE     TRUE     TRUE    TRUE  0.2617188
#>  18: FALSE   FALSE    TRUE FALSE     TRUE     TRUE    FALSE   FALSE  0.3710938
#>  19: FALSE   FALSE   FALSE  TRUE    FALSE    FALSE    FALSE   FALSE  0.3632812
#>  20:  TRUE    TRUE   FALSE FALSE     TRUE     TRUE    FALSE   FALSE  0.2734375
#>  21: FALSE   FALSE    TRUE FALSE     TRUE    FALSE     TRUE   FALSE  0.3671875
#>  22:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  23:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  24: FALSE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2578125
#>  25:  TRUE    TRUE   FALSE FALSE     TRUE    FALSE    FALSE   FALSE  0.2734375
#>  26:  TRUE   FALSE    TRUE FALSE     TRUE     TRUE     TRUE    TRUE  0.3437500
#>  27: FALSE    TRUE   FALSE FALSE     TRUE     TRUE     TRUE   FALSE  0.2656250
#>  28: FALSE    TRUE   FALSE  TRUE    FALSE    FALSE     TRUE    TRUE  0.2539062
#>  29:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  30:  TRUE   FALSE    TRUE FALSE     TRUE     TRUE     TRUE    TRUE  0.3437500
#>  31:  TRUE   FALSE    TRUE  TRUE     TRUE    FALSE    FALSE    TRUE  0.3789062
#>  32: FALSE   FALSE   FALSE FALSE    FALSE    FALSE     TRUE    TRUE  0.4023438
#>  33: FALSE   FALSE    TRUE FALSE    FALSE     TRUE    FALSE   FALSE  0.2968750
#>  34:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  35: FALSE    TRUE   FALSE FALSE    FALSE    FALSE    FALSE   FALSE  0.2695312
#>  36:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  37: FALSE   FALSE    TRUE FALSE    FALSE     TRUE    FALSE   FALSE  0.2968750
#>  38: FALSE   FALSE   FALSE  TRUE    FALSE    FALSE    FALSE   FALSE  0.3632812
#>  39:  TRUE    TRUE   FALSE FALSE     TRUE    FALSE    FALSE   FALSE  0.2734375
#>  40:  TRUE    TRUE    TRUE  TRUE     TRUE    FALSE     TRUE    TRUE  0.2343750
#>  41: FALSE    TRUE    TRUE FALSE     TRUE    FALSE     TRUE   FALSE  0.2890625
#>  42:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  43:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE    FALSE    TRUE  0.2343750
#>  44:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  45:  TRUE   FALSE   FALSE FALSE     TRUE    FALSE    FALSE   FALSE  0.3398438
#>  46: FALSE    TRUE    TRUE FALSE    FALSE    FALSE    FALSE   FALSE  0.2773438
#>  47:  TRUE   FALSE   FALSE  TRUE     TRUE    FALSE    FALSE   FALSE  0.3515625
#>  48: FALSE   FALSE   FALSE FALSE    FALSE    FALSE    FALSE    TRUE  0.3828125
#>  49:  TRUE    TRUE   FALSE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2382812
#>  50:  TRUE   FALSE   FALSE FALSE     TRUE     TRUE     TRUE   FALSE  0.3437500
#>  51: FALSE    TRUE    TRUE FALSE    FALSE     TRUE     TRUE    TRUE  0.2773438
#>  52:  TRUE   FALSE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.3710938
#>  53:  TRUE   FALSE    TRUE FALSE    FALSE    FALSE     TRUE    TRUE  0.3320312
#>  54: FALSE   FALSE    TRUE FALSE    FALSE    FALSE    FALSE    TRUE  0.3671875
#>  55: FALSE   FALSE   FALSE FALSE     TRUE    FALSE    FALSE   FALSE  0.3632812
#>  56:  TRUE   FALSE   FALSE  TRUE    FALSE    FALSE    FALSE   FALSE  0.3476562
#>  57:  TRUE    TRUE   FALSE  TRUE     TRUE    FALSE     TRUE   FALSE  0.2343750
#>  58:  TRUE    TRUE   FALSE  TRUE    FALSE    FALSE    FALSE   FALSE  0.2539062
#>  59:  TRUE    TRUE   FALSE  TRUE    FALSE     TRUE     TRUE    TRUE  0.2539062
#>  60: FALSE    TRUE   FALSE FALSE    FALSE    FALSE     TRUE   FALSE  0.2695312
#>  61:  TRUE   FALSE   FALSE FALSE    FALSE    FALSE    FALSE   FALSE  0.3515625
#>  62:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  63:  TRUE   FALSE    TRUE FALSE    FALSE    FALSE    FALSE    TRUE  0.3789062
#>  64: FALSE   FALSE    TRUE  TRUE    FALSE     TRUE     TRUE    TRUE  0.3242188
#>  65:  TRUE    TRUE   FALSE FALSE     TRUE    FALSE     TRUE    TRUE  0.2734375
#>  66: FALSE   FALSE    TRUE FALSE    FALSE    FALSE    FALSE   FALSE  0.3750000
#>  67: FALSE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2578125
#>  68:  TRUE   FALSE    TRUE  TRUE    FALSE     TRUE    FALSE    TRUE  0.3398438
#>  69:  TRUE   FALSE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.3710938
#>  70: FALSE   FALSE    TRUE  TRUE    FALSE    FALSE     TRUE   FALSE  0.3632812
#>  71:  TRUE    TRUE    TRUE  TRUE    FALSE     TRUE    FALSE    TRUE  0.2343750
#>  72: FALSE   FALSE   FALSE FALSE    FALSE    FALSE     TRUE   FALSE  0.3750000
#>  73:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  74:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE    FALSE    TRUE  0.2343750
#>  75:  TRUE   FALSE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.3710938
#>  76: FALSE    TRUE   FALSE  TRUE     TRUE     TRUE     TRUE   FALSE  0.2539062
#>  77:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  78:  TRUE   FALSE   FALSE  TRUE     TRUE    FALSE    FALSE   FALSE  0.3515625
#>  79: FALSE   FALSE    TRUE FALSE    FALSE    FALSE     TRUE    TRUE  0.3554688
#>  80: FALSE    TRUE    TRUE  TRUE    FALSE     TRUE     TRUE    TRUE  0.2500000
#>  81:  TRUE    TRUE    TRUE  TRUE     TRUE    FALSE     TRUE   FALSE  0.2343750
#>  82: FALSE   FALSE    TRUE  TRUE    FALSE    FALSE    FALSE   FALSE  0.3632812
#>  83:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  84:  TRUE    TRUE    TRUE FALSE    FALSE     TRUE    FALSE   FALSE  0.2656250
#>  85: FALSE   FALSE   FALSE  TRUE    FALSE    FALSE    FALSE    TRUE  0.3632812
#>  86: FALSE   FALSE   FALSE FALSE    FALSE     TRUE    FALSE    TRUE  0.3398438
#>  87: FALSE    TRUE    TRUE  TRUE    FALSE     TRUE     TRUE    TRUE  0.2500000
#>  88: FALSE    TRUE   FALSE FALSE    FALSE    FALSE     TRUE   FALSE  0.2695312
#>  89:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  90: FALSE   FALSE    TRUE FALSE     TRUE     TRUE    FALSE    TRUE  0.3242188
#>  91:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  92:  TRUE   FALSE   FALSE  TRUE     TRUE     TRUE    FALSE    TRUE  0.3515625
#>  93:  TRUE   FALSE   FALSE  TRUE     TRUE     TRUE     TRUE    TRUE  0.3515625
#>  94: FALSE   FALSE   FALSE  TRUE     TRUE     TRUE     TRUE   FALSE  0.3007812
#>  95:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  96:  TRUE   FALSE    TRUE  TRUE     TRUE    FALSE     TRUE    TRUE  0.3632812
#>  97:  TRUE    TRUE    TRUE  TRUE     TRUE     TRUE     TRUE    TRUE  0.2343750
#>  98: FALSE   FALSE    TRUE FALSE    FALSE    FALSE    FALSE   FALSE  0.3750000
#>  99: FALSE   FALSE   FALSE FALSE    FALSE    FALSE     TRUE   FALSE  0.3750000
#> 100: FALSE   FALSE   FALSE  TRUE    FALSE    FALSE    FALSE    TRUE  0.3632812
#>        age glucose insulin  mass pedigree pregnant pressure triceps classif.ce
#>      runtime_learners           timestamp batch_nr      resample_result
#>   1:            0.072 2022-08-25 10:40:37        1 <ResampleResult[21]>
#>   2:            0.083 2022-08-25 10:40:37        2 <ResampleResult[21]>
#>   3:            0.087 2022-08-25 10:40:38        3 <ResampleResult[21]>
#>   4:            0.091 2022-08-25 10:40:38        4 <ResampleResult[21]>
#>   5:            0.095 2022-08-25 10:40:38        5 <ResampleResult[21]>
#>   6:            0.101 2022-08-25 10:40:38        6 <ResampleResult[21]>
#>   7:            0.096 2022-08-25 10:40:39        7 <ResampleResult[21]>
#>   8:            0.088 2022-08-25 10:40:39        8 <ResampleResult[21]>
#>   9:            0.080 2022-08-25 10:40:39        9 <ResampleResult[21]>
#>  10:            0.069 2022-08-25 10:40:39       10 <ResampleResult[21]>
#>  11:            0.069 2022-08-25 10:40:40       11 <ResampleResult[21]>
#>  12:            0.072 2022-08-25 10:40:40       12 <ResampleResult[21]>
#>  13:            0.087 2022-08-25 10:40:40       13 <ResampleResult[21]>
#>  14:            0.098 2022-08-25 10:40:40       14 <ResampleResult[21]>
#>  15:            0.089 2022-08-25 10:40:41       15 <ResampleResult[21]>
#>  16:            0.078 2022-08-25 10:40:41       16 <ResampleResult[21]>
#>  17:            0.070 2022-08-25 10:40:41       17 <ResampleResult[21]>
#>  18:            0.081 2022-08-25 10:40:41       18 <ResampleResult[21]>
#>  19:            0.275 2022-08-25 10:40:42       19 <ResampleResult[21]>
#>  20:            0.076 2022-08-25 10:40:42       20 <ResampleResult[21]>
#>  21:            0.066 2022-08-25 10:40:42       21 <ResampleResult[21]>
#>  22:            0.067 2022-08-25 10:40:42       22 <ResampleResult[21]>
#>  23:            0.068 2022-08-25 10:40:43       23 <ResampleResult[21]>
#>  24:            0.065 2022-08-25 10:40:43       24 <ResampleResult[21]>
#>  25:            0.066 2022-08-25 10:40:43       25 <ResampleResult[21]>
#>  26:            0.069 2022-08-25 10:40:43       26 <ResampleResult[21]>
#>  27:            0.072 2022-08-25 10:40:43       27 <ResampleResult[21]>
#>  28:            0.073 2022-08-25 10:40:44       28 <ResampleResult[21]>
#>  29:            0.073 2022-08-25 10:40:44       29 <ResampleResult[21]>
#>  30:            0.073 2022-08-25 10:40:44       30 <ResampleResult[21]>
#>  31:            0.066 2022-08-25 10:40:44       31 <ResampleResult[21]>
#>  32:            0.066 2022-08-25 10:40:45       32 <ResampleResult[21]>
#>  33:            0.066 2022-08-25 10:40:45       33 <ResampleResult[21]>
#>  34:            0.069 2022-08-25 10:40:45       34 <ResampleResult[21]>
#>  35:            0.066 2022-08-25 10:40:45       35 <ResampleResult[21]>
#>  36:            0.066 2022-08-25 10:40:45       36 <ResampleResult[21]>
#>  37:            0.066 2022-08-25 10:40:46       37 <ResampleResult[21]>
#>  38:            0.065 2022-08-25 10:40:46       38 <ResampleResult[21]>
#>  39:            0.065 2022-08-25 10:40:46       39 <ResampleResult[21]>
#>  40:            0.068 2022-08-25 10:40:46       40 <ResampleResult[21]>
#>  41:            0.068 2022-08-25 10:40:46       41 <ResampleResult[21]>
#>  42:            0.065 2022-08-25 10:40:47       42 <ResampleResult[21]>
#>  43:            0.067 2022-08-25 10:40:47       43 <ResampleResult[21]>
#>  44:            0.092 2022-08-25 10:40:47       44 <ResampleResult[21]>
#>  45:            0.100 2022-08-25 10:40:47       45 <ResampleResult[21]>
#>  46:            0.097 2022-08-25 10:40:48       46 <ResampleResult[21]>
#>  47:            0.079 2022-08-25 10:40:48       47 <ResampleResult[21]>
#>  48:            0.079 2022-08-25 10:40:48       48 <ResampleResult[21]>
#>  49:            0.081 2022-08-25 10:40:48       49 <ResampleResult[21]>
#>  50:            0.074 2022-08-25 10:40:49       50 <ResampleResult[21]>
#>  51:            0.079 2022-08-25 10:40:49       51 <ResampleResult[21]>
#>  52:            0.079 2022-08-25 10:40:49       52 <ResampleResult[21]>
#>  53:            0.078 2022-08-25 10:40:49       53 <ResampleResult[21]>
#>  54:            0.079 2022-08-25 10:40:50       54 <ResampleResult[21]>
#>  55:            0.083 2022-08-25 10:40:50       55 <ResampleResult[21]>
#>  56:            0.089 2022-08-25 10:40:50       56 <ResampleResult[21]>
#>  57:            0.103 2022-08-25 10:40:50       57 <ResampleResult[21]>
#>  58:            0.086 2022-08-25 10:40:51       58 <ResampleResult[21]>
#>  59:            0.086 2022-08-25 10:40:51       59 <ResampleResult[21]>
#>  60:            0.082 2022-08-25 10:40:51       60 <ResampleResult[21]>
#>  61:            0.081 2022-08-25 10:40:51       61 <ResampleResult[21]>
#>  62:            0.085 2022-08-25 10:40:51       62 <ResampleResult[21]>
#>  63:            0.087 2022-08-25 10:40:52       63 <ResampleResult[21]>
#>  64:            0.120 2022-08-25 10:40:52       64 <ResampleResult[21]>
#>  65:            0.101 2022-08-25 10:40:52       65 <ResampleResult[21]>
#>  66:            0.078 2022-08-25 10:40:52       66 <ResampleResult[21]>
#>  67:            0.082 2022-08-25 10:40:53       67 <ResampleResult[21]>
#>  68:            0.081 2022-08-25 10:40:53       68 <ResampleResult[21]>
#>  69:            0.080 2022-08-25 10:40:53       69 <ResampleResult[21]>
#>  70:            0.079 2022-08-25 10:40:53       70 <ResampleResult[21]>
#>  71:            0.076 2022-08-25 10:40:54       71 <ResampleResult[21]>
#>  72:            0.076 2022-08-25 10:40:54       72 <ResampleResult[21]>
#>  73:            0.081 2022-08-25 10:40:54       73 <ResampleResult[21]>
#>  74:            0.087 2022-08-25 10:40:54       74 <ResampleResult[21]>
#>  75:            0.088 2022-08-25 10:40:55       75 <ResampleResult[21]>
#>  76:            0.082 2022-08-25 10:40:55       76 <ResampleResult[21]>
#>  77:            0.083 2022-08-25 10:40:55       77 <ResampleResult[21]>
#>  78:            0.082 2022-08-25 10:40:55       78 <ResampleResult[21]>
#>  79:            0.079 2022-08-25 10:40:55       79 <ResampleResult[21]>
#>  80:            0.070 2022-08-25 10:40:56       80 <ResampleResult[21]>
#>  81:            0.070 2022-08-25 10:40:56       81 <ResampleResult[21]>
#>  82:            0.068 2022-08-25 10:40:56       82 <ResampleResult[21]>
#>  83:            0.072 2022-08-25 10:40:56       83 <ResampleResult[21]>
#>  84:            0.068 2022-08-25 10:40:57       84 <ResampleResult[21]>
#>  85:            0.069 2022-08-25 10:40:57       85 <ResampleResult[21]>
#>  86:            0.072 2022-08-25 10:40:57       86 <ResampleResult[21]>
#>  87:            0.070 2022-08-25 10:40:57       87 <ResampleResult[21]>
#>  88:            0.070 2022-08-25 10:40:58       88 <ResampleResult[21]>
#>  89:            0.069 2022-08-25 10:40:58       89 <ResampleResult[21]>
#>  90:            0.068 2022-08-25 10:40:58       90 <ResampleResult[21]>
#>  91:            0.078 2022-08-25 10:40:58       91 <ResampleResult[21]>
#>  92:            0.083 2022-08-25 10:40:58       92 <ResampleResult[21]>
#>  93:            0.101 2022-08-25 10:40:59       93 <ResampleResult[21]>
#>  94:            0.091 2022-08-25 10:40:59       94 <ResampleResult[21]>
#>  95:            0.086 2022-08-25 10:40:59       95 <ResampleResult[21]>
#>  96:            0.083 2022-08-25 10:40:59       96 <ResampleResult[21]>
#>  97:            0.079 2022-08-25 10:41:00       97 <ResampleResult[21]>
#>  98:            0.081 2022-08-25 10:41:00       98 <ResampleResult[21]>
#>  99:            0.081 2022-08-25 10:41:00       99 <ResampleResult[21]>
#> 100:            0.081 2022-08-25 10:41:00      100 <ResampleResult[21]>
#>      runtime_learners           timestamp batch_nr      resample_result

# subset the task and fit the final model
task$select(instance$result_feature_set)
learner$train(task)
# }