-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathveas_pilot_forecast_cv_xgboost.yaml
44 lines (40 loc) · 1.55 KB
/
veas_pilot_forecast_cv_xgboost.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
# @package _global_
# example hyperparameter optimization of some experiment with Optuna:
# python train.py -m hparams_search=mnist_optuna experiment=example
defaults:
- veas_pilot_cv_base
- override /experiment: veas_pilot_forecast
- override /model: veas_pilot_xgboost_forecast
model:
lags_future_covariates:
_target_: builtins.tuple
_args_:
- - ${model_lags}
- 0
early_stopping_rounds: 10
# here we define Optuna hyperparameter search
# it optimizes for value returned from function with @hydra.main decorator
# docs: https://hydra.cc/docs/next/plugins/optuna_sweeper
hydra:
sweeper:
study_name: veas_pilot-forecast-cv-xgboost
params:
model_lags: range(1, ${max_lags})
++model.output_chunk_length: choice(1, 2, 3, ${eval.kwargs.forecast_horizon})
++model.max_depth: range(3, 10)
++model.learning_rate: interval(1e-3,1e-1)
++model.reg_lambda: range(0.0, 4.5, 0.5)
++model.reg_alpha: range(0.0, 4.5, 0.5)
++model.min_child_weight: range(1,6,2)
++model.subsample: range(0.4, 1.0, 0.1)
++model.colsample_bytree: range(0.4, 1.0, 0.1)
use_inputs.oxygen: choice(True, False)
use_inputs.ammonium: choice(True, False)
use_inputs.filterpressure_1: choice(True, False)
use_inputs.filterpressure_8: choice(True, False)
use_inputs.turb: choice(True, False)
use_inputs.temp: choice(True, False)
use_inputs.methanol: choice(True, False)
use_inputs.orto-p: choice(True, False)
use_inputs.tunnelwater: choice(True, False)
# TODO: add gamma?