kryox64 commited on
Commit
0a3df69
1 Parent(s): 3f6827e

Update range

Browse files

Signed-off-by: Aadhitya A <[email protected]>

Files changed (1) hide show
  1. app.py +14 -14
app.py CHANGED
@@ -555,13 +555,13 @@ def modelTFT(csv_file, prax):
555
  model_path="optuna_test",
556
  n_trials=5,
557
  max_epochs=MAX_EPOCHS,
558
- gradient_clip_val_range=(0.01, 0.5),
559
- hidden_size_range=(8, 64),
560
- hidden_continuous_size_range=(8, 24),
561
- attention_head_size_range=(1, 4),
562
- learning_rate_range=(0.01, 0.1),
563
- dropout_range=(0.1, 0.3),
564
- trainer_kwargs=dict(limit_train_batches=30),
565
  reduce_on_plateau_patience=4,
566
  pruner=optuna.pruners.MedianPruner(n_min_trials=3, n_startup_trials=3),
567
  use_learning_rate_finder=False, # use Optuna to find ideal learning rate or use in-built learning rate finder
@@ -787,13 +787,13 @@ def modelTFT_OpenGap(csv_file, prax):
787
  model_path="optuna_test",
788
  n_trials=5,
789
  max_epochs=MAX_EPOCHS,
790
- gradient_clip_val_range=(0.01, 0.5),
791
- hidden_size_range=(8, 64),
792
- hidden_continuous_size_range=(8, 24),
793
- attention_head_size_range=(1, 4),
794
- learning_rate_range=(0.01, 0.1),
795
- dropout_range=(0.1, 0.3),
796
- trainer_kwargs=dict(limit_train_batches=30),
797
  reduce_on_plateau_patience=4,
798
  pruner=optuna.pruners.MedianPruner(n_min_trials=3, n_warmup_steps=3),
799
  use_learning_rate_finder=False, # use Optuna to find ideal learning rate or use in-built learning rate finder
 
555
  model_path="optuna_test",
556
  n_trials=5,
557
  max_epochs=MAX_EPOCHS,
558
+ gradient_clip_val_range=(0.01, 0.3),
559
+ hidden_size_range=(8, 24),
560
+ hidden_continuous_size_range=(8, 12),
561
+ attention_head_size_range=(2, 4),
562
+ learning_rate_range=(0.01, 0.05),
563
+ dropout_range=(0.1, 0.25),
564
+ trainer_kwargs=dict(limit_train_batches=20),
565
  reduce_on_plateau_patience=4,
566
  pruner=optuna.pruners.MedianPruner(n_min_trials=3, n_startup_trials=3),
567
  use_learning_rate_finder=False, # use Optuna to find ideal learning rate or use in-built learning rate finder
 
787
  model_path="optuna_test",
788
  n_trials=5,
789
  max_epochs=MAX_EPOCHS,
790
+ gradient_clip_val_range=(0.01, 0.3),
791
+ hidden_size_range=(8, 24),
792
+ hidden_continuous_size_range=(8, 12),
793
+ attention_head_size_range=(2, 4),
794
+ learning_rate_range=(0.01, 0.05),
795
+ dropout_range=(0.1, 0.25),
796
+ trainer_kwargs=dict(limit_train_batches=20),
797
  reduce_on_plateau_patience=4,
798
  pruner=optuna.pruners.MedianPruner(n_min_trials=3, n_warmup_steps=3),
799
  use_learning_rate_finder=False, # use Optuna to find ideal learning rate or use in-built learning rate finder