Skip to content

Commit ed13e81

Browse files
Merge pull request #852 from Niccolo-Ajroldi/fix_self_tuning_hparams
Fix self-tuning submissions hyperparams definition
2 parents 5cc62ea + 2db00eb commit ed13e81

File tree

2 files changed

+4
-0
lines changed

2 files changed

+4
-0
lines changed

prize_qualification_baselines/self_tuning/pytorch_nadamw_full_budget.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Submission file for an NAdamW optimizer with warmup+cosine LR in PyTorch."""
22

3+
import collections
34
import math
45
from typing import Any, Dict, Iterator, List, Optional, Tuple
56

@@ -24,6 +25,7 @@
2425
"weight_decay": 0.08121616522670176,
2526
"warmup_factor": 0.02
2627
}
28+
HPARAMS = collections.namedtuple('Hyperparameters', HPARAMS.keys())(**HPARAMS)
2729

2830

2931
# Modified from github.com/pytorch/pytorch/blob/v1.12.1/torch/optim/adamw.py.

prize_qualification_baselines/self_tuning/pytorch_nadamw_target_setting.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Submission file for an NAdamW optimizer with warmup+cosine LR in PyTorch."""
22

3+
import collections
34
import math
45
from typing import Any, Dict, Iterator, List, Optional, Tuple
56

@@ -24,6 +25,7 @@
2425
"weight_decay": 0.08121616522670176,
2526
"warmup_factor": 0.02
2627
}
28+
HPARAMS = collections.namedtuple('Hyperparameters', HPARAMS.keys())(**HPARAMS)
2729

2830

2931
# Modified from github.com/pytorch/pytorch/blob/v1.12.1/torch/optim/adamw.py.

0 commit comments

Comments
 (0)