Skip to content

Commit 0b515ab

Browse files
fix hparams type
1 parent 4f89f59 commit 0b515ab

File tree

4 files changed

+16
-0
lines changed

4 files changed

+16
-0
lines changed

prize_qualification_baselines/self_tuning/jax_nadamw_full_budget.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Submission file for an NAdamW optimizer with warmup+cosine LR in Jax."""
22

3+
import collections
34
import functools
45

56
# isort: off
@@ -34,6 +35,9 @@
3435
"weight_decay": 0.08121616522670176,
3536
"warmup_factor": 0.02
3637
}
38+
HPARAMS = collections.namedtuple(
39+
'Hyperparameters',
40+
HPARAMS.keys())(**HPARAMS)
3741

3842

3943
# Forked from

prize_qualification_baselines/self_tuning/jax_nadamw_target_setting.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Submission file for an NAdamW optimizer with warmup+cosine LR in Jax."""
22

3+
import collections
34
import functools
45

56
# isort: off
@@ -34,6 +35,9 @@
3435
"weight_decay": 0.08121616522670176,
3536
"warmup_factor": 0.02
3637
}
38+
HPARAMS = collections.namedtuple(
39+
'Hyperparameters',
40+
HPARAMS.keys())(**HPARAMS)
3741

3842

3943
# Forked from

prize_qualification_baselines/self_tuning/pytorch_nadamw_full_budget.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Submission file for an NAdamW optimizer with warmup+cosine LR in PyTorch."""
22

3+
import collections
34
import math
45
from typing import Any, Dict, Iterator, List, Optional, Tuple
56

@@ -24,6 +25,9 @@
2425
"weight_decay": 0.08121616522670176,
2526
"warmup_factor": 0.02
2627
}
28+
HPARAMS = collections.namedtuple(
29+
'Hyperparameters',
30+
HPARAMS.keys())(**HPARAMS)
2731

2832

2933
# Modified from github.com/pytorch/pytorch/blob/v1.12.1/torch/optim/adamw.py.

prize_qualification_baselines/self_tuning/pytorch_nadamw_target_setting.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Submission file for an NAdamW optimizer with warmup+cosine LR in PyTorch."""
22

3+
import collections
34
import math
45
from typing import Any, Dict, Iterator, List, Optional, Tuple
56

@@ -24,6 +25,9 @@
2425
"weight_decay": 0.08121616522670176,
2526
"warmup_factor": 0.02
2627
}
28+
HPARAMS = collections.namedtuple(
29+
'Hyperparameters',
30+
HPARAMS.keys())(**HPARAMS)
2731

2832

2933
# Modified from github.com/pytorch/pytorch/blob/v1.12.1/torch/optim/adamw.py.

0 commit comments

Comments
 (0)