Skip to content

Commit 4d8eb22

Browse files
authored
Remove double transpose flag in MHA fusion (#2483)
The double_transpose option to control the MHA fusion is no longer used (with recent simplifications). Remove this flag. (Overlooked this in the recent PR.) Signed-off-by: Ganesan Ramalingam <[email protected]>
1 parent c219dce commit 4d8eb22

File tree

1 file changed

+0
-5
lines changed
  • onnxscript/rewriter/ort_fusions

1 file changed

+0
-5
lines changed

onnxscript/rewriter/ort_fusions/mha.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -37,13 +37,11 @@ def __init__(
3737
self,
3838
name,
3939
*,
40-
double_transpose: bool,
4140
is_rotary: bool,
4241
has_past_present: bool,
4342
is_cross_attention: bool,
4443
):
4544
super().__init__(name)
46-
self._double_transpose = double_transpose
4745
self._is_rotary = is_rotary
4846
self._has_past_present = has_past_present
4947
self._is_cross_attention = is_cross_attention
@@ -345,12 +343,10 @@ def rewrite(
345343
def _make_rule_set(has_past_present: bool):
346344
parameter_combinations = [
347345
{
348-
"double_transpose": double_transpose,
349346
"is_rotary": is_rotary,
350347
"has_past_present": has_past_present,
351348
"is_cross_attention": is_cross_attention,
352349
}
353-
for double_transpose in [False, True]
354350
for is_rotary in [False, True]
355351
for is_cross_attention in ([False] if has_past_present else [False, True])
356352
]
@@ -360,7 +356,6 @@ def _make_rule_set(has_past_present: bool):
360356
[
361357
MultiHeadAttention.rule(
362358
f"MHA"
363-
f"{'_Twice' if params['double_transpose'] else ''}"
364359
f"{'_Rotary' if params['is_rotary'] else ''}"
365360
f"{'_Past' if params['has_past_present'] else ''}"
366361
f"{'_CrossAttention' if params['is_cross_attention'] else ''}",

0 commit comments

Comments
 (0)