Skip to content

Commit 8e73b58

Browse files
committed
Use SDP on BF16 in GPU/HPU migration
Signed-off-by: Daniel Socek <[email protected]>
1 parent 9e7ae56 commit 8e73b58

File tree

1 file changed

+5
-0
lines changed

1 file changed

+5
-0
lines changed

src/diffusers/pipelines/pipeline_utils.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -504,6 +504,11 @@ def module_is_offloaded(module):
504504
os.environ["PT_HPU_MAX_COMPOUND_OP_SIZE"] = "1"
505505
logger.debug("Environment variable set: PT_HPU_MAX_COMPOUND_OP_SIZE=1")
506506

507+
if dtype in (torch.bfloat16, None) and kwargs.pop("sdp_on_bf16", True):
508+
if hasattr(torch._C, "_set_math_sdp_allow_fp16_bf16_reduction"):
509+
torch._C._set_math_sdp_allow_fp16_bf16_reduction(True)
510+
logger.warning(f"Enabled SDP with BF16 precision on HPU. To disable, please use `.to('hpu', sdp_on_bf16=False)`")
511+
507512
module_names, _ = self._get_signature_keys(self)
508513
modules = [getattr(self, n, None) for n in module_names]
509514
modules = [m for m in modules if isinstance(m, torch.nn.Module)]

0 commit comments

Comments
 (0)