-
Notifications
You must be signed in to change notification settings - Fork 177
Description
Traceback (most recent call last):
File "/mnt/d/HunyuanWorld-1.0/demo_panogen.py", line 262, in
panorama_image = demo_T2P.run(
File "/mnt/d/HunyuanWorld-1.0/demo_panogen.py", line 89, in run
image = self.pipe(
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
File "/mnt/d/HunyuanWorld-1.0/hy3dworld/models/pano_generator.py", line 37, in call
return self._call_shared(prompt=prompt, is_inpainting=False, early_steps=3, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
File "/mnt/d/HunyuanWorld-1.0/hy3dworld/models/pipelines.py", line 1015, in _call_shared
prompt_embeds, pooled_prompt_embeds, text_ids = self.encode_prompt(
File "/mnt/d/HunyuanWorld-1.0/hy3dworld/models/pipelines.py", line 384, in encode_prompt
pooled_prompt_embeds = self._get_clip_prompt_embeds(
File "/mnt/d/HunyuanWorld-1.0/hy3dworld/models/pipelines.py", line 345, in _get_clip_prompt_embeds
prompt_embeds = self.text_encoder(
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/accelerate/hooks.py", line 176, in new_forward
output = module._old_forward(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/transformers/utils/generic.py", line 965, in wrapper
output = func(self, *args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/transformers/models/clip/modeling_clip.py", line 1049, in forward
return self.text_model(
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/transformers/utils/generic.py", line 965, in wrapper
output = func(self, *args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/transformers/models/clip/modeling_clip.py", line 958, in forward
encoder_outputs: BaseModelOutput = self.encoder(
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/transformers/utils/generic.py", line 965, in wrapper
output = func(self, *args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/transformers/models/clip/modeling_clip.py", line 882, in forward
layer_outputs = encoder_layer(
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/transformers/models/clip/modeling_clip.py", line 614, in forward
hidden_states, attn_weights = self.self_attn(
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/hamad/anaconda3/envs/hworld/lib/python3.10/site-packages/transformers/models/clip/modeling_clip.py", line 546, in forward
attn_output = torch.nn.functional.scaled_dot_product_attention(
RuntimeError: cuDNN Frontend error: [cudnn_frontend] Error: No execution plans support the graph.