Skip to content

Commit f16be1e

Browse files
authored
Fix Qwen2.5 Omni SinusoidsPositionEmbedding precision
fixes QwenLM/Qwen2.5-Omni#271
1 parent b11b28c commit f16be1e

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

src/transformers/models/qwen2_5_omni/modeling_qwen2_5_omni.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -794,7 +794,7 @@ def __init__(self, length, channels, max_timescale=10000):
794794
if channels % 2 != 0:
795795
raise ValueError("SinusoidsPositionEmbedding needs even channels input")
796796
log_timescale_increment = np.log(max_timescale) / (channels // 2 - 1)
797-
inv_timescales = torch.exp(-log_timescale_increment * torch.arange(channels // 2)).float()
797+
inv_timescales = torch.exp(-log_timescale_increment * torch.arange(channels // 2).float())
798798
scaled_time = torch.arange(length)[:, np.newaxis] * inv_timescales[np.newaxis, :]
799799
self.register_buffer(
800800
"positional_embedding",

0 commit comments

Comments
 (0)