We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 30c3e94 commit d900910Copy full SHA for d900910
vllm_ascend/ops/rotary_embedding.py
@@ -287,8 +287,8 @@ def mrope_forward(
287
) -> Tuple[torch.Tensor, torch.Tensor]:
288
import torch_npu
289
mrope_section = [0, 0, 0] if positions.ndim == 1 else self.mrope_section
290
-
291
- query, key = torch_npu.npu_mrope(positions.clone(),
+ positions = positions.clone().detach().continuous()
+ query, key = torch_npu.npu_mrope(positions,
292
query.contiguous(),
293
key.contiguous(),
294
self.cos_sin_cache.contiguous(),
0 commit comments