From e40cd398acdc720aa5a8c26de04700857c652411 Mon Sep 17 00:00:00 2001 From: kongdeshuo <1670690897@qq.com> Date: Tue, 29 Jul 2025 16:48:06 +0800 Subject: [PATCH] fix _process_partial_rope import error --- .../core/models/common/embeddings/rotary_pos_embedding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mindspeed_llm/mindspore/core/models/common/embeddings/rotary_pos_embedding.py b/mindspeed_llm/mindspore/core/models/common/embeddings/rotary_pos_embedding.py index 757b6264b..3fde25c15 100644 --- a/mindspeed_llm/mindspore/core/models/common/embeddings/rotary_pos_embedding.py +++ b/mindspeed_llm/mindspore/core/models/common/embeddings/rotary_pos_embedding.py @@ -1,4 +1,3 @@ - import torch from torch import Tensor import torch_npu @@ -7,6 +6,7 @@ from megatron.core import parallel_state from megatron.core.models.common.embeddings.rotary_pos_embedding import _rotate_half, get_pos_emb_on_this_cp_rank from mindspeed.ops.npu_rotary_position_embedding import npu_rotary_position_embedding from mindspeed_llm.tasks.common.yarn_rope import YarnRotaryPositionEmbedding +from mindspeed_llm.core.models.common.embeddings.rotary_pos_embedding import _process_partial_rope def apply_yarn_scaling(freqs: torch.Tensor): -- Gitee