From 60d5b3c8ab9bf66a3bac269226dd4ce95b37c448 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=83=91=E7=89=B9=E9=A9=B9?= Date: Mon, 7 Jul 2025 19:38:16 +0800 Subject: [PATCH] =?UTF-8?q?[built-in][PyTorch]=20=E5=9B=BA=E5=AE=9ABert=5F?= =?UTF-8?q?Chinese=5FID3433=E7=9A=84=E4=BE=9D=E8=B5=96=E7=89=88=E6=9C=AC?= =?UTF-8?q?=E4=BB=A5=E5=8F=8A=E5=85=81=E8=AE=B8MAPPO=E6=A8=A1=E5=9E=8B?= =?UTF-8?q?=E5=9C=A8=E7=89=B9=E5=AE=9A=E5=9C=BA=E6=99=AF=E8=B5=B0=E7=A7=81?= =?UTF-8?q?=E6=9C=89=E6=A0=BC=E5=BC=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../nlp/Bert_Chinese_ID3433_for_PyTorch/requirements.txt | 4 ++-- .../rl/MAPPO_for_PyTorch/onpolicy/scripts/train/train_mpe.py | 3 +++ PyTorch/contrib/nlp/roberta_for_PyTorch/README.md | 4 ++-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/PyTorch/built-in/nlp/Bert_Chinese_ID3433_for_PyTorch/requirements.txt b/PyTorch/built-in/nlp/Bert_Chinese_ID3433_for_PyTorch/requirements.txt index e07523b532..28b43304e7 100644 --- a/PyTorch/built-in/nlp/Bert_Chinese_ID3433_for_PyTorch/requirements.txt +++ b/PyTorch/built-in/nlp/Bert_Chinese_ID3433_for_PyTorch/requirements.txt @@ -1,6 +1,6 @@ accelerate -datasets >= 1.8.0 -tokenizers +datasets == 1.18.3 +tokenizers == 0.11.1 sentencepiece == 0.1.96 protobuf == 3.14 wikiextractor diff --git a/PyTorch/built-in/rl/MAPPO_for_PyTorch/onpolicy/scripts/train/train_mpe.py b/PyTorch/built-in/rl/MAPPO_for_PyTorch/onpolicy/scripts/train/train_mpe.py index a030eaeeb3..9ef5cb82fd 100644 --- a/PyTorch/built-in/rl/MAPPO_for_PyTorch/onpolicy/scripts/train/train_mpe.py +++ b/PyTorch/built-in/rl/MAPPO_for_PyTorch/onpolicy/scripts/train/train_mpe.py @@ -21,6 +21,9 @@ import numpy as np from pathlib import Path import torch import torch_npu +if torch_npu.npu.utils.get_soc_version() == 251: + torch_npu.npu.config.allow_internal_format = True + from torch_npu.contrib import transfer_to_npu from onpolicy.config import get_config from onpolicy.envs.mpe.MPE_env import MPEEnv diff --git a/PyTorch/contrib/nlp/roberta_for_PyTorch/README.md b/PyTorch/contrib/nlp/roberta_for_PyTorch/README.md index a33954a66e..c8bb74298c 100644 --- a/PyTorch/contrib/nlp/roberta_for_PyTorch/README.md +++ b/PyTorch/contrib/nlp/roberta_for_PyTorch/README.md @@ -86,7 +86,7 @@ RoBERTa 在模型规模、算力和数据上,都比 BERT 有一定的提升。 1. 获取数据集。 - 下载 `SST-2` 数据集,请参考 `examples/roberta/preprocess_GLUE_tasks.sh` 。 + 用户自行下载 `SST-2` 数据集,请参考 `examples/roberta/preprocess_GLUE_tasks.sh` 。 `SST-2` 数据集目录结构参考如下所示。 @@ -114,7 +114,7 @@ RoBERTa 在模型规模、算力和数据上,都比 BERT 有一定的提升。 2. 获取预训练模型 - 下载预训练模型 `RoBERTa.base` , 解压至源码包路径下:“./pre_train_model/RoBERTa.base/model.pt”。 + 用户自行下载预训练模型 `RoBERTa.base` , 解压至源码包路径下:“./pre_train_model/RoBERTa.base/model.pt”。 # 开始训练 -- Gitee