diff --git a/PyTorch/built-in/nlp/Bert_Chinese_ID3433_for_PyTorch/requirements.txt b/PyTorch/built-in/nlp/Bert_Chinese_ID3433_for_PyTorch/requirements.txt index e07523b532f34a513e3cc5336db4ae3607d3f236..28b43304e76f939f47fad944f07af80c5756cd23 100644 --- a/PyTorch/built-in/nlp/Bert_Chinese_ID3433_for_PyTorch/requirements.txt +++ b/PyTorch/built-in/nlp/Bert_Chinese_ID3433_for_PyTorch/requirements.txt @@ -1,6 +1,6 @@ accelerate -datasets >= 1.8.0 -tokenizers +datasets == 1.18.3 +tokenizers == 0.11.1 sentencepiece == 0.1.96 protobuf == 3.14 wikiextractor diff --git a/PyTorch/built-in/rl/MAPPO_for_PyTorch/onpolicy/scripts/train/train_mpe.py b/PyTorch/built-in/rl/MAPPO_for_PyTorch/onpolicy/scripts/train/train_mpe.py index a030eaeeb3ff5877dfafac4b12c1eeb132a87d87..9ef5cb82fd6c12a3956227c8c529e270d4f9a156 100644 --- a/PyTorch/built-in/rl/MAPPO_for_PyTorch/onpolicy/scripts/train/train_mpe.py +++ b/PyTorch/built-in/rl/MAPPO_for_PyTorch/onpolicy/scripts/train/train_mpe.py @@ -21,6 +21,9 @@ import numpy as np from pathlib import Path import torch import torch_npu +if torch_npu.npu.utils.get_soc_version() == 251: + torch_npu.npu.config.allow_internal_format = True + from torch_npu.contrib import transfer_to_npu from onpolicy.config import get_config from onpolicy.envs.mpe.MPE_env import MPEEnv diff --git a/PyTorch/contrib/nlp/roberta_for_PyTorch/README.md b/PyTorch/contrib/nlp/roberta_for_PyTorch/README.md index a33954a66ecb7a3fbc6db71044eafae5a6d96805..c8bb74298cb515a2dc213ec3469ee43a779c27b0 100644 --- a/PyTorch/contrib/nlp/roberta_for_PyTorch/README.md +++ b/PyTorch/contrib/nlp/roberta_for_PyTorch/README.md @@ -86,7 +86,7 @@ RoBERTa 在模型规模、算力和数据上,都比 BERT 有一定的提升。 1. 获取数据集。 - 下载 `SST-2` 数据集,请参考 `examples/roberta/preprocess_GLUE_tasks.sh` 。 + 用户自行下载 `SST-2` 数据集,请参考 `examples/roberta/preprocess_GLUE_tasks.sh` 。 `SST-2` 数据集目录结构参考如下所示。 @@ -114,7 +114,7 @@ RoBERTa 在模型规模、算力和数据上,都比 BERT 有一定的提升。 2. 获取预训练模型 - 下载预训练模型 `RoBERTa.base` , 解压至源码包路径下:“./pre_train_model/RoBERTa.base/model.pt”。 + 用户自行下载预训练模型 `RoBERTa.base` , 解压至源码包路径下:“./pre_train_model/RoBERTa.base/model.pt”。 # 开始训练