From bda3460a92284d17cef1a91ac089c92370556c26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E5=A7=9C=E5=A5=94?= Date: Fri, 8 Apr 2022 18:49:38 +0800 Subject: [PATCH] fix bug --- .../DistributedResnet50/main_apex_d76_npu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PyTorch/built-in/cv/classification/ResNet50_for_PyTorch/DistributedResnet50/main_apex_d76_npu.py b/PyTorch/built-in/cv/classification/ResNet50_for_PyTorch/DistributedResnet50/main_apex_d76_npu.py index 3176314e25..63cff61578 100644 --- a/PyTorch/built-in/cv/classification/ResNet50_for_PyTorch/DistributedResnet50/main_apex_d76_npu.py +++ b/PyTorch/built-in/cv/classification/ResNet50_for_PyTorch/DistributedResnet50/main_apex_d76_npu.py @@ -416,7 +416,7 @@ def main_worker(gpu, ngpus_per_node, args): args.rank = args.rank * ngpus_per_node + gpu if args.device == 'npu': - args.rank = int(os.environ["NODE_RANK"]) * 8 + args.rank + args.rank = int(os.getenv("NODE_RANK", 0)) * 8 + args.rank print("the global_rank is :", args.rank) dist.init_process_group(backend=args.dist_backend, world_size=args.world_size, rank=args.rank) -- Gitee