From c846dddffa32fc04ccf7281e2b42dce111979d30 Mon Sep 17 00:00:00 2001 From: yuhan Date: Wed, 6 Aug 2025 10:55:43 +0800 Subject: [PATCH] modify urls --- .../api/api_cn/API_sample_and_requirements.md | 6 +- .../source_en/api_python/bfloat16_support.md | 72 +- .../api_python/dynamic_shape_func.md | 482 ++++---- .../source_en/api_python/dynamic_shape_nn.md | 114 +- .../api_python/dynamic_shape_primitive.md | 428 +++---- .../source_en/api_python/env_var_list.rst | 26 +- .../api_python/operator_list_parallel.md | 338 +++--- .../source_en/faq/data_processing.md | 24 +- .../source_en/faq/distributed_parallel.md | 4 +- .../mindspore/source_en/faq/feature_advice.md | 4 +- .../source_en/faq/implement_problem.md | 16 +- docs/mindspore/source_en/faq/inference.md | 2 +- docs/mindspore/source_en/faq/installation.md | 4 +- .../source_en/faq/network_compilation.md | 14 +- .../source_en/faq/operators_compile.md | 22 +- .../source_en/faq/performance_tuning.md | 4 +- .../source_en/faq/precision_tuning.md | 2 +- .../features/compile/graph_construction.md | 12 +- .../features/compile/graph_optimization.md | 2 +- .../compile/multi_level_compilation.md | 10 +- .../source_en/features/data_engine.md | 20 +- docs/mindspore/source_en/features/overview.md | 8 +- .../features/parallel/auto_parallel.rst | 8 +- .../features/parallel/data_parallel.md | 14 +- .../features/parallel/operator_parallel.md | 18 +- .../features/parallel/optimizer_parallel.md | 12 +- .../features/parallel/pipeline_parallel.md | 24 +- .../features/runtime/memory_manager.md | 14 +- .../features/runtime/multilevel_pipeline.md | 6 +- .../runtime/multistream_concurrency.md | 6 +- .../note/api_mapping/pytorch_api_mapping.md | 1034 ++++++++--------- .../note/api_mapping/pytorch_diff/AGNEWS.md | 4 +- .../pytorch_diff/AmazonReviewFull.md | 4 +- .../pytorch_diff/AmazonReviewPolarity.md | 4 +- .../api_mapping/pytorch_diff/AmplitudeToDB.md | 4 +- .../note/api_mapping/pytorch_diff/CIFAR10.md | 4 +- .../note/api_mapping/pytorch_diff/CIFAR100.md | 4 +- .../api_mapping/pytorch_diff/CMUARCTIC.md | 4 +- .../note/api_mapping/pytorch_diff/CelebA.md | 4 +- .../api_mapping/pytorch_diff/Cityscapes.md | 4 +- .../pytorch_diff/CoNLL2000Chunking.md | 4 +- .../api_mapping/pytorch_diff/CocoDataset.md | 4 +- .../note/api_mapping/pytorch_diff/DBpedia.md | 4 +- .../api_mapping/pytorch_diff/DataLoader.md | 4 +- .../pytorch_diff/DistributedSampler.md | 4 +- .../pytorch_diff/FrequencyMasking.md | 4 +- .../note/api_mapping/pytorch_diff/GTZAN.md | 4 +- .../api_mapping/pytorch_diff/GriffinLim.md | 4 +- .../note/api_mapping/pytorch_diff/IMDB.md | 4 +- .../api_mapping/pytorch_diff/IWSLT2016.md | 4 +- .../api_mapping/pytorch_diff/IWSLT2017.md | 4 +- .../api_mapping/pytorch_diff/ImageFolder.md | 4 +- .../pytorch_diff/InverseMelScale.md | 4 +- .../note/api_mapping/pytorch_diff/LIBRITTS.md | 4 +- .../note/api_mapping/pytorch_diff/LJSPEECH.md | 4 +- .../note/api_mapping/pytorch_diff/Lookup.md | 4 +- .../note/api_mapping/pytorch_diff/MNIST.md | 4 +- .../note/api_mapping/pytorch_diff/MelScale.md | 4 +- .../pytorch_diff/MelSpectrogram.md | 4 +- .../note/api_mapping/pytorch_diff/Ngram.md | 4 +- .../api_mapping/pytorch_diff/Normalize.md | 4 +- .../api_mapping/pytorch_diff/PennTreebank.md | 4 +- .../api_mapping/pytorch_diff/RandomAffine.md | 4 +- .../pytorch_diff/RandomPerspective.md | 4 +- .../pytorch_diff/RandomResizedCrop.md | 4 +- .../pytorch_diff/RandomRotation.md | 4 +- .../api_mapping/pytorch_diff/RandomSampler.md | 4 +- .../api_mapping/pytorch_diff/RegexReplace.md | 4 +- .../note/api_mapping/pytorch_diff/Resample.md | 4 +- .../pytorch_diff/SPEECHCOMMANDS.md | 4 +- .../note/api_mapping/pytorch_diff/SQuAD1.md | 4 +- .../note/api_mapping/pytorch_diff/SQuAD2.md | 4 +- .../SentencePieceTokenizer_Out_INT.md | 4 +- .../SentencePieceTokenizer_Out_STRING.md | 4 +- .../pytorch_diff/SequentialSampler.md | 4 +- .../api_mapping/pytorch_diff/SogouNews.md | 4 +- .../pytorch_diff/SpectralCentroid.md | 4 +- .../api_mapping/pytorch_diff/Spectrogram.md | 4 +- .../pytorch_diff/SubsetRandomSampler.md | 4 +- .../note/api_mapping/pytorch_diff/TEDLIUM.md | 4 +- .../api_mapping/pytorch_diff/TimeMasking.md | 4 +- .../note/api_mapping/pytorch_diff/ToPIL.md | 4 +- .../note/api_mapping/pytorch_diff/ToTensor.md | 4 +- .../note/api_mapping/pytorch_diff/TypeCast.md | 4 +- .../note/api_mapping/pytorch_diff/UDPOS.md | 4 +- .../api_mapping/pytorch_diff/VOCDetection.md | 4 +- .../pytorch_diff/VOCSegmentation.md | 4 +- .../pytorch_diff/WeightedRandomSampler.md | 4 +- .../pytorch_diff/WhitespaceTokenizer.md | 4 +- .../api_mapping/pytorch_diff/WikiText103.md | 4 +- .../api_mapping/pytorch_diff/WikiText2.md | 4 +- .../note/api_mapping/pytorch_diff/YESNO.md | 4 +- .../api_mapping/pytorch_diff/YahooAnswers.md | 4 +- .../pytorch_diff/YelpReviewFull.md | 4 +- .../pytorch_diff/YelpReviewPolarity.md | 4 +- .../api_mapping/pytorch_diff/checkpoint.md | 4 +- .../api_mapping/pytorch_diff/deform_conv2d.md | 4 +- .../api_mapping/pytorch_diff/load_sp_model.md | 4 +- .../note/api_mapping/pytorch_diff/nms.md | 4 +- .../api_mapping/pytorch_diff/roi_align.md | 4 +- .../api_python/bfloat16_support.md | 72 +- .../api_python/dynamic_shape_func.md | 482 ++++---- .../api_python/dynamic_shape_nn.md | 114 +- .../api_python/dynamic_shape_primitive.md | 428 +++---- .../source_zh_cn/api_python/env_var_list.rst | 28 +- .../api_python/operator_list_parallel.md | 338 +++--- .../source_zh_cn/faq/data_processing.md | 24 +- .../source_zh_cn/faq/distributed_parallel.md | 4 +- .../source_zh_cn/faq/feature_advice.md | 4 +- .../source_zh_cn/faq/implement_problem.md | 16 +- docs/mindspore/source_zh_cn/faq/inference.md | 2 +- .../source_zh_cn/faq/installation.md | 2 +- .../source_zh_cn/faq/network_compilation.md | 14 +- .../source_zh_cn/faq/operators_compile.md | 22 +- .../source_zh_cn/faq/performance_tuning.md | 4 +- .../source_zh_cn/faq/precision_tuning.md | 2 +- docs/mindspore/source_zh_cn/features/amp.md | 4 +- .../features/compile/graph_construction.ipynb | 12 +- .../features/compile/graph_optimization.md | 2 +- .../compile/multi_level_compilation.md | 8 +- .../source_zh_cn/features/data_engine.md | 16 +- docs/mindspore/source_zh_cn/features/mint.md | 6 +- .../source_zh_cn/features/overview.md | 8 +- .../features/parallel/auto_parallel.rst | 8 +- .../features/parallel/data_parallel.md | 14 +- .../features/parallel/operator_parallel.md | 18 +- .../features/parallel/optimizer_parallel.md | 8 +- .../features/parallel/pipeline_parallel.md | 12 +- .../features/runtime/memory_manager.md | 10 +- .../features/runtime/multilevel_pipeline.md | 2 +- .../runtime/multistream_concurrency.md | 2 +- docs/mindspore/source_zh_cn/features/view.md | 2 +- .../note/api_mapping/pytorch_api_mapping.md | 1034 ++++++++--------- .../note/api_mapping/pytorch_diff/AGNEWS.md | 4 +- .../pytorch_diff/AmazonReviewFull.md | 4 +- .../pytorch_diff/AmazonReviewPolarity.md | 4 +- .../api_mapping/pytorch_diff/AmplitudeToDB.md | 4 +- .../note/api_mapping/pytorch_diff/CIFAR10.md | 4 +- .../note/api_mapping/pytorch_diff/CIFAR100.md | 4 +- .../api_mapping/pytorch_diff/CMUARCTIC.md | 4 +- .../note/api_mapping/pytorch_diff/CelebA.md | 4 +- .../api_mapping/pytorch_diff/Cityscapes.md | 4 +- .../pytorch_diff/CoNLL2000Chunking.md | 4 +- .../api_mapping/pytorch_diff/CocoDataset.md | 4 +- .../note/api_mapping/pytorch_diff/DBpedia.md | 4 +- .../api_mapping/pytorch_diff/DataLoader.md | 4 +- .../pytorch_diff/DistributedSampler.md | 4 +- .../pytorch_diff/FrequencyMasking.md | 4 +- .../note/api_mapping/pytorch_diff/GTZAN.md | 4 +- .../api_mapping/pytorch_diff/GriffinLim.md | 4 +- .../note/api_mapping/pytorch_diff/IMDB.md | 4 +- .../api_mapping/pytorch_diff/IWSLT2016.md | 4 +- .../api_mapping/pytorch_diff/IWSLT2017.md | 4 +- .../api_mapping/pytorch_diff/ImageFolder.md | 4 +- .../pytorch_diff/InverseMelScale.md | 4 +- .../note/api_mapping/pytorch_diff/LIBRITTS.md | 4 +- .../note/api_mapping/pytorch_diff/LJSPEECH.md | 4 +- .../note/api_mapping/pytorch_diff/Lookup.md | 4 +- .../note/api_mapping/pytorch_diff/MNIST.md | 4 +- .../note/api_mapping/pytorch_diff/MelScale.md | 4 +- .../pytorch_diff/MelSpectrogram.md | 4 +- .../note/api_mapping/pytorch_diff/Ngram.md | 4 +- .../api_mapping/pytorch_diff/Normalize.md | 4 +- .../api_mapping/pytorch_diff/PennTreebank.md | 4 +- .../api_mapping/pytorch_diff/RandomAffine.md | 4 +- .../pytorch_diff/RandomPerspective.md | 4 +- .../pytorch_diff/RandomResizedCrop.md | 4 +- .../pytorch_diff/RandomRotation.md | 4 +- .../api_mapping/pytorch_diff/RandomSampler.md | 4 +- .../api_mapping/pytorch_diff/RegexReplace.md | 4 +- .../note/api_mapping/pytorch_diff/Resample.md | 4 +- .../pytorch_diff/SPEECHCOMMANDS.md | 4 +- .../note/api_mapping/pytorch_diff/SQuAD1.md | 4 +- .../note/api_mapping/pytorch_diff/SQuAD2.md | 4 +- .../SentencePieceTokenizer_Out_INT.md | 4 +- .../SentencePieceTokenizer_Out_STRING.md | 4 +- .../pytorch_diff/SequentialSampler.md | 4 +- .../api_mapping/pytorch_diff/SogouNews.md | 4 +- .../pytorch_diff/SpectralCentroid.md | 4 +- .../api_mapping/pytorch_diff/Spectrogram.md | 4 +- .../pytorch_diff/SubsetRandomSampler.md | 4 +- .../note/api_mapping/pytorch_diff/TEDLIUM.md | 4 +- .../api_mapping/pytorch_diff/TimeMasking.md | 4 +- .../note/api_mapping/pytorch_diff/ToPIL.md | 4 +- .../note/api_mapping/pytorch_diff/ToTensor.md | 4 +- .../note/api_mapping/pytorch_diff/TypeCast.md | 4 +- .../note/api_mapping/pytorch_diff/UDPOS.md | 4 +- .../api_mapping/pytorch_diff/VOCDetection.md | 4 +- .../pytorch_diff/VOCSegmentation.md | 4 +- .../pytorch_diff/WeightedRandomSampler.md | 4 +- .../pytorch_diff/WhitespaceTokenizer.md | 4 +- .../api_mapping/pytorch_diff/WikiText103.md | 4 +- .../api_mapping/pytorch_diff/WikiText2.md | 4 +- .../note/api_mapping/pytorch_diff/YESNO.md | 4 +- .../api_mapping/pytorch_diff/YahooAnswers.md | 4 +- .../pytorch_diff/YelpReviewFull.md | 4 +- .../pytorch_diff/YelpReviewPolarity.md | 4 +- .../api_mapping/pytorch_diff/checkpoint.md | 4 +- .../api_mapping/pytorch_diff/deform_conv2d.md | 4 +- .../api_mapping/pytorch_diff/load_sp_model.md | 4 +- .../note/api_mapping/pytorch_diff/nms.md | 4 +- .../api_mapping/pytorch_diff/roi_align.md | 4 +- install/mindspore_ascend_install_conda.md | 4 +- install/mindspore_ascend_install_conda_en.md | 4 +- install/mindspore_ascend_install_docker.md | 2 +- install/mindspore_ascend_install_docker_en.md | 2 +- install/mindspore_ascend_install_pip.md | 4 +- install/mindspore_ascend_install_pip_en.md | 4 +- install/mindspore_ascend_install_source.md | 6 +- install/mindspore_ascend_install_source_en.md | 6 +- install/mindspore_cpu_install_conda.md | 4 +- install/mindspore_cpu_install_conda_en.md | 4 +- install/mindspore_cpu_install_docker.md | 4 +- install/mindspore_cpu_install_docker_en.md | 4 +- install/mindspore_cpu_install_nightly.md | 4 +- install/mindspore_cpu_install_nightly_en.md | 4 +- install/mindspore_cpu_install_pip.md | 4 +- install/mindspore_cpu_install_pip_en.md | 4 +- install/mindspore_cpu_install_source.md | 6 +- install/mindspore_cpu_install_source_en.md | 6 +- install/mindspore_cpu_mac_install_conda.md | 4 +- install/mindspore_cpu_mac_install_conda_en.md | 4 +- install/mindspore_cpu_mac_install_nightly.md | 4 +- .../mindspore_cpu_mac_install_nightly_en.md | 4 +- install/mindspore_cpu_mac_install_pip.md | 4 +- install/mindspore_cpu_mac_install_pip_en.md | 4 +- install/mindspore_cpu_mac_install_source.md | 4 +- .../mindspore_cpu_mac_install_source_en.md | 4 +- install/mindspore_cpu_win_install_conda.md | 4 +- install/mindspore_cpu_win_install_conda_en.md | 4 +- install/mindspore_cpu_win_install_nightly.md | 4 +- .../mindspore_cpu_win_install_nightly_en.md | 4 +- install/mindspore_cpu_win_install_pip.md | 4 +- install/mindspore_cpu_win_install_pip_en.md | 4 +- install/mindspore_cpu_win_install_source.md | 8 +- .../mindspore_cpu_win_install_source_en.md | 8 +- install/mindspore_gpu_install_conda.md | 4 +- install/mindspore_gpu_install_conda_en.md | 4 +- install/mindspore_gpu_install_nightly.md | 4 +- install/mindspore_gpu_install_nightly_en.md | 4 +- install/mindspore_gpu_install_pip.md | 4 +- install/mindspore_gpu_install_pip_en.md | 4 +- install/mindspore_gpu_install_source.md | 6 +- install/mindspore_gpu_install_source_en.md | 6 +- install/third_party/msys_software_install.md | 2 +- .../third_party/msys_software_install_en.md | 2 +- .../third_party/third_party_cpu_install.md | 4 +- tools/generate_html/run.py | 2 +- .../beginner/accelerate_with_static_graph.md | 12 +- tutorials/source_en/beginner/autograd.md | 12 +- tutorials/source_en/beginner/dataset.md | 26 +- tutorials/source_en/beginner/introduction.md | 8 +- tutorials/source_en/beginner/model.md | 20 +- tutorials/source_en/beginner/quick_start.md | 22 +- tutorials/source_en/beginner/save_load.md | 16 +- tutorials/source_en/beginner/tensor.md | 16 +- tutorials/source_en/beginner/train.md | 12 +- tutorials/source_en/compile/operators.md | 2 +- .../compile/python_builtin_functions.md | 14 +- tutorials/source_en/compile/statements.md | 4 +- tutorials/source_en/compile/static_graph.md | 42 +- .../static_graph_expert_programming.md | 12 +- .../source_en/custom_program/fusion_pass.md | 2 +- .../source_en/custom_program/hook_program.md | 14 +- .../source_en/custom_program/op_custom.rst | 12 +- .../operation/cpp_api_for_custom_ops.md | 26 +- .../custom_program/operation/op_custom_adv.md | 6 +- .../custom_program/operation/op_custom_aot.md | 24 +- .../operation/op_custom_ascendc.md | 14 +- .../operation/op_custom_prim.rst | 18 +- .../operation/op_customopbuilder.md | 12 +- .../operation/op_customopbuilder_asdsip.md | 8 +- .../operation/op_customopbuilder_atb.md | 8 +- tutorials/source_en/cv/fcn8s.md | 14 +- tutorials/source_en/cv/resnet50.md | 16 +- tutorials/source_en/cv/ssd.md | 34 +- tutorials/source_en/cv/transfer_learning.md | 4 +- tutorials/source_en/cv/vit.md | 26 +- tutorials/source_en/dataset/augment.md | 4 +- tutorials/source_en/dataset/cache.md | 8 +- .../source_en/dataset/dataset_autotune.md | 10 +- .../source_en/dataset/dataset_offload.md | 2 +- tutorials/source_en/dataset/eager.md | 18 +- tutorials/source_en/dataset/optimize.ipynb | 38 +- tutorials/source_en/dataset/overview.md | 52 +- tutorials/source_en/dataset/python_objects.md | 2 +- tutorials/source_en/dataset/record.ipynb | 12 +- tutorials/source_en/dataset/sampler.md | 12 +- tutorials/source_en/debug/dryrun.md | 6 +- tutorials/source_en/debug/dump.md | 24 +- tutorials/source_en/debug/error_analysis.rst | 26 +- .../debug/error_analysis/cann_error_cases.md | 2 +- .../error_analysis/error_scenario_analysis.md | 44 +- .../debug/error_analysis/minddata_debug.md | 10 +- .../source_en/debug/error_analysis/mindir.md | 2 +- .../debug/error_analysis/mindrt_debug.md | 6 +- tutorials/source_en/debug/profiler.md | 28 +- tutorials/source_en/debug/pynative.md | 10 +- tutorials/source_en/debug/sdc.md | 4 +- tutorials/source_en/generative/cyclegan.md | 10 +- tutorials/source_en/generative/dcgan.md | 12 +- tutorials/source_en/generative/diffusion.md | 12 +- tutorials/source_en/generative/gan.md | 12 +- tutorials/source_en/generative/pix2pix.md | 10 +- .../model_infer/lite_infer/overview.md | 2 +- .../model_migration/model_migration.md | 22 +- tutorials/source_en/nlp/sentiment_analysis.md | 22 +- tutorials/source_en/nlp/sequence_labeling.md | 2 +- tutorials/source_en/orange_pi/dev_start.md | 16 +- .../source_en/orange_pi/environment_setup.md | 34 +- tutorials/source_en/orange_pi/model_infer.md | 12 +- tutorials/source_en/orange_pi/overview.md | 8 +- tutorials/source_en/parallel/comm_fusion.md | 12 +- tutorials/source_en/parallel/data_parallel.md | 12 +- tutorials/source_en/parallel/dataset_slice.md | 10 +- .../source_en/parallel/distributed_case.rst | 4 +- .../distributed_gradient_accumulation.md | 12 +- .../source_en/parallel/dynamic_cluster.md | 14 +- .../high_dimension_tensor_parallel.md | 16 +- .../parallel/host_device_training.md | 18 +- tutorials/source_en/parallel/mpirun.md | 4 +- .../source_en/parallel/msrun_launcher.md | 20 +- tutorials/source_en/parallel/multiple_copy.md | 14 +- .../source_en/parallel/multiple_mixed.md | 6 +- .../source_en/parallel/operator_parallel.md | 22 +- .../source_en/parallel/optimize_technique.rst | 22 +- .../source_en/parallel/optimizer_parallel.md | 8 +- tutorials/source_en/parallel/overview.md | 32 +- .../source_en/parallel/pipeline_parallel.md | 14 +- tutorials/source_en/parallel/rank_table.md | 4 +- tutorials/source_en/parallel/recompute.md | 16 +- .../source_en/parallel/split_technique.md | 12 +- .../source_en/parallel/startup_method.rst | 12 +- .../source_en/parallel/strategy_select.md | 10 +- .../train_availability/fault_recover.md | 6 +- .../train_availability/graceful_exit.md | 6 +- .../accelerate_with_static_graph.ipynb | 12 +- .../source_zh_cn/beginner/autograd.ipynb | 12 +- tutorials/source_zh_cn/beginner/dataset.ipynb | 28 +- .../source_zh_cn/beginner/introduction.ipynb | 8 +- tutorials/source_zh_cn/beginner/model.ipynb | 20 +- .../source_zh_cn/beginner/quick_start.ipynb | 22 +- .../source_zh_cn/beginner/save_load.ipynb | 20 +- tutorials/source_zh_cn/beginner/tensor.ipynb | 18 +- tutorials/source_zh_cn/beginner/train.ipynb | 12 +- tutorials/source_zh_cn/compile/operators.md | 2 +- .../compile/python_builtin_functions.ipynb | 14 +- .../source_zh_cn/compile/statements.ipynb | 4 +- .../source_zh_cn/compile/static_graph.md | 42 +- .../static_graph_expert_programming.ipynb | 16 +- .../custom_program/fusion_pass.md | 2 +- .../custom_program/hook_program.ipynb | 14 +- .../source_zh_cn/custom_program/op_custom.rst | 10 +- .../operation/cpp_api_for_custom_ops.md | 26 +- .../operation/op_custom_adv.ipynb | 6 +- .../custom_program/operation/op_custom_aot.md | 24 +- .../operation/op_custom_ascendc.md | 18 +- .../operation/op_custom_prim.ipynb | 16 +- .../operation/op_customopbuilder.md | 12 +- .../operation/op_customopbuilder_asdsip.md | 8 +- .../operation/op_customopbuilder_atb.md | 8 +- tutorials/source_zh_cn/cv/fcn8s.ipynb | 14 +- tutorials/source_zh_cn/cv/resnet50.ipynb | 16 +- tutorials/source_zh_cn/cv/ssd.ipynb | 34 +- .../source_zh_cn/cv/transfer_learning.ipynb | 6 +- tutorials/source_zh_cn/cv/vit.ipynb | 26 +- tutorials/source_zh_cn/dataset/augment.ipynb | 4 +- tutorials/source_zh_cn/dataset/cache.ipynb | 12 +- .../source_zh_cn/dataset/dataset_autotune.md | 10 +- .../source_zh_cn/dataset/dataset_offload.md | 2 +- tutorials/source_zh_cn/dataset/eager.ipynb | 20 +- tutorials/source_zh_cn/dataset/optimize.ipynb | 38 +- tutorials/source_zh_cn/dataset/overview.ipynb | 52 +- .../source_zh_cn/dataset/python_objects.ipynb | 2 +- tutorials/source_zh_cn/dataset/record.ipynb | 16 +- tutorials/source_zh_cn/dataset/sampler.ipynb | 16 +- tutorials/source_zh_cn/debug/dryrun.md | 6 +- tutorials/source_zh_cn/debug/dump.md | 24 +- .../source_zh_cn/debug/error_analysis.rst | 24 +- .../debug/error_analysis/cann_error_cases.md | 2 +- .../error_analysis/error_scenario_analysis.md | 38 +- .../debug/error_analysis/minddata_debug.md | 10 +- .../debug/error_analysis/mindir.md | 2 +- .../debug/error_analysis/mindrt_debug.md | 6 +- tutorials/source_zh_cn/debug/profiler.md | 28 +- tutorials/source_zh_cn/debug/pynative.md | 10 +- tutorials/source_zh_cn/debug/sdc.md | 4 +- .../source_zh_cn/generative/cyclegan.ipynb | 10 +- tutorials/source_zh_cn/generative/dcgan.ipynb | 12 +- .../source_zh_cn/generative/diffusion.ipynb | 12 +- tutorials/source_zh_cn/generative/gan.ipynb | 12 +- .../source_zh_cn/generative/pix2pix.ipynb | 10 +- .../source_zh_cn/model_infer/introduction.md | 2 +- .../model_infer/lite_infer/overview.md | 2 +- .../ms_infer/ms_infer_model_infer.rst | 6 +- .../ms_infer/ms_infer_model_serving_infer.md | 2 +- .../ms_infer/ms_infer_network_develop.md | 4 +- .../ms_infer/ms_infer_parallel_infer.md | 4 +- .../ms_infer/ms_infer_quantization.md | 4 +- .../model_migration/model_migration.md | 20 +- .../source_zh_cn/nlp/sentiment_analysis.ipynb | 22 +- .../source_zh_cn/nlp/sequence_labeling.ipynb | 2 +- .../source_zh_cn/orange_pi/dev_start.ipynb | 16 +- .../orange_pi/environment_setup.md | 4 +- .../source_zh_cn/orange_pi/model_infer.md | 6 +- tutorials/source_zh_cn/orange_pi/overview.md | 8 +- .../source_zh_cn/parallel/comm_fusion.md | 12 +- .../source_zh_cn/parallel/data_parallel.md | 12 +- .../source_zh_cn/parallel/dataset_slice.md | 10 +- .../parallel/distributed_case.rst | 4 +- .../distributed_gradient_accumulation.md | 12 +- .../source_zh_cn/parallel/dynamic_cluster.md | 14 +- .../high_dimension_tensor_parallel.md | 10 +- .../parallel/host_device_training.md | 14 +- tutorials/source_zh_cn/parallel/mpirun.md | 4 +- .../source_zh_cn/parallel/msrun_launcher.md | 20 +- .../source_zh_cn/parallel/multiple_copy.md | 12 +- .../source_zh_cn/parallel/multiple_mixed.md | 6 +- .../parallel/operator_parallel.md | 22 +- .../parallel/optimize_technique.rst | 22 +- .../parallel/optimizer_parallel.md | 8 +- tutorials/source_zh_cn/parallel/overview.md | 32 +- .../parallel/pipeline_parallel.md | 14 +- tutorials/source_zh_cn/parallel/rank_table.md | 4 +- tutorials/source_zh_cn/parallel/recompute.md | 12 +- .../source_zh_cn/parallel/split_technique.md | 6 +- .../source_zh_cn/parallel/startup_method.rst | 12 +- .../source_zh_cn/parallel/strategy_select.md | 10 +- .../train_availability/fault_recover.md | 6 +- .../train_availability/graceful_exit.md | 6 +- 430 files changed, 4356 insertions(+), 4356 deletions(-) diff --git a/docs/mindspore/api/api_cn/API_sample_and_requirements.md b/docs/mindspore/api/api_cn/API_sample_and_requirements.md index dd0995371d..732757bbd5 100644 --- a/docs/mindspore/api/api_cn/API_sample_and_requirements.md +++ b/docs/mindspore/api/api_cn/API_sample_and_requirements.md @@ -270,7 +270,7 @@ 教程样例: - `Graph Mode加速 - `_ + `_ ``` @@ -284,7 +284,7 @@ 教程样例: - `Graph Mode加速 - `_ + `_ ``` @@ -390,7 +390,7 @@ 例: - 请参考 `tensor `_ 。 + 请参考 `tensor `_ 。 ``` 请注意,链接文本和 URL 的开头 < 之间必须有一个空格,且整体的前后需要有空格。 diff --git a/docs/mindspore/source_en/api_python/bfloat16_support.md b/docs/mindspore/source_en/api_python/bfloat16_support.md index ce91e56990..636260bcb5 100644 --- a/docs/mindspore/source_en/api_python/bfloat16_support.md +++ b/docs/mindspore/source_en/api_python/bfloat16_support.md @@ -1,6 +1,6 @@ # bfloat16 Datatype Support Status -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/api_python/bfloat16_support.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/api_python/bfloat16_support.md) ## Overview @@ -15,38 +15,38 @@ FP16 format has 5 bits of exponent and 10 bits of mantissa, while BF16 has 8 bit |API Name|Ascend|Descriptions| |:----|:---------|:----| -|[mindspore.Tensor.asnumpy](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.asnumpy.html)|❌|Since numpy does not support bfloat16 data type, it is not possible to convert a tensor of bfloat16 type to numpy type.| -|[mindspore.amp.auto_mixed_precision](https://www.mindspore.cn/docs/en/master/api_python/amp/mindspore.amp.auto_mixed_precision.html)|✔️|When using the auto-mixed-precision interface, you can specify bfloat16 as the low-precision data type.| -|[mindspore.amp.custom_mixed_precision](https://www.mindspore.cn/docs/en/master/api_python/amp/mindspore.amp.custom_mixed_precision.html)|✔️|When using the custom-mixed-precision interface, you can specify bfloat16 as the low-precision data type.| -|[mindspore.load_checkpoint](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.load_checkpoint.html)|✔️|| -|[mindspore.save_checkpoint](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.save_checkpoint.html)|✔️|| -|[mindspore.ops.Add](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Add.html)|✔️|| -|[mindspore.ops.AddN](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AddN.html)|✔️|| -|[mindspore.ops.AllGather](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AllGather.html)|✔️|| -|[mindspore.ops.AllReduce](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AllReduce.html)|✔️|| -|[mindspore.ops.AssignAdd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AssignAdd.html)|✔️|| -|[mindspore.ops.BatchMatMul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BatchMatMul.html)|✔️|| -|[mindspore.ops.Broadcast](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Broadcast.html)|✔️|| -|[mindspore.ops.Cast](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cast.html)|✔️|| -|[mindspore.ops.Equal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Equal.html)|✔️|| -|[mindspore.ops.Exp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Exp.html)|✔️|| -|[mindspore.ops.FastGeLU](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.FastGeLU.html)|✔️|| -|[mindspore.ops.GreaterEqual](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.GreaterEqual.html)|✔️|| -|[mindspore.ops.LayerNorm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LayerNorm.html)|✔️|| -|[mindspore.ops.LessEqual](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LessEqual.html)|✔️|| -|[mindspore.ops.MatMul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MatMul.html)|✔️|| -|[mindspore.ops.Maximum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Maximum.html)|✔️|| -|[mindspore.ops.Minimum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Minimum.html)|✔️|| -|[mindspore.ops.Mul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Mul.html)|✔️|| -|[mindspore.ops.NotEqual](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.NotEqual.html)|✔️|| -|[mindspore.ops.RealDiv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.RealDiv.html)|✔️|| -|[mindspore.ops.ReduceMean](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceMean.html)|✔️|| -|[mindspore.ops.ReduceScatter](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceScatter.html)|✔️|| -|[mindspore.ops.ReduceSum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceSum.html)|✔️|| -|[mindspore.ops.Select](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Select.html)|✔️|| -|[mindspore.ops.Softmax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Softmax.html)|✔️|| -|[mindspore.ops.Sqrt](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sqrt.html)|✔️|| -|[mindspore.ops.Square](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Square.html)|✔️|| -|[mindspore.ops.Sub](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sub.html)|✔️|| -|[mindspore.ops.Tile](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Tile.html)|✔️|| -|[mindspore.ops.Transpose](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Transpose.html)|✔️|| +|[mindspore.Tensor.asnumpy](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.asnumpy.html)|❌|Since numpy does not support bfloat16 data type, it is not possible to convert a tensor of bfloat16 type to numpy type.| +|[mindspore.amp.auto_mixed_precision](https://www.mindspore.cn/docs/en/br_base/api_python/amp/mindspore.amp.auto_mixed_precision.html)|✔️|When using the auto-mixed-precision interface, you can specify bfloat16 as the low-precision data type.| +|[mindspore.amp.custom_mixed_precision](https://www.mindspore.cn/docs/en/br_base/api_python/amp/mindspore.amp.custom_mixed_precision.html)|✔️|When using the custom-mixed-precision interface, you can specify bfloat16 as the low-precision data type.| +|[mindspore.load_checkpoint](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.load_checkpoint.html)|✔️|| +|[mindspore.save_checkpoint](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.save_checkpoint.html)|✔️|| +|[mindspore.ops.Add](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Add.html)|✔️|| +|[mindspore.ops.AddN](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AddN.html)|✔️|| +|[mindspore.ops.AllGather](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AllGather.html)|✔️|| +|[mindspore.ops.AllReduce](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AllReduce.html)|✔️|| +|[mindspore.ops.AssignAdd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AssignAdd.html)|✔️|| +|[mindspore.ops.BatchMatMul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BatchMatMul.html)|✔️|| +|[mindspore.ops.Broadcast](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Broadcast.html)|✔️|| +|[mindspore.ops.Cast](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cast.html)|✔️|| +|[mindspore.ops.Equal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Equal.html)|✔️|| +|[mindspore.ops.Exp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Exp.html)|✔️|| +|[mindspore.ops.FastGeLU](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.FastGeLU.html)|✔️|| +|[mindspore.ops.GreaterEqual](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.GreaterEqual.html)|✔️|| +|[mindspore.ops.LayerNorm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LayerNorm.html)|✔️|| +|[mindspore.ops.LessEqual](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LessEqual.html)|✔️|| +|[mindspore.ops.MatMul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MatMul.html)|✔️|| +|[mindspore.ops.Maximum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Maximum.html)|✔️|| +|[mindspore.ops.Minimum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Minimum.html)|✔️|| +|[mindspore.ops.Mul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Mul.html)|✔️|| +|[mindspore.ops.NotEqual](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.NotEqual.html)|✔️|| +|[mindspore.ops.RealDiv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.RealDiv.html)|✔️|| +|[mindspore.ops.ReduceMean](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceMean.html)|✔️|| +|[mindspore.ops.ReduceScatter](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceScatter.html)|✔️|| +|[mindspore.ops.ReduceSum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceSum.html)|✔️|| +|[mindspore.ops.Select](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Select.html)|✔️|| +|[mindspore.ops.Softmax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Softmax.html)|✔️|| +|[mindspore.ops.Sqrt](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sqrt.html)|✔️|| +|[mindspore.ops.Square](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Square.html)|✔️|| +|[mindspore.ops.Sub](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sub.html)|✔️|| +|[mindspore.ops.Tile](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Tile.html)|✔️|| +|[mindspore.ops.Transpose](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Transpose.html)|✔️|| diff --git a/docs/mindspore/source_en/api_python/dynamic_shape_func.md b/docs/mindspore/source_en/api_python/dynamic_shape_func.md index b1cc0adf0d..b3532e6010 100644 --- a/docs/mindspore/source_en/api_python/dynamic_shape_func.md +++ b/docs/mindspore/source_en/api_python/dynamic_shape_func.md @@ -1,8 +1,8 @@ # Dynamic Shape Support Status of functional Interface -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/api_python/dynamic_shape_func.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/api_python/dynamic_shape_func.md) -> The following list provides functional interfaces that support dynamic shape functionality in PYNATIVE mode. However, some functional interfaces may have incomplete data type support. If you encounter such issues, you can resolve them by manually incorporating the [Cast](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cast.html) operator. +> The following list provides functional interfaces that support dynamic shape functionality in PYNATIVE mode. However, some functional interfaces may have incomplete data type support. If you encounter such issues, you can resolve them by manually incorporating the [Cast](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cast.html) operator. > > Functional interfaces outside of this list have limited support for dynamic shape functionality and may fail to execute. Additionally, in graph mode, dynamic shape functionality is also limited and may result in execution failures. > @@ -10,242 +10,242 @@ | API name | Ascend | GPU | CPU | | :--- |:-------- | :------- |:---------| -|[mindspore.ops.abs](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.abs.html)|✔️|✔️|✔️| -|[mindspore.ops.acos](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.acos.html)|✔️|✔️|✔️| -|[mindspore.ops.acosh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.acosh.html)|✔️|✔️|✔️| -|[mindspore.ops.add](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.add.html)|✔️|✔️|✔️| -|[mindspore.ops.addcdiv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.addcdiv.html)|✔️|✔️|✔️| -|[mindspore.ops.addcmul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.addcmul.html)|✔️|✔️|✔️| -|[mindspore.ops.addmm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.addmm.html)|✔️|✔️|✔️| -|[mindspore.ops.addn](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.addn.html)|✔️|✔️|✔️| -|[mindspore.ops.all](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.all.html)|✔️|✔️|✔️| -|[mindspore.ops.amax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.amax.html)|✔️|✔️|✔️| -|[mindspore.ops.amin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.amin.html)|✔️|✔️|✔️| -|[mindspore.ops.angle](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.angle.html)|✔️|✔️|✔️| -|[mindspore.ops.any](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.any.html)|✔️|✔️|✔️| -|[mindspore.ops.argmax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.argmax.html)|✔️|✔️|✔️| -|[mindspore.ops.argmin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.argmin.html)|✔️|✔️|✔️| -|[mindspore.ops.argsort](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.argsort.html)|✔️|✔️|✔️| -|[mindspore.ops.asin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.asin.html)|✔️|✔️|✔️| -|[mindspore.ops.asinh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.asinh.html)|✔️|✔️|✔️| -|[mindspore.ops.assign](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.assign.html)|✔️|✔️|✔️| -|[mindspore.ops.assign_add](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.assign_add.html)|✔️|✔️|✔️| -|[mindspore.ops.atan](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.atan.html)|✔️|✔️|✔️| -|[mindspore.ops.atan2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.atan2.html)|✔️|✔️|✔️| -|[mindspore.ops.atanh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.atanh.html)|✔️|✔️|✔️| -|[mindspore.ops.baddbmm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.baddbmm.html)|✔️|✔️|✔️| -|[mindspore.ops.bernoulli](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bernoulli.html)|❌|✔️|✔️| -|[mindspore.ops.bessel_i0](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bessel_i0.html)|❌|✔️|✔️| -|[mindspore.ops.bessel_i0e](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bessel_i0e.html)|✔️|✔️|✔️| -|[mindspore.ops.bessel_i1](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bessel_i1.html)|❌|✔️|✔️| -|[mindspore.ops.bessel_i1e](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bessel_i1e.html)|✔️|✔️|✔️| -|[mindspore.ops.bessel_j0](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bessel_j0.html)|❌|✔️|✔️| -|[mindspore.ops.bessel_j1](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bessel_j1.html)|❌|✔️|✔️| -|[mindspore.ops.bias_add](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bias_add.html)|❌|✔️|✔️| -|[mindspore.ops.bincount](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bincount.html)|✔️|✔️|✔️| -|[mindspore.ops.bitwise_and](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bitwise_and.html)|✔️|✔️|✔️| -|[mindspore.ops.bitwise_left_shift](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bitwise_left_shift.html)|✔️|✔️|✔️| -|[mindspore.ops.bitwise_or](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bitwise_or.html)|✔️|✔️|✔️| -|[mindspore.ops.bitwise_right_shift](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bitwise_right_shift.html)|✔️|✔️|✔️| -|[mindspore.ops.bitwise_xor](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bitwise_xor.html)|✔️|✔️|✔️| -|[mindspore.ops.bmm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.bmm.html)|✔️|✔️|✔️| -|[mindspore.ops.broadcast_to](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.broadcast_to.html)|✔️|✔️|✔️| -|[mindspore.ops.ceil](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ceil.html)|✔️|✔️|✔️| -|[mindspore.ops.celu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.celu.html)|✔️|✔️|✔️| -|[mindspore.ops.chunk](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.chunk.html)|❌|✔️|✔️| -|[mindspore.ops.clamp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.clamp.html)|✔️|✔️|✔️| -|[mindspore.ops.clip_by_global_norm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.clip_by_global_norm.html)|✔️|✔️|✔️| -|[mindspore.ops.clip_by_value](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.clip_by_value.html)|✔️|✔️|✔️| -|[mindspore.ops.concat](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.concat.html)|✔️|✔️|✔️| -|[mindspore.ops.conj](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.conj.html)|❌|✔️|✔️| -|[mindspore.ops.cos](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.cos.html)|✔️|✔️|✔️| -|[mindspore.ops.cosh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.cosh.html)|✔️|✔️|✔️| -|[mindspore.ops.cross](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.cross.html)|✔️|❌|✔️| -|[mindspore.ops.cross_entropy](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.cross_entropy.html)|✔️|✔️|✔️| -|[mindspore.ops.cummax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.cummax.html)|❌|✔️|✔️| -|[mindspore.ops.cummin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.cummin.html)|✔️|✔️|✔️| -|[mindspore.ops.cumprod](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.cumprod.html)|❌|✔️|✔️| -|[mindspore.ops.cumsum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.cumsum.html)|❌|✔️|✔️| -|[mindspore.ops.diag](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.diag.html)|✔️|✔️|✔️| -|[mindspore.ops.diag_embed](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.diag_embed.html)|✔️|✔️|✔️| -|[mindspore.ops.diagonal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.diagonal.html)|✔️|✔️|✔️| -|[mindspore.ops.dist](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.dist.html)|✔️|✔️|✔️| -|[mindspore.ops.div](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.div.html)|✔️|✔️|✔️| -|[mindspore.ops.dot](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.dot.html)|✔️|✔️|✔️| -|[mindspore.ops.dropout](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.dropout.html)|✔️|✔️|✔️| -|[mindspore.ops.dropout2d](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.dropout2d.html)|✔️|✔️|✔️| -|[mindspore.ops.dropout3d](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.dropout3d.html)|✔️|✔️|✔️| -|[mindspore.ops.einsum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.einsum.html)|❌|✔️|❌| -|[mindspore.ops.elu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.elu.html)|✔️|✔️|✔️| -|[mindspore.ops.equal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.equal.html)|✔️|✔️|✔️| -|[mindspore.ops.erf](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.erf.html)|✔️|✔️|✔️| -|[mindspore.ops.erfc](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.erfc.html)|✔️|✔️|✔️| -|[mindspore.ops.erfinv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.erfinv.html)|✔️|✔️|✔️| -|[mindspore.ops.exp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.exp.html)|✔️|✔️|✔️| -|[mindspore.ops.expand_dims](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.expand_dims.html)|✔️|✔️|✔️| -|[mindspore.ops.expm1](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.expm1.html)|✔️|✔️|✔️| -|[mindspore.ops.eye](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.eye.html)|✔️|✔️|✔️| -|[mindspore.ops.fill](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.fill.html)|✔️|✔️|✔️| -|[mindspore.ops.flatten](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.flatten.html)|✔️|✔️|✔️| -|[mindspore.ops.flip](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.flip.html)|✔️|✔️|✔️| -|[mindspore.ops.floor](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.floor.html)|✔️|✔️|✔️| -|[mindspore.ops.floor_div](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.floor_div.html)|✔️|✔️|✔️| -|[mindspore.ops.floor_mod](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.floor_mod.html)|✔️|✔️|✔️| -|[mindspore.ops.fmod](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.fmod.html)|✔️|✔️|✔️| -|[mindspore.ops.fold](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.fold.html)|✔️|✔️|✔️| -|[mindspore.ops.full](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.full.html)|✔️|✔️|✔️| -|[mindspore.ops.full_like](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.full_like.html)|✔️|✔️|✔️| -|[mindspore.ops.gather](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.gather.html)|✔️|✔️|✔️| -|[mindspore.ops.gather_elements](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.gather_elements.html)|✔️|✔️|✔️| -|[mindspore.ops.gather_nd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.gather_nd.html)|✔️|✔️|✔️| -|[mindspore.ops.gcd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.gcd.html)|✔️|✔️|✔️| -|[mindspore.ops.ge](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ge.html)|✔️|✔️|✔️| -|[mindspore.ops.gelu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.gelu.html)|✔️|✔️|✔️| -|[mindspore.ops.geqrf](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.geqrf.html)|✔️|✔️|✔️| -|[mindspore.ops.ger](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ger.html)|✔️|✔️|✔️| -|[mindspore.ops.glu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.glu.html)|✔️|✔️|✔️| -|[mindspore.ops.greater](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.greater.html)|✔️|✔️|✔️| -|[mindspore.ops.greater_equal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.greater_equal.html)|✔️|✔️|✔️| -|[mindspore.ops.grid_sample](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.grid_sample.html)|✔️|✔️|✔️| -|[mindspore.ops.gt](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.gt.html)|✔️|✔️|✔️| -|[mindspore.ops.gumbel_softmax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.gumbel_softmax.html)|✔️|✔️|✔️| -|[mindspore.ops.hardshrink](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.hardshrink.html)|✔️|✔️|✔️| -|[mindspore.ops.hardsigmoid](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.hardsigmoid.html)|✔️|✔️|✔️| -|[mindspore.ops.hardswish](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.hardswish.html)|✔️|✔️|✔️| -|[mindspore.ops.hardtanh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.hardtanh.html)|✔️|✔️|✔️| -|[mindspore.ops.heaviside](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.heaviside.html)|✔️|✔️|✔️| -|[mindspore.ops.hypot](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.hypot.html)|✔️|✔️|✔️| -|[mindspore.ops.igammac](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.igammac.html)|✔️|✔️|✔️| -|[mindspore.ops.imag](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.imag.html)|✔️|✔️|✔️| -|[mindspore.ops.index_select](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.index_select.html)|✔️|✔️|✔️| -|[mindspore.ops.interpolate](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.interpolate.html)|❌|✔️|✔️| -|[mindspore.ops.inverse](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.inverse.html)|❌|✔️|✔️| -|[mindspore.ops.invert](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.invert.html)|✔️|✔️|✔️| -|[mindspore.ops.isfinite](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.isfinite.html)|✔️|✔️|✔️| -|[mindspore.ops.isinf](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.isinf.html)|✔️|✔️|✔️| -|[mindspore.ops.isnan](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.isnan.html)|✔️|✔️|✔️| -|[mindspore.ops.l1_loss](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.l1_loss.html)|✔️|✔️|✔️| -|[mindspore.ops.lcm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.lcm.html)|✔️|✔️|✔️| -|[mindspore.ops.le](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.le.html)|✔️|✔️|✔️| -|[mindspore.ops.lerp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.lerp.html)|✔️|✔️|✔️| -|[mindspore.ops.less](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.less.html)|✔️|✔️|✔️| -|[mindspore.ops.less_equal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.less_equal.html)|✔️|✔️|✔️| -|[mindspore.ops.linspace](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.linspace.html)|✔️|✔️|✔️| -|[mindspore.ops.log](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.log.html)|✔️|✔️|✔️| -|[mindspore.ops.log_softmax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.log_softmax.html)|✔️|✔️|✔️| -|[mindspore.ops.log10](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.log10.html)|✔️|✔️|✔️| -|[mindspore.ops.log1p](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.log1p.html)|✔️|✔️|✔️| -|[mindspore.ops.log2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.log2.html)|✔️|✔️|✔️| -|[mindspore.ops.logical_and](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.logical_and.html)|✔️|✔️|✔️| -|[mindspore.ops.logical_not](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.logical_not.html)|✔️|✔️|✔️| -|[mindspore.ops.logical_or](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.logical_or.html)|✔️|✔️|✔️| -|[mindspore.ops.logical_xor](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.logical_xor.html)|✔️|❌|✔️| -|[mindspore.ops.logit](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.logit.html)|✔️|✔️|✔️| -|[mindspore.ops.logsumexp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.logsumexp.html)|✔️|✔️|✔️| -|[mindspore.ops.lt](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.lt.html)|✔️|✔️|✔️| -|[mindspore.ops.margin_ranking_loss](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.margin_ranking_loss.html)|✔️|✔️|✔️| -|[mindspore.ops.masked_fill](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.masked_fill.html)|✔️|✔️|✔️| -|[mindspore.ops.masked_select](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.masked_select.html)|✔️|✔️|✔️| -|[mindspore.ops.matmul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.matmul.html)|✔️|✔️|✔️| -|[mindspore.ops.matrix_solve](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.matrix_solve.html)|✔️|❌|✔️| -|[mindspore.ops.max](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.max.html)|✔️|✔️|✔️| -|[mindspore.ops.maximum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.maximum.html)|✔️|✔️|✔️| -|[mindspore.ops.mean](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.mean.html)|✔️|✔️|✔️| -|[mindspore.ops.median](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.median.html)|❌|✔️|✔️| -|[mindspore.ops.meshgrid](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.meshgrid.html)|✔️|✔️|✔️| -|[mindspore.ops.min](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.min.html)|✔️|✔️|✔️| -|[mindspore.ops.minimum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.minimum.html)|✔️|✔️|✔️| -|[mindspore.ops.mish](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.mish.html)|❌|✔️|✔️| -|[mindspore.ops.mse_loss](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.mse_loss.html)|✔️|✔️|✔️| -|[mindspore.ops.mul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.mul.html)|✔️|✔️|✔️| -|[mindspore.ops.multinomial](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.multinomial.html)|✔️|✔️|✔️| -|[mindspore.ops.mv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.mv.html)|✔️|✔️|✔️| -|[mindspore.ops.mvlgamma](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.mvlgamma.html)|✔️|✔️|✔️| -|[mindspore.ops.nan_to_num](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.nan_to_num.html)|✔️|❌|✔️| -|[mindspore.ops.narrow](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.narrow.html)|✔️|✔️|✔️| -|[mindspore.ops.ne](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ne.html)|✔️|✔️|✔️| -|[mindspore.ops.neg](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.neg.html)|✔️|✔️|✔️| -|[mindspore.ops.nll_loss](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.nll_loss.html)|✔️|✔️|✔️| -|[mindspore.ops.nonzero](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.nonzero.html)|✔️|✔️|✔️| -|[mindspore.ops.norm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.norm.html)|❌|✔️|✔️| -|[mindspore.ops.normal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.normal.html)|✔️|✔️|✔️| -|[mindspore.ops.numel](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.numel.html)|✔️|✔️|✔️| -|[mindspore.ops.one_hot](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.one_hot.html)|✔️|✔️|✔️| -|[mindspore.ops.ones](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ones.html)|✔️|✔️|✔️| -|[mindspore.ops.ones_like](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ones_like.html)|✔️|✔️|✔️| -|[mindspore.ops.pad](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.pad.html)|✔️|✔️|✔️| -|[mindspore.ops.polar](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.polar.html)|❌|✔️|✔️| -|[mindspore.ops.polygamma](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.polygamma.html)|❌|✔️|✔️| -|[mindspore.ops.pow](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.pow.html)|✔️|✔️|✔️| -|[mindspore.ops.prelu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.prelu.html)|✔️|✔️|✔️| -|[mindspore.ops.prod](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.prod.html)|✔️|✔️|✔️| -|[mindspore.ops.rand](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.rand.html)|✔️|✔️|✔️| -|[mindspore.ops.rand_like](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.rand_like.html)|✔️|✔️|✔️| -|[mindspore.ops.randint](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.randint.html)|✔️|✔️|✔️| -|[mindspore.ops.randn](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.randn.html)|✔️|✔️|✔️| -|[mindspore.ops.randn_like](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.randn_like.html)|✔️|✔️|✔️| -|[mindspore.ops.randperm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.randperm.html)|❌|❌|✔️| -|[mindspore.ops.range](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.range.html)|❌|✔️|✔️| -|[mindspore.ops.ravel](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ravel.html)|✔️|✔️|✔️| -|[mindspore.ops.real](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.real.html)|❌|✔️|✔️| -|[mindspore.ops.reciprocal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.reciprocal.html)|✔️|✔️|✔️| -|[mindspore.ops.relu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.relu.html)|✔️|✔️|✔️| -|[mindspore.ops.relu6](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.relu6.html)|✔️|✔️|✔️| -|[mindspore.ops.remainder](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.remainder.html)|✔️|✔️|✔️| -|[mindspore.ops.repeat_interleave](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.repeat_interleave.html)|✔️|✔️|✔️| -|[mindspore.ops.reshape](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.reshape.html)|✔️|✔️|✔️| -|[mindspore.ops.reverse_sequence](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.reverse_sequence.html)|✔️|✔️|✔️| -|[mindspore.ops.roll](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.roll.html)|❌|✔️|❌| -|[mindspore.ops.round](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.round.html)|✔️|✔️|✔️| -|[mindspore.ops.rsqrt](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.rsqrt.html)|✔️|✔️|✔️| -|[mindspore.ops.scatter_nd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.scatter_nd.html)|✔️|✔️|✔️| -|[mindspore.ops.scatter_nd_add](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.scatter_nd_add.html)|✔️|✔️|✔️| -|[mindspore.ops.scatter_nd_max](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.scatter_nd_max.html)|❌|✔️|✔️| -|[mindspore.ops.scatter_nd_min](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.scatter_nd_min.html)|✔️|✔️|✔️| -|[mindspore.ops.scatter_nd_mul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.scatter_nd_mul.html)|❌|✔️|✔️| -|[mindspore.ops.scatter_nd_sub](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.scatter_nd_sub.html)|✔️|✔️|✔️| -|[mindspore.ops.scatter_update](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.scatter_update.html)|✔️|✔️|✔️| -|[mindspore.ops.select](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.select.html)|✔️|✔️|✔️| -|[mindspore.ops.selu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.selu.html)|✔️|✔️|✔️| -|[mindspore.ops.sigmoid](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.sigmoid.html)|✔️|✔️|✔️| -|[mindspore.ops.sign](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.sign.html)|✔️|✔️|✔️| -|[mindspore.ops.silu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.silu.html)|✔️|✔️|✔️| -|[mindspore.ops.sin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.sin.html)|✔️|✔️|✔️| -|[mindspore.ops.sinc](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.sinc.html)|✔️|✔️|✔️| -|[mindspore.ops.sinh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.sinh.html)|✔️|✔️|✔️| -|[mindspore.ops.slice](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.slice.html)|✔️|✔️|✔️| -|[mindspore.ops.softmax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.softmax.html)|✔️|✔️|✔️| -|[mindspore.ops.softshrink](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.softshrink.html)|✔️|✔️|✔️| -|[mindspore.ops.sort](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.sort.html)|✔️|❌|✔️|| -|[mindspore.ops.split](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.split.html)|❌|✔️|✔️| -|[mindspore.ops.sqrt](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.sqrt.html)|✔️|✔️|✔️| -|[mindspore.ops.square](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.square.html)|✔️|✔️|✔️| -|[mindspore.ops.squeeze](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.squeeze.html)|✔️|✔️|✔️| -|[mindspore.ops.stack](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.stack.html)|✔️|✔️|✔️| -|[mindspore.ops.std](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.std.html)|✔️|❌|✔️|| -|[mindspore.ops.strided_slice](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.strided_slice.html)|✔️|✔️|✔️| -|[mindspore.ops.sub](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.sub.html)|✔️|✔️|✔️| -|[mindspore.ops.sum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.sum.html)|✔️|✔️|✔️| -|[mindspore.ops.svd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.svd.html)|❌|✔️|✔️| -|[mindspore.ops.tan](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.tan.html)|✔️|✔️|✔️| -|[mindspore.ops.tanh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.tanh.html)|✔️|✔️|✔️| -|[mindspore.ops.tile](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.tile.html)|✔️|✔️|✔️| -|[mindspore.ops.topk](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.topk.html)|✔️|✔️|✔️| -|[mindspore.ops.trace](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.trace.html)|✔️|✔️|✔️| -|[mindspore.ops.transpose](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.transpose.html)|✔️|✔️|✔️| -|[mindspore.ops.tril](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.tril.html)|✔️|✔️|✔️| -|[mindspore.ops.triu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.triu.html)|✔️|✔️|✔️| -|[mindspore.ops.trunc](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.trunc.html)|✔️|✔️|✔️| -|[mindspore.ops.unfold](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.unfold.html)|✔️|✔️|✔️| -|[mindspore.ops.uniform](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.uniform.html)|❌|✔️|✔️| -|[mindspore.ops.unique](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.unique.html)|✔️|✔️|✔️| -|[mindspore.ops.unsorted_segment_sum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.unsorted_segment_sum.html)|✔️|✔️|✔️| -|[mindspore.ops.unsqueeze](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.unsqueeze.html)|✔️|✔️|✔️| -|[mindspore.ops.unstack](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.unstack.html)|✔️|✔️|✔️| -|[mindspore.ops.where](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.where.html)|✔️|✔️|✔️| -|[mindspore.ops.xlogy](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.xlogy.html)|✔️|✔️|✔️| -|[mindspore.ops.zeros](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.zeros.html)|✔️|✔️|✔️| -|[mindspore.ops.zeros_like](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.zeros_like.html)|✔️|✔️|✔️| -|[mindspore.ops.zeta](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.zeta.html)|✔️|✔️|✔️| -|[mindspore.mint](https://www.mindspore.cn/docs/en/master/api_python/mindspore.mint.html#mindspore-mint)|✔️|❌|❌| -|[mindspore.mint.nn.functional](https://www.mindspore.cn/docs/en/master/api_python/mindspore.mint.html#mindspore-mint-nn-functional)|✔️|❌|❌| +|[mindspore.ops.abs](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.abs.html)|✔️|✔️|✔️| +|[mindspore.ops.acos](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.acos.html)|✔️|✔️|✔️| +|[mindspore.ops.acosh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.acosh.html)|✔️|✔️|✔️| +|[mindspore.ops.add](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.add.html)|✔️|✔️|✔️| +|[mindspore.ops.addcdiv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.addcdiv.html)|✔️|✔️|✔️| +|[mindspore.ops.addcmul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.addcmul.html)|✔️|✔️|✔️| +|[mindspore.ops.addmm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.addmm.html)|✔️|✔️|✔️| +|[mindspore.ops.addn](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.addn.html)|✔️|✔️|✔️| +|[mindspore.ops.all](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.all.html)|✔️|✔️|✔️| +|[mindspore.ops.amax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.amax.html)|✔️|✔️|✔️| +|[mindspore.ops.amin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.amin.html)|✔️|✔️|✔️| +|[mindspore.ops.angle](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.angle.html)|✔️|✔️|✔️| +|[mindspore.ops.any](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.any.html)|✔️|✔️|✔️| +|[mindspore.ops.argmax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.argmax.html)|✔️|✔️|✔️| +|[mindspore.ops.argmin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.argmin.html)|✔️|✔️|✔️| +|[mindspore.ops.argsort](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.argsort.html)|✔️|✔️|✔️| +|[mindspore.ops.asin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.asin.html)|✔️|✔️|✔️| +|[mindspore.ops.asinh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.asinh.html)|✔️|✔️|✔️| +|[mindspore.ops.assign](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.assign.html)|✔️|✔️|✔️| +|[mindspore.ops.assign_add](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.assign_add.html)|✔️|✔️|✔️| +|[mindspore.ops.atan](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.atan.html)|✔️|✔️|✔️| +|[mindspore.ops.atan2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.atan2.html)|✔️|✔️|✔️| +|[mindspore.ops.atanh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.atanh.html)|✔️|✔️|✔️| +|[mindspore.ops.baddbmm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.baddbmm.html)|✔️|✔️|✔️| +|[mindspore.ops.bernoulli](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bernoulli.html)|❌|✔️|✔️| +|[mindspore.ops.bessel_i0](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bessel_i0.html)|❌|✔️|✔️| +|[mindspore.ops.bessel_i0e](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bessel_i0e.html)|✔️|✔️|✔️| +|[mindspore.ops.bessel_i1](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bessel_i1.html)|❌|✔️|✔️| +|[mindspore.ops.bessel_i1e](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bessel_i1e.html)|✔️|✔️|✔️| +|[mindspore.ops.bessel_j0](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bessel_j0.html)|❌|✔️|✔️| +|[mindspore.ops.bessel_j1](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bessel_j1.html)|❌|✔️|✔️| +|[mindspore.ops.bias_add](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bias_add.html)|❌|✔️|✔️| +|[mindspore.ops.bincount](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bincount.html)|✔️|✔️|✔️| +|[mindspore.ops.bitwise_and](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bitwise_and.html)|✔️|✔️|✔️| +|[mindspore.ops.bitwise_left_shift](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bitwise_left_shift.html)|✔️|✔️|✔️| +|[mindspore.ops.bitwise_or](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bitwise_or.html)|✔️|✔️|✔️| +|[mindspore.ops.bitwise_right_shift](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bitwise_right_shift.html)|✔️|✔️|✔️| +|[mindspore.ops.bitwise_xor](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bitwise_xor.html)|✔️|✔️|✔️| +|[mindspore.ops.bmm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.bmm.html)|✔️|✔️|✔️| +|[mindspore.ops.broadcast_to](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.broadcast_to.html)|✔️|✔️|✔️| +|[mindspore.ops.ceil](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ceil.html)|✔️|✔️|✔️| +|[mindspore.ops.celu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.celu.html)|✔️|✔️|✔️| +|[mindspore.ops.chunk](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.chunk.html)|❌|✔️|✔️| +|[mindspore.ops.clamp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.clamp.html)|✔️|✔️|✔️| +|[mindspore.ops.clip_by_global_norm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.clip_by_global_norm.html)|✔️|✔️|✔️| +|[mindspore.ops.clip_by_value](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.clip_by_value.html)|✔️|✔️|✔️| +|[mindspore.ops.concat](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.concat.html)|✔️|✔️|✔️| +|[mindspore.ops.conj](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.conj.html)|❌|✔️|✔️| +|[mindspore.ops.cos](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.cos.html)|✔️|✔️|✔️| +|[mindspore.ops.cosh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.cosh.html)|✔️|✔️|✔️| +|[mindspore.ops.cross](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.cross.html)|✔️|❌|✔️| +|[mindspore.ops.cross_entropy](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.cross_entropy.html)|✔️|✔️|✔️| +|[mindspore.ops.cummax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.cummax.html)|❌|✔️|✔️| +|[mindspore.ops.cummin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.cummin.html)|✔️|✔️|✔️| +|[mindspore.ops.cumprod](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.cumprod.html)|❌|✔️|✔️| +|[mindspore.ops.cumsum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.cumsum.html)|❌|✔️|✔️| +|[mindspore.ops.diag](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.diag.html)|✔️|✔️|✔️| +|[mindspore.ops.diag_embed](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.diag_embed.html)|✔️|✔️|✔️| +|[mindspore.ops.diagonal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.diagonal.html)|✔️|✔️|✔️| +|[mindspore.ops.dist](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.dist.html)|✔️|✔️|✔️| +|[mindspore.ops.div](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.div.html)|✔️|✔️|✔️| +|[mindspore.ops.dot](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.dot.html)|✔️|✔️|✔️| +|[mindspore.ops.dropout](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.dropout.html)|✔️|✔️|✔️| +|[mindspore.ops.dropout2d](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.dropout2d.html)|✔️|✔️|✔️| +|[mindspore.ops.dropout3d](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.dropout3d.html)|✔️|✔️|✔️| +|[mindspore.ops.einsum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.einsum.html)|❌|✔️|❌| +|[mindspore.ops.elu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.elu.html)|✔️|✔️|✔️| +|[mindspore.ops.equal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.equal.html)|✔️|✔️|✔️| +|[mindspore.ops.erf](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.erf.html)|✔️|✔️|✔️| +|[mindspore.ops.erfc](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.erfc.html)|✔️|✔️|✔️| +|[mindspore.ops.erfinv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.erfinv.html)|✔️|✔️|✔️| +|[mindspore.ops.exp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.exp.html)|✔️|✔️|✔️| +|[mindspore.ops.expand_dims](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.expand_dims.html)|✔️|✔️|✔️| +|[mindspore.ops.expm1](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.expm1.html)|✔️|✔️|✔️| +|[mindspore.ops.eye](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.eye.html)|✔️|✔️|✔️| +|[mindspore.ops.fill](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.fill.html)|✔️|✔️|✔️| +|[mindspore.ops.flatten](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.flatten.html)|✔️|✔️|✔️| +|[mindspore.ops.flip](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.flip.html)|✔️|✔️|✔️| +|[mindspore.ops.floor](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.floor.html)|✔️|✔️|✔️| +|[mindspore.ops.floor_div](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.floor_div.html)|✔️|✔️|✔️| +|[mindspore.ops.floor_mod](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.floor_mod.html)|✔️|✔️|✔️| +|[mindspore.ops.fmod](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.fmod.html)|✔️|✔️|✔️| +|[mindspore.ops.fold](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.fold.html)|✔️|✔️|✔️| +|[mindspore.ops.full](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.full.html)|✔️|✔️|✔️| +|[mindspore.ops.full_like](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.full_like.html)|✔️|✔️|✔️| +|[mindspore.ops.gather](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.gather.html)|✔️|✔️|✔️| +|[mindspore.ops.gather_elements](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.gather_elements.html)|✔️|✔️|✔️| +|[mindspore.ops.gather_nd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.gather_nd.html)|✔️|✔️|✔️| +|[mindspore.ops.gcd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.gcd.html)|✔️|✔️|✔️| +|[mindspore.ops.ge](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ge.html)|✔️|✔️|✔️| +|[mindspore.ops.gelu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.gelu.html)|✔️|✔️|✔️| +|[mindspore.ops.geqrf](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.geqrf.html)|✔️|✔️|✔️| +|[mindspore.ops.ger](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ger.html)|✔️|✔️|✔️| +|[mindspore.ops.glu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.glu.html)|✔️|✔️|✔️| +|[mindspore.ops.greater](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.greater.html)|✔️|✔️|✔️| +|[mindspore.ops.greater_equal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.greater_equal.html)|✔️|✔️|✔️| +|[mindspore.ops.grid_sample](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.grid_sample.html)|✔️|✔️|✔️| +|[mindspore.ops.gt](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.gt.html)|✔️|✔️|✔️| +|[mindspore.ops.gumbel_softmax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.gumbel_softmax.html)|✔️|✔️|✔️| +|[mindspore.ops.hardshrink](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.hardshrink.html)|✔️|✔️|✔️| +|[mindspore.ops.hardsigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.hardsigmoid.html)|✔️|✔️|✔️| +|[mindspore.ops.hardswish](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.hardswish.html)|✔️|✔️|✔️| +|[mindspore.ops.hardtanh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.hardtanh.html)|✔️|✔️|✔️| +|[mindspore.ops.heaviside](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.heaviside.html)|✔️|✔️|✔️| +|[mindspore.ops.hypot](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.hypot.html)|✔️|✔️|✔️| +|[mindspore.ops.igammac](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.igammac.html)|✔️|✔️|✔️| +|[mindspore.ops.imag](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.imag.html)|✔️|✔️|✔️| +|[mindspore.ops.index_select](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.index_select.html)|✔️|✔️|✔️| +|[mindspore.ops.interpolate](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.interpolate.html)|❌|✔️|✔️| +|[mindspore.ops.inverse](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.inverse.html)|❌|✔️|✔️| +|[mindspore.ops.invert](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.invert.html)|✔️|✔️|✔️| +|[mindspore.ops.isfinite](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.isfinite.html)|✔️|✔️|✔️| +|[mindspore.ops.isinf](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.isinf.html)|✔️|✔️|✔️| +|[mindspore.ops.isnan](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.isnan.html)|✔️|✔️|✔️| +|[mindspore.ops.l1_loss](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.l1_loss.html)|✔️|✔️|✔️| +|[mindspore.ops.lcm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.lcm.html)|✔️|✔️|✔️| +|[mindspore.ops.le](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.le.html)|✔️|✔️|✔️| +|[mindspore.ops.lerp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.lerp.html)|✔️|✔️|✔️| +|[mindspore.ops.less](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.less.html)|✔️|✔️|✔️| +|[mindspore.ops.less_equal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.less_equal.html)|✔️|✔️|✔️| +|[mindspore.ops.linspace](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.linspace.html)|✔️|✔️|✔️| +|[mindspore.ops.log](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.log.html)|✔️|✔️|✔️| +|[mindspore.ops.log_softmax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.log_softmax.html)|✔️|✔️|✔️| +|[mindspore.ops.log10](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.log10.html)|✔️|✔️|✔️| +|[mindspore.ops.log1p](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.log1p.html)|✔️|✔️|✔️| +|[mindspore.ops.log2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.log2.html)|✔️|✔️|✔️| +|[mindspore.ops.logical_and](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.logical_and.html)|✔️|✔️|✔️| +|[mindspore.ops.logical_not](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.logical_not.html)|✔️|✔️|✔️| +|[mindspore.ops.logical_or](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.logical_or.html)|✔️|✔️|✔️| +|[mindspore.ops.logical_xor](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.logical_xor.html)|✔️|❌|✔️| +|[mindspore.ops.logit](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.logit.html)|✔️|✔️|✔️| +|[mindspore.ops.logsumexp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.logsumexp.html)|✔️|✔️|✔️| +|[mindspore.ops.lt](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.lt.html)|✔️|✔️|✔️| +|[mindspore.ops.margin_ranking_loss](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.margin_ranking_loss.html)|✔️|✔️|✔️| +|[mindspore.ops.masked_fill](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.masked_fill.html)|✔️|✔️|✔️| +|[mindspore.ops.masked_select](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.masked_select.html)|✔️|✔️|✔️| +|[mindspore.ops.matmul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.matmul.html)|✔️|✔️|✔️| +|[mindspore.ops.matrix_solve](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.matrix_solve.html)|✔️|❌|✔️| +|[mindspore.ops.max](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.max.html)|✔️|✔️|✔️| +|[mindspore.ops.maximum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.maximum.html)|✔️|✔️|✔️| +|[mindspore.ops.mean](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.mean.html)|✔️|✔️|✔️| +|[mindspore.ops.median](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.median.html)|❌|✔️|✔️| +|[mindspore.ops.meshgrid](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.meshgrid.html)|✔️|✔️|✔️| +|[mindspore.ops.min](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.min.html)|✔️|✔️|✔️| +|[mindspore.ops.minimum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.minimum.html)|✔️|✔️|✔️| +|[mindspore.ops.mish](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.mish.html)|❌|✔️|✔️| +|[mindspore.ops.mse_loss](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.mse_loss.html)|✔️|✔️|✔️| +|[mindspore.ops.mul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.mul.html)|✔️|✔️|✔️| +|[mindspore.ops.multinomial](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.multinomial.html)|✔️|✔️|✔️| +|[mindspore.ops.mv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.mv.html)|✔️|✔️|✔️| +|[mindspore.ops.mvlgamma](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.mvlgamma.html)|✔️|✔️|✔️| +|[mindspore.ops.nan_to_num](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.nan_to_num.html)|✔️|❌|✔️| +|[mindspore.ops.narrow](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.narrow.html)|✔️|✔️|✔️| +|[mindspore.ops.ne](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ne.html)|✔️|✔️|✔️| +|[mindspore.ops.neg](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.neg.html)|✔️|✔️|✔️| +|[mindspore.ops.nll_loss](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.nll_loss.html)|✔️|✔️|✔️| +|[mindspore.ops.nonzero](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.nonzero.html)|✔️|✔️|✔️| +|[mindspore.ops.norm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.norm.html)|❌|✔️|✔️| +|[mindspore.ops.normal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.normal.html)|✔️|✔️|✔️| +|[mindspore.ops.numel](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.numel.html)|✔️|✔️|✔️| +|[mindspore.ops.one_hot](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.one_hot.html)|✔️|✔️|✔️| +|[mindspore.ops.ones](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ones.html)|✔️|✔️|✔️| +|[mindspore.ops.ones_like](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ones_like.html)|✔️|✔️|✔️| +|[mindspore.ops.pad](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.pad.html)|✔️|✔️|✔️| +|[mindspore.ops.polar](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.polar.html)|❌|✔️|✔️| +|[mindspore.ops.polygamma](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.polygamma.html)|❌|✔️|✔️| +|[mindspore.ops.pow](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.pow.html)|✔️|✔️|✔️| +|[mindspore.ops.prelu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.prelu.html)|✔️|✔️|✔️| +|[mindspore.ops.prod](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.prod.html)|✔️|✔️|✔️| +|[mindspore.ops.rand](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.rand.html)|✔️|✔️|✔️| +|[mindspore.ops.rand_like](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.rand_like.html)|✔️|✔️|✔️| +|[mindspore.ops.randint](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.randint.html)|✔️|✔️|✔️| +|[mindspore.ops.randn](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.randn.html)|✔️|✔️|✔️| +|[mindspore.ops.randn_like](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.randn_like.html)|✔️|✔️|✔️| +|[mindspore.ops.randperm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.randperm.html)|❌|❌|✔️| +|[mindspore.ops.range](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.range.html)|❌|✔️|✔️| +|[mindspore.ops.ravel](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ravel.html)|✔️|✔️|✔️| +|[mindspore.ops.real](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.real.html)|❌|✔️|✔️| +|[mindspore.ops.reciprocal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.reciprocal.html)|✔️|✔️|✔️| +|[mindspore.ops.relu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.relu.html)|✔️|✔️|✔️| +|[mindspore.ops.relu6](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.relu6.html)|✔️|✔️|✔️| +|[mindspore.ops.remainder](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.remainder.html)|✔️|✔️|✔️| +|[mindspore.ops.repeat_interleave](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.repeat_interleave.html)|✔️|✔️|✔️| +|[mindspore.ops.reshape](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.reshape.html)|✔️|✔️|✔️| +|[mindspore.ops.reverse_sequence](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.reverse_sequence.html)|✔️|✔️|✔️| +|[mindspore.ops.roll](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.roll.html)|❌|✔️|❌| +|[mindspore.ops.round](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.round.html)|✔️|✔️|✔️| +|[mindspore.ops.rsqrt](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.rsqrt.html)|✔️|✔️|✔️| +|[mindspore.ops.scatter_nd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.scatter_nd.html)|✔️|✔️|✔️| +|[mindspore.ops.scatter_nd_add](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.scatter_nd_add.html)|✔️|✔️|✔️| +|[mindspore.ops.scatter_nd_max](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.scatter_nd_max.html)|❌|✔️|✔️| +|[mindspore.ops.scatter_nd_min](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.scatter_nd_min.html)|✔️|✔️|✔️| +|[mindspore.ops.scatter_nd_mul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.scatter_nd_mul.html)|❌|✔️|✔️| +|[mindspore.ops.scatter_nd_sub](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.scatter_nd_sub.html)|✔️|✔️|✔️| +|[mindspore.ops.scatter_update](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.scatter_update.html)|✔️|✔️|✔️| +|[mindspore.ops.select](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.select.html)|✔️|✔️|✔️| +|[mindspore.ops.selu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.selu.html)|✔️|✔️|✔️| +|[mindspore.ops.sigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.sigmoid.html)|✔️|✔️|✔️| +|[mindspore.ops.sign](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.sign.html)|✔️|✔️|✔️| +|[mindspore.ops.silu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.silu.html)|✔️|✔️|✔️| +|[mindspore.ops.sin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.sin.html)|✔️|✔️|✔️| +|[mindspore.ops.sinc](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.sinc.html)|✔️|✔️|✔️| +|[mindspore.ops.sinh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.sinh.html)|✔️|✔️|✔️| +|[mindspore.ops.slice](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.slice.html)|✔️|✔️|✔️| +|[mindspore.ops.softmax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.softmax.html)|✔️|✔️|✔️| +|[mindspore.ops.softshrink](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.softshrink.html)|✔️|✔️|✔️| +|[mindspore.ops.sort](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.sort.html)|✔️|❌|✔️|| +|[mindspore.ops.split](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.split.html)|❌|✔️|✔️| +|[mindspore.ops.sqrt](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.sqrt.html)|✔️|✔️|✔️| +|[mindspore.ops.square](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.square.html)|✔️|✔️|✔️| +|[mindspore.ops.squeeze](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.squeeze.html)|✔️|✔️|✔️| +|[mindspore.ops.stack](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.stack.html)|✔️|✔️|✔️| +|[mindspore.ops.std](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.std.html)|✔️|❌|✔️|| +|[mindspore.ops.strided_slice](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.strided_slice.html)|✔️|✔️|✔️| +|[mindspore.ops.sub](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.sub.html)|✔️|✔️|✔️| +|[mindspore.ops.sum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.sum.html)|✔️|✔️|✔️| +|[mindspore.ops.svd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.svd.html)|❌|✔️|✔️| +|[mindspore.ops.tan](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.tan.html)|✔️|✔️|✔️| +|[mindspore.ops.tanh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.tanh.html)|✔️|✔️|✔️| +|[mindspore.ops.tile](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.tile.html)|✔️|✔️|✔️| +|[mindspore.ops.topk](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.topk.html)|✔️|✔️|✔️| +|[mindspore.ops.trace](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.trace.html)|✔️|✔️|✔️| +|[mindspore.ops.transpose](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.transpose.html)|✔️|✔️|✔️| +|[mindspore.ops.tril](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.tril.html)|✔️|✔️|✔️| +|[mindspore.ops.triu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.triu.html)|✔️|✔️|✔️| +|[mindspore.ops.trunc](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.trunc.html)|✔️|✔️|✔️| +|[mindspore.ops.unfold](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.unfold.html)|✔️|✔️|✔️| +|[mindspore.ops.uniform](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.uniform.html)|❌|✔️|✔️| +|[mindspore.ops.unique](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.unique.html)|✔️|✔️|✔️| +|[mindspore.ops.unsorted_segment_sum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.unsorted_segment_sum.html)|✔️|✔️|✔️| +|[mindspore.ops.unsqueeze](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.unsqueeze.html)|✔️|✔️|✔️| +|[mindspore.ops.unstack](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.unstack.html)|✔️|✔️|✔️| +|[mindspore.ops.where](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.where.html)|✔️|✔️|✔️| +|[mindspore.ops.xlogy](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.xlogy.html)|✔️|✔️|✔️| +|[mindspore.ops.zeros](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.zeros.html)|✔️|✔️|✔️| +|[mindspore.ops.zeros_like](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.zeros_like.html)|✔️|✔️|✔️| +|[mindspore.ops.zeta](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.zeta.html)|✔️|✔️|✔️| +|[mindspore.mint](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.mint.html#mindspore-mint)|✔️|❌|❌| +|[mindspore.mint.nn.functional](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.mint.html#mindspore-mint-nn-functional)|✔️|❌|❌| diff --git a/docs/mindspore/source_en/api_python/dynamic_shape_nn.md b/docs/mindspore/source_en/api_python/dynamic_shape_nn.md index d73ebee010..39a78a2c96 100644 --- a/docs/mindspore/source_en/api_python/dynamic_shape_nn.md +++ b/docs/mindspore/source_en/api_python/dynamic_shape_nn.md @@ -1,8 +1,8 @@ # Dynamic Shape Support Status of nn Interface -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/api_python/dynamic_shape_nn.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/api_python/dynamic_shape_nn.md) -> The following list provides nn interfaces that support dynamic shape functionality in PYNATIVE mode. However, some nn interfaces may have incomplete data type support. If you encounter such issues, you can resolve them by manually incorporating the [Cast](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cast.html) operator. +> The following list provides nn interfaces that support dynamic shape functionality in PYNATIVE mode. However, some nn interfaces may have incomplete data type support. If you encounter such issues, you can resolve them by manually incorporating the [Cast](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cast.html) operator. > > nn interfaces outside of this list have limited support for dynamic shape functionality and may fail to execute. Additionally, in graph mode, dynamic shape functionality is also limited and may result in execution failures. > @@ -10,58 +10,58 @@ | API name | Ascend | GPU | CPU | | :--- |:-------- | :------- |:---------| -|[mindspore.nn.Adam](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Adam.html)|✔️|✔️|✔️| -|[mindspore.nn.AdaptiveAvgPool1d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.AdaptiveAvgPool1d.html)|✔️|✔️|✔️| -|[mindspore.nn.AdaptiveAvgPool2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.AdaptiveAvgPool2d.html)|✔️|✔️|✔️| -|[mindspore.nn.AdaptiveAvgPool3d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.AdaptiveAvgPool3d.html)|✔️|✔️|✔️| -|[mindspore.nn.AdaptiveMaxPool1d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.AdaptiveMaxPool1d.html)|✔️|✔️|✔️| -|[mindspore.nn.AvgPool1d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.AvgPool1d.html)|✔️|✔️|✔️| -|[mindspore.nn.AvgPool2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.AvgPool2d.html)|✔️|✔️|✔️| -|[mindspore.nn.AvgPool3d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.AvgPool3d.html)|✔️|✔️|✔️| -|[mindspore.nn.BatchNorm1d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.BatchNorm1d.html)|✔️|✔️|✔️| -|[mindspore.nn.BatchNorm2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.BatchNorm2d.html)|✔️|✔️|✔️| -|[mindspore.nn.BatchNorm3d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.BatchNorm3d.html)|✔️|✔️|✔️| -|[mindspore.nn.BCELoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.BCELoss.html)|✔️|✔️|✔️| -|[mindspore.nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.BCEWithLogitsLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.ConstantPad1d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.ConstantPad1d.html)|✔️|✔️|✔️| -|[mindspore.nn.ConstantPad2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.ConstantPad2d.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv1d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Conv1d.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv1dTranspose](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Conv1dTranspose.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Conv2d.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv2dTranspose](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Conv2dTranspose.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv3d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Conv3d.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv3dTranspose](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Conv3dTranspose.html)|✔️|✔️|✔️| -|[mindspore.nn.CosineEmbeddingLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.CosineEmbeddingLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.CrossEntropyLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.CTCLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.CTCLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.Dense](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Dense.html)|✔️|✔️|✔️| -|[mindspore.nn.Embedding](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Embedding.html)|✔️|✔️|✔️| -|[mindspore.nn.EmbeddingLookup](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.EmbeddingLookup.html)|✔️|✔️|✔️| -|[mindspore.nn.GLU](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.GLU.html)|✔️|✔️|✔️| -|[mindspore.nn.GroupNorm](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.GroupNorm.html)|✔️|✔️|✔️| -|[mindspore.nn.GRU](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.GRU.html)|❌|❌|✔️| -|[mindspore.nn.GRUCell](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.GRUCell.html)|✔️|✔️|✔️| -|[mindspore.nn.InstanceNorm1d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.InstanceNorm1d.html)|❌|✔️|❌| -|[mindspore.nn.InstanceNorm2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.InstanceNorm2d.html)|❌|✔️|❌| -|[mindspore.nn.InstanceNorm3d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.InstanceNorm3d.html)|❌|✔️|❌| -|[mindspore.nn.KLDivLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.KLDivLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.L1Loss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.L1Loss.html)|✔️|✔️|✔️| -|[mindspore.nn.LeakyReLU](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.LeakyReLU.html)|✔️|✔️|✔️| -|[mindspore.nn.LRN](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.LRN.html)|✔️|✔️|✔️| -|[mindspore.nn.LSTM](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.LSTM.html)|✔️|✔️|✔️| -|[mindspore.nn.MarginRankingLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.MarginRankingLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.MaxPool1d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.MaxPool1d.html)|✔️|✔️|✔️| -|[mindspore.nn.MaxPool2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.MaxPool2d.html)|✔️|✔️|✔️| -|[mindspore.nn.MaxPool3d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.MaxPool3d.html)|✔️|✔️|✔️| -|[mindspore.nn.MaxUnpool2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.MaxUnpool2d.html)|❌|✔️|✔️| -|[mindspore.nn.MSELoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.MSELoss.html)|✔️|✔️|✔️| -|[mindspore.nn.MultiLabelSoftMarginLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.MultiLabelSoftMarginLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.PixelShuffle](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.PixelShuffle.html)|✔️|✔️|✔️| -|[mindspore.nn.ReflectionPad1d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.ReflectionPad1d.html)|✔️|❌|✔️| -|[mindspore.nn.ReplicationPad2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.ReplicationPad2d.html)|❌|✔️|❌| -|[mindspore.nn.RReLU](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.RReLU.html)|✔️|✔️|✔️| -|[mindspore.nn.SmoothL1Loss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.SmoothL1Loss.html)|✔️|✔️|✔️| -|[mindspore.nn.Softmax2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Softmax2d.html)|✔️|✔️|✔️| -|[mindspore.nn.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.SoftmaxCrossEntropyWithLogits.html)|✔️|✔️|✔️| -|[mindspore.nn.ZeroPad2d](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.ZeroPad2d.html)|✔️|✔️|✔️| -|[mindspore.mint.nn](https://www.mindspore.cn/docs/en/master/api_python/mindspore.mint.html#mindspore-mint-nn)|✔️|❌|❌| +|[mindspore.nn.Adam](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Adam.html)|✔️|✔️|✔️| +|[mindspore.nn.AdaptiveAvgPool1d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.AdaptiveAvgPool1d.html)|✔️|✔️|✔️| +|[mindspore.nn.AdaptiveAvgPool2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.AdaptiveAvgPool2d.html)|✔️|✔️|✔️| +|[mindspore.nn.AdaptiveAvgPool3d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.AdaptiveAvgPool3d.html)|✔️|✔️|✔️| +|[mindspore.nn.AdaptiveMaxPool1d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.AdaptiveMaxPool1d.html)|✔️|✔️|✔️| +|[mindspore.nn.AvgPool1d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.AvgPool1d.html)|✔️|✔️|✔️| +|[mindspore.nn.AvgPool2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.AvgPool2d.html)|✔️|✔️|✔️| +|[mindspore.nn.AvgPool3d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.AvgPool3d.html)|✔️|✔️|✔️| +|[mindspore.nn.BatchNorm1d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.BatchNorm1d.html)|✔️|✔️|✔️| +|[mindspore.nn.BatchNorm2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.BatchNorm2d.html)|✔️|✔️|✔️| +|[mindspore.nn.BatchNorm3d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.BatchNorm3d.html)|✔️|✔️|✔️| +|[mindspore.nn.BCELoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.BCELoss.html)|✔️|✔️|✔️| +|[mindspore.nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.BCEWithLogitsLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.ConstantPad1d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.ConstantPad1d.html)|✔️|✔️|✔️| +|[mindspore.nn.ConstantPad2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.ConstantPad2d.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv1d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Conv1d.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv1dTranspose](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Conv1dTranspose.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Conv2d.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv2dTranspose](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Conv2dTranspose.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv3d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Conv3d.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv3dTranspose](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Conv3dTranspose.html)|✔️|✔️|✔️| +|[mindspore.nn.CosineEmbeddingLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.CosineEmbeddingLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.CrossEntropyLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.CTCLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.CTCLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.Dense](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Dense.html)|✔️|✔️|✔️| +|[mindspore.nn.Embedding](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Embedding.html)|✔️|✔️|✔️| +|[mindspore.nn.EmbeddingLookup](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.EmbeddingLookup.html)|✔️|✔️|✔️| +|[mindspore.nn.GLU](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.GLU.html)|✔️|✔️|✔️| +|[mindspore.nn.GroupNorm](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.GroupNorm.html)|✔️|✔️|✔️| +|[mindspore.nn.GRU](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.GRU.html)|❌|❌|✔️| +|[mindspore.nn.GRUCell](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.GRUCell.html)|✔️|✔️|✔️| +|[mindspore.nn.InstanceNorm1d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.InstanceNorm1d.html)|❌|✔️|❌| +|[mindspore.nn.InstanceNorm2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.InstanceNorm2d.html)|❌|✔️|❌| +|[mindspore.nn.InstanceNorm3d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.InstanceNorm3d.html)|❌|✔️|❌| +|[mindspore.nn.KLDivLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.KLDivLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.L1Loss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.L1Loss.html)|✔️|✔️|✔️| +|[mindspore.nn.LeakyReLU](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.LeakyReLU.html)|✔️|✔️|✔️| +|[mindspore.nn.LRN](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.LRN.html)|✔️|✔️|✔️| +|[mindspore.nn.LSTM](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.LSTM.html)|✔️|✔️|✔️| +|[mindspore.nn.MarginRankingLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.MarginRankingLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.MaxPool1d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.MaxPool1d.html)|✔️|✔️|✔️| +|[mindspore.nn.MaxPool2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.MaxPool2d.html)|✔️|✔️|✔️| +|[mindspore.nn.MaxPool3d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.MaxPool3d.html)|✔️|✔️|✔️| +|[mindspore.nn.MaxUnpool2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.MaxUnpool2d.html)|❌|✔️|✔️| +|[mindspore.nn.MSELoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.MSELoss.html)|✔️|✔️|✔️| +|[mindspore.nn.MultiLabelSoftMarginLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.MultiLabelSoftMarginLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.PixelShuffle](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.PixelShuffle.html)|✔️|✔️|✔️| +|[mindspore.nn.ReflectionPad1d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.ReflectionPad1d.html)|✔️|❌|✔️| +|[mindspore.nn.ReplicationPad2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.ReplicationPad2d.html)|❌|✔️|❌| +|[mindspore.nn.RReLU](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.RReLU.html)|✔️|✔️|✔️| +|[mindspore.nn.SmoothL1Loss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.SmoothL1Loss.html)|✔️|✔️|✔️| +|[mindspore.nn.Softmax2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Softmax2d.html)|✔️|✔️|✔️| +|[mindspore.nn.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.SoftmaxCrossEntropyWithLogits.html)|✔️|✔️|✔️| +|[mindspore.nn.ZeroPad2d](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.ZeroPad2d.html)|✔️|✔️|✔️| +|[mindspore.mint.nn](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.mint.html#mindspore-mint-nn)|✔️|❌|❌| diff --git a/docs/mindspore/source_en/api_python/dynamic_shape_primitive.md b/docs/mindspore/source_en/api_python/dynamic_shape_primitive.md index 09bfe8331a..a4a690dd1e 100644 --- a/docs/mindspore/source_en/api_python/dynamic_shape_primitive.md +++ b/docs/mindspore/source_en/api_python/dynamic_shape_primitive.md @@ -1,8 +1,8 @@ # Dynamic Shape Support Status of primitive Interface -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/api_python/dynamic_shape_primitive.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/api_python/dynamic_shape_primitive.md) -> The following list provides primitive interfaces that support dynamic shape functionality in PyNative mode. However, some primitive interfaces may have incomplete data type support. If you encounter such issues, you can resolve them by manually incorporating the [Cast](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cast.html) operator. +> The following list provides primitive interfaces that support dynamic shape functionality in PyNative mode. However, some primitive interfaces may have incomplete data type support. If you encounter such issues, you can resolve them by manually incorporating the [Cast](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cast.html) operator. > > Primitive interfaces outside of this list have limited support for dynamic shape functionality and may fail to execute. Additionally, in graph mode, dynamic shape functionality is also limited and may result in execution failures. > @@ -10,215 +10,215 @@ | Operator name | Ascend | GPU | CPU | | :--- |:-------- | :------- |:---------| -|[mindspore.Abs](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Abs.html)|✔️|✔️|✔️| -|[mindspore.Acosh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Acosh.html)|✔️|✔️|✔️| -|[mindspore.Adam](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Adam.html)|✔️|✔️|✔️| -|[mindspore.AdaptiveAvgPool2D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AdaptiveAvgPool2D.html)|✔️|✔️|✔️| -|[mindspore.AdaptiveAvgPool3D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AdaptiveAvgPool3D.html)|✔️|✔️|✔️| -|[mindspore.Add](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Add.html)|✔️|✔️|✔️| -|[mindspore.Addcmul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Addcmul.html)|✔️|✔️|✔️| -|[mindspore.AddN](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AddN.html)|✔️|✔️|✔️| -|[mindspore.Angle](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Angle.html)|✔️|✔️|✔️| -|[mindspore.ArgMaxWithValue](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ArgMaxWithValue.html)|✔️|✔️|✔️| -|[mindspore.ArgMinWithValue](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ArgMinWithValue.html)|✔️|✔️|✔️| -|[mindspore.Asin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Asin.html)|✔️|✔️|✔️| -|[mindspore.Asinh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Asinh.html)|✔️|✔️|✔️| -|[mindspore.Assign](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Assign.html)|✔️|✔️|✔️| -|[mindspore.AssignAdd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AssignAdd.html)|✔️|✔️|✔️| -|[mindspore.Atan](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Atan.html)|✔️|✔️|✔️| -|[mindspore.Atan2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Atan2.html)|✔️|✔️|✔️| -|[mindspore.Atanh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Atanh.html)|✔️|✔️|✔️| -|[mindspore.AvgPool](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AvgPool.html)|✔️|✔️|✔️| -|[mindspore.AvgPool3D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AvgPool3D.html)|✔️|✔️|✔️| -|[mindspore.BatchNorm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BatchNorm.html)|✔️|✔️|✔️| -|[mindspore.BCEWithLogitsLoss](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BCEWithLogitsLoss.html)|✔️|✔️|✔️| -|[mindspore.Bernoulli](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Bernoulli.html)|❌|✔️|✔️| -|[mindspore.BesselI0](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BesselI0.html)|❌|✔️|✔️| -|[mindspore.BesselI0e](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BesselI0e.html)|✔️|✔️|✔️| -|[mindspore.BesselI1](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BesselI1.html)|❌|✔️|✔️| -|[mindspore.BesselI1e](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BesselI1e.html)|✔️|✔️|✔️| -|[mindspore.BesselJ0](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BesselJ0.html)|❌|✔️|✔️| -|[mindspore.BesselJ1](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BesselJ1.html)|❌|✔️|✔️| -|[mindspore.BiasAdd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BiasAdd.html)|✔️|✔️|️❌| -|[mindspore.BinaryCrossEntropy](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BinaryCrossEntropy.html)|✔️|✔️|✔️| -|[mindspore.BitwiseAnd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BitwiseAnd.html)|✔️|✔️|✔️| -|[mindspore.BitwiseOr](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BitwiseOr.html)|✔️|✔️|✔️| -|[mindspore.BitwiseXor](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BitwiseXor.html)|✔️|✔️|✔️| -|[mindspore.BroadcastTo](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BroadcastTo.html)|✔️|✔️|✔️| -|[mindspore.Cast](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cast.html)|✔️|✔️|✔️| -|[mindspore.Ceil](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Ceil.html)|✔️|✔️|✔️| -|[mindspore.Col2Im](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Col2Im.html)|✔️|✔️|✔️| -|[mindspore.Complex](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Complex.html)|✔️|✔️|✔️| -|[mindspore.Concat](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Concat.html)|✔️|✔️|✔️| -|[mindspore.Conj](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Conj.html)|❌|✔️|✔️| -|[mindspore.Conv2D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Conv2D.html)|✔️|✔️|✔️| -|[mindspore.Conv2DTranspose](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Conv2DTranspose.html)|✔️|✔️|✔️| -|[mindspore.Conv3D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Conv3D.html)|✔️|✔️|✔️| -|[mindspore.Conv3DTranspose](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Conv3DTranspose.html)|✔️|✔️|✔️| -|[mindspore.Cos](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cos.html)|✔️|✔️|✔️| -|[mindspore.Cosh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cosh.html)|✔️|✔️|✔️| -|[mindspore.Cross](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cross.html)|✔️|❌|✔️| -|[mindspore.CTCLossV2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.CTCLossV2.html)|✔️|✔️|✔️| -|[mindspore.Cummax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cummax.html)|❌|✔️|✔️| -|[mindspore.Cummin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cummin.html)|✔️|✔️|✔️| -|[mindspore.CumSum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.CumSum.html)|✔️|✔️|️❌| -|[mindspore.Diag](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Diag.html)|✔️|✔️|✔️| -|[mindspore.Digamma](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Digamma.html)|❌|✔️|✔️| -|[mindspore.Div](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Div.html)|✔️|✔️|✔️| -|[mindspore.Dropout](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Dropout.html)|✔️|✔️|✔️| -|[mindspore.Dropout2D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Dropout2D.html)|✔️|✔️|✔️| -|[mindspore.Dropout3D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Dropout3D.html)|✔️|✔️|✔️| -|[mindspore.DynamicGRUV2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.DynamicGRUV2.html)|✔️|❌|❌| -|[mindspore.Einsum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Einsum.html)|❌|✔️|❌| -|[mindspore.Elu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Elu.html)|✔️|✔️|✔️| -|[mindspore.Equal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Equal.html)|✔️|✔️|✔️| -|[mindspore.Erf](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Erf.html)|✔️|✔️|✔️| -|[mindspore.Erfc](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Erfc.html)|✔️|✔️|✔️| -|[mindspore.Exp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Exp.html)|✔️|✔️|✔️| -|[mindspore.ExpandDims](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ExpandDims.html)|✔️|✔️|✔️| -|[mindspore.Expm1](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Expm1.html)|✔️|✔️|✔️| -|[mindspore.Eye](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Eye.html)|✔️|✔️|✔️| -|[mindspore.FFTWithSize](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.FFTWithSize.html)|✔️|✔️|✔️| -|[mindspore.Fill](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Fill.html)|✔️|✔️|✔️| -|[mindspore.FillV2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.FillV2.html)|✔️|✔️|✔️| -|[mindspore.Flatten](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Flatten.html)|✔️|✔️|✔️| -|[mindspore.Floor](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Floor.html)|✔️|✔️|✔️| -|[mindspore.FloorDiv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.FloorDiv.html)|✔️|✔️|✔️| -|[mindspore.FloorMod](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.FloorMod.html)|✔️|✔️|✔️| -|[mindspore.Gather](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Gather.html)|✔️|✔️|✔️| -|[mindspore.GatherD](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.GatherD.html)|✔️|✔️|✔️| -|[mindspore.GatherNd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.GatherNd.html)|✔️|✔️|✔️| -|[mindspore.Gcd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Gcd.html)|✔️|✔️|✔️| -|[mindspore.Geqrf](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Geqrf.html)|✔️|✔️|✔️| -|[mindspore.Ger](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Ger.html)|✔️|✔️|✔️| -|[mindspore.Greater](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Greater.html)|✔️|✔️|✔️| -|[mindspore.GreaterEqual](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.GreaterEqual.html)|✔️|✔️|✔️| -|[mindspore.GridSampler2D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.GridSampler2D.html)|✔️|✔️|✔️| -|[mindspore.GridSampler3D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.GridSampler3D.html)|✔️|✔️|✔️| -|[mindspore.Heaviside](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Heaviside.html)|✔️|✔️|✔️| -|[mindspore.HSwish](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.HSwish.html)|✔️|✔️|✔️| -|[mindspore.Hypot](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Hypot.html)|✔️|✔️|✔️| -|[mindspore.Identity](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Identity.html)|✔️|✔️|✔️| -|[mindspore.Igammac](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Igammac.html)|✔️|✔️|✔️| -|[mindspore.Imag](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Imag.html)|✔️|✔️|✔️| -|[mindspore.Invert](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Invert.html)|✔️|✔️|✔️| -|[mindspore.IsFinite](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.IsFinite.html)|✔️|✔️|✔️| -|[mindspore.IsInf](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.IsInf.html)|✔️|✔️|✔️| -|[mindspore.IsNan](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.IsNan.html)|✔️|✔️|✔️| -|[mindspore.KLDivLoss](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.KLDivLoss.html)|✔️|✔️|✔️| -|[mindspore.LayerNorm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LayerNorm.html)|✔️|✔️|✔️| -|[mindspore.Lcm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Lcm.html)|✔️|✔️|✔️| -|[mindspore.LeftShift](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LeftShift.html)|✔️|✔️|✔️| -|[mindspore.Lerp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Lerp.html)|✔️|✔️|✔️| -|[mindspore.Less](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Less.html)|✔️|✔️|✔️| -|[mindspore.LessEqual](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LessEqual.html)|✔️|✔️|✔️| -|[mindspore.LinSpace](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LinSpace.html)|✔️|✔️|✔️| -|[mindspore.Log](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Log.html)|✔️|✔️|✔️| -|[mindspore.Log1p](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Log1p.html)|✔️|✔️|✔️| -|[mindspore.LogicalAnd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LogicalAnd.html)|✔️|✔️|✔️| -|[mindspore.LogicalNot](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LogicalNot.html)|✔️|✔️|✔️| -|[mindspore.LogicalOr](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LogicalOr.html)|✔️|✔️|✔️| -|[mindspore.LogicalXor](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LogicalXor.html)|✔️|❌|✔️| -|[mindspore.Logit](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Logit.html)|✔️|✔️|✔️| -|[mindspore.LogSoftmax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LogSoftmax.html)|✔️|✔️|✔️| -|[mindspore.LpNorm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LpNorm.html)|✔️|✔️|✔️| -|[mindspore.LRN](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LRN.html)|✔️|✔️|✔️| -|[mindspore.MaskedFill](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MaskedFill.html)|✔️|✔️|✔️| -|[mindspore.MaskedSelect](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MaskedSelect.html)|✔️|✔️|✔️| -|[mindspore.MatrixInverse](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MatrixInverse.html)|❌|✔️|✔️| -|[mindspore.MatrixSolve](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MatrixSolve.html)|✔️|❌|✔️| -|[mindspore.Maximum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Maximum.html)|✔️|✔️|✔️| -|[mindspore.MaxPool](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MaxPool.html)|✔️|✔️|✔️| -|[mindspore.MaxPool3D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MaxPool3D.html)|✔️|✔️|✔️| -|[mindspore.MaxUnpool2D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MaxUnpool2D.html)|❌|✔️|✔️| -|[mindspore.Median](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Median.html)|❌|✔️|✔️| -|[mindspore.Meshgrid](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Meshgrid.html)|✔️|✔️|✔️| -|[mindspore.Minimum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Minimum.html)|✔️|✔️|✔️| -|[mindspore.MirrorPad](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MirrorPad.html)|✔️|❌|✔️| -|[mindspore.Mish](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Mish.html)|❌|✔️|✔️| -|[mindspore.Mod](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Mod.html)|✔️|✔️|✔️| -|[mindspore.Mul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Mul.html)|✔️|✔️|✔️| -|[mindspore.Multinomial](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Multinomial.html)|✔️|✔️|✔️| -|[mindspore.Mvlgamma](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Mvlgamma.html)|✔️|✔️|✔️| -|[mindspore.NanToNum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.NanToNum.html)|✔️|❌|✔️| -|[mindspore.Neg](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Neg.html)|✔️|✔️|✔️| -|[mindspore.NextAfter](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.NextAfter.html)|✔️|✔️|✔️| -|[mindspore.NLLLoss](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.NLLLoss.html)|✔️|✔️|✔️| -|[mindspore.nonzero](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.nonzero.html)|✔️|✔️|✔️| -|[mindspore.NotEqual](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.NotEqual.html)|✔️|✔️|✔️| -|[mindspore.OneHot](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.OneHot.html)|✔️|✔️|✔️| -|[mindspore.OnesLike](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.OnesLike.html)|✔️|✔️|✔️| -|[mindspore.Pad](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Pad.html)|✔️|✔️|✔️| -|[mindspore.Polar](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Polar.html)|❌|✔️|✔️| -|[mindspore.Polygamma](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Polygamma.html)|❌|✔️|✔️| -|[mindspore.Pow](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Pow.html)|✔️|✔️|✔️| -|[mindspore.PReLU](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.PReLU.html)|✔️|✔️|✔️| -|[mindspore.RandpermV2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.RandpermV2.html)|❌|❌|✔️| -|[mindspore.Range](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Range.html)|❌|✔️|✔️| -|[mindspore.Real](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Real.html)|❌|✔️|✔️| -|[mindspore.RealDiv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.RealDiv.html)|✔️|✔️|✔️| -|[mindspore.Reciprocal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Reciprocal.html)|✔️|✔️|✔️| -|[mindspore.ReduceAll](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceAll.html)|✔️|✔️|✔️| -|[mindspore.ReduceAny](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceAny.html)|✔️|✔️|✔️| -|[mindspore.ReduceMax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceMax.html)|✔️|✔️|✔️| -|[mindspore.ReduceMean](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceMean.html)|✔️|✔️|✔️| -|[mindspore.ReduceMin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceMin.html)|✔️|✔️|✔️| -|[mindspore.ReduceProd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceProd.html)|✔️|✔️|✔️| -|[mindspore.ReduceSum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceSum.html)|✔️|✔️|✔️| -|[mindspore.Reshape](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Reshape.html)|✔️|✔️|✔️| -|[mindspore.ResizeBicubic](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ResizeBicubic.html)|✔️|✔️|✔️| -|[mindspore.ResizeBilinearV2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ResizeBilinearV2.html)|✔️|✔️|️❌| -|[mindspore.ReverseSequence](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReverseSequence.html)|✔️|✔️|✔️| -|[mindspore.ReverseV2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReverseV2.html)|✔️|✔️|✔️| -|[mindspore.RightShift](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.RightShift.html)|✔️|✔️|✔️| -|[mindspore.Rint](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Rint.html)|✔️|✔️|✔️| -|[mindspore.Round](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Round.html)|✔️|✔️|✔️| -|[mindspore.Rsqrt](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Rsqrt.html)|✔️|✔️|✔️| -|[mindspore.ScatterNd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterNd.html)|✔️|✔️|✔️| -|[mindspore.ScatterNdAdd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterNdAdd.html)|✔️|✔️|✔️| -|[mindspore.ScatterNdMax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterNdMax.html)|❌|✔️|✔️| -|[mindspore.ScatterNdMin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterNdMin.html)|✔️|✔️|✔️| -|[mindspore.ScatterNdMul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterNdMul.html)|❌|✔️|✔️| -|[mindspore.ScatterNdSub](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterNdSub.html)|✔️|✔️|✔️| -|[mindspore.ScatterNdUpdate](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterNdUpdate.html)|✔️|✔️|✔️| -|[mindspore.ScatterSub](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterSub.html)|✔️|✔️|✔️| -|[mindspore.ScatterUpdate](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterUpdate.html)|✔️|✔️|✔️| -|[mindspore.Select](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Select.html)|✔️|✔️|✔️| -|[mindspore.Sigmoid](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sigmoid.html)|✔️|✔️|✔️| -|[mindspore.Sign](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sign.html)|✔️|✔️|✔️| -|[mindspore.Sin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sin.html)|✔️|✔️|✔️| -|[mindspore.Sinc](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sinc.html)|✔️|✔️|✔️| -|[mindspore.Sinh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sinh.html)|✔️|✔️|✔️| -|[mindspore.Slice](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Slice.html)|✔️|✔️|✔️| -|[mindspore.SmoothL1Loss](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.SmoothL1Loss.html)|✔️|✔️|✔️| -|[mindspore.Softmax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Softmax.html)|✔️|✔️|✔️| -|[mindspore.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.SoftmaxCrossEntropyWithLogits.html)|✔️|✔️|✔️| -|[mindspore.Softplus](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Softplus.html)|✔️|✔️|✔️| -|[mindspore.SoftShrink](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.SoftShrink.html)|✔️|✔️|✔️| -|[mindspore.Sort](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sort.html)|✔️|❌|✔️| -|[mindspore.Split](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Split.html)|✔️|✔️|️❌| -|[mindspore.Sqrt](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sqrt.html)|✔️|✔️|✔️| -|[mindspore.Square](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Square.html)|✔️|✔️|✔️| -|[mindspore.Squeeze](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Squeeze.html)|✔️|✔️|✔️| -|[mindspore.Stack](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Stack.html)|✔️|✔️|✔️| -|[mindspore.StandardNormal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.StandardNormal.html)|✔️|✔️|✔️| -|[mindspore.StridedSlice](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.StridedSlice.html)|✔️|✔️|✔️| -|[mindspore.Sub](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sub.html)|✔️|✔️|✔️| -|[mindspore.Svd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Svd.html)|❌|✔️|✔️| -|[mindspore.Tan](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Tan.html)|✔️|✔️|✔️| -|[mindspore.Tanh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Tanh.html)|✔️|✔️|✔️| -|[mindspore.TensorScatterUpdate](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TensorScatterUpdate.html)|✔️|✔️|✔️| -|[mindspore.Tile](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Tile.html)|✔️|✔️|✔️| -|[mindspore.TopK](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TopK.html)|✔️|✔️|✔️| -|[mindspore.Trace](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Trace.html)|✔️|✔️|✔️| -|[mindspore.Transpose](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Transpose.html)|✔️|✔️|✔️| -|[mindspore.Tril](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Tril.html)|✔️|✔️|✔️| -|[mindspore.triu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.triu.html)|✔️|✔️|✔️| -|[mindspore.Trunc](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Trunc.html)|✔️|✔️|✔️| -|[mindspore.TruncateDiv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TruncateDiv.html)|✔️|✔️|️❌| -|[mindspore.UniformInt](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.UniformInt.html)|❌|✔️|✔️| -|[mindspore.UniformReal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.UniformReal.html)|❌|✔️|✔️| -|[mindspore.Unique](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Unique.html)|✔️|✔️|✔️| -|[mindspore.UnsortedSegmentSum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.UnsortedSegmentSum.html)|✔️|✔️|✔️| -|[mindspore.Xlogy](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Xlogy.html)|✔️|✔️|✔️| -|[mindspore.ZerosLike](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ZerosLike.html)|✔️|✔️|✔️| +|[mindspore.Abs](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Abs.html)|✔️|✔️|✔️| +|[mindspore.Acosh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Acosh.html)|✔️|✔️|✔️| +|[mindspore.Adam](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Adam.html)|✔️|✔️|✔️| +|[mindspore.AdaptiveAvgPool2D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AdaptiveAvgPool2D.html)|✔️|✔️|✔️| +|[mindspore.AdaptiveAvgPool3D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AdaptiveAvgPool3D.html)|✔️|✔️|✔️| +|[mindspore.Add](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Add.html)|✔️|✔️|✔️| +|[mindspore.Addcmul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Addcmul.html)|✔️|✔️|✔️| +|[mindspore.AddN](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AddN.html)|✔️|✔️|✔️| +|[mindspore.Angle](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Angle.html)|✔️|✔️|✔️| +|[mindspore.ArgMaxWithValue](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ArgMaxWithValue.html)|✔️|✔️|✔️| +|[mindspore.ArgMinWithValue](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ArgMinWithValue.html)|✔️|✔️|✔️| +|[mindspore.Asin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Asin.html)|✔️|✔️|✔️| +|[mindspore.Asinh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Asinh.html)|✔️|✔️|✔️| +|[mindspore.Assign](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Assign.html)|✔️|✔️|✔️| +|[mindspore.AssignAdd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AssignAdd.html)|✔️|✔️|✔️| +|[mindspore.Atan](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Atan.html)|✔️|✔️|✔️| +|[mindspore.Atan2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Atan2.html)|✔️|✔️|✔️| +|[mindspore.Atanh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Atanh.html)|✔️|✔️|✔️| +|[mindspore.AvgPool](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AvgPool.html)|✔️|✔️|✔️| +|[mindspore.AvgPool3D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AvgPool3D.html)|✔️|✔️|✔️| +|[mindspore.BatchNorm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BatchNorm.html)|✔️|✔️|✔️| +|[mindspore.BCEWithLogitsLoss](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BCEWithLogitsLoss.html)|✔️|✔️|✔️| +|[mindspore.Bernoulli](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Bernoulli.html)|❌|✔️|✔️| +|[mindspore.BesselI0](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BesselI0.html)|❌|✔️|✔️| +|[mindspore.BesselI0e](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BesselI0e.html)|✔️|✔️|✔️| +|[mindspore.BesselI1](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BesselI1.html)|❌|✔️|✔️| +|[mindspore.BesselI1e](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BesselI1e.html)|✔️|✔️|✔️| +|[mindspore.BesselJ0](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BesselJ0.html)|❌|✔️|✔️| +|[mindspore.BesselJ1](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BesselJ1.html)|❌|✔️|✔️| +|[mindspore.BiasAdd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BiasAdd.html)|✔️|✔️|️❌| +|[mindspore.BinaryCrossEntropy](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BinaryCrossEntropy.html)|✔️|✔️|✔️| +|[mindspore.BitwiseAnd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BitwiseAnd.html)|✔️|✔️|✔️| +|[mindspore.BitwiseOr](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BitwiseOr.html)|✔️|✔️|✔️| +|[mindspore.BitwiseXor](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BitwiseXor.html)|✔️|✔️|✔️| +|[mindspore.BroadcastTo](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BroadcastTo.html)|✔️|✔️|✔️| +|[mindspore.Cast](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cast.html)|✔️|✔️|✔️| +|[mindspore.Ceil](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Ceil.html)|✔️|✔️|✔️| +|[mindspore.Col2Im](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Col2Im.html)|✔️|✔️|✔️| +|[mindspore.Complex](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Complex.html)|✔️|✔️|✔️| +|[mindspore.Concat](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Concat.html)|✔️|✔️|✔️| +|[mindspore.Conj](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Conj.html)|❌|✔️|✔️| +|[mindspore.Conv2D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Conv2D.html)|✔️|✔️|✔️| +|[mindspore.Conv2DTranspose](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Conv2DTranspose.html)|✔️|✔️|✔️| +|[mindspore.Conv3D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Conv3D.html)|✔️|✔️|✔️| +|[mindspore.Conv3DTranspose](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Conv3DTranspose.html)|✔️|✔️|✔️| +|[mindspore.Cos](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cos.html)|✔️|✔️|✔️| +|[mindspore.Cosh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cosh.html)|✔️|✔️|✔️| +|[mindspore.Cross](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cross.html)|✔️|❌|✔️| +|[mindspore.CTCLossV2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.CTCLossV2.html)|✔️|✔️|✔️| +|[mindspore.Cummax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cummax.html)|❌|✔️|✔️| +|[mindspore.Cummin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cummin.html)|✔️|✔️|✔️| +|[mindspore.CumSum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.CumSum.html)|✔️|✔️|️❌| +|[mindspore.Diag](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Diag.html)|✔️|✔️|✔️| +|[mindspore.Digamma](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Digamma.html)|❌|✔️|✔️| +|[mindspore.Div](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Div.html)|✔️|✔️|✔️| +|[mindspore.Dropout](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Dropout.html)|✔️|✔️|✔️| +|[mindspore.Dropout2D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Dropout2D.html)|✔️|✔️|✔️| +|[mindspore.Dropout3D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Dropout3D.html)|✔️|✔️|✔️| +|[mindspore.DynamicGRUV2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.DynamicGRUV2.html)|✔️|❌|❌| +|[mindspore.Einsum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Einsum.html)|❌|✔️|❌| +|[mindspore.Elu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Elu.html)|✔️|✔️|✔️| +|[mindspore.Equal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Equal.html)|✔️|✔️|✔️| +|[mindspore.Erf](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Erf.html)|✔️|✔️|✔️| +|[mindspore.Erfc](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Erfc.html)|✔️|✔️|✔️| +|[mindspore.Exp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Exp.html)|✔️|✔️|✔️| +|[mindspore.ExpandDims](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ExpandDims.html)|✔️|✔️|✔️| +|[mindspore.Expm1](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Expm1.html)|✔️|✔️|✔️| +|[mindspore.Eye](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Eye.html)|✔️|✔️|✔️| +|[mindspore.FFTWithSize](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.FFTWithSize.html)|✔️|✔️|✔️| +|[mindspore.Fill](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Fill.html)|✔️|✔️|✔️| +|[mindspore.FillV2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.FillV2.html)|✔️|✔️|✔️| +|[mindspore.Flatten](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Flatten.html)|✔️|✔️|✔️| +|[mindspore.Floor](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Floor.html)|✔️|✔️|✔️| +|[mindspore.FloorDiv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.FloorDiv.html)|✔️|✔️|✔️| +|[mindspore.FloorMod](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.FloorMod.html)|✔️|✔️|✔️| +|[mindspore.Gather](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Gather.html)|✔️|✔️|✔️| +|[mindspore.GatherD](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.GatherD.html)|✔️|✔️|✔️| +|[mindspore.GatherNd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.GatherNd.html)|✔️|✔️|✔️| +|[mindspore.Gcd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Gcd.html)|✔️|✔️|✔️| +|[mindspore.Geqrf](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Geqrf.html)|✔️|✔️|✔️| +|[mindspore.Ger](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Ger.html)|✔️|✔️|✔️| +|[mindspore.Greater](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Greater.html)|✔️|✔️|✔️| +|[mindspore.GreaterEqual](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.GreaterEqual.html)|✔️|✔️|✔️| +|[mindspore.GridSampler2D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.GridSampler2D.html)|✔️|✔️|✔️| +|[mindspore.GridSampler3D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.GridSampler3D.html)|✔️|✔️|✔️| +|[mindspore.Heaviside](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Heaviside.html)|✔️|✔️|✔️| +|[mindspore.HSwish](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.HSwish.html)|✔️|✔️|✔️| +|[mindspore.Hypot](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Hypot.html)|✔️|✔️|✔️| +|[mindspore.Identity](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Identity.html)|✔️|✔️|✔️| +|[mindspore.Igammac](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Igammac.html)|✔️|✔️|✔️| +|[mindspore.Imag](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Imag.html)|✔️|✔️|✔️| +|[mindspore.Invert](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Invert.html)|✔️|✔️|✔️| +|[mindspore.IsFinite](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.IsFinite.html)|✔️|✔️|✔️| +|[mindspore.IsInf](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.IsInf.html)|✔️|✔️|✔️| +|[mindspore.IsNan](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.IsNan.html)|✔️|✔️|✔️| +|[mindspore.KLDivLoss](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.KLDivLoss.html)|✔️|✔️|✔️| +|[mindspore.LayerNorm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LayerNorm.html)|✔️|✔️|✔️| +|[mindspore.Lcm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Lcm.html)|✔️|✔️|✔️| +|[mindspore.LeftShift](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LeftShift.html)|✔️|✔️|✔️| +|[mindspore.Lerp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Lerp.html)|✔️|✔️|✔️| +|[mindspore.Less](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Less.html)|✔️|✔️|✔️| +|[mindspore.LessEqual](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LessEqual.html)|✔️|✔️|✔️| +|[mindspore.LinSpace](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LinSpace.html)|✔️|✔️|✔️| +|[mindspore.Log](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Log.html)|✔️|✔️|✔️| +|[mindspore.Log1p](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Log1p.html)|✔️|✔️|✔️| +|[mindspore.LogicalAnd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LogicalAnd.html)|✔️|✔️|✔️| +|[mindspore.LogicalNot](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LogicalNot.html)|✔️|✔️|✔️| +|[mindspore.LogicalOr](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LogicalOr.html)|✔️|✔️|✔️| +|[mindspore.LogicalXor](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LogicalXor.html)|✔️|❌|✔️| +|[mindspore.Logit](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Logit.html)|✔️|✔️|✔️| +|[mindspore.LogSoftmax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LogSoftmax.html)|✔️|✔️|✔️| +|[mindspore.LpNorm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LpNorm.html)|✔️|✔️|✔️| +|[mindspore.LRN](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LRN.html)|✔️|✔️|✔️| +|[mindspore.MaskedFill](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MaskedFill.html)|✔️|✔️|✔️| +|[mindspore.MaskedSelect](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MaskedSelect.html)|✔️|✔️|✔️| +|[mindspore.MatrixInverse](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MatrixInverse.html)|❌|✔️|✔️| +|[mindspore.MatrixSolve](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MatrixSolve.html)|✔️|❌|✔️| +|[mindspore.Maximum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Maximum.html)|✔️|✔️|✔️| +|[mindspore.MaxPool](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MaxPool.html)|✔️|✔️|✔️| +|[mindspore.MaxPool3D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MaxPool3D.html)|✔️|✔️|✔️| +|[mindspore.MaxUnpool2D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MaxUnpool2D.html)|❌|✔️|✔️| +|[mindspore.Median](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Median.html)|❌|✔️|✔️| +|[mindspore.Meshgrid](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Meshgrid.html)|✔️|✔️|✔️| +|[mindspore.Minimum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Minimum.html)|✔️|✔️|✔️| +|[mindspore.MirrorPad](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MirrorPad.html)|✔️|❌|✔️| +|[mindspore.Mish](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Mish.html)|❌|✔️|✔️| +|[mindspore.Mod](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Mod.html)|✔️|✔️|✔️| +|[mindspore.Mul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Mul.html)|✔️|✔️|✔️| +|[mindspore.Multinomial](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Multinomial.html)|✔️|✔️|✔️| +|[mindspore.Mvlgamma](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Mvlgamma.html)|✔️|✔️|✔️| +|[mindspore.NanToNum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.NanToNum.html)|✔️|❌|✔️| +|[mindspore.Neg](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Neg.html)|✔️|✔️|✔️| +|[mindspore.NextAfter](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.NextAfter.html)|✔️|✔️|✔️| +|[mindspore.NLLLoss](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.NLLLoss.html)|✔️|✔️|✔️| +|[mindspore.nonzero](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.nonzero.html)|✔️|✔️|✔️| +|[mindspore.NotEqual](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.NotEqual.html)|✔️|✔️|✔️| +|[mindspore.OneHot](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.OneHot.html)|✔️|✔️|✔️| +|[mindspore.OnesLike](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.OnesLike.html)|✔️|✔️|✔️| +|[mindspore.Pad](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Pad.html)|✔️|✔️|✔️| +|[mindspore.Polar](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Polar.html)|❌|✔️|✔️| +|[mindspore.Polygamma](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Polygamma.html)|❌|✔️|✔️| +|[mindspore.Pow](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Pow.html)|✔️|✔️|✔️| +|[mindspore.PReLU](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.PReLU.html)|✔️|✔️|✔️| +|[mindspore.RandpermV2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.RandpermV2.html)|❌|❌|✔️| +|[mindspore.Range](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Range.html)|❌|✔️|✔️| +|[mindspore.Real](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Real.html)|❌|✔️|✔️| +|[mindspore.RealDiv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.RealDiv.html)|✔️|✔️|✔️| +|[mindspore.Reciprocal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Reciprocal.html)|✔️|✔️|✔️| +|[mindspore.ReduceAll](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceAll.html)|✔️|✔️|✔️| +|[mindspore.ReduceAny](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceAny.html)|✔️|✔️|✔️| +|[mindspore.ReduceMax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceMax.html)|✔️|✔️|✔️| +|[mindspore.ReduceMean](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceMean.html)|✔️|✔️|✔️| +|[mindspore.ReduceMin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceMin.html)|✔️|✔️|✔️| +|[mindspore.ReduceProd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceProd.html)|✔️|✔️|✔️| +|[mindspore.ReduceSum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceSum.html)|✔️|✔️|✔️| +|[mindspore.Reshape](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Reshape.html)|✔️|✔️|✔️| +|[mindspore.ResizeBicubic](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ResizeBicubic.html)|✔️|✔️|✔️| +|[mindspore.ResizeBilinearV2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ResizeBilinearV2.html)|✔️|✔️|️❌| +|[mindspore.ReverseSequence](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReverseSequence.html)|✔️|✔️|✔️| +|[mindspore.ReverseV2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReverseV2.html)|✔️|✔️|✔️| +|[mindspore.RightShift](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.RightShift.html)|✔️|✔️|✔️| +|[mindspore.Rint](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Rint.html)|✔️|✔️|✔️| +|[mindspore.Round](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Round.html)|✔️|✔️|✔️| +|[mindspore.Rsqrt](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Rsqrt.html)|✔️|✔️|✔️| +|[mindspore.ScatterNd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterNd.html)|✔️|✔️|✔️| +|[mindspore.ScatterNdAdd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterNdAdd.html)|✔️|✔️|✔️| +|[mindspore.ScatterNdMax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterNdMax.html)|❌|✔️|✔️| +|[mindspore.ScatterNdMin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterNdMin.html)|✔️|✔️|✔️| +|[mindspore.ScatterNdMul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterNdMul.html)|❌|✔️|✔️| +|[mindspore.ScatterNdSub](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterNdSub.html)|✔️|✔️|✔️| +|[mindspore.ScatterNdUpdate](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterNdUpdate.html)|✔️|✔️|✔️| +|[mindspore.ScatterSub](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterSub.html)|✔️|✔️|✔️| +|[mindspore.ScatterUpdate](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterUpdate.html)|✔️|✔️|✔️| +|[mindspore.Select](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Select.html)|✔️|✔️|✔️| +|[mindspore.Sigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sigmoid.html)|✔️|✔️|✔️| +|[mindspore.Sign](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sign.html)|✔️|✔️|✔️| +|[mindspore.Sin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sin.html)|✔️|✔️|✔️| +|[mindspore.Sinc](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sinc.html)|✔️|✔️|✔️| +|[mindspore.Sinh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sinh.html)|✔️|✔️|✔️| +|[mindspore.Slice](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Slice.html)|✔️|✔️|✔️| +|[mindspore.SmoothL1Loss](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.SmoothL1Loss.html)|✔️|✔️|✔️| +|[mindspore.Softmax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Softmax.html)|✔️|✔️|✔️| +|[mindspore.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.SoftmaxCrossEntropyWithLogits.html)|✔️|✔️|✔️| +|[mindspore.Softplus](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Softplus.html)|✔️|✔️|✔️| +|[mindspore.SoftShrink](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.SoftShrink.html)|✔️|✔️|✔️| +|[mindspore.Sort](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sort.html)|✔️|❌|✔️| +|[mindspore.Split](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Split.html)|✔️|✔️|️❌| +|[mindspore.Sqrt](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sqrt.html)|✔️|✔️|✔️| +|[mindspore.Square](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Square.html)|✔️|✔️|✔️| +|[mindspore.Squeeze](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Squeeze.html)|✔️|✔️|✔️| +|[mindspore.Stack](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Stack.html)|✔️|✔️|✔️| +|[mindspore.StandardNormal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.StandardNormal.html)|✔️|✔️|✔️| +|[mindspore.StridedSlice](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.StridedSlice.html)|✔️|✔️|✔️| +|[mindspore.Sub](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sub.html)|✔️|✔️|✔️| +|[mindspore.Svd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Svd.html)|❌|✔️|✔️| +|[mindspore.Tan](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Tan.html)|✔️|✔️|✔️| +|[mindspore.Tanh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Tanh.html)|✔️|✔️|✔️| +|[mindspore.TensorScatterUpdate](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TensorScatterUpdate.html)|✔️|✔️|✔️| +|[mindspore.Tile](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Tile.html)|✔️|✔️|✔️| +|[mindspore.TopK](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TopK.html)|✔️|✔️|✔️| +|[mindspore.Trace](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Trace.html)|✔️|✔️|✔️| +|[mindspore.Transpose](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Transpose.html)|✔️|✔️|✔️| +|[mindspore.Tril](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Tril.html)|✔️|✔️|✔️| +|[mindspore.triu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.triu.html)|✔️|✔️|✔️| +|[mindspore.Trunc](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Trunc.html)|✔️|✔️|✔️| +|[mindspore.TruncateDiv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TruncateDiv.html)|✔️|✔️|️❌| +|[mindspore.UniformInt](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.UniformInt.html)|❌|✔️|✔️| +|[mindspore.UniformReal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.UniformReal.html)|❌|✔️|✔️| +|[mindspore.Unique](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Unique.html)|✔️|✔️|✔️| +|[mindspore.UnsortedSegmentSum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.UnsortedSegmentSum.html)|✔️|✔️|✔️| +|[mindspore.Xlogy](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Xlogy.html)|✔️|✔️|✔️| +|[mindspore.ZerosLike](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ZerosLike.html)|✔️|✔️|✔️| diff --git a/docs/mindspore/source_en/api_python/env_var_list.rst b/docs/mindspore/source_en/api_python/env_var_list.rst index 46144ab9ec..ac5ae1fb05 100644 --- a/docs/mindspore/source_en/api_python/env_var_list.rst +++ b/docs/mindspore/source_en/api_python/env_var_list.rst @@ -1,8 +1,8 @@ Environment Variables ===================== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg - :target: https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/api_python/env_var_list.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/api_python/env_var_list.rst :alt: View Source On Gitee MindSpore environment variables are as follows: @@ -73,7 +73,7 @@ Data Processing false: disables pipeline tree optimization. - -For more information, see `Single-Node Data Cache `_ and `Optimizing the Data Processing `_. +For more information, see `Single-Node Data Cache `_ and `Optimizing the Data Processing `_. Graph Compilation and Execution --------------------------------- @@ -251,7 +251,7 @@ Graph Compilation and Execution - 1: Enable graceful exit. No setting or other value: Disable graceful exit. - - Rely on the callback function to enable graceful exit. Refer to the `Example of Graceful Exit `_ . + - Rely on the callback function to enable graceful exit. Refer to the `Example of Graceful Exit `_ . * - MS_DEV_BOOST_INFER - Compile optimization switch for graph compilation. This switch accelerates the type inference module to speed up network compilation. - Integer @@ -341,7 +341,7 @@ Graph Compilation and Execution enable_debug_mode: Insert synchronization points before and after the graph kernel mod launch, and print debugging information if the launch fails. This is supported only for the GPU backend. Default value: `False`. path: use specified json file. When this option is set, the above options are ignored. - - Refer to the `Custom Fusion `_ + - Refer to the `Custom Fusion `_ * - MS_DEV_DISABLE_AUTO_H2D - Control whether the input of the operator performs implicit copying in PyNative mode. When enabled, implicit copying of operator input will be disabled. @@ -410,13 +410,13 @@ Dump Debugging - Value Range - Description * - MINDSPORE_DUMP_CONFIG - - Specify the path of the configuration file that the `cloud-side Dump `_ + - Specify the path of the configuration file that the `cloud-side Dump `_ or the `device-side Dump `_ depends on. - String - File path, which can be a relative path or an absolute path. - * - MS_DIAGNOSTIC_DATA_PATH - - When the `cloud-side Dump `_ is enabled, + - When the `cloud-side Dump `_ is enabled, if the `path` field is not set or set to an empty string in the Dump configuration file, then `$MS_DIAGNOSTIC_DATA_PATH` `/debug_dump` is regarded as path. If the `path` field in configuration file is not empty, it is still used as the path to save Dump data. - String @@ -465,7 +465,7 @@ Dump Debugging - 0~600, unit: Seconds, default value is 0. The value 0 means using default wait time, i.e. the value of `mindspore.get_context("op_timeout")`. - This environment variable only takes effect when value of `MS_DUMP_SLICE_SIZE` is greater than 0. Now the wait time can not exceed value of `mindspore.get_context("op_timeout")`. -For more information, see `Using Dump in the Graph Mode `_. +For more information, see `Using Dump in the Graph Mode `_. Distributed Parallel --------------------- @@ -633,7 +633,7 @@ Distributed Parallel - -See `Dynamic Cluster `_ for more details about Dynamic Cluster. +See `Dynamic Cluster `_ for more details about Dynamic Cluster. Operators Compile ----------------- @@ -704,7 +704,7 @@ Operators Compile Default value: empty string. - -For more information, see `FAQ `_. +For more information, see `FAQ `_. Log --- @@ -812,7 +812,7 @@ Log - The assignment way is:`MS_SUBMODULE_LOG_v="{SubModule1:LogLevel1,SubModule2:LogLevel2,...}"` - The log level of the specified sub-module will override the setting of `GLOG_v` in this module, where the log level of the sub-module `LogLevel` has the same meaning as that of `GLOG_v`. For a detailed list of MindSpore sub-modules, see `sub-module_names `_. + The log level of the specified sub-module will override the setting of `GLOG_v` in this module, where the log level of the sub-module `LogLevel` has the same meaning as that of `GLOG_v`. For a detailed list of MindSpore sub-modules, see `sub-module_names `_. For example, you can set the log level of `PARSER` and `ANALYZER` modules to WARNING and the log level of other modules to INFO by `GLOG_v=1 MS_SUBMODULE_LOG_v="{PARSER:2,ANALYZER:2}"`. * - GLOG_logfile_mode @@ -906,7 +906,7 @@ Silent Data Corruption Detection 1: Enable CheckSum for silent data corruption Detection - Currently, this feature only supports Atlas A2 training series products, and only supports CheckSum for MatMul with bfloat16 data type in O0 or O1 mode -For more information on feature value detection, see `Feature Value Detection `_. +For more information on feature value detection, see `Feature Value Detection `_. Third-party Library @@ -1030,7 +1030,7 @@ Profiler profiler_level (str, optional) - Set the level of performance data collection. Default value: Level0. Possible values: Level0, Level1, Level2. - Refer to other parameters, see `Description of MindSpore profile parameters `_. + Refer to other parameters, see `Description of MindSpore profile parameters `_. - This environment variable enables one of two ways to enable performance data collection with the input parameter instantiation Profiler method. * - PROFILING_MODE diff --git a/docs/mindspore/source_en/api_python/operator_list_parallel.md b/docs/mindspore/source_en/api_python/operator_list_parallel.md index 030afe9512..b5fddcd598 100644 --- a/docs/mindspore/source_en/api_python/operator_list_parallel.md +++ b/docs/mindspore/source_en/api_python/operator_list_parallel.md @@ -1,176 +1,176 @@ # Usage Constraints During Operator Parallel -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/api_python/operator_list_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/api_python/operator_list_parallel.md) | API name | constraints | Config layout constraints | | :----------------------------------------------------------- | :----------------------------------------------------------- | :----------------------------------------------------------- | -| [mindspore.ops.Abs](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Abs.html) | None | Not support config layout | -| [mindspore.ops.ACos](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ACos.html) | None | Not support config layout | -| [mindspore.ops.Acosh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Acosh.html) | None | Not support config layout | -| [mindspore.ops.Add](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Add.html) | None | Layout configuration is supported. The input layout should be the same or broadcastable. The output layout cannot be configured. | -| [mindspore.ops.AddN](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AddN.html) | None | Not support config layout | -| [mindspore.ops.ApproximateEqual](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ApproximateEqual.html) | None | Not support config layout | -| [mindspore.ops.ArgMaxWithValue](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ArgMaxWithValue.html) | When the input_x is split on the axis dimension, the distributed result may be inconsistent with that on the single machine. | Not support config layout | -| [mindspore.ops.ArgMinWithValue](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ArgMinWithValue.html) | When the input_x is split on the axis dimension, the distributed result may be inconsistent with that on the single machine. | Not support config layout | -| [mindspore.ops.Asin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Asin.html) | None | Not support config layout | -| [mindspore.ops.Asinh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Asinh.html) | None | Not support config layout | -| [mindspore.ops.Assign](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Assign.html) | None | Not support config layout | -| [mindspore.ops.AssignAdd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AssignAdd.html) | None | Not support config layout | -| [mindspore.ops.AssignSub](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AssignSub.html) | None | Not support config layout | -| [mindspore.ops.Atan](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Atan.html) | None | Not support config layout | -| [mindspore.ops.Atan2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Atan2.html) | None | Not support config layout | -| [mindspore.ops.Atanh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Atanh.html) | None | Not support config layout | -| [mindspore.ops.AvgPool](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AvgPool.html) | 1. The data format only supports 'NCHW';
2. The shapes of output H/W dimension must be divisible by the split strategies of input H/W dimension;
3. If H/W is split:
1) If the kernel_size <= stride, the input slice size must be divisible by stride;
2) It does not support kernel_size > stride;
4. In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.AvgPool3D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AvgPool3D.html) | 1. The data format only supports 'NCDHW';
2. If data exchange between adjacent nodes is involved, only Ascend is supported;
3. The W dimensions can not be split;
4. The output shape of D/H dimension must be divisible by the strategy of input D/H dimensions;
5. In valid mode: If D/H dimension is split:
1) When the kernel_size <= stride, the input‘s slice shape of D/H dimension must be divisible by stride;
2) It does not support that kernel_size > stride;
6. In the same/pad mode: If D/H dimension is split:
1) If kernel_size >= stride, (Total input length including pad - kernel_size) must be divisible by stride. Otherwise, the pad must be 0 and the slice shape of D/H dimension must be divisible by stride;
2) (Output length* stride - input length) must be divisible by strategy:
3) The length of data sent and received between adjacent cards must be greater than or equal to 0 and less than or equal to the slice size;
7. In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.BatchMatMul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BatchMatMul.html) | The splitting rule for the last two dimensions of each input and output is the same as that of the MatMul operator. |Support config layout. The splitting rule for the last two dimensions of each input and output is the same as that of the MatMul operator. -| [mindspore.ops.BatchNorm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BatchNorm.html) | It does not support GPU. | Not support config layout | -| [mindspore.ops.BesselI0e](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BesselI0e.html) | None | Not support config layout | -| [mindspore.ops.BesselI1e](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BesselI1e.html) | None | Not support config layout | -| [mindspore.ops.BiasAdd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BiasAdd.html) | None | Support config layout. The second input, bias, should have the same tensor layout as the last dimension of input_x. Output Layout is not open for configuration. | -| [mindspore.ops.BitwiseAnd](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BitwiseAnd.html) | None | Not support config layout | -| [mindspore.ops.BitwiseOr](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BitwiseOr.html) | None | Not support config layout | -| [mindspore.ops.BitwiseXor](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BitwiseXor.html) | None | Not support config layout | -| [mindspore.ops.BoundingBoxEncode](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BoundingBoxEncode.html) | 1. The first dimension of input (anchor_box) and input (groundtruth_box) can be split;
2. The sharding strategies of input (anchor_box) and input (groundtruth_box) must be the same. | Not support config layout | -| [mindspore.ops.BroadcastTo](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BroadcastTo.html) | None | Not support config layout | -| [mindspore.ops.Cast](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cast.html) | The shard strategy is ignored in the Auto Parallel and Semi Auto Parallel mode. | Not support config layout | -| [mindspore.ops.Cdist](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cdist.html) | 1. The strategy for 'B' dimension must be the same;
2.`M` dimension can't be split. | Not support config layout | -| [mindspore.ops.Ceil](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Ceil.html) | None | Not support config layout | -| [mindspore.ops.Concat](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Concat.html) | The input_x can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | Not support config layout | -| [mindspore.ops.Conv2D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Conv2D.html) | 1. The data format only supports 'NCHW';
2. If data exchange between adjacent nodes is involved, only Ascend is supported;
3. When the value of group is not 1, can not split C-in or C-out;
4. The last two dimensions of weight can not be split;
5. The output shape of H/W dimension must be divisible by the strategy of input H/W dimensions;
6. In valid mode: If H/W dimension is split:
1) When the kernel_size <= stride (kernel_size is dilation *(kernel_size - 1) + 1, the same below), the input‘s slice shape of H/W dimension must be divisible by stride;
2) It does not support that kernel_size > stride;
7. In the same/pad mode: If H/W dimension is split:
1) If kernel_size >= stride, (Total input length including pad - kernel_size) must be divisible by stride. Otherwise, the pad must be 0 and the slice shape of H/W dimension must be divisible by stride;
2) (Output length* stride - input length) must be divisible by strategy:
3) The length of data sent and received between adjacent cards must be greater than or equal to 0 and less than or equal to the slice size; | Not support config layout | -| [mindspore.ops.Conv3D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Conv3D.html) | 1. The data format only supports 'NCDHW';
2. If data exchange between adjacent nodes is involved, only Ascend is supported;
3. When the value of group is not 1, can not split C-in or C-out;
4. The W dimension and the last three dimensions of weight can not be split;
5. The output shape of D/H dimension must be divisible by the strategy of input D/H dimensions;
6. In valid mode: If D/H dimension is split:
1) When the kernel_size <= stride (kernel_size is dilation *(kernel_size - 1) + 1, the same below), the input‘s slice shape of D/H dimension must be divisible by stride;
2) It does not support that kernel_size > stride;
7. In the same/pad mode: If D/H dimension is split:
1) If kernel_size >= stride, (Total input length including pad - kernel_size) must be divisible by stride. Otherwise, the pad must be 0 and the slice shape of D/H dimension must be divisible by stride;
2) (Output length* stride - input length) must be divisible by strategy:
3) The length of data sent and received between adjacent cards must be greater than or equal to 0 and less than or equal to the slice size;
8. In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.Cos](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cos.html) | None | Not support config layout | -| [mindspore.ops.Cosh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Cosh.html) | None | Not support config layout | -| [mindspore.ops.CropAndResize](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.CropAndResize.html) | 1. Sharding of the H/W dimension of input (x) and the second dimension of input (boxes) is not supported.
2. The shard strategy for the first dimension of inputs (boxes) and (box_index) must be the same. | Not support config layout | -| [mindspore.ops.CumProd](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.CumProd.html) | The `axis` dimension for `input` can't be split. | Not support config layout | -| [mindspore.ops.CumSum](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.CumSum.html) | The same as CumProd. | Not support config layout | -| [mindspore.ops.Div](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Div.html) | None | Layout configuration is supported. The input layout should be the same or broadcastable. The output layout cannot be configured. | -| [mindspore.ops.DivNoNan](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.DivNoNan.html) | None | Not support config layout | -| [mindspore.ops.Dropout](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Dropout.html) | None | Not support config layout | -| [mindspore.ops.Elu](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Elu.html) | None | Not support config layout | -| [mindspore.ops.embedding](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.embedding.html) | 1. padding_idx, max_norm, norm_type, and scale_gradid_by_freq only support default values.
2. The first input does not support splitting.
3. The second input does not support scenarios where it cannot be cut off. | Layout configuration is supported. | -| [mindspore.ops.EmbeddingLookup](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.EmbeddingLookup.html) | The same as Gather. | Not support config layout | -| [mindspore.ops.Equal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Equal.html) | None | Not support config layout | -| [mindspore.ops.Erf](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Erf.html) | None | Not support config layout | -| [mindspore.ops.Erfc](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Erfc.html) | None | Not support config layout | -| [mindspore.ops.Erfinv](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Erfinv.html) | None | Not support config layout | -| [mindspore.ops.Exp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Exp.html) | None | Not support config layout | -| [mindspore.ops.ExpandDims](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ExpandDims.html) | None | Not support config layout | -| [mindspore.ops.Expm1](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Expm1.html) | None | Not support config layout | -| [mindspore.ops.Floor](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Floor.html) | None | Not support config layout | -| [mindspore.ops.FloorDiv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.FloorDiv.html) | None | Not support config layout | -| [mindspore.ops.FloorMod](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.FloorMod.html) | None | Not support config layout | -| [mindspore.ops.Gamma](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Gamma.html) | 1. Set the strategy for `shape`. e.g shape=(8, 16), the corresponding policy can be (2, 4);
2. The strategy for `alpha` and `beta` must be all-1;
3. When the setting for `shard` is not all-1 strategy, the result is inconsistent with standalone. | Not support config layout | -| [mindspore.ops.Gather](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Gather.html) | 1. When batch_dims > 0:
1) The axis dimension of input_params can not be split;
2) Non-uniform split is not supported;
2. When batch_dims = 0:
1) If uniform split:
a) and if the axis dimensions of input_params is not split, other dimensions can be split;
b) If the axis dimensions of input_params is split: The input_params only supports 1 and 2 dimensions; The input_indices can not be scalar tensor; Does not support to split input_params and input_indices at the same time; When axis = 0 and the parameter is split in the dimension of axis, the output strategy can be configured. The legal output shard strategy is (indices_strategy, param_strategy[1:]) or ((indices_strategy[0]*param_strategy[0], indices_strategy[1:]), param_strategy[1:])
2) Non-uniform split:
a) Only support axis = 0;
b) The non-uniform split only represents the non-uniformity of the 0th dimension of input_params, and the last dimension of the params slice should be aligned by 32 bytes;
c) The number of slices in the 0th dimension of input_params should be equal to that of the last dimension of input_indices;
d) Each dimension of input_params can be split, but input_indices can only split the last dimension, and does not support repeated calculations;
e) Input_indices shall meet the following requirements: the Tensor value of the next slice shall be greater than that of the previous slice. | Not support config layout | -| [mindspore.ops.GatherD](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.GatherD.html) | The dimension corresponding to dim cannot be segmented; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.GatherNd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.GatherNd.html) | The first input can't be split, and the last dimension of the second input can't be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.GeLU](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.GeLU.html) | None | Support config input layout. Output Layout is not open for configuration. | -| [mindspore.ops.Greater](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Greater.html) | None | Not support config layout | -| [mindspore.ops.GreaterEqual](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.GreaterEqual.html) | None | Not support config layout | -| [mindspore.ops.HShrink](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.HShrink.html) | None | Not support config layout | -| [mindspore.ops.HSigmoid](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.HSigmoid.html) | None | Not support config layout | -| [mindspore.ops.InplaceAdd](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.InplaceAdd.html) | The first dimension of `x` and `input_v` can't be split. | Not support config layout | -| [mindspore.ops.InplaceSub](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.InplaceSub.html) | The same as InplaceAdd. | Not support config layout | -| [mindspore.ops.InplaceUpdate](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.InplaceUpdate.html) | The same as InplaceAdd. | Not support config layout | -| [mindspore.ops.Inv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Inv.html) | None | Not support config layout | -| [mindspore.ops.IOU](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.IOU.html) | The first dimension of the `anchor_boxes` and `gt_boxes` can be spilt. | Not support config layout | -| [mindspore.ops.IsFinite](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.IsFinite.html) | None | Not support config layout | -| [mindspore.ops.KLDivLoss](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.KLDivLoss.html) | None | Not support config layout | -| [mindspore.ops.LayerNorm](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LayerNorm.html) | The strategy for the second input gamma and the third input beta needs to be equal to the input_x_strategy[begin_params.axis:], input_x_strategy is the strategy for the first input. | Support config layout. The layout configuration for the second input gamma and the third input beta needs to be equal to the input_x_layout_tuple[begin_params.axis:], input_x_layout_tuple is the layout configuration for the first input. | -| [mindspore.ops.L2Loss](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.L2Loss.html) | None | Not support config layout | -| [mindspore.ops.L2Normalize](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.L2Normalize.html) | The input_x can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | Not support config layout | -| [mindspore.ops.Lerp](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Lerp.html) | None | Not support config layout | -| [mindspore.ops.Less](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Less.html) | None | Not support config layout | -| [mindspore.ops.LessEqual](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LessEqual.html) | None | Not support config layout | -| [mindspore.ops.LinSpace](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LinSpace.html) | You don't need to configure strategy for `start` and `end`. You just need to pass in a strategy of length 1 whose value divisible into `num`. | Not support config layout | -| [mindspore.ops.LogicalAnd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LogicalAnd.html) | None | Not support config layout | -| [mindspore.ops.LogicalNot](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LogicalNot.html) | None | Not support config layout | -| [mindspore.ops.LogicalOr](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LogicalOr.html) | None | Not support config layout | -| [mindspore.ops.Log](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Log.html) | None | Not support config layout | -| [mindspore.ops.Log1p](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Log1p.html) | None | Not support config layout | -| [mindspore.ops.LogSoftmax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.LogSoftmax.html) | The logits can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | Not support config layout | -| [mindspore.ops.MaskedFill](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MaskedFill.html) | None | Not support config layout | -| [mindspore.ops.MatMul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MatMul.html) | 1. When `transpose_b=True` is set, the input's split strategy must be in the form of ((A, B), (C, B));
2. When `transpose_b=False` is set, the input's split strategy must be in the form of ((A, B), (B, C));
3. It is supported to set the output's split strategy, the legal output's split strategy is ((A, C),) or ((A * B, C),) | Support config layout.
1. When `transpose_b=True` is set, the input's layout configuration must be in the form of (layout(A, B), layout(C, B)), A/B/C is the alias name of device axis or the alias name tuple;
2. When `transpose_b=False` is set, the input's layout configuration must be in the form of (layout(A, B), layout(B, C)), A/B/C is the alias name of device axis or the alias name tuple;
3. It is supported to config the output's layout, the legal output's layout configuration is (layout(A, C),) or (layout((A, B), C),), A/B/C is the alias name of device axis; When A is tuple of alias name (A1, A2), the legal output's layout configuration is (layout((A1, A2), C),) or (layout((A1, A2, B), C),);
4. In the split strategy, if A/B/C is the alias name of device axis, A/B/C should be different from each other. If there exist alias name tuples in A/B/C, each element in the tuple should also be different from other elements or other device alias names. | -| [mindspore.ops.Maximum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Maximum.html) | None | Not support config layout | -| [mindspore.ops.MaxPool](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MaxPool.html) | 1. The data format only supports 'NCHW';
2. The shapes of output H/W dimension must be divisible by the split strategies of input H/W dimension;
3. If H/W is split:
1) If the kernel_size <= stride, the input slice size must be divisible by stride;
2) It does not support kernel_size > stride;
4. In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.MaxPool3D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MaxPool3D.html) | The same as AvgPool3D. | Not support config layout | -| [mindspore.ops.Minimum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Minimum.html) | None | Not support config layout | -| [mindspore.ops.Mish](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Mish.html) | None | Not support config layout | -| [mindspore.ops.Mod](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Mod.html) | None | Not support config layout | -| [mindspore.ops.Mul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Mul.html) | None | Layout configuration is supported. The input layout should be the same or broadcastable. The output layout cannot be configured. | -| [mindspore.ops.MulNoNan](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MulNoNan.html) | None | Not support config layout | -| [mindspore.ops.Neg](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Neg.html) | None | Not support config layout | -| [mindspore.ops.NotEqual](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.NotEqual.html) | None | Not support config layout | -| [mindspore.ops.OneHot](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.OneHot.html) | Only support 1-dim indices. Must configure strategy for the output and the first and second inputs. | Not support config layout | -| [mindspore.ops.OnesLike](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.OnesLike.html) | None | Not support config layout | -| [mindspore.ops.Pow](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Pow.html) | None | Not support config layout | -| [mindspore.ops.PReLU](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.PReLU.html) | When the shape of weight is not [1], the shard strategy in channel dimension of input_x should be consistent with weight. | Not support config layout | -| [mindspore.ops.RandomChoiceWithMask](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.RandomChoiceWithMask.html) | Only the all-1 strategy is supported. | Not support config layout | -| [mindspore.ops.RealDiv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.RealDiv.html) | None | Not support config layout | -| [mindspore.ops.Reciprocal](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Reciprocal.html) | None | Not support config layout | -| [mindspore.ops.ReduceMax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceMax.html) | When the input_x is splited on the axis dimension, the distributed result may be inconsistent with that on the single machine. | Not support config layout | -| [mindspore.ops.ReduceMin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceMin.html) | When the input_x is splited on the axis dimension, the distributed result may be inconsistent with that on the single machine. | Not support config layout | -| [mindspore.ops.ReduceSum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceSum.html) | None | Not support config layout | -| [mindspore.ops.ReduceMean](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceMean.html) | None | Not support config layout | -| [mindspore.ops.ReLU](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReLU.html) | None | Support config input layout. Output layout is not open for configuration. | -| [mindspore.ops.ReLU6](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReLU6.html) | None | Not support config layout | -| [mindspore.ops.Reshape](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Reshape.html) | Configuring sharding strategy is not supported. In auto parallel mode, if multiple operators are followed by the reshape operator, different shard strategys are not allowed to be configured for these operators. | Not support config layout | -| [mindspore.ops.Rint](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Rint.html) | None | Not support config layout | -| [mindspore.ops.ResizeNearestNeighbor](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ResizeNearestNeighbor.html) | When `align_corners=True` is set, only the first dimension and the second dimension can be split. | Not support config layout | -| [mindspore.ops.ROIAlign](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ROIAlign.html) | Sharding the H/W dimension of the input(features) and the second dimension of input(rois) is not supported. | Not support config layout | -| [mindspore.ops.Round](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Round.html) | None | Not support config layout | -| [mindspore.ops.Rsqrt](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Rsqrt.html) | None | Not support config layout | -| [mindspore.ops.ScatterAdd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterAdd.html) | The second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.ScatterDiv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterDiv.html) | The second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.ScatterMax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterMax.html) | The first dimension of the first input cannot be split, the second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.ScatterMin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterMin.html) | The first dimension of the first input cannot be split, the second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.ScatterMul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterMul.html) | The second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.ScatterNdAdd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterNdAdd.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.ScatterNdSub](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterNdSub.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.ScatterNdUpdate](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterNdUpdate.html) | The top m dimension of the first input cannot be cut (m is the value of the last dimension of the second input indexes [- 1]). The second input cannot be split. The top n-1 dimension of the third input (n is the dimension of the second input) cannot be split. The partitions of the remaining k dimensions (excluding the top n-1 dimension) of the third input is consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.ScatterSub](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterSub.html) | The second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.ScatterUpdate](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ScatterUpdate.html) | The first dimension of first input can not be split, the second input can not be split, and the first n dimensions (n is the dimension size of the second input) of the third input can not be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.TensorScatterAdd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TensorScatterAdd.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.TensorScatterDiv](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TensorScatterDiv.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.TensorScatterMax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TensorScatterMax.html) | The top m dimension of the first input cannot be cut (m is the value of the last dimension of the second input indexes [- 1]). The second input cannot be split. The top n-1 dimension of the third input (n is the dimension of the second input) cannot be split. The partitions of the remaining k dimensions (excluding the top n-1 dimension) of the third input is consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.TensorScatterMax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TensorScatterMin.html) | The top m dimension of the first input cannot be cut (m is the value of the last dimension of the second input indexes [- 1]). The second input cannot be split. The top n-1 dimension of the third input (n is the dimension of the second input) cannot be split. The partitions of the remaining k dimensions (excluding the top n-1 dimension) of the third input is consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.TensorScatterMul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TensorScatterMul.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.TensorScatterAdd](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TensorScatterSub.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.TensorScatterUpdate](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TensorScatterUpdate.html) | The top m dimension of the first input cannot be cut (m is the value of the last dimension of the second input indexes [- 1]). The second input cannot be split. The top n-1 dimension of the third input (n is the dimension of the second input) cannot be split. The partitions of the remaining k dimensions (excluding the top n-1 dimension) of the third input is consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.Select](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Select.html) | In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | -| [mindspore.ops.SeLU](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.SeLU.html) | None | Not support config layout | -| [mindspore.ops.Sigmoid](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sigmoid.html) | None | Support config input layout. Output layout is not open for configuration. | -| [mindspore.ops.SigmoidCrossEntropyWithLogits](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.SigmoidCrossEntropyWithLogits.html) | None | Not support config layout | -| [mindspore.ops.Sign](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sign.html) | None | Not support config layout | -| [mindspore.ops.Sin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sin.html) | None | Not support config layout | -| [mindspore.ops.Sinh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sinh.html) | None | Not support config layout | -| [mindspore.ops.Softmax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Softmax.html) | The logits can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | Support config input layout. Output layout is not open for configuration, and can't config layout on the dimension of axis. | -| [mindspore.ops.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.SoftmaxCrossEntropyWithLogits.html) | The last dimension of logits and labels can't be splited; Only supports using output[0]. | Not support config layout | -| [mindspore.ops.Softplus](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Softplus.html) | None | Not support config layout | -| [mindspore.ops.Softsign](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Softsign.html) | None | Not support config layout | -| [mindspore.ops.SoftShrink](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.SoftShrink.html) | None | Not support config layout | -| [mindspore.ops.SparseGatherV2](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.SparseGatherV2.html) | The same as Gather. | Not support config layout | -| [mindspore.ops.Split](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Split.html) | The input_x can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | Support config layout, and can't config layout on the dimension of axis. | -| [mindspore.ops.Sqrt](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sqrt.html) | None | Not support config layout | -| [mindspore.ops.Square](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Square.html) | None | Not support config layout | -| [mindspore.ops.SquaredDifference](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.SquaredDifference.html) | None | Not support config layout | -| [mindspore.ops.Squeeze](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Squeeze.html) | None | Not support config layout | -| [mindspore.ops.Stack](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Stack.html) | None | Not support config layout | -| [mindspore.ops.StridedSlice](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.StridedSlice.html) | Only support mask with all 0 values; The dimension needs to be split should be all extracted; Split is supported when the strides of dimension is 1. | Not support config layout | -| [mindspore.ops.Slice](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Slice.html) | The dimension needs to be split should be all extracted. | Not support config layout | -| [mindspore.ops.Sub](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Sub.html) | None | Layout configuration is supported. The input layout should be the same or broadcastable. The output layout cannot be configured. | -| [mindspore.ops.Tan](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Tan.html) | None | Not support config layout | -| [mindspore.ops.Tanh](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Tanh.html) | None | Not support config layout | -| [mindspore.ops.Tile](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Tile.html) | Only support configuring shard strategy for dims. | Support config input and output layout. When dim (replication number) = 1, the input and output layout for this dim should be the same; when dim > 1, the input of this dim can't be split for the data accuracy, this dim must be divisible by the output split strategies. | -| [mindspore.ops.TopK](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TopK.html) | The input_x can't be split into the last dimension, otherwise it's inconsistent with the single machine in the mathematical logic. | Not support config layout | -| [mindspore.ops.Transpose](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Transpose.html) | None | Support config layout, and the output layout cannot be configured. | -| [mindspore.ops.TruncateDiv](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TruncateDiv.html) | None | Not support config layout | -| [mindspore.ops.TruncateMod](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.TruncateMod.html) | None | Not support config layout | -| [mindspore.ops.Unique](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Unique.html) | Only support the repeat calculate shard strategy (1,). | Not support config layout | -| [mindspore.ops.UnsortedSegmentSum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.UnsortedSegmentSum.html) | The shard of input_x and segment_ids must be the same as the dimension of segment_ids. | Not support config layout | -| [mindspore.ops.UnsortedSegmentMin](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.UnsortedSegmentMin.html) | The shard of input_x and segment_ids must be the same as the dimension of segment_ids. Note that if the segment id i is missing, then the output[i] will be filled with the maximum of the input type. The user needs to mask the maximum value to avoid value overflow. The communication operation such as AllReudce will raise an Run Task Error due to overflow. | Not support config layout | -| [mindspore.ops.UnsortedSegmentMax](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.UnsortedSegmentMax.html) | The shard of input_x and segment_ids must be the same as the dimension of segment_ids. Note that if the segment id i is missing, then the output[i] will be filled with the minimum of the input type. The user needs to mask the minimum value to avoid value overflow. The communication operation such as AllReudce will raise an Run Task Error due to overflow. | Not support config layout | -| [mindspore.ops.Xdivy](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Xdivy.html) | None | Not support config layout | -| [mindspore.ops.Xlogy](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Xlogy.html) | None | Not support config layout | -| [mindspore.ops.ZerosLike](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ZerosLike.html) | None | Not support config layout | +| [mindspore.ops.Abs](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Abs.html) | None | Not support config layout | +| [mindspore.ops.ACos](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ACos.html) | None | Not support config layout | +| [mindspore.ops.Acosh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Acosh.html) | None | Not support config layout | +| [mindspore.ops.Add](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Add.html) | None | Layout configuration is supported. The input layout should be the same or broadcastable. The output layout cannot be configured. | +| [mindspore.ops.AddN](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AddN.html) | None | Not support config layout | +| [mindspore.ops.ApproximateEqual](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ApproximateEqual.html) | None | Not support config layout | +| [mindspore.ops.ArgMaxWithValue](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ArgMaxWithValue.html) | When the input_x is split on the axis dimension, the distributed result may be inconsistent with that on the single machine. | Not support config layout | +| [mindspore.ops.ArgMinWithValue](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ArgMinWithValue.html) | When the input_x is split on the axis dimension, the distributed result may be inconsistent with that on the single machine. | Not support config layout | +| [mindspore.ops.Asin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Asin.html) | None | Not support config layout | +| [mindspore.ops.Asinh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Asinh.html) | None | Not support config layout | +| [mindspore.ops.Assign](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Assign.html) | None | Not support config layout | +| [mindspore.ops.AssignAdd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AssignAdd.html) | None | Not support config layout | +| [mindspore.ops.AssignSub](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AssignSub.html) | None | Not support config layout | +| [mindspore.ops.Atan](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Atan.html) | None | Not support config layout | +| [mindspore.ops.Atan2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Atan2.html) | None | Not support config layout | +| [mindspore.ops.Atanh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Atanh.html) | None | Not support config layout | +| [mindspore.ops.AvgPool](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AvgPool.html) | 1. The data format only supports 'NCHW';
2. The shapes of output H/W dimension must be divisible by the split strategies of input H/W dimension;
3. If H/W is split:
1) If the kernel_size <= stride, the input slice size must be divisible by stride;
2) It does not support kernel_size > stride;
4. In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.AvgPool3D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AvgPool3D.html) | 1. The data format only supports 'NCDHW';
2. If data exchange between adjacent nodes is involved, only Ascend is supported;
3. The W dimensions can not be split;
4. The output shape of D/H dimension must be divisible by the strategy of input D/H dimensions;
5. In valid mode: If D/H dimension is split:
1) When the kernel_size <= stride, the input‘s slice shape of D/H dimension must be divisible by stride;
2) It does not support that kernel_size > stride;
6. In the same/pad mode: If D/H dimension is split:
1) If kernel_size >= stride, (Total input length including pad - kernel_size) must be divisible by stride. Otherwise, the pad must be 0 and the slice shape of D/H dimension must be divisible by stride;
2) (Output length* stride - input length) must be divisible by strategy:
3) The length of data sent and received between adjacent cards must be greater than or equal to 0 and less than or equal to the slice size;
7. In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.BatchMatMul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BatchMatMul.html) | The splitting rule for the last two dimensions of each input and output is the same as that of the MatMul operator. |Support config layout. The splitting rule for the last two dimensions of each input and output is the same as that of the MatMul operator. +| [mindspore.ops.BatchNorm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BatchNorm.html) | It does not support GPU. | Not support config layout | +| [mindspore.ops.BesselI0e](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BesselI0e.html) | None | Not support config layout | +| [mindspore.ops.BesselI1e](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BesselI1e.html) | None | Not support config layout | +| [mindspore.ops.BiasAdd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BiasAdd.html) | None | Support config layout. The second input, bias, should have the same tensor layout as the last dimension of input_x. Output Layout is not open for configuration. | +| [mindspore.ops.BitwiseAnd](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BitwiseAnd.html) | None | Not support config layout | +| [mindspore.ops.BitwiseOr](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BitwiseOr.html) | None | Not support config layout | +| [mindspore.ops.BitwiseXor](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BitwiseXor.html) | None | Not support config layout | +| [mindspore.ops.BoundingBoxEncode](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BoundingBoxEncode.html) | 1. The first dimension of input (anchor_box) and input (groundtruth_box) can be split;
2. The sharding strategies of input (anchor_box) and input (groundtruth_box) must be the same. | Not support config layout | +| [mindspore.ops.BroadcastTo](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BroadcastTo.html) | None | Not support config layout | +| [mindspore.ops.Cast](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cast.html) | The shard strategy is ignored in the Auto Parallel and Semi Auto Parallel mode. | Not support config layout | +| [mindspore.ops.Cdist](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cdist.html) | 1. The strategy for 'B' dimension must be the same;
2.`M` dimension can't be split. | Not support config layout | +| [mindspore.ops.Ceil](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Ceil.html) | None | Not support config layout | +| [mindspore.ops.Concat](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Concat.html) | The input_x can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | Not support config layout | +| [mindspore.ops.Conv2D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Conv2D.html) | 1. The data format only supports 'NCHW';
2. If data exchange between adjacent nodes is involved, only Ascend is supported;
3. When the value of group is not 1, can not split C-in or C-out;
4. The last two dimensions of weight can not be split;
5. The output shape of H/W dimension must be divisible by the strategy of input H/W dimensions;
6. In valid mode: If H/W dimension is split:
1) When the kernel_size <= stride (kernel_size is dilation *(kernel_size - 1) + 1, the same below), the input‘s slice shape of H/W dimension must be divisible by stride;
2) It does not support that kernel_size > stride;
7. In the same/pad mode: If H/W dimension is split:
1) If kernel_size >= stride, (Total input length including pad - kernel_size) must be divisible by stride. Otherwise, the pad must be 0 and the slice shape of H/W dimension must be divisible by stride;
2) (Output length* stride - input length) must be divisible by strategy:
3) The length of data sent and received between adjacent cards must be greater than or equal to 0 and less than or equal to the slice size; | Not support config layout | +| [mindspore.ops.Conv3D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Conv3D.html) | 1. The data format only supports 'NCDHW';
2. If data exchange between adjacent nodes is involved, only Ascend is supported;
3. When the value of group is not 1, can not split C-in or C-out;
4. The W dimension and the last three dimensions of weight can not be split;
5. The output shape of D/H dimension must be divisible by the strategy of input D/H dimensions;
6. In valid mode: If D/H dimension is split:
1) When the kernel_size <= stride (kernel_size is dilation *(kernel_size - 1) + 1, the same below), the input‘s slice shape of D/H dimension must be divisible by stride;
2) It does not support that kernel_size > stride;
7. In the same/pad mode: If D/H dimension is split:
1) If kernel_size >= stride, (Total input length including pad - kernel_size) must be divisible by stride. Otherwise, the pad must be 0 and the slice shape of D/H dimension must be divisible by stride;
2) (Output length* stride - input length) must be divisible by strategy:
3) The length of data sent and received between adjacent cards must be greater than or equal to 0 and less than or equal to the slice size;
8. In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.Cos](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cos.html) | None | Not support config layout | +| [mindspore.ops.Cosh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Cosh.html) | None | Not support config layout | +| [mindspore.ops.CropAndResize](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.CropAndResize.html) | 1. Sharding of the H/W dimension of input (x) and the second dimension of input (boxes) is not supported.
2. The shard strategy for the first dimension of inputs (boxes) and (box_index) must be the same. | Not support config layout | +| [mindspore.ops.CumProd](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.CumProd.html) | The `axis` dimension for `input` can't be split. | Not support config layout | +| [mindspore.ops.CumSum](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.CumSum.html) | The same as CumProd. | Not support config layout | +| [mindspore.ops.Div](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Div.html) | None | Layout configuration is supported. The input layout should be the same or broadcastable. The output layout cannot be configured. | +| [mindspore.ops.DivNoNan](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.DivNoNan.html) | None | Not support config layout | +| [mindspore.ops.Dropout](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Dropout.html) | None | Not support config layout | +| [mindspore.ops.Elu](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Elu.html) | None | Not support config layout | +| [mindspore.ops.embedding](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.embedding.html) | 1. padding_idx, max_norm, norm_type, and scale_gradid_by_freq only support default values.
2. The first input does not support splitting.
3. The second input does not support scenarios where it cannot be cut off. | Layout configuration is supported. | +| [mindspore.ops.EmbeddingLookup](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.EmbeddingLookup.html) | The same as Gather. | Not support config layout | +| [mindspore.ops.Equal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Equal.html) | None | Not support config layout | +| [mindspore.ops.Erf](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Erf.html) | None | Not support config layout | +| [mindspore.ops.Erfc](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Erfc.html) | None | Not support config layout | +| [mindspore.ops.Erfinv](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Erfinv.html) | None | Not support config layout | +| [mindspore.ops.Exp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Exp.html) | None | Not support config layout | +| [mindspore.ops.ExpandDims](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ExpandDims.html) | None | Not support config layout | +| [mindspore.ops.Expm1](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Expm1.html) | None | Not support config layout | +| [mindspore.ops.Floor](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Floor.html) | None | Not support config layout | +| [mindspore.ops.FloorDiv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.FloorDiv.html) | None | Not support config layout | +| [mindspore.ops.FloorMod](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.FloorMod.html) | None | Not support config layout | +| [mindspore.ops.Gamma](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Gamma.html) | 1. Set the strategy for `shape`. e.g shape=(8, 16), the corresponding policy can be (2, 4);
2. The strategy for `alpha` and `beta` must be all-1;
3. When the setting for `shard` is not all-1 strategy, the result is inconsistent with standalone. | Not support config layout | +| [mindspore.ops.Gather](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Gather.html) | 1. When batch_dims > 0:
1) The axis dimension of input_params can not be split;
2) Non-uniform split is not supported;
2. When batch_dims = 0:
1) If uniform split:
a) and if the axis dimensions of input_params is not split, other dimensions can be split;
b) If the axis dimensions of input_params is split: The input_params only supports 1 and 2 dimensions; The input_indices can not be scalar tensor; Does not support to split input_params and input_indices at the same time; When axis = 0 and the parameter is split in the dimension of axis, the output strategy can be configured. The legal output shard strategy is (indices_strategy, param_strategy[1:]) or ((indices_strategy[0]*param_strategy[0], indices_strategy[1:]), param_strategy[1:])
2) Non-uniform split:
a) Only support axis = 0;
b) The non-uniform split only represents the non-uniformity of the 0th dimension of input_params, and the last dimension of the params slice should be aligned by 32 bytes;
c) The number of slices in the 0th dimension of input_params should be equal to that of the last dimension of input_indices;
d) Each dimension of input_params can be split, but input_indices can only split the last dimension, and does not support repeated calculations;
e) Input_indices shall meet the following requirements: the Tensor value of the next slice shall be greater than that of the previous slice. | Not support config layout | +| [mindspore.ops.GatherD](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.GatherD.html) | The dimension corresponding to dim cannot be segmented; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.GatherNd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.GatherNd.html) | The first input can't be split, and the last dimension of the second input can't be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.GeLU](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.GeLU.html) | None | Support config input layout. Output Layout is not open for configuration. | +| [mindspore.ops.Greater](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Greater.html) | None | Not support config layout | +| [mindspore.ops.GreaterEqual](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.GreaterEqual.html) | None | Not support config layout | +| [mindspore.ops.HShrink](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.HShrink.html) | None | Not support config layout | +| [mindspore.ops.HSigmoid](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.HSigmoid.html) | None | Not support config layout | +| [mindspore.ops.InplaceAdd](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.InplaceAdd.html) | The first dimension of `x` and `input_v` can't be split. | Not support config layout | +| [mindspore.ops.InplaceSub](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.InplaceSub.html) | The same as InplaceAdd. | Not support config layout | +| [mindspore.ops.InplaceUpdate](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.InplaceUpdate.html) | The same as InplaceAdd. | Not support config layout | +| [mindspore.ops.Inv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Inv.html) | None | Not support config layout | +| [mindspore.ops.IOU](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.IOU.html) | The first dimension of the `anchor_boxes` and `gt_boxes` can be spilt. | Not support config layout | +| [mindspore.ops.IsFinite](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.IsFinite.html) | None | Not support config layout | +| [mindspore.ops.KLDivLoss](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.KLDivLoss.html) | None | Not support config layout | +| [mindspore.ops.LayerNorm](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LayerNorm.html) | The strategy for the second input gamma and the third input beta needs to be equal to the input_x_strategy[begin_params.axis:], input_x_strategy is the strategy for the first input. | Support config layout. The layout configuration for the second input gamma and the third input beta needs to be equal to the input_x_layout_tuple[begin_params.axis:], input_x_layout_tuple is the layout configuration for the first input. | +| [mindspore.ops.L2Loss](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.L2Loss.html) | None | Not support config layout | +| [mindspore.ops.L2Normalize](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.L2Normalize.html) | The input_x can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | Not support config layout | +| [mindspore.ops.Lerp](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Lerp.html) | None | Not support config layout | +| [mindspore.ops.Less](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Less.html) | None | Not support config layout | +| [mindspore.ops.LessEqual](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LessEqual.html) | None | Not support config layout | +| [mindspore.ops.LinSpace](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LinSpace.html) | You don't need to configure strategy for `start` and `end`. You just need to pass in a strategy of length 1 whose value divisible into `num`. | Not support config layout | +| [mindspore.ops.LogicalAnd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LogicalAnd.html) | None | Not support config layout | +| [mindspore.ops.LogicalNot](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LogicalNot.html) | None | Not support config layout | +| [mindspore.ops.LogicalOr](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LogicalOr.html) | None | Not support config layout | +| [mindspore.ops.Log](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Log.html) | None | Not support config layout | +| [mindspore.ops.Log1p](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Log1p.html) | None | Not support config layout | +| [mindspore.ops.LogSoftmax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.LogSoftmax.html) | The logits can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | Not support config layout | +| [mindspore.ops.MaskedFill](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MaskedFill.html) | None | Not support config layout | +| [mindspore.ops.MatMul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MatMul.html) | 1. When `transpose_b=True` is set, the input's split strategy must be in the form of ((A, B), (C, B));
2. When `transpose_b=False` is set, the input's split strategy must be in the form of ((A, B), (B, C));
3. It is supported to set the output's split strategy, the legal output's split strategy is ((A, C),) or ((A * B, C),) | Support config layout.
1. When `transpose_b=True` is set, the input's layout configuration must be in the form of (layout(A, B), layout(C, B)), A/B/C is the alias name of device axis or the alias name tuple;
2. When `transpose_b=False` is set, the input's layout configuration must be in the form of (layout(A, B), layout(B, C)), A/B/C is the alias name of device axis or the alias name tuple;
3. It is supported to config the output's layout, the legal output's layout configuration is (layout(A, C),) or (layout((A, B), C),), A/B/C is the alias name of device axis; When A is tuple of alias name (A1, A2), the legal output's layout configuration is (layout((A1, A2), C),) or (layout((A1, A2, B), C),);
4. In the split strategy, if A/B/C is the alias name of device axis, A/B/C should be different from each other. If there exist alias name tuples in A/B/C, each element in the tuple should also be different from other elements or other device alias names. | +| [mindspore.ops.Maximum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Maximum.html) | None | Not support config layout | +| [mindspore.ops.MaxPool](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MaxPool.html) | 1. The data format only supports 'NCHW';
2. The shapes of output H/W dimension must be divisible by the split strategies of input H/W dimension;
3. If H/W is split:
1) If the kernel_size <= stride, the input slice size must be divisible by stride;
2) It does not support kernel_size > stride;
4. In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.MaxPool3D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MaxPool3D.html) | The same as AvgPool3D. | Not support config layout | +| [mindspore.ops.Minimum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Minimum.html) | None | Not support config layout | +| [mindspore.ops.Mish](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Mish.html) | None | Not support config layout | +| [mindspore.ops.Mod](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Mod.html) | None | Not support config layout | +| [mindspore.ops.Mul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Mul.html) | None | Layout configuration is supported. The input layout should be the same or broadcastable. The output layout cannot be configured. | +| [mindspore.ops.MulNoNan](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MulNoNan.html) | None | Not support config layout | +| [mindspore.ops.Neg](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Neg.html) | None | Not support config layout | +| [mindspore.ops.NotEqual](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.NotEqual.html) | None | Not support config layout | +| [mindspore.ops.OneHot](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.OneHot.html) | Only support 1-dim indices. Must configure strategy for the output and the first and second inputs. | Not support config layout | +| [mindspore.ops.OnesLike](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.OnesLike.html) | None | Not support config layout | +| [mindspore.ops.Pow](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Pow.html) | None | Not support config layout | +| [mindspore.ops.PReLU](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.PReLU.html) | When the shape of weight is not [1], the shard strategy in channel dimension of input_x should be consistent with weight. | Not support config layout | +| [mindspore.ops.RandomChoiceWithMask](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.RandomChoiceWithMask.html) | Only the all-1 strategy is supported. | Not support config layout | +| [mindspore.ops.RealDiv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.RealDiv.html) | None | Not support config layout | +| [mindspore.ops.Reciprocal](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Reciprocal.html) | None | Not support config layout | +| [mindspore.ops.ReduceMax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceMax.html) | When the input_x is splited on the axis dimension, the distributed result may be inconsistent with that on the single machine. | Not support config layout | +| [mindspore.ops.ReduceMin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceMin.html) | When the input_x is splited on the axis dimension, the distributed result may be inconsistent with that on the single machine. | Not support config layout | +| [mindspore.ops.ReduceSum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceSum.html) | None | Not support config layout | +| [mindspore.ops.ReduceMean](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceMean.html) | None | Not support config layout | +| [mindspore.ops.ReLU](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReLU.html) | None | Support config input layout. Output layout is not open for configuration. | +| [mindspore.ops.ReLU6](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReLU6.html) | None | Not support config layout | +| [mindspore.ops.Reshape](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Reshape.html) | Configuring sharding strategy is not supported. In auto parallel mode, if multiple operators are followed by the reshape operator, different shard strategys are not allowed to be configured for these operators. | Not support config layout | +| [mindspore.ops.Rint](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Rint.html) | None | Not support config layout | +| [mindspore.ops.ResizeNearestNeighbor](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ResizeNearestNeighbor.html) | When `align_corners=True` is set, only the first dimension and the second dimension can be split. | Not support config layout | +| [mindspore.ops.ROIAlign](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ROIAlign.html) | Sharding the H/W dimension of the input(features) and the second dimension of input(rois) is not supported. | Not support config layout | +| [mindspore.ops.Round](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Round.html) | None | Not support config layout | +| [mindspore.ops.Rsqrt](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Rsqrt.html) | None | Not support config layout | +| [mindspore.ops.ScatterAdd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterAdd.html) | The second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.ScatterDiv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterDiv.html) | The second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.ScatterMax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterMax.html) | The first dimension of the first input cannot be split, the second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.ScatterMin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterMin.html) | The first dimension of the first input cannot be split, the second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.ScatterMul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterMul.html) | The second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.ScatterNdAdd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterNdAdd.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.ScatterNdSub](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterNdSub.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.ScatterNdUpdate](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterNdUpdate.html) | The top m dimension of the first input cannot be cut (m is the value of the last dimension of the second input indexes [- 1]). The second input cannot be split. The top n-1 dimension of the third input (n is the dimension of the second input) cannot be split. The partitions of the remaining k dimensions (excluding the top n-1 dimension) of the third input is consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.ScatterSub](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterSub.html) | The second input cannot be split, and the top n dimensions of the third input (n is the dimension of the second input) cannot be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.ScatterUpdate](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ScatterUpdate.html) | The first dimension of first input can not be split, the second input can not be split, and the first n dimensions (n is the dimension size of the second input) of the third input can not be split; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.TensorScatterAdd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TensorScatterAdd.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.TensorScatterDiv](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TensorScatterDiv.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.TensorScatterMax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TensorScatterMax.html) | The top m dimension of the first input cannot be cut (m is the value of the last dimension of the second input indexes [- 1]). The second input cannot be split. The top n-1 dimension of the third input (n is the dimension of the second input) cannot be split. The partitions of the remaining k dimensions (excluding the top n-1 dimension) of the third input is consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.TensorScatterMax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TensorScatterMin.html) | The top m dimension of the first input cannot be cut (m is the value of the last dimension of the second input indexes [- 1]). The second input cannot be split. The top n-1 dimension of the third input (n is the dimension of the second input) cannot be split. The partitions of the remaining k dimensions (excluding the top n-1 dimension) of the third input is consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.TensorScatterMul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TensorScatterMul.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.TensorScatterAdd](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TensorScatterSub.html) | The second input cannot be split, the top n-1 dimension of the third input (n is the dimension of the second input) cannot be split, and the remaining k dimensions (excluding the top n-1 dimension) of the third input are consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.TensorScatterUpdate](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TensorScatterUpdate.html) | The top m dimension of the first input cannot be cut (m is the value of the last dimension of the second input indexes [- 1]). The second input cannot be split. The top n-1 dimension of the third input (n is the dimension of the second input) cannot be split. The partitions of the remaining k dimensions (excluding the top n-1 dimension) of the third input is consistent with the last k partitions of the first input; In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.Select](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Select.html) | In auto_parallel mode, the dual recursive algorithm is not supported. | Not support config layout | +| [mindspore.ops.SeLU](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.SeLU.html) | None | Not support config layout | +| [mindspore.ops.Sigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sigmoid.html) | None | Support config input layout. Output layout is not open for configuration. | +| [mindspore.ops.SigmoidCrossEntropyWithLogits](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.SigmoidCrossEntropyWithLogits.html) | None | Not support config layout | +| [mindspore.ops.Sign](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sign.html) | None | Not support config layout | +| [mindspore.ops.Sin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sin.html) | None | Not support config layout | +| [mindspore.ops.Sinh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sinh.html) | None | Not support config layout | +| [mindspore.ops.Softmax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Softmax.html) | The logits can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | Support config input layout. Output layout is not open for configuration, and can't config layout on the dimension of axis. | +| [mindspore.ops.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.SoftmaxCrossEntropyWithLogits.html) | The last dimension of logits and labels can't be splited; Only supports using output[0]. | Not support config layout | +| [mindspore.ops.Softplus](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Softplus.html) | None | Not support config layout | +| [mindspore.ops.Softsign](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Softsign.html) | None | Not support config layout | +| [mindspore.ops.SoftShrink](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.SoftShrink.html) | None | Not support config layout | +| [mindspore.ops.SparseGatherV2](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.SparseGatherV2.html) | The same as Gather. | Not support config layout | +| [mindspore.ops.Split](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Split.html) | The input_x can't be split into the dimension of axis, otherwise it's inconsistent with the single machine in the mathematical logic. | Support config layout, and can't config layout on the dimension of axis. | +| [mindspore.ops.Sqrt](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sqrt.html) | None | Not support config layout | +| [mindspore.ops.Square](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Square.html) | None | Not support config layout | +| [mindspore.ops.SquaredDifference](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.SquaredDifference.html) | None | Not support config layout | +| [mindspore.ops.Squeeze](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Squeeze.html) | None | Not support config layout | +| [mindspore.ops.Stack](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Stack.html) | None | Not support config layout | +| [mindspore.ops.StridedSlice](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.StridedSlice.html) | Only support mask with all 0 values; The dimension needs to be split should be all extracted; Split is supported when the strides of dimension is 1. | Not support config layout | +| [mindspore.ops.Slice](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Slice.html) | The dimension needs to be split should be all extracted. | Not support config layout | +| [mindspore.ops.Sub](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Sub.html) | None | Layout configuration is supported. The input layout should be the same or broadcastable. The output layout cannot be configured. | +| [mindspore.ops.Tan](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Tan.html) | None | Not support config layout | +| [mindspore.ops.Tanh](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Tanh.html) | None | Not support config layout | +| [mindspore.ops.Tile](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Tile.html) | Only support configuring shard strategy for dims. | Support config input and output layout. When dim (replication number) = 1, the input and output layout for this dim should be the same; when dim > 1, the input of this dim can't be split for the data accuracy, this dim must be divisible by the output split strategies. | +| [mindspore.ops.TopK](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TopK.html) | The input_x can't be split into the last dimension, otherwise it's inconsistent with the single machine in the mathematical logic. | Not support config layout | +| [mindspore.ops.Transpose](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Transpose.html) | None | Support config layout, and the output layout cannot be configured. | +| [mindspore.ops.TruncateDiv](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TruncateDiv.html) | None | Not support config layout | +| [mindspore.ops.TruncateMod](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.TruncateMod.html) | None | Not support config layout | +| [mindspore.ops.Unique](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Unique.html) | Only support the repeat calculate shard strategy (1,). | Not support config layout | +| [mindspore.ops.UnsortedSegmentSum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.UnsortedSegmentSum.html) | The shard of input_x and segment_ids must be the same as the dimension of segment_ids. | Not support config layout | +| [mindspore.ops.UnsortedSegmentMin](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.UnsortedSegmentMin.html) | The shard of input_x and segment_ids must be the same as the dimension of segment_ids. Note that if the segment id i is missing, then the output[i] will be filled with the maximum of the input type. The user needs to mask the maximum value to avoid value overflow. The communication operation such as AllReudce will raise an Run Task Error due to overflow. | Not support config layout | +| [mindspore.ops.UnsortedSegmentMax](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.UnsortedSegmentMax.html) | The shard of input_x and segment_ids must be the same as the dimension of segment_ids. Note that if the segment id i is missing, then the output[i] will be filled with the minimum of the input type. The user needs to mask the minimum value to avoid value overflow. The communication operation such as AllReudce will raise an Run Task Error due to overflow. | Not support config layout | +| [mindspore.ops.Xdivy](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Xdivy.html) | None | Not support config layout | +| [mindspore.ops.Xlogy](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Xlogy.html) | None | Not support config layout | +| [mindspore.ops.ZerosLike](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ZerosLike.html) | None | Not support config layout | > Repeated calculation means that the device is not fully used. For example, the cluster has 8 devices to run distributed training, the splitting strategy only cuts the input into 4 copies. In this case, double counting will occur. diff --git a/docs/mindspore/source_en/faq/data_processing.md b/docs/mindspore/source_en/faq/data_processing.md index 6a2b535937..e2e9d65a1b 100644 --- a/docs/mindspore/source_en/faq/data_processing.md +++ b/docs/mindspore/source_en/faq/data_processing.md @@ -1,10 +1,10 @@ # Data Processing -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/faq/data_processing.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/faq/data_processing.md) ## Q: How do I offload data if I do not use high-level APIs? -A: You can implement by referring to the [test_tdt_data_transfer.py](https://gitee.com/mindspore/mindspore/blob/master/tests/st/data_transfer/test_tdt_data_transfer.py) example of the manual offloading mode without using the `model.train` API. Currently, the GPU-based and Ascend-based hardware is supported. +A: You can implement by referring to the [test_tdt_data_transfer.py](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/data_transfer/test_tdt_data_transfer.py) example of the manual offloading mode without using the `model.train` API. Currently, the GPU-based and Ascend-based hardware is supported.
@@ -38,7 +38,7 @@ A: You can refer to the following steps to reduce CPU consumption (mainly due to ## Q:  Why there is no difference between the parameter `shuffle` in `GeneratorDataset`, and `shuffle=True` and `shuffle=False` when the task is run? -A: If `shuffle` is enabled, the input `Dataset` must support random access (for example, the user-defined `Dataset` has the `getitem` method). If data is returned in `yeild` mode in the user-defined `Dataset`, random access is not supported. For details, see section [GeneratorDataset example](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html). +A: If `shuffle` is enabled, the input `Dataset` must support random access (for example, the user-defined `Dataset` has the `getitem` method). If data is returned in `yeild` mode in the user-defined `Dataset`, random access is not supported. For details, see section [GeneratorDataset example](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html).
@@ -77,7 +77,7 @@ A: `c_transforms` is recommended. Its performance is better because it is execut Principle: The underlying layer of `c_transform` uses `opencv/jpeg-turbo` of the C version for data processing, and `py_transform` uses `Pillow` of the Python version for data processing. -Data augmentation APIs are unified in MindSpore 1.8. Transformations of `c_transforms` and `py_transforms` will be selected automatically due to input tensor type instead of importing them manually. `c_transforms` is set to default option since its performance is better. More details please refer to [Latest API doc and import note](https://mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision). +Data augmentation APIs are unified in MindSpore 1.8. Transformations of `c_transforms` and `py_transforms` will be selected automatically due to input tensor type instead of importing them manually. `c_transforms` is set to default option since its performance is better. More details please refer to [Latest API doc and import note](https://mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision).
@@ -160,7 +160,7 @@ A: You can refer to the usage of YOLOv3 which contains the resizing of different A: [build_seg_data.py](https://gitee.com/mindspore/models/blob/master/research/cv/FCN8s/src/data/build_seg_data.py) is the script of MindRecords generated by the dataset. You can directly use or adapt it to your dataset. Alternatively, you can use `GeneratorDataset` to customize the dataset loading if you want to implement the dataset reading by yourself. -[GeneratorDataset example](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html) +[GeneratorDataset example](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html)
@@ -189,7 +189,7 @@ ds.GeneratorDataset(..., num_shards=8, shard_id=7, ...) A: The data schema can be defined as follows:`cv_schema_json = {"label": {"type": "int32", "shape": [-1]}, "data": {"type": "bytes"}}` Note: A label is an array of the numpy type, where label values 1, 1, 0, 1, 0, 1 are stored. These label values correspond to the same data, that is, the binary value of the same image. -For details, see [Converting Dataset to MindRecord](https://www.mindspore.cn/tutorials/en/master/dataset/record.html#converting-dataset-to-record-format). +For details, see [Converting Dataset to MindRecord](https://www.mindspore.cn/tutorials/en/br_base/dataset/record.html#converting-dataset-to-record-format).
@@ -201,7 +201,7 @@ A: The MNIST gray scale image dataset is used for MindSpore training. Therefore, ## Q: Can you introduce the data processing framework in MindSpore? -A: MindSpore Dataset module makes it easy for users to define data preprocessing pipelines and transform samples efficiently with multiprocessing or multithreading. MindSpore Dataset also provides variable APIs for users to load and process datasets, more introduction please refer to [MindSpore Dataset](https://mindspore.cn/docs/en/master/api_python/mindspore.dataset.html). If you want to further study the performance optimization of dataset pipeline, please read [Optimizing Data Processing](https://www.mindspore.cn/tutorials/en/master/dataset/optimize.html). +A: MindSpore Dataset module makes it easy for users to define data preprocessing pipelines and transform samples efficiently with multiprocessing or multithreading. MindSpore Dataset also provides variable APIs for users to load and process datasets, more introduction please refer to [MindSpore Dataset](https://mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.html). If you want to further study the performance optimization of dataset pipeline, please read [Optimizing Data Processing](https://www.mindspore.cn/tutorials/en/br_base/dataset/optimize.html).
@@ -213,7 +213,7 @@ A: Firstly, above error refers to failed sending data to the device through the 2. **When error raised in the graph compiling stage, as training has not started** (for example, the loss has not been printed in the log), please check the error log if there are errors reported by the network related operators or the environment configuration resulted Errors (such as hccl.json is incorrect, resulted abnormal initialization of multi-card communication) -3. **When error raised during the training process**, usually this is caused by the mismatch between the amount of data (batch number) has been sent and the amount of data (step number) required for network training. You can print and check the number of batches of an epoch with [get_dataset_size](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/attribute/mindspore.dataset.Dataset.get_dataset_size.html) interface, several possible reason are as follows: +3. **When error raised during the training process**, usually this is caused by the mismatch between the amount of data (batch number) has been sent and the amount of data (step number) required for network training. You can print and check the number of batches of an epoch with [get_dataset_size](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/attribute/mindspore.dataset.Dataset.get_dataset_size.html) interface, several possible reason are as follows: - With checking the print times of loss to figure out that when data amount(trained steps) is just an integer multiple of the batches number in an epoch, there may be a processing existence problem in the data processing part involving Epoch processing, such as the following case: @@ -312,7 +312,7 @@ dataset3 = dataset2.map(***) ## Q: What is the API corresponding to DataLoader in MindSpore? -A: If the DataLoader is considered as an API for receiving user-defined datasets, the GeneratorDataset in the MindSpore data processing API is similar to that in the DataLoader and can receive user-defined datasets. For details about how to use the GeneratorDataset, see the [GeneratorDataset example](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html), and for details about the differences, see the [API Mapping](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html). +A: If the DataLoader is considered as an API for receiving user-defined datasets, the GeneratorDataset in the MindSpore data processing API is similar to that in the DataLoader and can receive user-defined datasets. For details about how to use the GeneratorDataset, see the [GeneratorDataset example](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html), and for details about the differences, see the [API Mapping](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html).
@@ -331,7 +331,7 @@ for item in Dataset: ## Q: Can the data processing operation and network computing operator be used together? A: Generally, if the data processing operation and network computing operator are used together, the performance deteriorates. If the corresponding data processing operation is unavailable and the user-defined py_transforms operation is inappropriate, you can try to use the data processing operation and network computing operator together. Note that because the inputs required are different, the input of the data processing operation is Numpy array or PIL Image, but the input of the network computing operator must be MindSpore.Tensor. -To use these two together, ensure that the output format of the previous one is the same as the input format of the next. Data processing operations refer to APIs in [mindspore.dataset](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.html) module on the official website, for example, [mindspore.dataset.vision.CenterCrop](https://www.mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.CenterCrop.html). Network computing operators include operators in the mindspore.nn and mindspore.ops modules. +To use these two together, ensure that the output format of the previous one is the same as the input format of the next. Data processing operations refer to APIs in [mindspore.dataset](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.html) module on the official website, for example, [mindspore.dataset.vision.CenterCrop](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.CenterCrop.html). Network computing operators include operators in the mindspore.nn and mindspore.ops modules.
@@ -498,7 +498,7 @@ A: When using the data sinking mode (where `data preprocessing` -> `sending queu 2022-05-09-11:36:01.893.412 -> 2022-05-09-11:36:02.006.771 ``` - Improvement method: View the time difference between the last item of `push_end_time` and GetNext error reporting time. If the default GetNext timeout is exceeded (default: 1900s, and can be modified through `mindspore.device_context.ascend.op_debug.execute_timeout(xx)`), it indicates poor data preprocessing performance. Please refer to [Optimizing the Data Processing](https://www.mindspore.cn/tutorials/en/master/dataset/optimize.html) to improve data preprocessing performance. + Improvement method: View the time difference between the last item of `push_end_time` and GetNext error reporting time. If the default GetNext timeout is exceeded (default: 1900s, and can be modified through `mindspore.device_context.ascend.op_debug.execute_timeout(xx)`), it indicates poor data preprocessing performance. Please refer to [Optimizing the Data Processing](https://www.mindspore.cn/tutorials/en/br_base/dataset/optimize.html) to improve data preprocessing performance. 4. When the log output is similar to the following, it indicates that data preprocessing has generated 182 batches of data and the 183st batch of data is being sent to the device. And the `device_queue` shows that there is sufficient data cache on the device side. @@ -548,7 +548,7 @@ A: When using the data sinking mode (where `data preprocessing` -> `sending queu 2022-05-09-14:31:04.064.571 -> ``` - Improvement method: Please refer to [Optimizing the Data Processing](https://www.mindspore.cn/tutorials/en/master/dataset/optimize.html) to improve data preprocessing performance. + Improvement method: Please refer to [Optimizing the Data Processing](https://www.mindspore.cn/tutorials/en/br_base/dataset/optimize.html) to improve data preprocessing performance.
diff --git a/docs/mindspore/source_en/faq/distributed_parallel.md b/docs/mindspore/source_en/faq/distributed_parallel.md index a9bd1d4698..6905c17ca5 100644 --- a/docs/mindspore/source_en/faq/distributed_parallel.md +++ b/docs/mindspore/source_en/faq/distributed_parallel.md @@ -1,6 +1,6 @@ # Distributed Parallel -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/faq/distributed_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/faq/distributed_parallel.md) ## Q: What should I do if the error message `Init plugin so failed, ret = 1343225860` is displayed during the HCCL distributed training? @@ -49,7 +49,7 @@ Solution: Manually `kill` the training process and then restart the training tas [CRITICAL] DISTRIBUTED [mindspore/ccsrc/distributed/cluster/cluster_context.cc:130] InitNodeRole] Role name is invalid... ``` -A: In the case where the user does not start the process using `mpirun` but still calls the `init()` method, MindSpore requires the user to configure several environment variables and verify according to training and [dynamic cluster startup methods](https://www.mindspore.cn/tutorials/en/master/parallel/dynamic_cluster.html). If without configuring, MindSpore may display the above error message. Therefore, it is suggested that only when performing distributed training, `mindspore.communication.init` is called, and in the case of not using `mpirun`, it is configured the correct environment variables according to the documentation to start distributed training. +A: In the case where the user does not start the process using `mpirun` but still calls the `init()` method, MindSpore requires the user to configure several environment variables and verify according to training and [dynamic cluster startup methods](https://www.mindspore.cn/tutorials/en/br_base/parallel/dynamic_cluster.html). If without configuring, MindSpore may display the above error message. Therefore, it is suggested that only when performing distributed training, `mindspore.communication.init` is called, and in the case of not using `mpirun`, it is configured the correct environment variables according to the documentation to start distributed training.
diff --git a/docs/mindspore/source_en/faq/feature_advice.md b/docs/mindspore/source_en/faq/feature_advice.md index 4d4bd3d41c..ac5e3452d8 100644 --- a/docs/mindspore/source_en/faq/feature_advice.md +++ b/docs/mindspore/source_en/faq/feature_advice.md @@ -1,6 +1,6 @@ # Feature Advice -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/faq/feature_advice.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/faq/feature_advice.md) ## Q: Is the `input=np.random.uniform(...)` format fixed when the MindIR format is exported? @@ -10,7 +10,7 @@ A: The format is not fixed. This step is to create an input for constructing the ## Q: What framework models and formats can be directly read by MindSpore? Can the PTH Model obtained through training in PyTorch be loaded to the MindSpore framework for use? -A: MindSpore uses Protobuf to store training parameters and cannot directly read framework models. A model file stores parameters and their values. You can use APIs of other frameworks to read parameters, obtain the key-value pairs of parameters, and load the key-value pairs to MindSpore. If you want to use the .ckpt file trained by other framework, read the parameters and then call the [save_checkpoint](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.save_checkpoint.html) API of MindSpore to save the file as a .ckpt file that can be read by MindSpore. +A: MindSpore uses Protobuf to store training parameters and cannot directly read framework models. A model file stores parameters and their values. You can use APIs of other frameworks to read parameters, obtain the key-value pairs of parameters, and load the key-value pairs to MindSpore. If you want to use the .ckpt file trained by other framework, read the parameters and then call the [save_checkpoint](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.save_checkpoint.html) API of MindSpore to save the file as a .ckpt file that can be read by MindSpore.
diff --git a/docs/mindspore/source_en/faq/implement_problem.md b/docs/mindspore/source_en/faq/implement_problem.md index 23b5f02254..504b935b84 100644 --- a/docs/mindspore/source_en/faq/implement_problem.md +++ b/docs/mindspore/source_en/faq/implement_problem.md @@ -1,6 +1,6 @@ # Implement Problem -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/faq/implement_problem.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/faq/implement_problem.md) ## Q: How do I use MindSpore to implement multi-scale training? @@ -16,7 +16,7 @@ A: In PyNative mode, if `numpy` is used for computation, gradient transfer will ## Q: How do I modify the `weight` and `bias` of the fully-connected layer like `torch.nn.functional.linear()`? -A: The [nn.Dense](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Dense.html) interface is similar to `torch.nn.functional.linear()`. `nn.Dense` can specify the initial values of `weight` and `bias`. Subsequent changes are automatically updated by the optimizer. During the training, you do not need to change the values of the two parameters. +A: The [nn.Dense](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Dense.html) interface is similar to `torch.nn.functional.linear()`. `nn.Dense` can specify the initial values of `weight` and `bias`. Subsequent changes are automatically updated by the optimizer. During the training, you do not need to change the values of the two parameters.
@@ -52,7 +52,7 @@ In most cases, the training network and parameters can be directly reused. Note net.set_train(False) ``` -The optimizer is not required in the `eval` phase. However, if the [model.eval](https://www.mindspore.cn/docs/en/master/api_python/train/mindspore.train.Model.html#mindspore.train.Model.eval) API of MindSpore needs to be used, the `loss function` needs to be configured. For example: +The optimizer is not required in the `eval` phase. However, if the [model.eval](https://www.mindspore.cn/docs/en/br_base/api_python/train/mindspore.train.Model.html#mindspore.train.Model.eval) API of MindSpore needs to be used, the `loss function` needs to be configured. For example: ```python # Define a model. @@ -65,7 +65,7 @@ res = model.eval(dataset) ## Q: How do I use `param_group` in SGD to reduce the learning rate? -A: To change the value according to `epoch`, use [Dynamic LR Function](https://www.mindspore.cn/docs/en/master/api_python/mindspore.nn.html#dynamic-lr-function) and set `step_per_epoch` to `step_size`. To change the value according to `step`, set `step_per_epoch` to 1. You can also use [LearningRateSchedule](https://www.mindspore.cn/docs/en/master/api_python/mindspore.nn.html#learningrateschedule-class). +A: To change the value according to `epoch`, use [Dynamic LR Function](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.nn.html#dynamic-lr-function) and set `step_per_epoch` to `step_size`. To change the value according to `step`, set `step_per_epoch` to 1. You can also use [LearningRateSchedule](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.nn.html#learningrateschedule-class).
@@ -123,7 +123,7 @@ model.train(epoch_size, ds_train, callbacks=[stop_cb]) ## Q: How do I obtain `feature map` with the expected size when `nn.Conv2d` is used? -A: For details about how to derive the `Conv2d shape`, click [here](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Conv2d.html#mindspore.nn.Conv2d). Change `pad_mode` of `Conv2d` to `same`. Alternatively, you can calculate the `pad` based on the `Conv2d shape` derivation formula to keep the `shape` unchanged. Generally, the pad is `(kernel_size-1)//2`. +A: For details about how to derive the `Conv2d shape`, click [here](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Conv2d.html#mindspore.nn.Conv2d). Change `pad_mode` of `Conv2d` to `same`. Alternatively, you can calculate the `pad` based on the `Conv2d shape` derivation formula to keep the `shape` unchanged. Generally, the pad is `(kernel_size-1)//2`.
@@ -143,7 +143,7 @@ model = ms.train.Model(net=train_net, loss_fn=None, optimizer=None) ## Q: How does MindSpore implement the early stopping function? -A: You can refer to [EarlyStopping](https://www.mindspore.cn/docs/en/master/api_python/train/mindspore.train.EarlyStopping.html). +A: You can refer to [EarlyStopping](https://www.mindspore.cn/docs/en/br_base/api_python/train/mindspore.train.EarlyStopping.html).
@@ -243,7 +243,7 @@ print(network.layers) ## Q: When MindSpore is used for model training, there are four input parameters for `CTCLoss`: `inputs`, `labels_indices`, `labels_values`, and `sequence_length`. How do I use `CTCLoss` for model training? -A: The `dataset` received by the defined `model.train` API can consist of multiple pieces of data, for example, (`data1`, `data2`, `data3`, ...). Therefore, the `dataset` can contain `inputs`, `labels_indices`, `labels_values`, and `sequence_length` information. You only need to define the dataset in the corresponding format and transfer it to `model.train`. For details, see [Data Processing API](https://www.mindspore.cn/docs/en/master/features/index.html). +A: The `dataset` received by the defined `model.train` API can consist of multiple pieces of data, for example, (`data1`, `data2`, `data3`, ...). Therefore, the `dataset` can contain `inputs`, `labels_indices`, `labels_values`, and `sequence_length` information. You only need to define the dataset in the corresponding format and transfer it to `model.train`. For details, see [Data Processing API](https://www.mindspore.cn/docs/en/br_base/features/index.html).
@@ -491,7 +491,7 @@ In addition, CANN may throw some Inner Errors, for example, the error code is "E ## Q: How to control the Tensor value printed by the `print` method? -A: In PyNative dynamic graph mode, you can use numpy native methods such as ` set_ Printoptions ` to control the output value. In the Graph static graph mode, because the `print` method needs to be converted into an operator, the output value cannot be controlled temporarily. For specific usage of print operator, see [Reference](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Print.html). +A: In PyNative dynamic graph mode, you can use numpy native methods such as ` set_ Printoptions ` to control the output value. In the Graph static graph mode, because the `print` method needs to be converted into an operator, the output value cannot be controlled temporarily. For specific usage of print operator, see [Reference](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Print.html).
## Q: How does `Tensor.asnumpy()` share the underlying storage with Tensor? diff --git a/docs/mindspore/source_en/faq/inference.md b/docs/mindspore/source_en/faq/inference.md index 65c8385e66..75adc43d94 100644 --- a/docs/mindspore/source_en/faq/inference.md +++ b/docs/mindspore/source_en/faq/inference.md @@ -1,6 +1,6 @@ # Inference -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/faq/inference.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/faq/inference.md) ## Q: In the previous version, Atlas 200/300/500 inference product inference is performed based on the MindSpore installation package. However, the MindSpore release package of the new version does not support Atlas 200/300/500 inference product inference. How do I use Atlas 200/300/500 inference product for inference? (Changes in the MindSpore Atlas 200/300/500 Inference Product Inference Release Package) diff --git a/docs/mindspore/source_en/faq/installation.md b/docs/mindspore/source_en/faq/installation.md index af831ff924..e9fb5dfdfc 100644 --- a/docs/mindspore/source_en/faq/installation.md +++ b/docs/mindspore/source_en/faq/installation.md @@ -1,6 +1,6 @@ # Installation -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/faq/installation.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/faq/installation.md) ## Installing by Using Pip @@ -194,7 +194,7 @@ If you want to run on a GPU+Windows environment, you can try to use WSL+docker, > Since CUDA on WSL is still a preview feature, pay attention to the description of the Windows version requirements in the reference link, and the version is not enough to be upgraded. -3. Referring to [Docker Image](https://gitee.com/mindspore/mindspore/blob/master/README.md#docker-image), take MindSpore-GPU images. For example, take the MindSpore1.0.0 version container, and execute `docker pull mindspore/mindspore-gpu:1.0.0` to execute the container in WSL Ubuntu18.04: +3. Referring to [Docker Image](https://gitee.com/mindspore/mindspore/blob/br_base/README.md#docker-image), take MindSpore-GPU images. For example, take the MindSpore1.0.0 version container, and execute `docker pull mindspore/mindspore-gpu:1.0.0` to execute the container in WSL Ubuntu18.04: ```docker docker run -it --runtime=nvidia mindspore/mindspore-gpu:1.0.0 /bin/bash diff --git a/docs/mindspore/source_en/faq/network_compilation.md b/docs/mindspore/source_en/faq/network_compilation.md index 2c27b842d5..767ee0c694 100644 --- a/docs/mindspore/source_en/faq/network_compilation.md +++ b/docs/mindspore/source_en/faq/network_compilation.md @@ -1,16 +1,16 @@ # Network Compilation -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/faq/network_compilation.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/faq/network_compilation.md) ## Q: What is the set of syntaxes supported by static graph mode? -A: Static graph mode can support a subset of common Python syntax to support the construction and training of neural networks. Some Python syntax is not supported yet. For more detailed supported syntax set, please refer to [Static Graph Syntax Support](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html). In order to facilitate users to choose whether to extend the static graph syntax, the static graph mode provides JIT syntax support level options. For some network scenarios, it is recommended to use basic syntax (nn/ops, etc.) rather than extended syntax (such as numpy third-party library). In addition, it is recommended to use [Advanced Programming Techniques with Static Graphs](https://www.mindspore.cn/tutorials/en/master/compile/static_graph_expert_programming.html) to optimize compilation performance. +A: Static graph mode can support a subset of common Python syntax to support the construction and training of neural networks. Some Python syntax is not supported yet. For more detailed supported syntax set, please refer to [Static Graph Syntax Support](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html). In order to facilitate users to choose whether to extend the static graph syntax, the static graph mode provides JIT syntax support level options. For some network scenarios, it is recommended to use basic syntax (nn/ops, etc.) rather than extended syntax (such as numpy third-party library). In addition, it is recommended to use [Advanced Programming Techniques with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph_expert_programming.html) to optimize compilation performance.
## Q: What can I do if an error "'self.xx' should be initialized as a 'Parameter' type in the '`__init__`' function" is reported? -A: If you want to assign for a class member such as `self.xx` in the function `construct`, `self.xx` must have been defined as a [Parameter]() type in the `__init__` function while the other types are not supported. But the local variable `xx` is not under the regulation. +A: If you want to assign for a class member such as `self.xx` in the function `construct`, `self.xx` must have been defined as a [Parameter]() type in the `__init__` function while the other types are not supported. But the local variable `xx` is not under the regulation.
@@ -300,7 +300,7 @@ If the match, you need to check if a non-Tensor scenario in the exported paramet When the exported data input is a non-Tensor, the exported input will be solidified into MindIR as a constant, making the input in MindIR less than the Construct input for network construction. -If the data is a scalar type, you can export the scalar to Tensor type, and if the data is Tuple or List type, you can use the [mutable](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.mutable.html) interface to encapsulate it and export it. +If the data is a scalar type, you can export the scalar to Tensor type, and if the data is Tuple or List type, you can use the [mutable](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.mutable.html) interface to encapsulate it and export it.
@@ -533,7 +533,7 @@ net = Net() out = net(Tensor(x)) ``` -3) If a function decorated with a @jit decorator is called in a custom class, an error will be reported. In this scenario, it is recommended to add @jit_class decorators to custom classes in the network and avoid the JIT Fallback feature. For more use of custom classes, please refer to [Supporting the Use of Custom Classes](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#supporting-the-use-of-custom-classes). The use of jit_class decorators can be referred to [Use jit_class](https://www.mindspore.cn/tutorials/en/master/compile/static_graph_expert_programming.html#using-jit-class). +3) If a function decorated with a @jit decorator is called in a custom class, an error will be reported. In this scenario, it is recommended to add @jit_class decorators to custom classes in the network and avoid the JIT Fallback feature. For more use of custom classes, please refer to [Supporting the Use of Custom Classes](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#supporting-the-use-of-custom-classes). The use of jit_class decorators can be referred to [Use jit_class](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph_expert_programming.html#using-jit-class). ```python import mindspore as ms @@ -768,13 +768,13 @@ A: The following scenarios will trigger recompilation: - The length of Tuple or List changes. -- When the input of network is tuple[Tensor], list[Tensor] or Dict[Tensor], even if the shape and dtype of the Tensor inside do not change. For more details, please refer to [mutable](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.mutable.html). +- When the input of network is tuple[Tensor], list[Tensor] or Dict[Tensor], even if the shape and dtype of the Tensor inside do not change. For more details, please refer to [mutable](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.mutable.html).
## Q: How to determine how many graphs there are in static graph mode? When will the subgraph be divided? What is the impact of multiple subgraphs? How to avoid multiple subgraphs? -A: 1. The number of subgraphs can be obtained by viewing the IR file and searching for "Total subgraphs". For how to view and analyze IR files, please refer to [MindSpore IR Introduction](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/mindir.html) +A: 1. The number of subgraphs can be obtained by viewing the IR file and searching for "Total subgraphs". For how to view and analyze IR files, please refer to [MindSpore IR Introduction](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/mindir.html) 2. Subgraph segmentation in static graph mode is common in control flow scenarios, such as if/while. In addition to manual writing by users, the control flow syntax within the MindSpore may also lead to dividing into multiple subgraphs. diff --git a/docs/mindspore/source_en/faq/operators_compile.md b/docs/mindspore/source_en/faq/operators_compile.md index 831a345d1b..899e04da1d 100644 --- a/docs/mindspore/source_en/faq/operators_compile.md +++ b/docs/mindspore/source_en/faq/operators_compile.md @@ -1,10 +1,10 @@ # Operators Compile -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/faq/operators_compile.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/faq/operators_compile.md) ## Q: When the `ops.concat` operator is used, the error message `Error:Input and (output + workspace) num should <=192!` is displayed, which indicates that the data volume is large. What can I do? -A: The `shape` of the [ops.concat](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.concat.html) operator is too large. You are advised to set the output to `numpy` when creating an iterator for the `dataset` object. The setting is as follows: +A: The `shape` of the [ops.concat](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.concat.html) operator is too large. You are advised to set the output to `numpy` when creating an iterator for the `dataset` object. The setting is as follows: ```python gallaryloader.create_dict_iterator(output_numpy=True) @@ -16,7 +16,7 @@ In the post-processing phase (in a non-network calculation process, that is, in ## Q: In the `construct` function of the static graph mode, how do I remove all negative values contained in a `tensor`? -A: You are advised to use the [ops.clip_by_value](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.clip_by_value.html) interface to change all negative numbers to 0 for computation. +A: You are advised to use the [ops.clip_by_value](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.clip_by_value.html) interface to change all negative numbers to 0 for computation.
@@ -34,19 +34,19 @@ A: The number of tensors to be concatenated at a time cannot exceed 192 accordin ## Q: When `Conv2D` is used to define convolution, the `group` parameter is used. Is it necessary to ensure that the value of `group` can be exactly divided by the input and output dimensions? How is the `group` parameter transferred? -A: The `Conv2d` operator has the following constraint: When the value of `group` is greater than 1, the value must be the same as the number of input and output channels. Do not use [ops.Conv2D](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Conv2D.html). Currently, this operator does not support a value of `group` that is greater than 1. Only the [nn.Conv2D](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Conv2d.html) API of MindSpore supports group convolution. However, the number of `group` must be the same as the number of input and output channels. +A: The `Conv2d` operator has the following constraint: When the value of `group` is greater than 1, the value must be the same as the number of input and output channels. Do not use [ops.Conv2D](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Conv2D.html). Currently, this operator does not support a value of `group` that is greater than 1. Only the [nn.Conv2D](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Conv2d.html) API of MindSpore supports group convolution. However, the number of `group` must be the same as the number of input and output channels.
## Q: Does MindSpore support matrix transposition? -A: Yes. For details, see [mindspore.ops.Transpose](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Transpose.html#mindspore.ops.Transpose). +A: Yes. For details, see [mindspore.ops.Transpose](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Transpose.html#mindspore.ops.Transpose).
## Q: Can MindSpore calculate the variance of any `tensor`? -A: You can use the mindspore.Tensor.var interface to calculate the variance of a Tensor. You can refer to [mindspore.Tensor.var(axis=None, ddof=0, keepdims=False)](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.var.html#mindspore.Tensor.var) to realize it. +A: You can use the mindspore.Tensor.var interface to calculate the variance of a Tensor. You can refer to [mindspore.Tensor.var(axis=None, ddof=0, keepdims=False)](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.var.html#mindspore.Tensor.var) to realize it.
@@ -59,7 +59,7 @@ In MindSpore, you can manually initialize the weight corresponding to the `paddi ## Q: When the `Tile` operator in operations executes `__infer__`, the `value` is `None`. Why is the value lost? -A: The `multiples input` of the `Tile` operator must be a constant (The value cannot directly or indirectly come from the input of the graph). Otherwise, the `None` data will be obtained during graph composition because the graph input is transferred only during graph execution and the input data cannot be obtained during graph composition. For the detailed information, refer to [Static Graph Syntax Support](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html). +A: The `multiples input` of the `Tile` operator must be a constant (The value cannot directly or indirectly come from the input of the graph). Otherwise, the `None` data will be obtained during graph composition because the graph input is transferred only during graph execution and the input data cannot be obtained during graph composition. For the detailed information, refer to [Static Graph Syntax Support](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html).
@@ -71,7 +71,7 @@ A: TBE (Tensor Boost Engine) operator is Huawei's self-developed Ascend operator ## Q: Has MindSpore implemented the anti-pooling operation similar to `nn.MaxUnpool2d`? -A: Currently, MindSpore does not provide anti-pooling APIs but you can customize the operator to implement the operation. For details, refer to [Customize Operators](https://www.mindspore.cn/tutorials/en/master/custom_program/op_custom.html). +A: Currently, MindSpore does not provide anti-pooling APIs but you can customize the operator to implement the operation. For details, refer to [Customize Operators](https://www.mindspore.cn/tutorials/en/br_base/custom_program/op_custom.html).
@@ -81,7 +81,7 @@ A: In this case, 1. Make sure if these operators are fusion operators. The operator pre-compiling may change the value of operator's attribute `fusion_type`, the attr will affect the fusion of operator. The performance of the fused operator is not necessarily better than that of small operator. -2. If these operators are not fusion operators, using the environment variable `MS_COMPILER_OP_LEVEL` to generate the operator debug info, and then ask the operator developer for help. For specific configuration instructions, see [Environment Variables](https://www.mindspore.cn/docs/en/master/api_python/env_var_list.html). +2. If these operators are not fusion operators, using the environment variable `MS_COMPILER_OP_LEVEL` to generate the operator debug info, and then ask the operator developer for help. For specific configuration instructions, see [Environment Variables](https://www.mindspore.cn/docs/en/br_base/api_python/env_var_list.html).
@@ -109,13 +109,13 @@ A: The `Ascend` backend operators can be divided into AI CORE operators and AI C 1. If the `AI CORE` operator's candidates list is empty, it may be that all operator information failed to pass the verification in the `check support` stage. You can search the keyword `CheckSupport` in the log to find the reason for the failure. Modify the shape or data type according to the specific information, or ask the developer to further locate the problem. 2. If the `AI CPU` candidate operator information is not empty, or the candidate operator information of `AI CORE` and `AI CPU` are both not empty, it may be that the given input data type was not in the candidate list and was filtered out in the selection stage. Try to modify the input data type of the operator according to the candidate list. -You can select a proper mode and writing method to complete the training by referring to the [official website tutorial](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html). +You can select a proper mode and writing method to complete the training by referring to the [official website tutorial](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html).
## Q: What are the type conversion rules for the inputs of MindSpore's operator? If there is a zero-dimensional Tensor in the inputs, do we follow the rules? -A: For the type conversion rules for the inputs of MindSpore's operator, please refer to [Type Conversion Rules](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.dtype.html#mindspore.dtype). Different from PyTorch, MindSpore also follows this rule when there is a zero-dimensional Tensor in the inputs. The sample code is as follows. +A: For the type conversion rules for the inputs of MindSpore's operator, please refer to [Type Conversion Rules](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.dtype.html#mindspore.dtype). Different from PyTorch, MindSpore also follows this rule when there is a zero-dimensional Tensor in the inputs. The sample code is as follows. ```python import torch diff --git a/docs/mindspore/source_en/faq/performance_tuning.md b/docs/mindspore/source_en/faq/performance_tuning.md index 9f428c3df4..eaee852c22 100644 --- a/docs/mindspore/source_en/faq/performance_tuning.md +++ b/docs/mindspore/source_en/faq/performance_tuning.md @@ -1,11 +1,11 @@ # Performance Tuning -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/faq/performance_tuning.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/faq/performance_tuning.md) ## Q: What can I do if the network performance is abnormal and weight initialization takes a long time during training after MindSpore is installed? A: The `scipy 1.4` series versions may be used in the environment. Run the `pip list | grep scipy` command to view the scipy version and change the `scipy` version to that required by MindSpore. You can view the third-party library dependency in the `requirement.txt` file. - + ## Q: How to choose the batchsize to achieve the best performance when training models on the Ascend chip? diff --git a/docs/mindspore/source_en/faq/precision_tuning.md b/docs/mindspore/source_en/faq/precision_tuning.md index 3f4d4b852a..fe5f48d8df 100644 --- a/docs/mindspore/source_en/faq/precision_tuning.md +++ b/docs/mindspore/source_en/faq/precision_tuning.md @@ -1,6 +1,6 @@ # Precision Tuning -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/faq/precision_tuning.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/faq/precision_tuning.md) ## Q: Why is the loss value not converged or why does the accuracy not meet the requirement? How can I locate and optimize the loss value? diff --git a/docs/mindspore/source_en/features/compile/graph_construction.md b/docs/mindspore/source_en/features/compile/graph_construction.md index bc30d9fe52..6375ccc49c 100644 --- a/docs/mindspore/source_en/features/compile/graph_construction.md +++ b/docs/mindspore/source_en/features/compile/graph_construction.md @@ -1,16 +1,16 @@ # Graph Construction (Compilation) -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/compile/graph_construction.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/compile/graph_construction.md) MindSpore provides JIT (just-in-time) technology to optimize the performance. The JIT mode parses the code into an intermediate representation (IR) graph by means of AST tree parsing, Python bytecode parsing or code execution tracing, which serves as a unique representation of the code, and the compiler optimizes the code by optimizing the IR graph to improve the runtime performance. In contrast to the dynamic graph model, this JIT compilation model is called the static graph model. -Based on JIT technology, MindSpore provides a dynamic-static combination approach to improve the operational efficiency of the user's network. The combination of dynamic and static, that is, in the overall run as a dynamic graph, specifies certain code blocks to run as a static graph. Code blocks that run as static graphs are compiled first and then executed, and global optimizations are performed during the compilation period to obtain performance gains during the execution period. Users can modify functions with the `@jit` decorator to specify that they execute according to the pattern of a static graph. For the documentation on the `@jit` decorator, refer to [jit API documentation](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.jit.html#mindspore.jit). +Based on JIT technology, MindSpore provides a dynamic-static combination approach to improve the operational efficiency of the user's network. The combination of dynamic and static, that is, in the overall run as a dynamic graph, specifies certain code blocks to run as a static graph. Code blocks that run as static graphs are compiled first and then executed, and global optimizations are performed during the compilation period to obtain performance gains during the execution period. Users can modify functions with the `@jit` decorator to specify that they execute according to the pattern of a static graph. For the documentation on the `@jit` decorator, refer to [jit API documentation](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.jit.html#mindspore.jit). MindSpore provides three JIT compilation methods, namely, ast, bytecode and trace. The ast converts the functions that are identified by the users manually and need to be executed in accordance with the ast into a static graph through the AST tree parsing. The bytecode is through the Python bytecode parsing, in the dynamic graph as much as possible to build a static graph. The part that can not be converted to a static graph will be in accordance with the dynamic graph for the purpose of combining static and dynamic. The trace constructs a static graph by tracing the execution path of Python code and is currently an experimental feature. Subsequent introduction will explain in detail the difference among the three principles and their respective characteristics. ## Ast -In dynamic graph mode, the user can modify a function to execute in ast mode by using the `@jit(capture_mode=“ast”)` decorator. The syntax and data structures used inside the functions which decorated by ast mode need to strictly follow the [Static Graph Syntax Specification](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html). The ast approach compiles Python code via a source-to-source method, which first parses the Python source code of model definitions into an Abstract Syntax Tree (AST), then converts the AST into MindIR. For example, the following Python code: +In dynamic graph mode, the user can modify a function to execute in ast mode by using the `@jit(capture_mode=“ast”)` decorator. The syntax and data structures used inside the functions which decorated by ast mode need to strictly follow the [Static Graph Syntax Specification](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html). The ast approach compiles Python code via a source-to-source method, which first parses the Python source code of model definitions into an Abstract Syntax Tree (AST), then converts the AST into MindIR. For example, the following Python code: ```python @jit @@ -21,7 +21,7 @@ def foo(x, y): The corresponding AST is as follows: -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/compile/images/ast.png) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/compile/images/ast.png) By parsing the above AST, we obtain the following MindIR: @@ -83,7 +83,7 @@ In the above use case, the tensor_cal function is modified by the @jit decorator - The vast majority of calculations and optimizations for MindSpore static graphs are based on optimizations for Tensor calculations, so we recommend that the functions that are modified should be the kind of functions that are used to perform real data calculations, rather than simple scalar calculations or transformations of data structures. -- Functions modified by `@jit` that have constants in their inputs will result in a recompile each time that the function input value changes. See [Constants and Variables Within JIT](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#constants-and-variables-within-jit) for the concept of variable constants. Therefore, it is recommended that the modified function takes as input Tensor or data modified by Mutable. Avoid additional performance loss due to multiple compilations. +- Functions modified by `@jit` that have constants in their inputs will result in a recompile each time that the function input value changes. See [Constants and Variables Within JIT](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#constants-and-variables-within-jit) for the concept of variable constants. Therefore, it is recommended that the modified function takes as input Tensor or data modified by Mutable. Avoid additional performance loss due to multiple compilations. ## Bytecode @@ -100,7 +100,7 @@ In addition to ast, MindSpore provides another static acceleration mechanism, by The compilation process of bytecode is illustrated in the following diagram: -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/compile/images/bytecode.png) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/compile/images/bytecode.png) **bytecode Usage** diff --git a/docs/mindspore/source_en/features/compile/graph_optimization.md b/docs/mindspore/source_en/features/compile/graph_optimization.md index 00bb22bb19..19b31d0c8e 100644 --- a/docs/mindspore/source_en/features/compile/graph_optimization.md +++ b/docs/mindspore/source_en/features/compile/graph_optimization.md @@ -1,6 +1,6 @@ # Graph Optimization (Compilation) -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/compile/graph_optimization.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/compile/graph_optimization.md) Similar to traditional compilers, MindSpore also performs compilation optimization after graph construction. The main purpose of compilation optimization is to analyze and transform MindSpore's intermediate representation MindIR by static analysis techniques to achieve goals such as reducing the size of the target code, improving execution efficiency, lowering runtime resource consumption, or enhancing other performance metrics. Compilation optimization is a crucial part of the graph compilation system and plays an extremely important role in improving the performance and resource utilization of the entire neural network model. Compared with the original code that has not been optimized, compilation optimization can bring several times or even tens of times performance improvement. diff --git a/docs/mindspore/source_en/features/compile/multi_level_compilation.md b/docs/mindspore/source_en/features/compile/multi_level_compilation.md index 5ca3e25b7a..e50e916c37 100644 --- a/docs/mindspore/source_en/features/compile/multi_level_compilation.md +++ b/docs/mindspore/source_en/features/compile/multi_level_compilation.md @@ -1,6 +1,6 @@ # Multi-Level Compilation Introduction (Compilation) -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/compile/multi_level_compilation.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/compile/multi_level_compilation.md) ## Background @@ -16,13 +16,13 @@ With the arrival of the era of deep learning large models, the bigger the networ ![jit_level_framework](./images/multi_level_compilation/jit_level_framework.png) -1. Multi-level compilation external interface: configure multi-level compilation level through [mindspore.jit(jit_level="O0/O1")](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.jit.html#mindspore.jit), jit_level defaults to O0. We usually recommend that users use O0 mode for network debugging tuning. After debugging is ready, for better performance you can turn on O1 to run the network. +1. Multi-level compilation external interface: configure multi-level compilation level through [mindspore.jit(jit_level="O0/O1")](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.jit.html#mindspore.jit), jit_level defaults to O0. We usually recommend that users use O0 mode for network debugging tuning. After debugging is ready, for better performance you can turn on O1 to run the network. 2. Backend graph compilation: According to the configured multi-level compilation level, different compilation modes are selected. O0 is the most basic native composition and compilation, and O1 adds automatic operator fusion function on the basis of O0, with the main functions of graph optimization, graph-operator fusion, operator selection, and execution sequence scheduling, of which graph-operator fusion is a unique function in O1 mode. 3. Backend graph execution: The O0 and O1 modes are the same at the execution level, and both use a single operator way of scheduling execution, with the main functions of multi-stream concurrency, multi-level streaming, HAL management, and memory management. ## Introduction to the O0 Model -O0 is the basic graph compilation and execution mode, except for the necessary impact on the functionality of the optimization, other optimizations are turned off, the use of native graph structure for compilation and execution, easy to debug and tuning, with better compilation performance. The following mainly introduces the functions related to backend graph compilation, and the functions related to backend graph execution are detailed in [runtime](https://www.mindspore.cn/docs/en/master/features/runtime/memory_manager.html). +O0 is the basic graph compilation and execution mode, except for the necessary impact on the functionality of the optimization, other optimizations are turned off, the use of native graph structure for compilation and execution, easy to debug and tuning, with better compilation performance. The following mainly introduces the functions related to backend graph compilation, and the functions related to backend graph execution are detailed in [runtime](https://www.mindspore.cn/docs/en/br_base/features/runtime/memory_manager.html). ### Graph Optimization @@ -70,7 +70,7 @@ Execution order scheduling is a complex problem of solving optimal operator conc - First, the optimization module needs to address the complexity of solving for optimal operator concurrency. Due to the large number of operators in the computational graph and their interdependencies, finding an execution order that maximizes concurrency while maintaining the logical correctness of the computational graph is a challenging task. - Second, memory constraints are a critical factor that cannot be ignored in execution order optimization. Increasing concurrency, while improving computational efficiency, tends to significantly increase peak memory requirements, which may lead to Overflow of Memory (OOM) errors, especially in resource-constrained environments. Therefore, the optimization module must weigh the relationship between concurrency and memory usage to ensure that concurrency is increased without exceeding the memory capacity of the system. -- MindSpore's execution order adjustment module combines rule-based and heuristic-based strategies to provide both bfs/dfs execution order orchestration algorithms [mindspore.jit(option={“exec_order”: “bfs/dfs”})](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.jit.html#mindspore.jit) to achieve fine-grained adjustment of the execution order of the computation graph, so as to effectively deal with multiple challenges such as memory constraints and system stability while ensuring computational efficiency. +- MindSpore's execution order adjustment module combines rule-based and heuristic-based strategies to provide both bfs/dfs execution order orchestration algorithms [mindspore.jit(option={“exec_order”: “bfs/dfs”})](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.jit.html#mindspore.jit) to achieve fine-grained adjustment of the execution order of the computation graph, so as to effectively deal with multiple challenges such as memory constraints and system stability while ensuring computational efficiency. ## Introduction to the O1 Model @@ -101,7 +101,7 @@ The overall architecture of graph-kernel fusion is shown in the figure below. Th The optimized computational graph is passed to MindSpore AKG as a subgraph for further back-end optimization and target code generation. -![graphkernel](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/images/graphkernel.png) +![graphkernel](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/images/graphkernel.png) By following these steps, we can obtain two aspects of performance gains: diff --git a/docs/mindspore/source_en/features/data_engine.md b/docs/mindspore/source_en/features/data_engine.md index 256403c7b0..684659aa07 100644 --- a/docs/mindspore/source_en/features/data_engine.md +++ b/docs/mindspore/source_en/features/data_engine.md @@ -1,6 +1,6 @@ # High Performance Data Processing Engine -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/data_engine.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/data_engine.md) ## Background Introduction @@ -14,9 +14,9 @@ The core of MindSpore training data processing engine is to efficiently and flex - Provide an automatic data augmentation mode, and perform automatic data augmentation on images based on specific strategies. - Provide single-node data caching capability to solve the problem of repeated loading and processing of data, reduce data processing overhead, and improve device-to-device training efficiency. -Please refer to the instructions for usage: [Data Loading And Processing](https://www.mindspore.cn/tutorials/en/master/dataset/overview.html) +Please refer to the instructions for usage: [Data Loading And Processing](https://www.mindspore.cn/tutorials/en/br_base/dataset/overview.html) -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_en/features/images/data/data_engine_en.png) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_en/features/images/data/data_engine_en.png) MindSpore training data engine also provides efficient loading and sampling capabilities of datasets in fields, such as scientific computing-electromagnetic simulation, remote sensing large-format image processing, helping MindSpore achieve full-scene support. @@ -26,7 +26,7 @@ MindSpore training data engine also provides efficient loading and sampling capa The design of MindSpore considers the efficiency, flexibility and adaptability of data processing in different scenarios. The whole data processing subsystem is divided into the following modules: -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/images/data/architecture.png) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/images/data/architecture.png) - API: The data processing process is represented in MindSpore in the form of a graph, called a data graph. MindSpore provides Python API to define data graphs externally and implement graph optimization and graph execution internally. - Data Processing Pipeline: Data loading and pre-processing multi-step parallel pipeline, which consists of the following components. @@ -83,9 +83,9 @@ Users often have diverse needs for data processing, and processing logic that is To address the challenge of having a wide variety of datasets with different formats and organization, MindSpore provides three different methods of loading datasets: - - For common datasets in each domain, they can be loaded directly by using MindSpore built-in API interface. MindSpore provides [CelebADataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CelebADataset.html), [Cifar10Dataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html), [CocoDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CocoDataset.html), [ImageFolderDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html), [MnistDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MnistDataset.html), [VOCDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.VOCDataset.html) and other common dataset loading interfaces to ensure performance while enabling users to use them out of the box. - - For datasets that do not support direct loading at the moment, they can be converted to MindSpore data format, i.e. MindRecord, and then loaded through the [MindDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MindDataset.html) interface. MindRecord can normalize different dataset formats, with various advantages such as aggregated storage, efficient reading, fast coding and decoding, and flexible control of partition size. - - Users can also write custom dataset reading classes in Python and then use the [GeneratorDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html) interface for dataset loading. This method allows for quick integration of existing code, but requires additional attention to data loading performance as it is a Python IO Reader. + - For common datasets in each domain, they can be loaded directly by using MindSpore built-in API interface. MindSpore provides [CelebADataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CelebADataset.html), [Cifar10Dataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html), [CocoDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CocoDataset.html), [ImageFolderDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html), [MnistDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MnistDataset.html), [VOCDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.VOCDataset.html) and other common dataset loading interfaces to ensure performance while enabling users to use them out of the box. + - For datasets that do not support direct loading at the moment, they can be converted to MindSpore data format, i.e. MindRecord, and then loaded through the [MindDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MindDataset.html) interface. MindRecord can normalize different dataset formats, with various advantages such as aggregated storage, efficient reading, fast coding and decoding, and flexible control of partition size. + - Users can also write custom dataset reading classes in Python and then use the [GeneratorDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html) interface for dataset loading. This method allows for quick integration of existing code, but requires additional attention to data loading performance as it is a Python IO Reader. - Support more operations by Python layer customization and C++ layer plug-in @@ -99,7 +99,7 @@ Users often have diverse needs for data processing, and processing logic that is To support AutoAugment, an automatic data augmentation strategy, MindSpore provides the following interfaces. - - [RandomChoice](https://www.mindspore.cn/docs/en/master/api_python/dataset_transforms/mindspore.dataset.transforms.RandomChoice.html), or random selection, allows the user to define a list of data augmentation operations, and the data processing process will select one data augmentation operation from the list with equal probability for each image. + - [RandomChoice](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.RandomChoice.html), or random selection, allows the user to define a list of data augmentation operations, and the data processing process will select one data augmentation operation from the list with equal probability for each image. ```python from mindspore.dataset.transforms import RandomChoice @@ -110,7 +110,7 @@ Users often have diverse needs for data processing, and processing logic that is RandomRotation((90, 90))]) ``` - - [RandomApply](https://www.mindspore.cn/docs/en/master/api_python/dataset_transforms/mindspore.dataset.transforms.RandomApply.html), a random probability execution, allows the user to define a list of data augmentation operations and the corresponding probabilities, and the data augmentation operations in the list will be executed for each image with the specified probability, either all or none. + - [RandomApply](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.RandomApply.html), a random probability execution, allows the user to define a list of data augmentation operations and the corresponding probabilities, and the data augmentation operations in the list will be executed for each image with the specified probability, either all or none. ```python from mindspore.dataset.transforms import RandomApply @@ -121,7 +121,7 @@ Users often have diverse needs for data processing, and processing logic that is RandomRotation((90, 90))], 0.8) ``` - - [RandomSelectSubpolicy](https://www.mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomSelectSubpolicy.html), a random subpolicy selection, allows users to define multiple lists of data augmentation operation subpolices and specify the probability of execution for each data augmentation operation in the subpolicy. During data processing, a subpolicy is first selected with equal probability for each image, and then whether each data augmentation operation is performed is decided in order according to the probability. + - [RandomSelectSubpolicy](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomSelectSubpolicy.html), a random subpolicy selection, allows users to define multiple lists of data augmentation operation subpolices and specify the probability of execution for each data augmentation operation in the subpolicy. During data processing, a subpolicy is first selected with equal probability for each image, and then whether each data augmentation operation is performed is decided in order according to the probability. ```python from mindspore.dataset.vision import RandomSelectSubpolicy, RandomRotation, RandomVerticalFlip, \ diff --git a/docs/mindspore/source_en/features/overview.md b/docs/mindspore/source_en/features/overview.md index 2899bbfb4b..215ecc6066 100644 --- a/docs/mindspore/source_en/features/overview.md +++ b/docs/mindspore/source_en/features/overview.md @@ -1,6 +1,6 @@ # MindSpore Design Overview -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/overview.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/overview.md) ## Introduction @@ -55,7 +55,7 @@ Static graph mode can effectively perceive the relationships between operators a MindSpore builds the graph structure of neural networks based on Python, which provides more usable and flexible expressiveness compared to traditional static graph modes. MindSpore innovatively builds source code conversion capabilities, constructing computational graphs based on Python statements by extracting AST, thus supporting developers' use of native Python syntax (conditions/loops, etc.) and other operations such as tuples, lists, and lambda expressions to build computational graphs and perform automatic differentiation. Therefore, MindSpore can better accommodate both dynamic and static graph programming interfaces, maintaining consistency at the code level, such as control flow writing. -Native Python expressions can directly enable static graph mode execution based on Python control flow keywords, making the programming unification of dynamic and static graphs higher. At the same time, developers can flexibly control Python code fragments in dynamic and static graph modes based on MindSpore's interfaces. That is, local functions can be executed in static graph mode ([mindspore.jit](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.jit.html)) while other functions are executed in dynamic graph mode. This allows developers to flexibly specify function fragments for static graph optimization and acceleration when interleaving with common Python libraries and custom Python functions, without sacrificing the programming ease of interleaved execution. +Native Python expressions can directly enable static graph mode execution based on Python control flow keywords, making the programming unification of dynamic and static graphs higher. At the same time, developers can flexibly control Python code fragments in dynamic and static graph modes based on MindSpore's interfaces. That is, local functions can be executed in static graph mode ([mindspore.jit](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.jit.html)) while other functions are executed in dynamic graph mode. This allows developers to flexibly specify function fragments for static graph optimization and acceleration when interleaving with common Python libraries and custom Python functions, without sacrificing the programming ease of interleaved execution. ### Distributed Parallel Computing @@ -71,7 +71,7 @@ At the same time, MindSpore also provides various parallel strategies such as pi Based on compilation technology, MindSpore provides rich hardware-independent optimizations such as IR fusion, algebraic simplification, constant folding, and common subexpression elimination. At the same time, it also provides various hardware optimization capabilities for different hardware such as NPU and GPU, thereby better leveraging the large-scale computational acceleration capabilities of hardware. -#### [Graph-Algorithm Fusion](https://www.mindspore.cn/docs/en/master/features/compile/multi_level_compilation.html#graph-kernel-fusion) +#### [Graph-Algorithm Fusion](https://www.mindspore.cn/docs/en/br_base/features/compile/multi_level_compilation.html#graph-kernel-fusion) Mainstream AI computing frameworks like MindSpore typically define operators from the perspective of developer understanding and ease of use. Each operator carries varying amounts of computation and computational complexity. However, from a hardware execution perspective, this natural operator computational division based on the developer's perspective is not efficient and cannot fully utilize hardware computational capabilities. This is mainly reflected in: @@ -101,6 +101,6 @@ MindSpore is an AI framework that integrates training and inference, supporting According to actual execution environments and business requirements, MindSpore provides multiple specification versions, supporting deployment on cloud, servers, mobile and other embedded devices, and ultra-lightweight devices such as earphones. -### [Third-Party Hardware Integration](https://www.mindspore.cn/docs/en/master/features/runtime/pluggable_device.html) +### [Third-Party Hardware Integration](https://www.mindspore.cn/docs/en/br_base/features/runtime/pluggable_device.html) Based on the unified MindIR, MindSpore has built an open AI architecture that supports third-party chip plugins, standardization, and low-cost rapid integration, which can connect to GPU series chips as well as various DSA chips. MindSpore provides two chip integration methods: Kernel mode and Graph mode, allowing chip manufacturers to choose the integration method according to their own characteristics. diff --git a/docs/mindspore/source_en/features/parallel/auto_parallel.rst b/docs/mindspore/source_en/features/parallel/auto_parallel.rst index 897b5ae711..4afb421508 100644 --- a/docs/mindspore/source_en/features/parallel/auto_parallel.rst +++ b/docs/mindspore/source_en/features/parallel/auto_parallel.rst @@ -1,8 +1,8 @@ Automatic Parallel Strategy Search ==================================== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg - :target: https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/parallel/auto_parallel.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/parallel/auto_parallel.rst :alt: View Source On Gitee The auto-parallel mode allows the user to automatically build the cost model and find a parallel strategy with shorter training time without paying attention to the strategy configuration. Currently MindSpore supports the following two different auto-parallel schemes: @@ -69,9 +69,9 @@ The sharding strategy propagation algorithm means that the user only needs to ma Related interfaces: -1. `mindspore.parallel.auto_parallel.AutoParallel(net, parallel_mode="sharding_propagation") `_: Set the parallel mode and select the Strategy Propagation Algorithm via ``parallel_mode``. +1. `mindspore.parallel.auto_parallel.AutoParallel(net, parallel_mode="sharding_propagation") `_: Set the parallel mode and select the Strategy Propagation Algorithm via ``parallel_mode``. -2. `mindspore.nn.Cell.shard() `_ and `mindspore.ops.Primitive.shard() `_ : Specifies the operator sharding strategy, and the strategy for the rest of the operators is derived by the propagation algorithm. Currently the ``mindspore.nn.Cell.shard()`` interface can be used in PyNative mode and Graph mode; The ``mindspore.ops.Primitive.shard()`` interface can only be used in Graph mode. +2. `mindspore.nn.Cell.shard() `_ and `mindspore.ops.Primitive.shard() `_ : Specifies the operator sharding strategy, and the strategy for the rest of the operators is derived by the propagation algorithm. Currently the ``mindspore.nn.Cell.shard()`` interface can be used in PyNative mode and Graph mode; The ``mindspore.ops.Primitive.shard()`` interface can only be used in Graph mode. In summary, the sharding strategy propagation algorithm requires the user to manually configure the sharding strategy of the key operator. diff --git a/docs/mindspore/source_en/features/parallel/data_parallel.md b/docs/mindspore/source_en/features/parallel/data_parallel.md index 3fa7ff6c14..2947296067 100644 --- a/docs/mindspore/source_en/features/parallel/data_parallel.md +++ b/docs/mindspore/source_en/features/parallel/data_parallel.md @@ -1,6 +1,6 @@ # Data Parallel -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/parallel/data_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/parallel/data_parallel.md) ## Overview @@ -10,24 +10,24 @@ Data parallel is the most commonly used parallel training approach for accelerat Related interfaces are as follows: -1. [mindspore.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL)](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.set_auto_parallel_context.html): Set the data parallel mode. -2. [mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.DistributedGradReducer.html): Perform multi-card gradient aggregation. +1. [mindspore.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL)](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.set_auto_parallel_context.html): Set the data parallel mode. +2. [mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.DistributedGradReducer.html): Perform multi-card gradient aggregation. ## Overall Process -![Overall Process](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/images/data_parallel.png) +![Overall Process](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/images/data_parallel.png) 1. Environmental dependencies - Before starting parallel training, the communication resources are initialized by calling the [mindspore.communication.init](https://www.mindspore.cn/docs/en/master/api_python/communication/mindspore.communication.init.html) interface and the global communication group `WORLD_COMM_GROUP` is automatically created. The communication group enables communication operators to distribute messages between cards and machines, and the global communication group is the largest one, including all devices in current training. The current mode is set to data parallel mode by calling `mindspore.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL)`. + Before starting parallel training, the communication resources are initialized by calling the [mindspore.communication.init](https://www.mindspore.cn/docs/en/br_base/api_python/communication/mindspore.communication.init.html) interface and the global communication group `WORLD_COMM_GROUP` is automatically created. The communication group enables communication operators to distribute messages between cards and machines, and the global communication group is the largest one, including all devices in current training. The current mode is set to data parallel mode by calling `mindspore.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL)`. 2. Data distribution - The core of data parallel lies in splitting the dataset in sample dimensions and sending it down to different cards. In all dataset loading interfaces provided by the [mindspore.dataset](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.html) module, there are `num_shards` and `shard_id` parameters which are used to split the dataset into multiple copies and cycle through the samples in a way that collects `batch` data to their respective cards, and will start from the beginning when there is a shortage of data. + The core of data parallel lies in splitting the dataset in sample dimensions and sending it down to different cards. In all dataset loading interfaces provided by the [mindspore.dataset](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.html) module, there are `num_shards` and `shard_id` parameters which are used to split the dataset into multiple copies and cycle through the samples in a way that collects `batch` data to their respective cards, and will start from the beginning when there is a shortage of data. 3. Network composition - The data parallel network is written in a way that does not differ from the single-card network, due to the fact that during forward propagation & backward propagation the models of each card are executed independently from each other, only the same network structure is maintained. The only thing we need to pay special attention to is that in order to ensure the training synchronization between cards, the corresponding network parameter initialization values should be the same. In `DATA_PARALLEL` mode, we can use [mindspore.set_seed](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.set_seed.html) to set the seed or enable `parameter_broadcast` in `mindspore.set_auto_parallel_context` to achieve the same initialization of weights between multiple cards. + The data parallel network is written in a way that does not differ from the single-card network, due to the fact that during forward propagation & backward propagation the models of each card are executed independently from each other, only the same network structure is maintained. The only thing we need to pay special attention to is that in order to ensure the training synchronization between cards, the corresponding network parameter initialization values should be the same. In `DATA_PARALLEL` mode, we can use [mindspore.set_seed](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.set_seed.html) to set the seed or enable `parameter_broadcast` in `mindspore.set_auto_parallel_context` to achieve the same initialization of weights between multiple cards. 4. Gradient aggregation diff --git a/docs/mindspore/source_en/features/parallel/operator_parallel.md b/docs/mindspore/source_en/features/parallel/operator_parallel.md index b01560c0f9..e85c0792c1 100644 --- a/docs/mindspore/source_en/features/parallel/operator_parallel.md +++ b/docs/mindspore/source_en/features/parallel/operator_parallel.md @@ -1,6 +1,6 @@ # Operator-level Parallelism -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/parallel/operator_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/parallel/operator_parallel.md) ## Overview @@ -8,9 +8,9 @@ With the development of deep learning, network models are becoming larger and la Operator-level parallelism is achieved by slicing the tensor involved in each operator in the network model. Logical data parallelism is used when only the data dimension is sliced, while logical model parallelism is used when only the model dimension is silced. The training of large models is enabled by reducing the memory consumption of a single device. -MindSpore provides two operator-level parallelism capabilities: [Operator-level Parallelism](#basic-principle) and [Higher-order Operator-level Parallelism](#higher-order-operator-level-parallelism). Operator-level Parallelism uses simple tensor dimension splitting strategies to describe tensor distribution, meeting the requirements of most common scenarios. Higher-order Operator-level Parallelism enables complex partitioning scenarios by opening device arrangement descriptions, supporting: Non-contiguous device allocation, Multi-dimensional hybrid partitioning and so on. Both ops and mint operators are supported for the operator-level parallel capability of the two granularities. This chapter only introduces the operator-level parallelism and high-order operator-level parallelism based on ops operators. For the configuration method of operator-level parallelism based on mint operators, please refer to the mint Operator Parallel Practice and Higher-Order mint Operator Parallel Practice in the [Operator-level Parallelism Tutorial](https://www.mindspore.cn/tutorials/en/master/parallel/operator_parallel.html). +MindSpore provides two operator-level parallelism capabilities: [Operator-level Parallelism](#basic-principle) and [Higher-order Operator-level Parallelism](#higher-order-operator-level-parallelism). Operator-level Parallelism uses simple tensor dimension splitting strategies to describe tensor distribution, meeting the requirements of most common scenarios. Higher-order Operator-level Parallelism enables complex partitioning scenarios by opening device arrangement descriptions, supporting: Non-contiguous device allocation, Multi-dimensional hybrid partitioning and so on. Both ops and mint operators are supported for the operator-level parallel capability of the two granularities. This chapter only introduces the operator-level parallelism and high-order operator-level parallelism based on ops operators. For the configuration method of operator-level parallelism based on mint operators, please refer to the mint Operator Parallel Practice and Higher-Order mint Operator Parallel Practice in the [Operator-level Parallelism Tutorial](https://www.mindspore.cn/tutorials/en/br_base/parallel/operator_parallel.html). -For a list of operators that currently support parallelism, see [Usage Constraints During Operator Parallel](https://www.mindspore.cn/docs/en/master/api_python/operator_list_parallel.html). +For a list of operators that currently support parallelism, see [Usage Constraints During Operator Parallel](https://www.mindspore.cn/docs/en/br_base/api_python/operator_list_parallel.html). > Hardware platforms supported by the operator-level parallel model include Ascend, GPU, and need to be run in Graph mode. @@ -77,15 +77,15 @@ The configuration of operator-level parallelism in MindSpore is implemented thro To cope with these complex scenarios, this tutorial introduces a higher-order operator-level parallel configuration method with an open device arrangement description. -[Operator-level Parallelism](https://www.mindspore.cn/tutorials/en/master/parallel/operator_parallel.html) describes MindSpore basic slicing logic for tensors, but cannot express all the slicing scenarios. For example, for a 2D tensor "[[a0, a1, a2, a3], [a4, a5, a6, a7]]", the tensor layout is shown below: +[Operator-level Parallelism](https://www.mindspore.cn/tutorials/en/br_base/parallel/operator_parallel.html) describes MindSpore basic slicing logic for tensors, but cannot express all the slicing scenarios. For example, for a 2D tensor "[[a0, a1, a2, a3], [a4, a5, a6, a7]]", the tensor layout is shown below: -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/advanced_operator_parallel_view1.PNG) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/advanced_operator_parallel_view1.PNG) *Figure: Schematic of 2D tensor arrangement* It can be seen that the 0-axis of the tensor, e.g. "[a0, a1, a2, a3]" slices to the discontinuous card "[Rank0, Rank4, Rank2, Rank6]" and the tensor is sliced according to strategy=(2, 4), the arrangement should be as follows: -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/advanced_operator_parallel_view2.PNG) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/advanced_operator_parallel_view2.PNG) *Figure: Schematic of a 2D tensor arranged according to a sharding strategy* @@ -93,9 +93,9 @@ Therefore, directly slicing the input and output tensor of the operator accordin ### Interface Configuration -In order to express sharding as in the above scenario, functional extensions are made to the [shard](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.shard.html) interface. +In order to express sharding as in the above scenario, functional extensions are made to the [shard](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.shard.html) interface. -The parameters in_strategy and out_strategy both additionally receive the new quantity type tuple(Layout) type. [Layout](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.Layout.html) is initialized using the device matrix, while requiring an alias for each axis of the device matrix. For example: "layout = Layout((8, 4, 4), name = ("dp", "sp", "mp"))" means that the device has 128 cards in total, which are arranged in the shape of (8, 4, 4), and aliases "dp", "sp", "mp" are given to each axis. +The parameters in_strategy and out_strategy both additionally receive the new quantity type tuple(Layout) type. [Layout](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.Layout.html) is initialized using the device matrix, while requiring an alias for each axis of the device matrix. For example: "layout = Layout((8, 4, 4), name = ("dp", "sp", "mp"))" means that the device has 128 cards in total, which are arranged in the shape of (8, 4, 4), and aliases "dp", "sp", "mp" are given to each axis. For the specific meaning of Layout and the configuration derivation method, please refer to the following two technical documents: @@ -121,7 +121,7 @@ a_strategy = layout("mp", ("sp", "dp")) It can be seen that the "[a0, a1, a2, a3]" of the tensor a is sliced twice to the "sp" and "dp" axes of the device, so that the result comes out as: -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/advanced_operator_parallel_view1.PNG) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/advanced_operator_parallel_view1.PNG) The following is exemplified by a concrete example in which the user computes a two-dimensional matrix multiplication over 8 cards: `Y = (X * W)` , where the devices are organized according to `2 * 2 * 2`, and the cut of X coincides with the cut of the tensor a. The code is as follows: diff --git a/docs/mindspore/source_en/features/parallel/optimizer_parallel.md b/docs/mindspore/source_en/features/parallel/optimizer_parallel.md index d5d069cdde..1f3e624068 100644 --- a/docs/mindspore/source_en/features/parallel/optimizer_parallel.md +++ b/docs/mindspore/source_en/features/parallel/optimizer_parallel.md @@ -1,6 +1,6 @@ # Optimizer Parallel -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/parallel/optimizer_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/parallel/optimizer_parallel.md) ## Overview @@ -18,11 +18,11 @@ In either mode, the optimizer parallelism does not affect the compute graph of t Related interfaces: -1. [mindspore.parallel.auto_parallel.AutoParallel(network, parallel_mode="semi_auto")](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html): Encapsulates the specified parallel mode via static graph parallelism, where `network` is the top-level `Cell` or function to be encapsulated, and `parallel_mode` takes the value `semi_auto`, indicating a semi-automatic parallel mode. The interface returns a `Cell` encapsulated with parallel configuration. +1. [mindspore.parallel.auto_parallel.AutoParallel(network, parallel_mode="semi_auto")](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html): Encapsulates the specified parallel mode via static graph parallelism, where `network` is the top-level `Cell` or function to be encapsulated, and `parallel_mode` takes the value `semi_auto`, indicating a semi-automatic parallel mode. The interface returns a `Cell` encapsulated with parallel configuration. -2. [mindspore.parallel.auto_parallel.AutoParallel.hsdp(shard_size=-1, threshold=64, optimizer_level="level1")](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html#mindspore.parallel.auto_parallel.AutoParallel.hsdp): Configures and enables optimizer parallelism through this interface. `shard_size` specifies the size of the communication group for optimizer weight sharding. `threshold` defines the minimum memory size (in KB) required for a parameter to be sharded. Parameters smaller than this threshold will not be sharded during parameter partitioning. `optimizer_level` is used to specify the splitting level for optimizer sharding. When optimizer_level=`level1`, splitting is performed on weights and optimizer state. When optimizer_level=`level2`, splitting is performed on weights, optimizer state, and gradients. When optimizer_level=`level3`, splitting is performed on weights, optimizer state,gradients, additionally, before the backward pass, the weights are further applied with allgather communication to release the memory used by the forward pass allgather. +2. [mindspore.parallel.auto_parallel.AutoParallel.hsdp(shard_size=-1, threshold=64, optimizer_level="level1")](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html#mindspore.parallel.auto_parallel.AutoParallel.hsdp): Configures and enables optimizer parallelism through this interface. `shard_size` specifies the size of the communication group for optimizer weight sharding. `threshold` defines the minimum memory size (in KB) required for a parameter to be sharded. Parameters smaller than this threshold will not be sharded during parameter partitioning. `optimizer_level` is used to specify the splitting level for optimizer sharding. When optimizer_level=`level1`, splitting is performed on weights and optimizer state. When optimizer_level=`level2`, splitting is performed on weights, optimizer state, and gradients. When optimizer_level=`level3`, splitting is performed on weights, optimizer state,gradients, additionally, before the backward pass, the weights are further applied with allgather communication to release the memory used by the forward pass allgather. -3. [mindspore.nn.Cell.set_comm_fusion(fusion_type=NUM)](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_comm_fusion): In automatic/semi-automatic mode, each parameter generates a corresponding AllGather operation and ReduceScatter operation. These communication operators are automatically inserted by the auto-parallel framework. However, as the number of parameters increases, the number of corresponding communication operators also increases, and the scheduling and startup of operators generated by communication operations incurs more overhead. Therefore, it is possible to manually configure fusion markers NUM for the AllGather and ReduceScatter operations corresponding to parameters within each `Cell` through the `set_comm_fusion` method provided by `Cell` in order to improve communication efficiency. MindSpore will fuse the communication operators corresponding to the same NUM parameters to minimize communication overhead. +3. [mindspore.nn.Cell.set_comm_fusion(fusion_type=NUM)](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_comm_fusion): In automatic/semi-automatic mode, each parameter generates a corresponding AllGather operation and ReduceScatter operation. These communication operators are automatically inserted by the auto-parallel framework. However, as the number of parameters increases, the number of corresponding communication operators also increases, and the scheduling and startup of operators generated by communication operations incurs more overhead. Therefore, it is possible to manually configure fusion markers NUM for the AllGather and ReduceScatter operations corresponding to parameters within each `Cell` through the `set_comm_fusion` method provided by `Cell` in order to improve communication efficiency. MindSpore will fuse the communication operators corresponding to the same NUM parameters to minimize communication overhead. ## Basic Principles @@ -34,7 +34,7 @@ If you want to implement parallel computing for the optimizer, there are two imp Weights grouping is to do inter-layer division of the parameters and gradients within the optimizer, and the general training flow is shown in Figure 1. The parameters and gradients are grouped onto different cards to be updated, and then the updated weights are shared among devices through a communication broadcast operation. The memory and performance gains of the solution depend on the group with the largest proportion of parameters. When the parameters are divided evenly, the theoretical positive gains are N-1/N of optimizer runtime and dynamic memory, and N-1/N of memory size for optimizer state parameters, where N denotes the number of devices. And the negative gain introduced is the communication time that comes when sharing network weights. -![images](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/optimizer_parallel_image_0_zh.png) +![images](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/optimizer_parallel_image_0_zh.png) *Figure 1: Schematic diagram of the parameter grouping training process* @@ -46,7 +46,7 @@ Another way to implement parameter slicing is to do intra-layer division of para Combining the above characteristics, the implementation scheme of parameter slicing is shown in Figure 2. -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/optimizer_parallel_image_1_zh.png) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/optimizer_parallel_image_1_zh.png) *Figure 2: Schematic diagram of the parameter slicing training process* diff --git a/docs/mindspore/source_en/features/parallel/pipeline_parallel.md b/docs/mindspore/source_en/features/parallel/pipeline_parallel.md index e42b37461b..685fa2a93a 100644 --- a/docs/mindspore/source_en/features/parallel/pipeline_parallel.md +++ b/docs/mindspore/source_en/features/parallel/pipeline_parallel.md @@ -1,6 +1,6 @@ # Pipeline Parallel -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/parallel/pipeline_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/parallel/pipeline_parallel.md) ## Overview @@ -10,15 +10,15 @@ In recent years, the scale of neural networks has increased exponentially. Limit Related interfaces: -1. [mindspore.parallel.auto_parallel.AutoParallel(network, parallel_mode="semi_auto")](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html): Encapsulates the specified parallel mode via static graph parallelism, where `network` is the top-level `Cell` or function to be encapsulated, and `parallel_mode` takes the value `semi_auto`, indicating a semi-automatic parallel mode. The interface returns a `Cell` encapsulated with parallel configuration. +1. [mindspore.parallel.auto_parallel.AutoParallel(network, parallel_mode="semi_auto")](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html): Encapsulates the specified parallel mode via static graph parallelism, where `network` is the top-level `Cell` or function to be encapsulated, and `parallel_mode` takes the value `semi_auto`, indicating a semi-automatic parallel mode. The interface returns a `Cell` encapsulated with parallel configuration. -2. [mindspore.parallel.auto_parallel.AutoParallel.pipeline(stages=1, output_broadcast=False, interleave=False, scheduler='1f1b')](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html#mindspore.parallel.auto_parallel.AutoParallel.pipeline): Configures pipeline parallelism settings. `stages` specifies the total number of partitions for pipeline parallelism. If using `WithLossCell` to encapsulate `net`, the name of the `Cell` will be changed and the `_backbone` prefix will be added. `output_broadcast` determines whether to broadcast the output of the final pipeline stage to all other stages during inference. `interleave` shows that whether to enable interleaving scheduling.`scheduler` defines the pipeline scheduling strategy. Supported values: `gpipe`/`1f1b`/`seqpipe`/`seqvpp`/`seqsmartvpp`/`zero_bubble_v`. +2. [mindspore.parallel.auto_parallel.AutoParallel.pipeline(stages=1, output_broadcast=False, interleave=False, scheduler='1f1b')](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html#mindspore.parallel.auto_parallel.AutoParallel.pipeline): Configures pipeline parallelism settings. `stages` specifies the total number of partitions for pipeline parallelism. If using `WithLossCell` to encapsulate `net`, the name of the `Cell` will be changed and the `_backbone` prefix will be added. `output_broadcast` determines whether to broadcast the output of the final pipeline stage to all other stages during inference. `interleave` shows that whether to enable interleaving scheduling.`scheduler` defines the pipeline scheduling strategy. Supported values: `gpipe`/`1f1b`/`seqpipe`/`seqvpp`/`seqsmartvpp`/`zero_bubble_v`. -3. [mindspore.parallel.Pipeline(network, micro_size=1, stage_config={"cell1":0, "cell2":1})](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.nn.Pipeline.html): Pipeline parallelism requires wrapping the `network` with an additional layer of `Pipeline`, `micro_size` specifies the number of MicroBatch, which are finer-grained splits of a MiniBatch to improve hardware utilization. If using `WithLossCell` to encapsulate `network`, the name of the `Cell` will be changed and the `_backbone` prefix will be added. The final loss is the accumulation of losses from all MicroBatches. `stage_config` indicates the stage assignment for each Cell in the network. `micro_size` must be greater than or equal to the number of `stages`. +3. [mindspore.parallel.Pipeline(network, micro_size=1, stage_config={"cell1":0, "cell2":1})](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.nn.Pipeline.html): Pipeline parallelism requires wrapping the `network` with an additional layer of `Pipeline`, `micro_size` specifies the number of MicroBatch, which are finer-grained splits of a MiniBatch to improve hardware utilization. If using `WithLossCell` to encapsulate `network`, the name of the `Cell` will be changed and the `_backbone` prefix will be added. The final loss is the accumulation of losses from all MicroBatches. `stage_config` indicates the stage assignment for each Cell in the network. `micro_size` must be greater than or equal to the number of `stages`. -4. [mindspore.parallel.PipelineGradReducer(parameters, scale_sense=1.0, opt_shard=None)](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.nn.PipelineGradReducer.html): pipeline parallelism requires using `PipelineGradReducer` for gradient reduction. Because the output of pipeline parallelism is derived by the addition of several micro-batch outputs, as the gradient do. +4. [mindspore.parallel.PipelineGradReducer(parameters, scale_sense=1.0, opt_shard=None)](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.nn.PipelineGradReducer.html): pipeline parallelism requires using `PipelineGradReducer` for gradient reduction. Because the output of pipeline parallelism is derived by the addition of several micro-batch outputs, as the gradient do. -5. [mindspore.parallel.sync_pipeline_shared_parameters(net)](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.sync_pipeline_shared_parameters.html): Synchronize pipeline parallel stage shared parameters. +5. [mindspore.parallel.sync_pipeline_shared_parameters(net)](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.sync_pipeline_shared_parameters.html): Synchronize pipeline parallel stage shared parameters. ## Basic Principle @@ -26,7 +26,7 @@ Pipeline parallel is the splitting of operators in a neural network into multipl As shown in Figure 1, the network of 4 layers of MatMul is split into 4 stages and distributed to 4 devices. In forward calculations, each machine sends the result to the next machine through the communication operator after calculating the MatMul on the machine, and at the same time, the next machine receives (Receive) the MatMul result of the previous machine through the communication operator, and starts to calculate the MatMul on the machine; In reverse calculation, after the gradient of the last machine is calculated, the result is sent to the previous machine, and at the same time, the previous machine receives the gradient result of the last machine and begins to calculate the reverse of the current machine. -![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/pipeline_parallel_image_0_zh.png) +![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/pipeline_parallel_image_0_zh.png) *Figure 1: Schematic diagram of graph splitting in pipeline parallel* @@ -36,7 +36,7 @@ Simply splitting the model onto multiple devices does not bring about a performa As shown in Figure 2. The small batches are cut into 4 micro-batches, and the 4 micro-batches are executed on 4 groups to form a pipeline. The gradient aggregation of the micro-batch is used to update the parameters, where each device only stores and updates the parameters of the corresponding group. where the white ordinal number represents the index of the micro-batch. -![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/pipeline_parallel_image_1_zh.png) +![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/pipeline_parallel_image_1_zh.png) *Figure 2: Schematic diagram of a pipeline parallel execution timeline with MicroBatch* @@ -46,7 +46,7 @@ In MindSpore's pipeline parallel implementation, the execution order has been ad As shown in Figure 3, the reverse of the MicroBatch numbered 0 is performed immediately after its forward execution, so that the memory of the intermediate result of the numbered 0 MicroBatch is freed earlier (compared to Figure 2), thus ensuring that the peak memory usage is lower than in the way of Figure 2. -![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/pipeline_parallel_image_2_zh.png) +![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/pipeline_parallel_image_2_zh.png) *Figure 3: MindSpore Pipeline Parallel Execution Timeline Diagram* @@ -54,7 +54,7 @@ As shown in Figure 3, the reverse of the MicroBatch numbered 0 is performed imme In order to improve the efficiency of pipeline parallelism and reduce the proportion of bubbles, Megatron LM proposes a new pipeline parallel scheduling strategy called "interleaved pipeline". Traditional pipeline parallelism typically places several consecutive model layers (such as Transformer layers) on a stage, as shown in Figure 3. In the scheduling of interleaved pipeline, each stage performs interleaved calculations on non-continuous model layers to further reduce the proportion of bubbles with more communication, as shown in Figure 4. For example, in traditional pipeline parallelism, each stage has 2 model layers, namely: stage 0 has layers 0 and 1, stage 1 has layers 2 and 3, stage 2 has layers 4 and 5, and stage 3 has layers 6 and 7, while in interleaved pipeline, stage 0 has layers 0 and 4, stage 1 has layers 1 and 5, stage 2 has layers 2 and 6, and stage 3 has layers 3 and 7. -![mpp2.png](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/megatron.png) +![mpp2.png](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/megatron.png) *Figure 4: Scheduler of Interleaved Pipeline* @@ -62,7 +62,7 @@ In order to improve the efficiency of pipeline parallelism and reduce the propor MindSpore has made memory optimization based on Megatron LM interleaved pipeline scheduling by moving some forward execution sequences back, as shown in Figure 5, which can accumulate less MicroBatch memory during memory peak hours. -![mpp2.png](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/mindspore.png) +![mpp2.png](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/mindspore.png) *Figure 5: MindSpore Scheduler of Interleaved Pipeline* @@ -70,6 +70,6 @@ MindSpore has made memory optimization based on Megatron LM interleaved pipeline As shown in Figure 6, zero_bubble_v pipeline parallelism further improves pipeline parallel efficiency and reduces bubble rate by dividing the backward computation into gradient computation and parameter update. For consecutive model layers, the stage value first increases and then decreases, the pipeline_segment of the first half of layers is 0, and the pipeline_segment of the second half of layers is 1. For example, for 8 layers, when the stage size is 4, stage0 has layer0 and layer7, stage1 has layer1 and layer6, stage2 has layer2 and layer5, stage 3 has layer3 and layer4, the pipeline_segment of layer0 to layer3 is 0, and the pipeline_segment of layer4 to layer7 is 1. -![mpp2.png](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/parallel/images/zero_bubble_v.png) +![mpp2.png](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/parallel/images/zero_bubble_v.png) *Figure 6: zero_bubble_v Pipeline Scheduler* diff --git a/docs/mindspore/source_en/features/runtime/memory_manager.md b/docs/mindspore/source_en/features/runtime/memory_manager.md index c51fcf785c..58bbc4795e 100644 --- a/docs/mindspore/source_en/features/runtime/memory_manager.md +++ b/docs/mindspore/source_en/features/runtime/memory_manager.md @@ -1,6 +1,6 @@ # Memory Management -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/runtime/memory_manager.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/runtime/memory_manager.md) ## Overview @@ -9,20 +9,20 @@ Device memory (hereinafter referred to as memory) is the most important resource 1. Memory pool serves as a base for memory management and can effectively avoid the overhead of frequent dynamic allocation of memory. 2. Memory reuse algorithm, as a core competency in memory management, needs to have efficient memory reuse results as well as minimal memory fragmentation. -![memory_manager](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_en/features/compile/images/multi_level_compilation/jit_level_memory_manage.png) +![memory_manager](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_en/features/compile/images/multi_level_compilation/jit_level_memory_manage.png) ## Interfaces -The memory management-related interfaces are detailed in [runtime interfaces](https://www.mindspore.cn/docs/en/master/api_python/mindspore.runtime.html#memory), of which the two most important ones The two most important interfaces are the memory settings interface and the memory fragmentation management interface: +The memory management-related interfaces are detailed in [runtime interfaces](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.runtime.html#memory), of which the two most important ones The two most important interfaces are the memory settings interface and the memory fragmentation management interface: -1. memory settings interface: [mindspore.runtime.set_memory](https://www.mindspore.cn/docs/en/master/api_python/runtime/mindspore.runtime.set_memory.html#mindspore.runtime.set_memory), setting the memory parameters to be managed using the memory pool and the memory reuse algorithm. -2. memory fragmentation management interface: [environment variable MS_ALLOC_CONF](https://www.mindspore.cn/docs/en/master/api_python/env_var_list.html#graph-compilation-and-execution). The behavior is determined by whether the hardware driver has the ability to map virtual memory to physical memory, if it does, it is turned on by default, otherwise it is turned off by default. This can be forced to be turned off by export MS_ALLOC_CONF=“enable_vmm:false”. +1. memory settings interface: [mindspore.runtime.set_memory](https://www.mindspore.cn/docs/en/br_base/api_python/runtime/mindspore.runtime.set_memory.html#mindspore.runtime.set_memory), setting the memory parameters to be managed using the memory pool and the memory reuse algorithm. +2. memory fragmentation management interface: [environment variable MS_ALLOC_CONF](https://www.mindspore.cn/docs/en/br_base/api_python/env_var_list.html#graph-compilation-and-execution). The behavior is determined by whether the hardware driver has the ability to map virtual memory to physical memory, if it does, it is turned on by default, otherwise it is turned off by default. This can be forced to be turned off by export MS_ALLOC_CONF=“enable_vmm:false”. ## Memory Pool -The core idea of memory pool as a base for memory management is to pre-allocate a large block of contiguous memory, allocate it directly from the pool when applying for memory, and return it to the pool for reuse when releasing it, instead of frequently calling the memory application and release interfaces in the system, which reduces the overhead of frequent dynamic allocations, and improves system performance. MindSpore mainly uses the BestFit memory allocation algorithm, supports dynamic expansion of memory blocks and defragmentation, and sets the initialization parameters of the memory pool through the interface [mindspore.runtime.set_memory(init_size,increase_size,max_size)](https://www.mindspore.cn/docs/en/master/api_python/runtime/mindspore.runtime.set_memory.html) to control the dynamic expansion size and maximum memory usage. +The core idea of memory pool as a base for memory management is to pre-allocate a large block of contiguous memory, allocate it directly from the pool when applying for memory, and return it to the pool for reuse when releasing it, instead of frequently calling the memory application and release interfaces in the system, which reduces the overhead of frequent dynamic allocations, and improves system performance. MindSpore mainly uses the BestFit memory allocation algorithm, supports dynamic expansion of memory blocks and defragmentation, and sets the initialization parameters of the memory pool through the interface [mindspore.runtime.set_memory(init_size,increase_size,max_size)](https://www.mindspore.cn/docs/en/br_base/api_python/runtime/mindspore.runtime.set_memory.html) to control the dynamic expansion size and maximum memory usage. -![memory_pool](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_en/features/compile/images/multi_level_compilation/jit_level_memory_pool.png) +![memory_pool](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_en/features/compile/images/multi_level_compilation/jit_level_memory_pool.png) 1. Slicing operation: When memory is allocated, free areas are sorted according to their sizes, the first free area that meets the requirements is found, allocated on demand, the excess is cut, and a new block of free memory is inserted. 2. Merge operation: When memory is reclaimed, neighboring free memory blocks are reclaimed and merged into one large free memory block. diff --git a/docs/mindspore/source_en/features/runtime/multilevel_pipeline.md b/docs/mindspore/source_en/features/runtime/multilevel_pipeline.md index 252fd765a0..014347196b 100644 --- a/docs/mindspore/source_en/features/runtime/multilevel_pipeline.md +++ b/docs/mindspore/source_en/features/runtime/multilevel_pipeline.md @@ -1,18 +1,18 @@ # Multi-level Pipeline -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/runtime/multilevel_pipeline.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/runtime/multilevel_pipeline.md) ## Overview Runtime scheduling for an operator mainly includes the operations InferShape (including updating the shape), Resize (including tiling calculation and updating the memory size) and Launch (including memory request and release), which can only be sent to the device (NPU/GPU) after the host completes these operations. When the host processing speed can not keep up with the operator's device execution time, the device side will produce bubbles, resulting in the device arithmetic can not be maximized to use, affecting the overall performance. For this reason, MindSpore proposed a multi-stage runtime streaming issued to take full advantage of the resources of the host multi-threaded. These operations of host are disassembled into separate operation units and issued in a stream, which greatly improves the efficiency of host issuance. -![rt_running](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/runtime/images/rt_running.png) +![rt_running](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/runtime/images/rt_running.png) ## Basic Principle Multi-stage flow is a key performance optimization point for runtime, which improves runtime scheduling efficiency by task decomposition and parallel flow issued to give full play to CPU multi-core performance. The main flow is as follows: -![rt_pipeline](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_en/features/compile/images/multi_level_compilation/jit_level_rt_pipeline.png) +![rt_pipeline](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_en/features/compile/images/multi_level_compilation/jit_level_rt_pipeline.png) 1. Task decomposition: the operator scheduling is decomposed into three tasks InferShape, Resize and Launch. 2. Queue creation: Create three queues, Infer Queue, Resize Queue and Launch Queue, for taking over the three tasks in step 1. diff --git a/docs/mindspore/source_en/features/runtime/multistream_concurrency.md b/docs/mindspore/source_en/features/runtime/multistream_concurrency.md index db9291024e..8bab6a914d 100644 --- a/docs/mindspore/source_en/features/runtime/multistream_concurrency.md +++ b/docs/mindspore/source_en/features/runtime/multistream_concurrency.md @@ -1,6 +1,6 @@ # Multi-stream Concurrency -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/features/runtime/multistream_concurrency.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/features/runtime/multistream_concurrency.md) ## Overview @@ -10,7 +10,7 @@ During the training of large-scale deep learning models, the importance of commu Traditional multi-stream concurrency methods usually rely on manual configuration, which is not only cumbersome and error-prone, but also often difficult to achieve optimal concurrency when faced with complex computational graphs. MindSpore's automatic stream assignment feature automatically identifies and assigns concurrency opportunities in the computational graph by means of an intelligent algorithm, and assigns different operators to different streams for execution. This automated allocation process not only simplifies user operations, but also enables dynamic adjustment of stream allocation policies at runtime to accommodate different computing environments and resource conditions. -![multi_stream](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_en/features/compile/images/multi_level_compilation/jit_level_multi_stream.png) +![multi_stream](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_en/features/compile/images/multi_level_compilation/jit_level_multi_stream.png) The principles are as follows: @@ -22,7 +22,7 @@ The principles are as follows: In order to achieve the above effect of concurrent execution of multiple streams, multi-stream management is an important technique aimed at efficiently managing and scheduling the streams (Streams) on the computing devices to optimize the execution efficiency and resource utilization of the computational graph. Device multi-stream management ensures efficient concurrent execution of computing and communication tasks in a multi-computing resource environment through intelligent stream allocation and scheduling policies, thus improving overall performance. -![stream_manager](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/runtime/images/stream_manager.png) +![stream_manager](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/runtime/images/stream_manager.png) **Stream Manager** plays a central role. It is responsible for the creation, distribution and destruction of streams, ensuring that each computational task is executed on the appropriate stream. The stream manager schedules tasks to different streams based on the type and priority of the task and the load on the device to achieve optimal resource utilization and task concurrency. diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_api_mapping.md b/docs/mindspore/source_en/note/api_mapping/pytorch_api_mapping.md index 81a23c966e..04b34417ed 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_api_mapping.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_api_mapping.md @@ -1,6 +1,6 @@ # PyTorch and MindSpore API Mapping Table -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_api_mapping.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_api_mapping.md) Mapping between PyTorch APIs and MindSpore APIs, which is provided by the community. There may be differences in parameters, inputs, outputs, logic functions, and specific scenarios. For details, see the description of each API or the difference comparison provided. @@ -18,7 +18,7 @@ The API mapping is also consistent in the following exception scenarios: (2) MindSpore API does not support passing parameters of plural type. -**Exception Scenario 2**: Compared to MindSpore APIss, the extra parameters of PyTorch API are [general difference parameters](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#general-difference-parameter-table). General difference parameters exist because PyTorch has some parameters that are added for non-functionality such as performance optimization, and the performance optimization mechanism of MindSpore is different from that of PyTorch. +**Exception Scenario 2**: Compared to MindSpore APIss, the extra parameters of PyTorch API are [general difference parameters](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#general-difference-parameter-table). General difference parameters exist because PyTorch has some parameters that are added for non-functionality such as performance optimization, and the performance optimization mechanism of MindSpore is different from that of PyTorch. **Exception Scenario 3**: If it can be guaranteed that MindSpore API uses the default configuration (or that the user does not configure it), MindSpore API can implement the same functionality as the PyTorch API, and MindSpore API has more parameters than PyTorch API. The functionality is not considered a difference. @@ -50,472 +50,472 @@ Because of the framework mechanism, MindSpore does not provide the following par | PyTorch 2.1 APIs | MindSpore APIs | Descriptions | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| [torch.abs](https://pytorch.org/docs/2.1/generated/torch.abs.html) | [mindspore.mint.abs](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.abs.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.acos](https://pytorch.org/docs/2.1/generated/torch.acos.html) | [mindspore.mint.acos](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.acos.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.acosh](https://pytorch.org/docs/2.1/generated/torch.acosh.html)| [mindspore.mint.acosh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.acosh.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.add](https://pytorch.org/docs/2.1/generated/torch.add.html)| [mindspore.mint.add](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.add.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.addbmm](https://pytorch.org/docs/2.1/generated/torch.addbmm.html)| [mindspore.mint.addbmm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.addbmm.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.addmm](https://pytorch.org/docs/2.1/generated/torch.addmm.html)| [mindspore.mint.addmm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.addmm.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.addmv](https://pytorch.org/docs/2.1/generated/torch.addmv.html)| [mindspore.mint.addmv](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.addmv.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.all](https://pytorch.org/docs/2.1/generated/torch.all.html#torch.all) | [mindspore.mint.all](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.all.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.allclose](https://pytorch.org/docs/2.1/generated/torch.allclose.html)| [mindspore.mint.allclose](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.allclose.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.amax](https://pytorch.org/docs/2.1/generated/torch.amax.html)| [mindspore.mint.amax](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.amax.html)|The functions are consistent, but the default value of dim is different.| -| [torch.amin](https://pytorch.org/docs/2.1/generated/torch.amin.html)| [mindspore.mint.amin](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.amin.html)|The functions are consistent, but the default value of dim is different.| -| [torch.any](https://pytorch.org/docs/2.1/generated/torch.any.html#torch.any) | [mindspore.mint.any](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.any.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.arange](https://pytorch.org/docs/2.1/generated/torch.arange.html)| [mindspore.mint.arange](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.arange.html)|The functions are consistent, but the default value of end is different.| -| [torch.arccos](https://pytorch.org/docs/2.1/generated/torch.arccos.html) | [mindspore.mint.arccos](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.arccos.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.arccosh](https://pytorch.org/docs/2.1/generated/torch.arccosh.html) | [mindspore.mint.arccosh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.arccosh.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.arcsin](https://pytorch.org/docs/2.1/generated/torch.arcsin.html) | [mindspore.mint.arcsin](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.arcsin.html) | Consistent functions, inconsistent parameter names. | -| [torch.arcsinh](https://pytorch.org/docs/2.1/generated/torch.arcsinh.html) | [mindspore.mint.arcsinh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.arcsinh.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.arctan](https://pytorch.org/docs/2.1/generated/torch.arctan.html) | [mindspore.mint.arctan](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.arctan.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.arctan2](https://pytorch.org/docs/2.1/generated/torch.arctan2.html)| [mindspore.mint.arctan2](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.arctan2.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.arctanh](https://pytorch.org/docs/2.1/generated/torch.arctanh.html) | [mindspore.mint.arctanh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.arctanh.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.argmax](https://pytorch.org/docs/2.1/generated/torch.argmax.html) | [mindspore.mint.argmax](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.argmax.html) |[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.argmin](https://pytorch.org/docs/2.1/generated/torch.argmin.html) | [mindspore.mint.argmin](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.argmin.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.argsort](https://pytorch.org/docs/2.1/generated/torch.argsort.html)| [mindspore.mint.argsort](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.argsort.html)| [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.asin](https://pytorch.org/docs/2.1/generated/torch.asin.html) | [mindspore.mint.asin](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.asin.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.asinh](https://pytorch.org/docs/2.1/generated/torch.asinh.html)| [mindspore.mint.asinh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.asinh.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.atan](https://pytorch.org/docs/2.1/generated/torch.atan.html) | [mindspore.mint.atan](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.atan.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.atan2](https://pytorch.org/docs/2.1/generated/torch.atan2.html) | [mindspore.mint.atan2](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.atan2.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.atanh](https://pytorch.org/docs/2.1/generated/torch.atanh.html)| [mindspore.mint.atanh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.atanh.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.baddbmm](https://pytorch.org/docs/1.8.1/generated/torch.baddbmm.html) | [mindspore.mint.baddbmm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.baddbmm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.bernoulli](https://pytorch.org/docs/2.1/generated/torch.bernoulli.html)| [mindspore.mint.bernoulli](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.bernoulli.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.bincount](https://pytorch.org/docs/2.1/generated/torch.bincount.html)| [mindspore.mint.bincount](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.bincount.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.bitwise_and](https://pytorch.org/docs/2.1/generated/torch.bitwise_and.html) | [mindspore.mint.bitwise_and](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.bitwise_and.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.bitwise_or](https://pytorch.org/docs/2.1/generated/torch.bitwise_or.html) | [mindspore.mint.bitwise_or](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.bitwise_or.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.bitwise_xor](https://pytorch.org/docs/2.1/generated/torch.bitwise_xor.html) | [mindspore.mint.bitwise_xor](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.bitwise_xor.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.bmm](https://pytorch.org/docs/2.1/generated/torch.bmm.html) | [mindspore.mint.bmm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.bmm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.broadcast_to](https://pytorch.org/docs/2.1/generated/torch.broadcast_to.html) | [mindspore.mint.broadcast_to](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.broadcast_to.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.cat](https://pytorch.org/docs/2.1/generated/torch.cat.html) | [mindspore.mint.cat](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.cat.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.cdist](https://pytorch.org/docs/2.1/generated/torch.cdist.html)| [mindspore.mint.cdist](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.cdist.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.chunk](https://pytorch.org/docs/2.1/generated/torch.chunk.html)| [mindspore.mint.chunk](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.chunk.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.ceil](https://pytorch.org/docs/2.1/generated/torch.ceil.html) | [mindspore.mint.ceil](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.ceil.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.clamp](https://pytorch.org/docs/2.1/generated/torch.clamp.html) | [mindspore.mint.clamp](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.clamp.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.clone](https://pytorch.org/docs/2.1/generated/torch.clone.html)| [mindspore.mint.clone](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.clone.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.concat](https://pytorch.org/docs/2.1/generated/torch.concat.html)| [mindspore.mint.concat](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.concat.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.count_nonzero](https://pytorch.org/docs/2.1/generated/torch.count_nonzero.html)| [mindspore.mint.count_nonzero](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.count_nonzero.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.cos](https://pytorch.org/docs/2.1/generated/torch.cos.html) | [mindspore.mint.cos](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.cos.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.cosh](https://pytorch.org/docs/2.1/generated/torch.cosh.html) | [mindspore.mint.cosh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.cosh.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.cross](https://pytorch.org/docs/2.1/generated/torch.cross.html) | [mindspore.mint.cross](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.cross.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.cummax](https://pytorch.org/docs/2.1/generated/torch.cummax.html) | [mindspore.mint.cummax](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.cummax.html) |[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.cummin](https://pytorch.org/docs/2.1/generated/torch.cummin.html) | [mindspore.mint.cummin](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.cummin.html) |[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.cumprod](https://pytorch.org/docs/2.1/generated/torch.cumprod.html)| [mindspore.mint.cumprod](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.cumprod.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.cumsum](https://pytorch.org/docs/2.1/generated/torch.cumsum.html) | [mindspore.mint.cumsum](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.cumsum.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.diff](https://pytorch.org/docs/2.1/generated/torch.diff.html)| [mindspore.mint.diff](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.diff.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.div](https://pytorch.org/docs/2.1/generated/torch.div.html) | [mindspore.mint.div](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.div.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.divide](https://pytorch.org/docs/2.1/generated/torch.divide.html) | [mindspore.mint.divide](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.divide.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.dot](https://pytorch.org/docs/2.1/generated/torch.dot.html)| [mindspore.mint.dot](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.dot.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.einsum](https://pytorch.org/docs/2.1/generated/torch.einsum.html)| [mindspore.mint.einsum](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.einsum.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.empty](https://pytorch.org/docs/2.1/generated/torch.empty.html)| [mindspore.mint.empty](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.empty.html)| [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.empty_like](https://pytorch.org/docs/2.1/generated/torch.empty_like.html)| [mindspore.mint.empty_like](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.empty_like.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.equal](https://pytorch.org/docs/2.1/generated/torch.equal.html)| [mindspore.mint.equal](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.equal.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.eq](https://pytorch.org/docs/2.1/generated/torch.eq.html) | [mindspore.mint.eq](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.eq.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.erf](https://pytorch.org/docs/2.1/generated/torch.erf.html) | [mindspore.mint.erf](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.erf.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.erfc](https://pytorch.org/docs/2.1/generated/torch.erfc.html) | [mindspore.mint.erfc](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.erfc.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.erfinv](https://pytorch.org/docs/2.1/generated/torch.erfinv.html) | [mindspore.mint.erfinv](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.erfinv.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.exp](https://pytorch.org/docs/2.1/generated/torch.exp.html) | [mindspore.mint.exp](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.exp.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.exp2](https://pytorch.org/docs/2.1/generated/torch.exp2.html) | [mindspore.mint.exp2](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.exp2.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.expm1](https://pytorch.org/docs/2.1/generated/torch.expm1.html) | [mindspore.mint.expm1](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.expm1.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.eye](https://pytorch.org/docs/2.1/generated/torch.eye.html) | [mindspore.mint.eye](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.eye.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.float_power](https://pytorch.org/docs/2.1/generated/torch.float_power.html)| [mindspore.mint.float_power](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.float_power.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.flatten](https://pytorch.org/docs/2.1/generated/torch.flatten.html) | [mindspore.mint.flatten](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.flatten.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.flip](https://pytorch.org/docs/2.1/generated/torch.flip.html) | [mindspore.mint.flip](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.flip.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.floor](https://pytorch.org/docs/2.1/generated/torch.floor.html) | [mindspore.mint.floor](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.floor.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.fmod](https://pytorch.org/docs/2.1/generated/torch.fmod.html)| [mindspore.mint.fmod](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.fmod.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.frac](https://pytorch.org/docs/2.1/generated/torch.frac.html)| [mindspore.mint.frac](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.frac.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.full](https://pytorch.org/docs/2.1/generated/torch.full.html) | [mindspore.mint.full](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.full.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.full_like](https://pytorch.org/docs/2.1/generated/torch.full_like.html)| [mindspore.mint.full_like](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.full_like.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.gather](https://pytorch.org/docs/2.1/generated/torch.gather.html)| [mindspore.mint.gather](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.gather.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.greater](https://pytorch.org/docs/2.1/generated/torch.greater.html) | [mindspore.mint.greater](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.greater.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.greater_equal](https://pytorch.org/docs/2.1/generated/torch.greater_equal.html) | [mindspore.mint.greater_equal](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.greater_equal.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.gt](https://pytorch.org/docs/2.1/generated/torch.gt.html) | [mindspore.mint.gt](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.gt.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.histc](https://pytorch.org/docs/2.1/generated/torch.histc.html)| [mindspore.mint.histc](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.histc.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.index_select](https://pytorch.org/docs/2.1/generated/torch.index_select.html) | [mindspore.mint.index_select](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.index_select.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.inverse](https://pytorch.org/docs/2.1/generated/torch.inverse.html) | [mindspore.mint.inverse](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.inverse.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.isclose](https://pytorch.org/docs/2.1/generated/torch.isclose.html) | [mindspore.mint.isclose](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.isclose.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.isfinite](https://pytorch.org/docs/2.1/generated/torch.isfinite.html) | [mindspore.mint.isfinite](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.isfinite.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.isinf](https://pytorch.org/docs/2.1/generated/torch.isinf.html)| [mindspore.mint.isinf](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.isinf.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.isneginf](https://pytorch.org/docs/2.1/generated/torch.isneginf.html)| [mindspore.mint.isneginf](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.isneginf.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.le](https://pytorch.org/docs/2.1/generated/torch.le.html) | [mindspore.mint.le](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.le.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.lerp](https://pytorch.org/docs/2.1/generated/torch.lerp.html)| [mindspore.mint.lerp](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.lerp.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.less](https://pytorch.org/docs/2.1/generated/torch.less.html) | [mindspore.mint.less](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.less.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.less_equal](https://pytorch.org/docs/2.1/generated/torch.less_equal.html) | [mindspore.mint.less_equal](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.less_equal.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.linspace](https://pytorch.org/docs/2.1/generated/torch.linspace.html) | [mindspore.mint.linspace](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.linspace.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.log](https://pytorch.org/docs/2.1/generated/torch.log.html) | [mindspore.mint.log](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.log.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.log2](https://pytorch.org/docs/2.1/generated/torch.log2.html)| [mindspore.mint.log2](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.log2.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.log10](https://pytorch.org/docs/2.1/generated/torch.log10.html)| [mindspore.mint.log10](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.log10.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.logaddexp](https://pytorch.org/docs/2.1/generated/torch.logaddexp.html)| [mindspore.mint.logaddexp](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.logaddexp.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.logsumexp](https://pytorch.org/docs/2.1/generated/torch.logsumexp.html)| [mindspore.mint.logsumexp](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.logsumexp.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.log1p](https://pytorch.org/docs/2.1/generated/torch.log1p.html#torch.log1p) | [mindspore.mint.log1p](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.log1p.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.logical_and](https://pytorch.org/docs/2.1/generated/torch.logical_and.html) | [mindspore.mint.logical_and](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.logical_and.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.logical_not](https://pytorch.org/docs/2.1/generated/torch.logical_not.html) | [mindspore.mint.logical_not](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.logical_not.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.logical_or](https://pytorch.org/docs/2.1/generated/torch.logical_or.html) | [mindspore.mint.logical_or](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.logical_or.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.logical_xor](https://pytorch.org/docs/2.1/generated/torch.logical_xor.html) | [mindspore.mint.logical_xor](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.logical_xor.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.lt](https://pytorch.org/docs/2.1/generated/torch.lt.html) | [mindspore.mint.lt](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.lt.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.masked_select](https://pytorch.org/docs/2.1/generated/torch.masked_select.html) | [mindspore.mint.masked_select](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.masked_select.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.matmul](https://pytorch.org/docs/2.1/generated/torch.matmul.html#torch.matmul) | [mindspore.mint.matmul](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.matmul.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.max](https://pytorch.org/docs/2.1/generated/torch.max.html) | [mindspore.mint.max](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.max.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.maximum](https://pytorch.org/docs/2.1/generated/torch.maximum.html) | [mindspore.mint.maximum](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.maximum.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.mean](https://pytorch.org/docs/2.1/generated/torch.mean.html) | [mindspore.mint.mean](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.mean.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.median](https://pytorch.org/docs/2.1/generated/torch.median.html) | [mindspore.mint.median](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.median.html) | The functions are consistent, but the default value of dim is different. | -| [torch.meshgrid](https://pytorch.org/docs/2.1/generated/torch.meshgrid.html)| [mindspore.mint.meshgrid](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.meshgrid.html)| The functions are consistent, but the default value of indexing is different. | -| [torch.mul](https://pytorch.org/docs/2.1/generated/torch.mul.html#torch.mul) | [mindspore.mint.mul](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.mul.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.min](https://pytorch.org/docs/2.1/generated/torch.min.html#torch.min) | [mindspore.mint.min](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.min.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.minimum](https://pytorch.org/docs/2.1/generated/torch.minimum.html) | [mindspore.mint.minimum](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.minimum.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.mm](https://pytorch.org/docs/2.1/generated/torch.mm.html) | [mindspore.mint.mm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.mm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.multinomial](https://pytorch.org/docs/2.1/generated/torch.multinomial.html) | [mindspore.mint.multinomial](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.multinomial.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.mv](https://pytorch.org/docs/2.1/generated/torch.mv.html) | [mindspore.mint.mv](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.mv.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nansum](https://pytorch.org/docs/2.1/generated/torch.nansum.html) | [mindspore.mint.nansum](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nansum.html) | The functions are consistent, but the default value of end is different. | -| [torch.nan_to_num](https://pytorch.org/docs/2.1/generated/torch.nan_to_num.html) | [mindspore.mint.nan_to_num](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nan_to_num.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.narrow](https://pytorch.org/docs/2.1/generated/torch.narrow.html) | [mindspore.mint.narrow](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.narrow.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.ne](https://pytorch.org/docs/2.1/generated/torch.ne.html)| [mindspore.mint.ne](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.ne.html)| [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.neg](https://pytorch.org/docs/2.1/generated/torch.neg.html)| [mindspore.mint.neg](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.neg.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.negative](https://pytorch.org/docs/2.1/generated/torch.negative.html) | [mindspore.mint.negative](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.negative.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nonzero](https://pytorch.org/docs/2.1/generated/torch.nonzero.html) | [mindspore.mint.nonzero](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nonzero.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.normal](https://pytorch.org/docs/2.1/generated/torch.normal.html) | [mindspore.mint.normal](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.normal.html) | The parameters of interface overloading are different. | -| [torch.norm](https://pytorch.org/docs/2.1/generated/torch.norm.html) | [mindspore.mint.norm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.norm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.ones](https://pytorch.org/docs/2.1/generated/torch.ones.html) | [mindspore.mint.ones](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.ones.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.ones_like](https://pytorch.org/docs/2.1/torch.html#torch.ones_like) | [mindspore.mint.ones_like](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.ones_like.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.outer](https://pytorch.org/docs/2.1/generated/torch.outer.html) | [mindspore.mint.outer](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.outer.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.permute](https://pytorch.org/docs/2.1/generated/torch.permute.html) | [mindspore.mint.permute](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.permute.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.polar](https://pytorch.org/docs/2.1/generated/torch.polar.html) | [mindspore.mint.polar](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.polar.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.pow](https://pytorch.org/docs/2.1/generated/torch.pow.html) | [mindspore.mint.pow](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.pow.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.prod](https://pytorch.org/docs/2.1/generated/torch.prod.html#torch.prod) | [mindspore.mint.prod](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.prod.html) |[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.rand](https://pytorch.org/docs/2.1/generated/torch.rand.html) | [mindspore.mint.rand](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.rand.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.rand_like](https://pytorch.org/docs/2.1/generated/torch.rand_like.html) | [mindspore.mint.rand_like](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.rand_like.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.randint](https://pytorch.org/docs/2.1/generated/torch.randint.html) | [mindspore.mint.randint](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.randint.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.randint_like](https://pytorch.org/docs/2.1/generated/torch.randint_like.html) | [mindspore.mint.randint_like](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.randint_like.html) | The functions are consistent, but the default value of low is different. | -| [torch.randn](https://pytorch.org/docs/2.1/generated/torch.randn.html) | [mindspore.mint.randn](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.randn.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.randn_like](https://pytorch.org/docs/2.1/generated/torch.randn_like.html) | [mindspore.mint.randn_like](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.randn_like.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.randperm](https://pytorch.org/docs/2.1/generated/torch.randperm.html) | [mindspore.mint.randperm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.randperm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.ravel](https://pytorch.org/docs/2.1/generated/torch.ravel.html) | [mindspore.mint.ravel](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.ravel.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.reciprocal](https://pytorch.org/docs/2.1/generated/torch.reciprocal.html) | [mindspore.mint.reciprocal](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.reciprocal.html) |[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.remainder](https://pytorch.org/docs/2.1/generated/torch.remainder.html) | [mindspore.mint.remainder](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.remainder.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.repeat_interleave](https://pytorch.org/docs/2.1/generated/torch.repeat_interleave.html) | [mindspore.mint.repeat_interleave](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.repeat_interleave.html) | Consistent functions, PyTorch involves overloading. | -| [torch.reshape](https://pytorch.org/docs/2.1/generated/torch.reshape.html) | [mindspore.mint.reshape](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.reshape.html) |[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.roll](https://pytorch.org/docs/2.1/generated/torch.roll.html) | [mindspore.mint.roll](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.roll.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.round](https://pytorch.org/docs/2.1/generated/torch.round.html)| [mindspore.mint.round](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.round.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.rsqrt](https://pytorch.org/docs/2.1/generated/torch.rsqrt.html) | [mindspore.mint.rsqrt](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.rsqrt.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.scatter](https://pytorch.org/docs/2.1/generated/torch.scatter.html) | [mindspore.mint.scatter](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.scatter.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.scatter_add](https://pytorch.org/docs/2.1/generated/torch.scatter_add.html) | [mindspore.mint.scatter_add](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.scatter_add.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.searchsorted](https://pytorch.org/docs/2.1/generated/torch.searchsorted.html) | [mindspore.mint.searchsorted](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.searchsorted.html) | The functions are consistent, but the default value of side is different. | -| [torch.select](https://pytorch.org/docs/2.1/generated/torch.select.html) | [mindspore.mint.select](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.select.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.sigmoid](https://pytorch.org/docs/2.1/generated/torch.sigmoid.html) | [mindspore.mint.sigmoid](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.sigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.sign](https://pytorch.org/docs/2.1/generated/torch.sign.html) | [mindspore.mint.sign](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.sign.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.sin](https://pytorch.org/docs/2.1/generated/torch.sin.html)| [mindspore.mint.sin](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.sin.html)| [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.sinc](https://pytorch.org/docs/2.1/generated/torch.sinc.html)| [mindspore.mint.sinc](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.sinc.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.sinh](https://pytorch.org/docs/2.1/generated/torch.sinh.html)| [mindspore.mint.sinh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.sinh.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.sort](https://pytorch.org/docs/2.1/generated/torch.sort.html) | [mindspore.mint.sort](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.sort.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.split](https://pytorch.org/docs/2.1/generated/torch.split.html) | [mindspore.mint.split](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.split.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.sqrt](https://pytorch.org/docs/2.1/generated/torch.sqrt.html) | [mindspore.mint.sqrt](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.sqrt.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.square](https://pytorch.org/docs/2.1/generated/torch.square.html)| [mindspore.mint.square](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.square.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.squeeze](https://pytorch.org/docs/2.1/generated/torch.squeeze.html) | [mindspore.mint.squeeze](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.squeeze.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.stack](https://pytorch.org/docs/2.1/generated/torch.stack.html) | [mindspore.mint.stack](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.stack.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.std](https://pytorch.org/docs/2.1/generated/torch.std.html) | [mindspore.mint.std](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.std.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.std_mean](https://pytorch.org/docs/2.1/generated/torch.std_mean.html) | [mindspore.mint.std_mean](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.std_mean.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.sub](https://pytorch.org/docs/2.1/generated/torch.sub.html#torch.sub) | [mindspore.mint.sub](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.sub.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.sum](https://pytorch.org/docs/2.1/generated/torch.sum.html) | [mindspore.mint.sum](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.sum.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.swapaxes](https://pytorch.org/docs/2.1/generated/torch.swapaxes.html) | [mindspore.mint.swapaxes](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.swapaxes.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.t](https://pytorch.org/docs/2.1/generated/torch.t.html) | [mindspore.mint.t](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.t.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.tan](https://pytorch.org/docs/2.1/generated/torch.tan.html)| [mindspore.mint.tan](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.tan.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.tanh](https://pytorch.org/docs/2.1/generated/torch.tanh.html) | [mindspore.mint.tanh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.tanh.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.tile](https://pytorch.org/docs/2.1/generated/torch.tile.html) | [mindspore.mint.tile](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.tile.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.topk](https://pytorch.org/docs/2.1/generated/torch.topk.html#torch.topk) | [mindspore.mint.topk](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.topk.html) | The functions are consistent, but the default value of dim is different. | -| [torch.trace](https://pytorch.org/docs/2.1/generated/torch.trace.html) | [mindspore.mint.trace](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.trace.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.transpose](https://pytorch.org/docs/2.1/generated/torch.transpose.html) | [mindspore.mint.transpose](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.transpose.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.tril](https://pytorch.org/docs/2.1/generated/torch.tril.html) | [mindspore.mint.tril](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.tril.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.triu](https://pytorch.org/docs/2.1/generated/torch.triu.html) | [mindspore.mint.triu](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.triu.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.trunc](https://pytorch.org/docs/2.1/generated/torch.trunc.html)| [mindspore.mint.trunc](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.trunc.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.unbind](https://pytorch.org/docs/2.1/generated/torch.unbind.html) | [mindspore.mint.unbind](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.unbind.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.unique](https://pytorch.org/docs/2.1/generated/torch.unique.html#torch.unique) | [mindspore.mint.unique](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.unique.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.unique_consecutive](https://pytorch.org/docs/2.1/generated/torch.unique_consecutive.html) | [mindspore.mint.unique_consecutive](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.unique_consecutive.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.unsqueeze](https://pytorch.org/docs/2.1/generated/torch.unsqueeze.html) | [mindspore.mint.unsqueeze](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.unsqueeze.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.var](https://pytorch.org/docs/2.1/generated/torch.var.html) | [mindspore.mint.var](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.var.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.var_mean](https://pytorch.org/docs/2.1/generated/torch.var_mean.html) | [mindspore.mint.var_mean](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.var_mean.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.where](https://pytorch.org/docs/2.1/generated/torch.where.html) | [mindspore.mint.where](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.where.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.xlogy](https://pytorch.org/docs/2.1/generated/torch.xlogy.html) | [mindspore.mint.xlogy](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.xlogy.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.zeros](https://pytorch.org/docs/2.1/generated/torch.zeros.html) | [mindspore.mint.zeros](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.zeros.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.zeros_like](https://pytorch.org/docs/2.1/generated/torch.zeros_like.html#torch-zeros-like) | [mindspore.mint.zeros_like](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.zeros_like.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.abs](https://pytorch.org/docs/2.1/generated/torch.abs.html) | [mindspore.mint.abs](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.abs.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.acos](https://pytorch.org/docs/2.1/generated/torch.acos.html) | [mindspore.mint.acos](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.acos.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.acosh](https://pytorch.org/docs/2.1/generated/torch.acosh.html)| [mindspore.mint.acosh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.acosh.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.add](https://pytorch.org/docs/2.1/generated/torch.add.html)| [mindspore.mint.add](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.add.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.addbmm](https://pytorch.org/docs/2.1/generated/torch.addbmm.html)| [mindspore.mint.addbmm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.addbmm.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.addmm](https://pytorch.org/docs/2.1/generated/torch.addmm.html)| [mindspore.mint.addmm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.addmm.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.addmv](https://pytorch.org/docs/2.1/generated/torch.addmv.html)| [mindspore.mint.addmv](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.addmv.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.all](https://pytorch.org/docs/2.1/generated/torch.all.html#torch.all) | [mindspore.mint.all](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.all.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.allclose](https://pytorch.org/docs/2.1/generated/torch.allclose.html)| [mindspore.mint.allclose](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.allclose.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.amax](https://pytorch.org/docs/2.1/generated/torch.amax.html)| [mindspore.mint.amax](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.amax.html)|The functions are consistent, but the default value of dim is different.| +| [torch.amin](https://pytorch.org/docs/2.1/generated/torch.amin.html)| [mindspore.mint.amin](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.amin.html)|The functions are consistent, but the default value of dim is different.| +| [torch.any](https://pytorch.org/docs/2.1/generated/torch.any.html#torch.any) | [mindspore.mint.any](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.any.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.arange](https://pytorch.org/docs/2.1/generated/torch.arange.html)| [mindspore.mint.arange](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.arange.html)|The functions are consistent, but the default value of end is different.| +| [torch.arccos](https://pytorch.org/docs/2.1/generated/torch.arccos.html) | [mindspore.mint.arccos](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.arccos.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.arccosh](https://pytorch.org/docs/2.1/generated/torch.arccosh.html) | [mindspore.mint.arccosh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.arccosh.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.arcsin](https://pytorch.org/docs/2.1/generated/torch.arcsin.html) | [mindspore.mint.arcsin](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.arcsin.html) | Consistent functions, inconsistent parameter names. | +| [torch.arcsinh](https://pytorch.org/docs/2.1/generated/torch.arcsinh.html) | [mindspore.mint.arcsinh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.arcsinh.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.arctan](https://pytorch.org/docs/2.1/generated/torch.arctan.html) | [mindspore.mint.arctan](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.arctan.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.arctan2](https://pytorch.org/docs/2.1/generated/torch.arctan2.html)| [mindspore.mint.arctan2](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.arctan2.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.arctanh](https://pytorch.org/docs/2.1/generated/torch.arctanh.html) | [mindspore.mint.arctanh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.arctanh.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.argmax](https://pytorch.org/docs/2.1/generated/torch.argmax.html) | [mindspore.mint.argmax](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.argmax.html) |[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.argmin](https://pytorch.org/docs/2.1/generated/torch.argmin.html) | [mindspore.mint.argmin](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.argmin.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.argsort](https://pytorch.org/docs/2.1/generated/torch.argsort.html)| [mindspore.mint.argsort](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.argsort.html)| [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.asin](https://pytorch.org/docs/2.1/generated/torch.asin.html) | [mindspore.mint.asin](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.asin.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.asinh](https://pytorch.org/docs/2.1/generated/torch.asinh.html)| [mindspore.mint.asinh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.asinh.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.atan](https://pytorch.org/docs/2.1/generated/torch.atan.html) | [mindspore.mint.atan](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.atan.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.atan2](https://pytorch.org/docs/2.1/generated/torch.atan2.html) | [mindspore.mint.atan2](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.atan2.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.atanh](https://pytorch.org/docs/2.1/generated/torch.atanh.html)| [mindspore.mint.atanh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.atanh.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.baddbmm](https://pytorch.org/docs/1.8.1/generated/torch.baddbmm.html) | [mindspore.mint.baddbmm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.baddbmm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.bernoulli](https://pytorch.org/docs/2.1/generated/torch.bernoulli.html)| [mindspore.mint.bernoulli](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.bernoulli.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.bincount](https://pytorch.org/docs/2.1/generated/torch.bincount.html)| [mindspore.mint.bincount](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.bincount.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.bitwise_and](https://pytorch.org/docs/2.1/generated/torch.bitwise_and.html) | [mindspore.mint.bitwise_and](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.bitwise_and.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.bitwise_or](https://pytorch.org/docs/2.1/generated/torch.bitwise_or.html) | [mindspore.mint.bitwise_or](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.bitwise_or.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.bitwise_xor](https://pytorch.org/docs/2.1/generated/torch.bitwise_xor.html) | [mindspore.mint.bitwise_xor](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.bitwise_xor.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.bmm](https://pytorch.org/docs/2.1/generated/torch.bmm.html) | [mindspore.mint.bmm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.bmm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.broadcast_to](https://pytorch.org/docs/2.1/generated/torch.broadcast_to.html) | [mindspore.mint.broadcast_to](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.broadcast_to.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.cat](https://pytorch.org/docs/2.1/generated/torch.cat.html) | [mindspore.mint.cat](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.cat.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.cdist](https://pytorch.org/docs/2.1/generated/torch.cdist.html)| [mindspore.mint.cdist](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.cdist.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.chunk](https://pytorch.org/docs/2.1/generated/torch.chunk.html)| [mindspore.mint.chunk](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.chunk.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.ceil](https://pytorch.org/docs/2.1/generated/torch.ceil.html) | [mindspore.mint.ceil](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.ceil.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.clamp](https://pytorch.org/docs/2.1/generated/torch.clamp.html) | [mindspore.mint.clamp](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.clamp.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.clone](https://pytorch.org/docs/2.1/generated/torch.clone.html)| [mindspore.mint.clone](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.clone.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.concat](https://pytorch.org/docs/2.1/generated/torch.concat.html)| [mindspore.mint.concat](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.concat.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.count_nonzero](https://pytorch.org/docs/2.1/generated/torch.count_nonzero.html)| [mindspore.mint.count_nonzero](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.count_nonzero.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.cos](https://pytorch.org/docs/2.1/generated/torch.cos.html) | [mindspore.mint.cos](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.cos.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.cosh](https://pytorch.org/docs/2.1/generated/torch.cosh.html) | [mindspore.mint.cosh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.cosh.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.cross](https://pytorch.org/docs/2.1/generated/torch.cross.html) | [mindspore.mint.cross](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.cross.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.cummax](https://pytorch.org/docs/2.1/generated/torch.cummax.html) | [mindspore.mint.cummax](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.cummax.html) |[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.cummin](https://pytorch.org/docs/2.1/generated/torch.cummin.html) | [mindspore.mint.cummin](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.cummin.html) |[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.cumprod](https://pytorch.org/docs/2.1/generated/torch.cumprod.html)| [mindspore.mint.cumprod](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.cumprod.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.cumsum](https://pytorch.org/docs/2.1/generated/torch.cumsum.html) | [mindspore.mint.cumsum](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.cumsum.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.diff](https://pytorch.org/docs/2.1/generated/torch.diff.html)| [mindspore.mint.diff](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.diff.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.div](https://pytorch.org/docs/2.1/generated/torch.div.html) | [mindspore.mint.div](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.div.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.divide](https://pytorch.org/docs/2.1/generated/torch.divide.html) | [mindspore.mint.divide](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.divide.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.dot](https://pytorch.org/docs/2.1/generated/torch.dot.html)| [mindspore.mint.dot](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.dot.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.einsum](https://pytorch.org/docs/2.1/generated/torch.einsum.html)| [mindspore.mint.einsum](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.einsum.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.empty](https://pytorch.org/docs/2.1/generated/torch.empty.html)| [mindspore.mint.empty](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.empty.html)| [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.empty_like](https://pytorch.org/docs/2.1/generated/torch.empty_like.html)| [mindspore.mint.empty_like](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.empty_like.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.equal](https://pytorch.org/docs/2.1/generated/torch.equal.html)| [mindspore.mint.equal](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.equal.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.eq](https://pytorch.org/docs/2.1/generated/torch.eq.html) | [mindspore.mint.eq](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.eq.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.erf](https://pytorch.org/docs/2.1/generated/torch.erf.html) | [mindspore.mint.erf](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.erf.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.erfc](https://pytorch.org/docs/2.1/generated/torch.erfc.html) | [mindspore.mint.erfc](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.erfc.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.erfinv](https://pytorch.org/docs/2.1/generated/torch.erfinv.html) | [mindspore.mint.erfinv](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.erfinv.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.exp](https://pytorch.org/docs/2.1/generated/torch.exp.html) | [mindspore.mint.exp](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.exp.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.exp2](https://pytorch.org/docs/2.1/generated/torch.exp2.html) | [mindspore.mint.exp2](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.exp2.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.expm1](https://pytorch.org/docs/2.1/generated/torch.expm1.html) | [mindspore.mint.expm1](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.expm1.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.eye](https://pytorch.org/docs/2.1/generated/torch.eye.html) | [mindspore.mint.eye](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.eye.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.float_power](https://pytorch.org/docs/2.1/generated/torch.float_power.html)| [mindspore.mint.float_power](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.float_power.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.flatten](https://pytorch.org/docs/2.1/generated/torch.flatten.html) | [mindspore.mint.flatten](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.flatten.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.flip](https://pytorch.org/docs/2.1/generated/torch.flip.html) | [mindspore.mint.flip](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.flip.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.floor](https://pytorch.org/docs/2.1/generated/torch.floor.html) | [mindspore.mint.floor](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.floor.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.fmod](https://pytorch.org/docs/2.1/generated/torch.fmod.html)| [mindspore.mint.fmod](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.fmod.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.frac](https://pytorch.org/docs/2.1/generated/torch.frac.html)| [mindspore.mint.frac](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.frac.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.full](https://pytorch.org/docs/2.1/generated/torch.full.html) | [mindspore.mint.full](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.full.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.full_like](https://pytorch.org/docs/2.1/generated/torch.full_like.html)| [mindspore.mint.full_like](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.full_like.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.gather](https://pytorch.org/docs/2.1/generated/torch.gather.html)| [mindspore.mint.gather](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.gather.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.greater](https://pytorch.org/docs/2.1/generated/torch.greater.html) | [mindspore.mint.greater](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.greater.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.greater_equal](https://pytorch.org/docs/2.1/generated/torch.greater_equal.html) | [mindspore.mint.greater_equal](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.greater_equal.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.gt](https://pytorch.org/docs/2.1/generated/torch.gt.html) | [mindspore.mint.gt](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.gt.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.histc](https://pytorch.org/docs/2.1/generated/torch.histc.html)| [mindspore.mint.histc](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.histc.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.index_select](https://pytorch.org/docs/2.1/generated/torch.index_select.html) | [mindspore.mint.index_select](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.index_select.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.inverse](https://pytorch.org/docs/2.1/generated/torch.inverse.html) | [mindspore.mint.inverse](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.inverse.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.isclose](https://pytorch.org/docs/2.1/generated/torch.isclose.html) | [mindspore.mint.isclose](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.isclose.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.isfinite](https://pytorch.org/docs/2.1/generated/torch.isfinite.html) | [mindspore.mint.isfinite](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.isfinite.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.isinf](https://pytorch.org/docs/2.1/generated/torch.isinf.html)| [mindspore.mint.isinf](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.isinf.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.isneginf](https://pytorch.org/docs/2.1/generated/torch.isneginf.html)| [mindspore.mint.isneginf](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.isneginf.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.le](https://pytorch.org/docs/2.1/generated/torch.le.html) | [mindspore.mint.le](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.le.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.lerp](https://pytorch.org/docs/2.1/generated/torch.lerp.html)| [mindspore.mint.lerp](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.lerp.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.less](https://pytorch.org/docs/2.1/generated/torch.less.html) | [mindspore.mint.less](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.less.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.less_equal](https://pytorch.org/docs/2.1/generated/torch.less_equal.html) | [mindspore.mint.less_equal](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.less_equal.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.linspace](https://pytorch.org/docs/2.1/generated/torch.linspace.html) | [mindspore.mint.linspace](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.linspace.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.log](https://pytorch.org/docs/2.1/generated/torch.log.html) | [mindspore.mint.log](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.log.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.log2](https://pytorch.org/docs/2.1/generated/torch.log2.html)| [mindspore.mint.log2](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.log2.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.log10](https://pytorch.org/docs/2.1/generated/torch.log10.html)| [mindspore.mint.log10](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.log10.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.logaddexp](https://pytorch.org/docs/2.1/generated/torch.logaddexp.html)| [mindspore.mint.logaddexp](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.logaddexp.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.logsumexp](https://pytorch.org/docs/2.1/generated/torch.logsumexp.html)| [mindspore.mint.logsumexp](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.logsumexp.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.log1p](https://pytorch.org/docs/2.1/generated/torch.log1p.html#torch.log1p) | [mindspore.mint.log1p](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.log1p.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.logical_and](https://pytorch.org/docs/2.1/generated/torch.logical_and.html) | [mindspore.mint.logical_and](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.logical_and.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.logical_not](https://pytorch.org/docs/2.1/generated/torch.logical_not.html) | [mindspore.mint.logical_not](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.logical_not.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.logical_or](https://pytorch.org/docs/2.1/generated/torch.logical_or.html) | [mindspore.mint.logical_or](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.logical_or.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.logical_xor](https://pytorch.org/docs/2.1/generated/torch.logical_xor.html) | [mindspore.mint.logical_xor](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.logical_xor.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.lt](https://pytorch.org/docs/2.1/generated/torch.lt.html) | [mindspore.mint.lt](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.lt.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.masked_select](https://pytorch.org/docs/2.1/generated/torch.masked_select.html) | [mindspore.mint.masked_select](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.masked_select.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.matmul](https://pytorch.org/docs/2.1/generated/torch.matmul.html#torch.matmul) | [mindspore.mint.matmul](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.matmul.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.max](https://pytorch.org/docs/2.1/generated/torch.max.html) | [mindspore.mint.max](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.max.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.maximum](https://pytorch.org/docs/2.1/generated/torch.maximum.html) | [mindspore.mint.maximum](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.maximum.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.mean](https://pytorch.org/docs/2.1/generated/torch.mean.html) | [mindspore.mint.mean](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.mean.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.median](https://pytorch.org/docs/2.1/generated/torch.median.html) | [mindspore.mint.median](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.median.html) | The functions are consistent, but the default value of dim is different. | +| [torch.meshgrid](https://pytorch.org/docs/2.1/generated/torch.meshgrid.html)| [mindspore.mint.meshgrid](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.meshgrid.html)| The functions are consistent, but the default value of indexing is different. | +| [torch.mul](https://pytorch.org/docs/2.1/generated/torch.mul.html#torch.mul) | [mindspore.mint.mul](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.mul.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.min](https://pytorch.org/docs/2.1/generated/torch.min.html#torch.min) | [mindspore.mint.min](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.min.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.minimum](https://pytorch.org/docs/2.1/generated/torch.minimum.html) | [mindspore.mint.minimum](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.minimum.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.mm](https://pytorch.org/docs/2.1/generated/torch.mm.html) | [mindspore.mint.mm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.mm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.multinomial](https://pytorch.org/docs/2.1/generated/torch.multinomial.html) | [mindspore.mint.multinomial](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.multinomial.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.mv](https://pytorch.org/docs/2.1/generated/torch.mv.html) | [mindspore.mint.mv](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.mv.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nansum](https://pytorch.org/docs/2.1/generated/torch.nansum.html) | [mindspore.mint.nansum](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nansum.html) | The functions are consistent, but the default value of end is different. | +| [torch.nan_to_num](https://pytorch.org/docs/2.1/generated/torch.nan_to_num.html) | [mindspore.mint.nan_to_num](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nan_to_num.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.narrow](https://pytorch.org/docs/2.1/generated/torch.narrow.html) | [mindspore.mint.narrow](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.narrow.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.ne](https://pytorch.org/docs/2.1/generated/torch.ne.html)| [mindspore.mint.ne](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.ne.html)| [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.neg](https://pytorch.org/docs/2.1/generated/torch.neg.html)| [mindspore.mint.neg](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.neg.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.negative](https://pytorch.org/docs/2.1/generated/torch.negative.html) | [mindspore.mint.negative](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.negative.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nonzero](https://pytorch.org/docs/2.1/generated/torch.nonzero.html) | [mindspore.mint.nonzero](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nonzero.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.normal](https://pytorch.org/docs/2.1/generated/torch.normal.html) | [mindspore.mint.normal](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.normal.html) | The parameters of interface overloading are different. | +| [torch.norm](https://pytorch.org/docs/2.1/generated/torch.norm.html) | [mindspore.mint.norm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.norm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.ones](https://pytorch.org/docs/2.1/generated/torch.ones.html) | [mindspore.mint.ones](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.ones.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.ones_like](https://pytorch.org/docs/2.1/torch.html#torch.ones_like) | [mindspore.mint.ones_like](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.ones_like.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.outer](https://pytorch.org/docs/2.1/generated/torch.outer.html) | [mindspore.mint.outer](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.outer.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.permute](https://pytorch.org/docs/2.1/generated/torch.permute.html) | [mindspore.mint.permute](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.permute.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.polar](https://pytorch.org/docs/2.1/generated/torch.polar.html) | [mindspore.mint.polar](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.polar.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.pow](https://pytorch.org/docs/2.1/generated/torch.pow.html) | [mindspore.mint.pow](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.pow.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.prod](https://pytorch.org/docs/2.1/generated/torch.prod.html#torch.prod) | [mindspore.mint.prod](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.prod.html) |[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.rand](https://pytorch.org/docs/2.1/generated/torch.rand.html) | [mindspore.mint.rand](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.rand.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.rand_like](https://pytorch.org/docs/2.1/generated/torch.rand_like.html) | [mindspore.mint.rand_like](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.rand_like.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.randint](https://pytorch.org/docs/2.1/generated/torch.randint.html) | [mindspore.mint.randint](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.randint.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.randint_like](https://pytorch.org/docs/2.1/generated/torch.randint_like.html) | [mindspore.mint.randint_like](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.randint_like.html) | The functions are consistent, but the default value of low is different. | +| [torch.randn](https://pytorch.org/docs/2.1/generated/torch.randn.html) | [mindspore.mint.randn](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.randn.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.randn_like](https://pytorch.org/docs/2.1/generated/torch.randn_like.html) | [mindspore.mint.randn_like](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.randn_like.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.randperm](https://pytorch.org/docs/2.1/generated/torch.randperm.html) | [mindspore.mint.randperm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.randperm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.ravel](https://pytorch.org/docs/2.1/generated/torch.ravel.html) | [mindspore.mint.ravel](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.ravel.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.reciprocal](https://pytorch.org/docs/2.1/generated/torch.reciprocal.html) | [mindspore.mint.reciprocal](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.reciprocal.html) |[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.remainder](https://pytorch.org/docs/2.1/generated/torch.remainder.html) | [mindspore.mint.remainder](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.remainder.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.repeat_interleave](https://pytorch.org/docs/2.1/generated/torch.repeat_interleave.html) | [mindspore.mint.repeat_interleave](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.repeat_interleave.html) | Consistent functions, PyTorch involves overloading. | +| [torch.reshape](https://pytorch.org/docs/2.1/generated/torch.reshape.html) | [mindspore.mint.reshape](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.reshape.html) |[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.roll](https://pytorch.org/docs/2.1/generated/torch.roll.html) | [mindspore.mint.roll](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.roll.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.round](https://pytorch.org/docs/2.1/generated/torch.round.html)| [mindspore.mint.round](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.round.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.rsqrt](https://pytorch.org/docs/2.1/generated/torch.rsqrt.html) | [mindspore.mint.rsqrt](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.rsqrt.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.scatter](https://pytorch.org/docs/2.1/generated/torch.scatter.html) | [mindspore.mint.scatter](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.scatter.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.scatter_add](https://pytorch.org/docs/2.1/generated/torch.scatter_add.html) | [mindspore.mint.scatter_add](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.scatter_add.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.searchsorted](https://pytorch.org/docs/2.1/generated/torch.searchsorted.html) | [mindspore.mint.searchsorted](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.searchsorted.html) | The functions are consistent, but the default value of side is different. | +| [torch.select](https://pytorch.org/docs/2.1/generated/torch.select.html) | [mindspore.mint.select](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.select.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.sigmoid](https://pytorch.org/docs/2.1/generated/torch.sigmoid.html) | [mindspore.mint.sigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.sigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.sign](https://pytorch.org/docs/2.1/generated/torch.sign.html) | [mindspore.mint.sign](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.sign.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.sin](https://pytorch.org/docs/2.1/generated/torch.sin.html)| [mindspore.mint.sin](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.sin.html)| [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.sinc](https://pytorch.org/docs/2.1/generated/torch.sinc.html)| [mindspore.mint.sinc](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.sinc.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.sinh](https://pytorch.org/docs/2.1/generated/torch.sinh.html)| [mindspore.mint.sinh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.sinh.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.sort](https://pytorch.org/docs/2.1/generated/torch.sort.html) | [mindspore.mint.sort](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.sort.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.split](https://pytorch.org/docs/2.1/generated/torch.split.html) | [mindspore.mint.split](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.split.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.sqrt](https://pytorch.org/docs/2.1/generated/torch.sqrt.html) | [mindspore.mint.sqrt](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.sqrt.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.square](https://pytorch.org/docs/2.1/generated/torch.square.html)| [mindspore.mint.square](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.square.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.squeeze](https://pytorch.org/docs/2.1/generated/torch.squeeze.html) | [mindspore.mint.squeeze](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.squeeze.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.stack](https://pytorch.org/docs/2.1/generated/torch.stack.html) | [mindspore.mint.stack](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.stack.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.std](https://pytorch.org/docs/2.1/generated/torch.std.html) | [mindspore.mint.std](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.std.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.std_mean](https://pytorch.org/docs/2.1/generated/torch.std_mean.html) | [mindspore.mint.std_mean](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.std_mean.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.sub](https://pytorch.org/docs/2.1/generated/torch.sub.html#torch.sub) | [mindspore.mint.sub](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.sub.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.sum](https://pytorch.org/docs/2.1/generated/torch.sum.html) | [mindspore.mint.sum](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.sum.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.swapaxes](https://pytorch.org/docs/2.1/generated/torch.swapaxes.html) | [mindspore.mint.swapaxes](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.swapaxes.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.t](https://pytorch.org/docs/2.1/generated/torch.t.html) | [mindspore.mint.t](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.t.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.tan](https://pytorch.org/docs/2.1/generated/torch.tan.html)| [mindspore.mint.tan](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.tan.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.tanh](https://pytorch.org/docs/2.1/generated/torch.tanh.html) | [mindspore.mint.tanh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.tanh.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.tile](https://pytorch.org/docs/2.1/generated/torch.tile.html) | [mindspore.mint.tile](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.tile.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.topk](https://pytorch.org/docs/2.1/generated/torch.topk.html#torch.topk) | [mindspore.mint.topk](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.topk.html) | The functions are consistent, but the default value of dim is different. | +| [torch.trace](https://pytorch.org/docs/2.1/generated/torch.trace.html) | [mindspore.mint.trace](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.trace.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.transpose](https://pytorch.org/docs/2.1/generated/torch.transpose.html) | [mindspore.mint.transpose](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.transpose.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.tril](https://pytorch.org/docs/2.1/generated/torch.tril.html) | [mindspore.mint.tril](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.tril.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.triu](https://pytorch.org/docs/2.1/generated/torch.triu.html) | [mindspore.mint.triu](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.triu.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.trunc](https://pytorch.org/docs/2.1/generated/torch.trunc.html)| [mindspore.mint.trunc](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.trunc.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.unbind](https://pytorch.org/docs/2.1/generated/torch.unbind.html) | [mindspore.mint.unbind](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.unbind.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.unique](https://pytorch.org/docs/2.1/generated/torch.unique.html#torch.unique) | [mindspore.mint.unique](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.unique.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.unique_consecutive](https://pytorch.org/docs/2.1/generated/torch.unique_consecutive.html) | [mindspore.mint.unique_consecutive](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.unique_consecutive.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.unsqueeze](https://pytorch.org/docs/2.1/generated/torch.unsqueeze.html) | [mindspore.mint.unsqueeze](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.unsqueeze.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.var](https://pytorch.org/docs/2.1/generated/torch.var.html) | [mindspore.mint.var](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.var.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.var_mean](https://pytorch.org/docs/2.1/generated/torch.var_mean.html) | [mindspore.mint.var_mean](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.var_mean.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.where](https://pytorch.org/docs/2.1/generated/torch.where.html) | [mindspore.mint.where](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.where.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.xlogy](https://pytorch.org/docs/2.1/generated/torch.xlogy.html) | [mindspore.mint.xlogy](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.xlogy.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.zeros](https://pytorch.org/docs/2.1/generated/torch.zeros.html) | [mindspore.mint.zeros](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.zeros.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.zeros_like](https://pytorch.org/docs/2.1/generated/torch.zeros_like.html#torch-zeros-like) | [mindspore.mint.zeros_like](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.zeros_like.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | ## torch.linalg | PyTorch 2.1 APIs | MindSpore APIs | Descriptions | | -------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ---- | -| [torch.linalg.inv](https://pytorch.org/docs/2.1/generated/torch.linalg.inv.html) | [mindspore.mint.linalg.inv](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.linalg.inv.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.linalg.matrix_norm](https://pytorch.org/docs/2.1/generated/torch.linalg.matrix_norm.html) | [mindspore.mint.linalg.matrix_norm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.linalg.matrix_norm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.linalg.norm](https://pytorch.org/docs/2.1/generated/torch.linalg.norm.html) | [mindspore.mint.linalg.norm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.linalg.norm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.linalg.vector_norm](https://pytorch.org/docs/2.1/generated/torch.linalg.vector_norm.html) | [mindspore.mint.linalg.vector_norm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.linalg.vector_norm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.linalg.inv](https://pytorch.org/docs/2.1/generated/torch.linalg.inv.html) | [mindspore.mint.linalg.inv](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.linalg.inv.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.linalg.matrix_norm](https://pytorch.org/docs/2.1/generated/torch.linalg.matrix_norm.html) | [mindspore.mint.linalg.matrix_norm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.linalg.matrix_norm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.linalg.norm](https://pytorch.org/docs/2.1/generated/torch.linalg.norm.html) | [mindspore.mint.linalg.norm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.linalg.norm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.linalg.vector_norm](https://pytorch.org/docs/2.1/generated/torch.linalg.vector_norm.html) | [mindspore.mint.linalg.vector_norm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.linalg.vector_norm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | ## torch.distributed | PyTorch 2.1 APIs | MindSpore APIs | Descriptions | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| [torch.distributed.P2POp](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.P2POp) | [mindspore.mint.distributed.P2POp](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.P2POp.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.distributed.all_gather](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather) | [mindspore.mint.distributed.all_gather](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.all_gather.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.all_gather_into_tensor](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather_into_tensor) | [mindspore.mint.distributed.all_gather_into_tensor](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.all_gather_into_tensor.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.all_gather_object](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather_object) | [mindspore.mint.distributed.all_gather_object](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.all_gather_object.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.all_reduce](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_reduce) | [mindspore.mint.distributed.all_reduce](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.all_reduce.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.all_to_all_single](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_to_all_single) | [mindspore.mint.distributed.all_to_all_single](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.all_to_all_single.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.all_to_all](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_to_all) | [mindspore.mint.distributed.all_to_all](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.all_to_all.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.barrier](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.barrier) | [mindspore.mint.distributed.barrier](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.barrier.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.batch_isend_irecv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.batch_isend_irecv) | [mindspore.mint.distributed.batch_isend_irecv](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.batch_isend_irecv.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.distributed.broadcast](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.broadcast) | [mindspore.mint.distributed.broadcast](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.broadcast.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.broadcast_object_list](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.broadcast_object_list) | [mindspore.mint.distributed.broadcast_object_list](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.broadcast_object_list.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| []() | [mindspore.mint.distributed.destroy_process_group](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.destroy_process_group.html) | Unique to MindSpore| -| [torch.distributed.gather](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.gather) | [mindspore.mint.distributed.gather](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.gather.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.gather_object](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.gather_object) | [mindspore.mint.distributed.gather_object](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.gather_object.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.distributed.get_backend](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_backend) | [mindspore.mint.distributed.get_backend](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.get_backend.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.distributed.get_global_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_global_rank) | [mindspore.mint.distributed.get_global_rank](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.get_global_rank.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.distributed.get_group_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_group_rank) | [mindspore.mint.distributed.get_group_rank](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.get_group_rank.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.distributed.get_process_group_ranks](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_process_group_ranks) | [mindspore.mint.distributed.get_process_group_ranks](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.get_process_group_ranks.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.distributed.get_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_rank) | [mindspore.mint.distributed.get_rank](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.get_rank.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.get_world_size](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_world_size) | [mindspore.mint.distributed.get_world_size](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.get_world_size.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.init_process_group](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.init_process_group) | [mindspore.mint.distributed.init_process_group](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.init_process_group.html) | Consistent functions, inconsistent parameter names. | -| [torch.distributed.irecv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.irecv) | [mindspore.mint.distributed.irecv](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.irecv.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.distributed.isend](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.isend) | [mindspore.mint.distributed.isend](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.isend.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.distributed.new_group](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.new_group) | [mindspore.mint.distributed.new_group](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.new_group.html) | Consistent functions, MindSpore has an additional parameter group_desc = None. | -| [torch.distributed.recv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.recv) | [mindspore.mint.distributed.recv](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.recv.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.reduce](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce) | [mindspore.mint.distributed.reduce](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.reduce.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.reduce_scatter](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce_scatter) | [mindspore.mint.distributed.reduce_scatter](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.reduce_scatter.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.reduce_scatter_tensor](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce_scatter_tensor) | [mindspore.mint.distributed.reduce_scatter_tensor](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.reduce_scatter_tensor.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.scatter](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.scatter) | [mindspore.mint.distributed.scatter](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.scatter.html) |The functions are consistent, but the default value of scatter_list is different. | -| [torch.distributed.scatter_object_list](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.scatter_object_list) | [mindspore.mint.distributed.scatter_object_list](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.scatter_object_list.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.distributed.send](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.send) | [mindspore.mint.distributed.send](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.distributed.send.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.P2POp](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.P2POp) | [mindspore.mint.distributed.P2POp](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.P2POp.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.distributed.all_gather](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather) | [mindspore.mint.distributed.all_gather](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.all_gather.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.all_gather_into_tensor](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather_into_tensor) | [mindspore.mint.distributed.all_gather_into_tensor](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.all_gather_into_tensor.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.all_gather_object](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather_object) | [mindspore.mint.distributed.all_gather_object](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.all_gather_object.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.all_reduce](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_reduce) | [mindspore.mint.distributed.all_reduce](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.all_reduce.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.all_to_all_single](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_to_all_single) | [mindspore.mint.distributed.all_to_all_single](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.all_to_all_single.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.all_to_all](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_to_all) | [mindspore.mint.distributed.all_to_all](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.all_to_all.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.barrier](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.barrier) | [mindspore.mint.distributed.barrier](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.barrier.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.batch_isend_irecv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.batch_isend_irecv) | [mindspore.mint.distributed.batch_isend_irecv](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.batch_isend_irecv.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.distributed.broadcast](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.broadcast) | [mindspore.mint.distributed.broadcast](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.broadcast.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.broadcast_object_list](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.broadcast_object_list) | [mindspore.mint.distributed.broadcast_object_list](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.broadcast_object_list.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| []() | [mindspore.mint.distributed.destroy_process_group](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.destroy_process_group.html) | Unique to MindSpore| +| [torch.distributed.gather](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.gather) | [mindspore.mint.distributed.gather](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.gather.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.gather_object](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.gather_object) | [mindspore.mint.distributed.gather_object](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.gather_object.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.distributed.get_backend](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_backend) | [mindspore.mint.distributed.get_backend](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.get_backend.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.distributed.get_global_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_global_rank) | [mindspore.mint.distributed.get_global_rank](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.get_global_rank.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.distributed.get_group_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_group_rank) | [mindspore.mint.distributed.get_group_rank](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.get_group_rank.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.distributed.get_process_group_ranks](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_process_group_ranks) | [mindspore.mint.distributed.get_process_group_ranks](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.get_process_group_ranks.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.distributed.get_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_rank) | [mindspore.mint.distributed.get_rank](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.get_rank.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.get_world_size](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_world_size) | [mindspore.mint.distributed.get_world_size](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.get_world_size.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.init_process_group](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.init_process_group) | [mindspore.mint.distributed.init_process_group](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.init_process_group.html) | Consistent functions, inconsistent parameter names. | +| [torch.distributed.irecv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.irecv) | [mindspore.mint.distributed.irecv](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.irecv.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.distributed.isend](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.isend) | [mindspore.mint.distributed.isend](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.isend.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.distributed.new_group](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.new_group) | [mindspore.mint.distributed.new_group](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.new_group.html) | Consistent functions, MindSpore has an additional parameter group_desc = None. | +| [torch.distributed.recv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.recv) | [mindspore.mint.distributed.recv](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.recv.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.reduce](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce) | [mindspore.mint.distributed.reduce](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.reduce.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.reduce_scatter](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce_scatter) | [mindspore.mint.distributed.reduce_scatter](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.reduce_scatter.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.reduce_scatter_tensor](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce_scatter_tensor) | [mindspore.mint.distributed.reduce_scatter_tensor](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.reduce_scatter_tensor.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.scatter](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.scatter) | [mindspore.mint.distributed.scatter](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.scatter.html) |The functions are consistent, but the default value of scatter_list is different. | +| [torch.distributed.scatter_object_list](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.scatter_object_list) | [mindspore.mint.distributed.scatter_object_list](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.scatter_object_list.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.distributed.send](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.send) | [mindspore.mint.distributed.send](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.distributed.send.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| ## torch.nn | PyTorch 2.1 APIs | MindSpore APIs | Descriptions | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| [torch.nn.AdaptiveAvgPool1d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool1d.html) | [mindspore.mint.nn.AdaptiveAvgPool1d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool1d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.AdaptiveAvgPool2d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool2d.html) | [mindspore.mint.nn.AdaptiveAvgPool2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.AdaptiveAvgPool3d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool3d.html) | [mindspore.mint.nn.AdaptiveAvgPool3d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool3d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.AvgPool2d](https://PyTorch.org/docs/2.1/generated/torch.nn.AvgPool2d.html) | [mindspore.mint.nn.AvgPool2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.AvgPool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.BCELoss](https://PyTorch.org/docs/2.1/generated/torch.nn.BCELoss.html) | [mindspore.mint.nn.BCELoss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.BCELoss.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.BCEWithLogitsLoss](https://pytorch.org/docs/2.1/generated/torch.nn.BCEWithLogitsLoss.html) | [mindspore.mint.nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.BCEWithLogitsLoss.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.BatchNorm1d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm1d.html) | [mindspore.mint.nn.BatchNorm1d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.BatchNorm1d.html) | Consistent functions, MindSpore is in inference mode by default. | -| [torch.nn.BatchNorm2d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm2d.html) | [mindspore.mint.nn.BatchNorm2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.BatchNorm2d.html) | Consistent functions, MindSpore is in inference mode by default. | -| [torch.nn.BatchNorm3d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm3d.html) | [mindspore.mint.nn.BatchNorm3d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.BatchNorm3d.html) | Consistent functions, MindSpore is in inference mode by default. | -| [torch.nn.ConstantPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad1d.html) | [mindspore.mint.nn.ConstantPad1d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ConstantPad1d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ConstantPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad2d.html) | [mindspore.mint.nn.ConstantPad2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ConstantPad2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ConstantPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad3d.html) | [mindspore.mint.nn.ConstantPad3d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ConstantPad3d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Conv2d](https://pytorch.org/docs/2.1/generated/torch.nn.Conv2d.html) | [mindspore.mint.nn.Conv2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Conv2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Conv3d](https://pytorch.org/docs/2.1/generated/torch.nn.Conv3d.html) | [mindspore.mint.nn.Conv3d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Conv3d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ConvTranspose2d](https://pytorch.org/docs/2.1/generated/torch.nn.ConvTranspose2d.html) | [mindspore.mint.nn.ConvTranspose2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ConvTranspose2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.CrossEntropyLoss](https://pytorch.org/docs/2.1/generated/torch.nn.CrossEntropyLoss.html) | [mindspore.mint.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.CrossEntropyLoss.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Dropout](https://pytorch.org/docs/2.1/generated/torch.nn.Dropout.html) | [mindspore.mint.nn.Dropout](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Dropout.html) | Consistent functions, MindSpore is in inference mode by default. | -| [torch.nn.Dropout2d](https://pytorch.org/docs/2.1/generated/torch.nn.Dropout2d.html) | [mindspore.mint.nn.Dropout2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Dropout2d.html) | Consistent functions, MindSpore is in inference mode by default. | -| [torch.nn.ELU](https://pytorch.org/docs/2.1/generated/torch.nn.ELU.html) | [mindspore.mint.nn.ELU](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ELU.html) |Consistent functions, MindSpore has no parameter inplace. | -| [torch.nn.Embedding](https://pytorch.org/docs/2.1/generated/torch.nn.Embedding.html) | [mindspore.mint.nn.Embedding](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Embedding.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Flatten](https://pytorch.org/docs/2.1/generated/torch.nn.Flatten.html) | [mindspore.mint.flatten](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.flatten.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Fold](https://pytorch.org/docs/2.1/generated/torch.nn.Fold.html) | [mindspore.mint.nn.Fold](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Fold.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.GELU](https://pytorch.org/docs/2.1/generated/torch.nn.GELU.html) | [mindspore.mint.nn.GELU](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.GELU.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.GroupNorm](https://pytorch.org/docs/2.1/generated/torch.nn.GroupNorm.html) | [mindspore.mint.nn.GroupNorm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.GroupNorm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Hardshrink](https://pytorch.org/docs/2.1/generated/torch.nn.Hardshrink.html#torch.nn.Hardshrink) | [mindspore.mint.nn.Hardshrink](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Hardshrink.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Hardsigmoid](https://pytorch.org/docs/2.1/generated/torch.nn.Hardsigmoid.html) | [mindspore.mint.nn.Hardsigmoid](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Hardsigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Hardswish](https://pytorch.org/docs/2.1/generated/torch.nn.Hardswish.html) | [mindspore.mint.nn.Hardswish](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Hardswish.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Identity](https://pytorch.org/docs/2.1/generated/torch.nn.Identity.html) | [mindspore.mint.nn.Identity](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Identity.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.L1Loss](https://pytorch.org/docs/2.1/generated/torch.nn.L1Loss.html#torch.nn.L1Loss) | [mindspore.mint.nn.L1Loss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.L1Loss.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.LayerNorm](https://pytorch.org/docs/2.1/generated/torch.nn.LayerNorm.html) | [mindspore.mint.nn.LayerNorm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.LayerNorm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Linear](https://pytorch.org/docs/2.1/generated/torch.nn.Linear.html) | [mindspore.mint.nn.Linear](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Linear.html) | Consistent function, MindSpore has two different parameters: weight_init = None and bias_init = None. | -| [torch.nn.LogSigmoid](https://pytorch.org/docs/2.1/generated/torch.nn.LogSigmoid.html) | [mindspore.mint.nn.LogSigmoid](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.LogSigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.LogSoftMax](https://pytorch.org/docs/2.1/generated/torch.nn.LogSoftmax.html) | [mindspore.mint.nn.LogSoftmax](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.LogSoftmax.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.MSELoss](https://pytorch.org/docs/2.1/generated/torch.nn.MSELoss.html) | [mindspore.mint.nn.MSELoss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.MSELoss.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.MaxUnpool2d](https://pytorch.org/docs/2.1/generated/torch.nn.MaxUnpool2d.html) | [mindspore.mint.nn.MaxUnpool2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.MaxUnpool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Mish](https://pytorch.org/docs/2.1/generated/torch.nn.Mish.html) | [mindspore.mint.nn.Mish](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Mish.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.NLLLoss](https://pytorch.org/docs/2.1/generated/torch.nn.NLLLoss.html) | [mindspore.mint.nn.NLLLoss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.NLLLoss.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.PReLU](https://pytorch.org/docs/2.1/generated/torch.nn.PReLU.html) | [mindspore.mint.nn.PReLU](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.PReLU.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ReLU](https://pytorch.org/docs/2.1/generated/torch.nn.ReLU.html) | [mindspore.mint.nn.ReLU](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ReLU.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.ReLU6](https://pytorch.org/docs/2.1/generated/torch.nn.ReLU6.html) | [mindspore.mint.nn.ReLU6](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ReLU6.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ReflectionPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad1d.html) | [mindspore.mint.nn.ReflectionPad1d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ReflectionPad1d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ReflectionPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad2d.html) | [mindspore.mint.nn.ReflectionPad2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ReflectionPad2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ReflectionPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad3d.html) | [mindspore.mint.nn.ReflectionPad3d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ReflectionPad3d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ReplicationPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad1d.html) | [mindspore.mint.nn.ReplicationPad1d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ReplicationPad1d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ReplicationPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad2d.html) | [mindspore.mint.nn.ReplicationPad2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ReplicationPad2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ReplicationPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad3d.html) | [mindspore.mint.nn.ReplicationPad3d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ReplicationPad3d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.SeLU](https://pytorch.org/docs/2.1/generated/torch.nn.SELU.html) | [mindspore.mint.nn.SELU](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.SELU.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.SiLU](https://pytorch.org/docs/2.1/generated/torch.nn.SiLU.html) | [mindspore.mint.nn.SiLU](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.SiLU.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.SmoothL1Loss](https://pytorch.org/docs/2.1/generated/torch.nn.SmoothL1Loss.html) | [mindspore.mint.nn.SmoothL1Loss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.SmoothL1Loss.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Softmax](https://pytorch.org/docs/2.1/generated/torch.nn.Softmax.html) | [mindspore.mint.nn.Softmax](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Softmax.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.Softshrink](https://pytorch.org/docs/2.1/generated/torch.nn.Softshrink.html) | [mindspore.mint.nn.Softshrink](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Softshrink.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.SyncBatchNorm](https://pytorch.org/docs/2.1/generated/torch.nn.SyncBatchNorm.html) | [mindspore.mint.nn.SyncBatchNorm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.SyncBatchNorm.html) | Consistent functions, MindSpore is in inference mode by default. | -| [torch.nn.Tanh](https://pytorch.org/docs/2.1/generated/torch.nn.Tanh.html) | [mindspore.mint.nn.Tanh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Tanh.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.Unfold](https://pytorch.org/docs/2.1/generated/torch.nn.Unfold.html) | [mindspore.mint.nn.Unfold](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Unfold.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.Upsample](https://pytorch.org/docs/2.1/generated/torch.nn.Upsample.html) | [mindspore.mint.nn.Upsample](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.Upsample.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ZeroPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad1d.html) | [mindspore.mint.nn.ZeroPad1d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ZeroPad1d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ZeroPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad2d.html) | [mindspore.mint.nn.ZeroPad2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ZeroPad2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.ZeroPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad3d.html) | [mindspore.mint.nn.ZeroPad3d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.ZeroPad3d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.AdaptiveAvgPool1d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool1d.html) | [mindspore.mint.nn.AdaptiveAvgPool1d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool1d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.AdaptiveAvgPool2d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool2d.html) | [mindspore.mint.nn.AdaptiveAvgPool2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.AdaptiveAvgPool3d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool3d.html) | [mindspore.mint.nn.AdaptiveAvgPool3d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool3d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.AvgPool2d](https://PyTorch.org/docs/2.1/generated/torch.nn.AvgPool2d.html) | [mindspore.mint.nn.AvgPool2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.AvgPool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.BCELoss](https://PyTorch.org/docs/2.1/generated/torch.nn.BCELoss.html) | [mindspore.mint.nn.BCELoss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.BCELoss.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.BCEWithLogitsLoss](https://pytorch.org/docs/2.1/generated/torch.nn.BCEWithLogitsLoss.html) | [mindspore.mint.nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.BCEWithLogitsLoss.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.BatchNorm1d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm1d.html) | [mindspore.mint.nn.BatchNorm1d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.BatchNorm1d.html) | Consistent functions, MindSpore is in inference mode by default. | +| [torch.nn.BatchNorm2d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm2d.html) | [mindspore.mint.nn.BatchNorm2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.BatchNorm2d.html) | Consistent functions, MindSpore is in inference mode by default. | +| [torch.nn.BatchNorm3d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm3d.html) | [mindspore.mint.nn.BatchNorm3d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.BatchNorm3d.html) | Consistent functions, MindSpore is in inference mode by default. | +| [torch.nn.ConstantPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad1d.html) | [mindspore.mint.nn.ConstantPad1d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ConstantPad1d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ConstantPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad2d.html) | [mindspore.mint.nn.ConstantPad2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ConstantPad2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ConstantPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad3d.html) | [mindspore.mint.nn.ConstantPad3d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ConstantPad3d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Conv2d](https://pytorch.org/docs/2.1/generated/torch.nn.Conv2d.html) | [mindspore.mint.nn.Conv2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Conv2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Conv3d](https://pytorch.org/docs/2.1/generated/torch.nn.Conv3d.html) | [mindspore.mint.nn.Conv3d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Conv3d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ConvTranspose2d](https://pytorch.org/docs/2.1/generated/torch.nn.ConvTranspose2d.html) | [mindspore.mint.nn.ConvTranspose2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ConvTranspose2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.CrossEntropyLoss](https://pytorch.org/docs/2.1/generated/torch.nn.CrossEntropyLoss.html) | [mindspore.mint.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.CrossEntropyLoss.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Dropout](https://pytorch.org/docs/2.1/generated/torch.nn.Dropout.html) | [mindspore.mint.nn.Dropout](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Dropout.html) | Consistent functions, MindSpore is in inference mode by default. | +| [torch.nn.Dropout2d](https://pytorch.org/docs/2.1/generated/torch.nn.Dropout2d.html) | [mindspore.mint.nn.Dropout2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Dropout2d.html) | Consistent functions, MindSpore is in inference mode by default. | +| [torch.nn.ELU](https://pytorch.org/docs/2.1/generated/torch.nn.ELU.html) | [mindspore.mint.nn.ELU](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ELU.html) |Consistent functions, MindSpore has no parameter inplace. | +| [torch.nn.Embedding](https://pytorch.org/docs/2.1/generated/torch.nn.Embedding.html) | [mindspore.mint.nn.Embedding](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Embedding.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Flatten](https://pytorch.org/docs/2.1/generated/torch.nn.Flatten.html) | [mindspore.mint.flatten](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.flatten.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Fold](https://pytorch.org/docs/2.1/generated/torch.nn.Fold.html) | [mindspore.mint.nn.Fold](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Fold.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.GELU](https://pytorch.org/docs/2.1/generated/torch.nn.GELU.html) | [mindspore.mint.nn.GELU](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.GELU.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.GroupNorm](https://pytorch.org/docs/2.1/generated/torch.nn.GroupNorm.html) | [mindspore.mint.nn.GroupNorm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.GroupNorm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Hardshrink](https://pytorch.org/docs/2.1/generated/torch.nn.Hardshrink.html#torch.nn.Hardshrink) | [mindspore.mint.nn.Hardshrink](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Hardshrink.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Hardsigmoid](https://pytorch.org/docs/2.1/generated/torch.nn.Hardsigmoid.html) | [mindspore.mint.nn.Hardsigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Hardsigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Hardswish](https://pytorch.org/docs/2.1/generated/torch.nn.Hardswish.html) | [mindspore.mint.nn.Hardswish](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Hardswish.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Identity](https://pytorch.org/docs/2.1/generated/torch.nn.Identity.html) | [mindspore.mint.nn.Identity](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Identity.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.L1Loss](https://pytorch.org/docs/2.1/generated/torch.nn.L1Loss.html#torch.nn.L1Loss) | [mindspore.mint.nn.L1Loss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.L1Loss.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.LayerNorm](https://pytorch.org/docs/2.1/generated/torch.nn.LayerNorm.html) | [mindspore.mint.nn.LayerNorm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.LayerNorm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Linear](https://pytorch.org/docs/2.1/generated/torch.nn.Linear.html) | [mindspore.mint.nn.Linear](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Linear.html) | Consistent function, MindSpore has two different parameters: weight_init = None and bias_init = None. | +| [torch.nn.LogSigmoid](https://pytorch.org/docs/2.1/generated/torch.nn.LogSigmoid.html) | [mindspore.mint.nn.LogSigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.LogSigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.LogSoftMax](https://pytorch.org/docs/2.1/generated/torch.nn.LogSoftmax.html) | [mindspore.mint.nn.LogSoftmax](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.LogSoftmax.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.MSELoss](https://pytorch.org/docs/2.1/generated/torch.nn.MSELoss.html) | [mindspore.mint.nn.MSELoss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.MSELoss.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.MaxUnpool2d](https://pytorch.org/docs/2.1/generated/torch.nn.MaxUnpool2d.html) | [mindspore.mint.nn.MaxUnpool2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.MaxUnpool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Mish](https://pytorch.org/docs/2.1/generated/torch.nn.Mish.html) | [mindspore.mint.nn.Mish](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Mish.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.NLLLoss](https://pytorch.org/docs/2.1/generated/torch.nn.NLLLoss.html) | [mindspore.mint.nn.NLLLoss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.NLLLoss.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.PReLU](https://pytorch.org/docs/2.1/generated/torch.nn.PReLU.html) | [mindspore.mint.nn.PReLU](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.PReLU.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ReLU](https://pytorch.org/docs/2.1/generated/torch.nn.ReLU.html) | [mindspore.mint.nn.ReLU](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ReLU.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.ReLU6](https://pytorch.org/docs/2.1/generated/torch.nn.ReLU6.html) | [mindspore.mint.nn.ReLU6](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ReLU6.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ReflectionPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad1d.html) | [mindspore.mint.nn.ReflectionPad1d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ReflectionPad1d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ReflectionPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad2d.html) | [mindspore.mint.nn.ReflectionPad2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ReflectionPad2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ReflectionPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad3d.html) | [mindspore.mint.nn.ReflectionPad3d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ReflectionPad3d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ReplicationPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad1d.html) | [mindspore.mint.nn.ReplicationPad1d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ReplicationPad1d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ReplicationPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad2d.html) | [mindspore.mint.nn.ReplicationPad2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ReplicationPad2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ReplicationPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad3d.html) | [mindspore.mint.nn.ReplicationPad3d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ReplicationPad3d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.SeLU](https://pytorch.org/docs/2.1/generated/torch.nn.SELU.html) | [mindspore.mint.nn.SELU](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.SELU.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.SiLU](https://pytorch.org/docs/2.1/generated/torch.nn.SiLU.html) | [mindspore.mint.nn.SiLU](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.SiLU.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.SmoothL1Loss](https://pytorch.org/docs/2.1/generated/torch.nn.SmoothL1Loss.html) | [mindspore.mint.nn.SmoothL1Loss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.SmoothL1Loss.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Softmax](https://pytorch.org/docs/2.1/generated/torch.nn.Softmax.html) | [mindspore.mint.nn.Softmax](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Softmax.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.Softshrink](https://pytorch.org/docs/2.1/generated/torch.nn.Softshrink.html) | [mindspore.mint.nn.Softshrink](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Softshrink.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.SyncBatchNorm](https://pytorch.org/docs/2.1/generated/torch.nn.SyncBatchNorm.html) | [mindspore.mint.nn.SyncBatchNorm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.SyncBatchNorm.html) | Consistent functions, MindSpore is in inference mode by default. | +| [torch.nn.Tanh](https://pytorch.org/docs/2.1/generated/torch.nn.Tanh.html) | [mindspore.mint.nn.Tanh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Tanh.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.Unfold](https://pytorch.org/docs/2.1/generated/torch.nn.Unfold.html) | [mindspore.mint.nn.Unfold](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Unfold.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.Upsample](https://pytorch.org/docs/2.1/generated/torch.nn.Upsample.html) | [mindspore.mint.nn.Upsample](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.Upsample.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ZeroPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad1d.html) | [mindspore.mint.nn.ZeroPad1d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ZeroPad1d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ZeroPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad2d.html) | [mindspore.mint.nn.ZeroPad2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ZeroPad2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.ZeroPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad3d.html) | [mindspore.mint.nn.ZeroPad3d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.ZeroPad3d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | ## torch.nn.functional | PyTorch 2.1 APIs | MindSpore APIs | Descriptions | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| [torch.nn.functional.adaptive_avg_pool1d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.adaptive_avg_pool1d) | [mindspore.mint.nn.functional.adaptive_avg_pool1d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.adaptive_avg_pool1d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.adaptive_avg_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.adaptive_avg_pool2d) | [mindspore.mint.nn.functional.adaptive_avg_pool2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.adaptive_avg_pool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.avg_pool1d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.avg_pool1d) | [mindspore.mint.nn.functional.avg_pool1d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.avg_pool1d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.avg_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.avg_pool2d) | [mindspore.mint.nn.functional.avg_pool2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.avg_pool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.batch_norm](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.batch_norm) | [mindspore.mint.nn.functional.batch_norm](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.batch_norm.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.functional.binary_cross_entropy](https://pytorch.org/docs/2.1/nn.functional.html#binary-cross-entropy) | [mindspore.mint.nn.functional.binary_cross_entropy](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.binary_cross_entropy.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.binary_cross_entropy_with_logits](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.binary_cross_entropy_with_logits) | [mindspore.mint.nn.functional.binary_cross_entropy_with_logits](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.binary_cross_entropy_with_logits.html) |[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.conv2d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.conv2d.html) | [mindspore.mint.nn.functional.conv2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.conv2d.html)| [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.conv3d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.conv3d.html) | [mindspore.mint.nn.functional.conv3d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.conv3d.html)| [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.conv_transpose2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.conv_transpose2d) | [mindspore.mint.nn.functional.conv_transpose2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.conv_transpose2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.dropout](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.dropout) | [mindspore.mint.nn.functional.dropout](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.dropout.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.dropout2d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.dropout2d.html#torch.nn.functional.dropout2d) | [mindspore.mint.nn.functional.dropout2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.dropout2d.html) | Consistent functions, MindSpore does not contain the parameter inplace. | -| [torch.nn.functional.elu](https://pytorch.org/docs/2.1/nn.functional.html#elu) | [mindspore.mint.nn.functional.elu](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.elu.html) | Consistent functions, MindSpore does not contain the parameter inplace. | -| [torch.nn.functional.embedding](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.embedding) | [mindspore.mint.nn.functional.embedding](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.embedding.html) | Consistent functions, MindSpore does not contain the parameter sparse. | -| [torch.nn.functional.fold](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.fold) | [mindspore.mint.nn.functional.fold](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.fold.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.functional.gelu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.gelu) | [mindspore.mint.nn.functional.gelu](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.gelu.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.functional.grid_sample](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.grid_sample) | [mindspore.mint.nn.functional.grid_sample](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.grid_sample.html) | Consistent functions, but the default value of align_corners is different. | -| [torch.nn.functional.hardshrink](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardshrink) | [mindspore.mint.nn.functional.hardshrink](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.hardshrink.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.hardsigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardsigmoid) | [mindspore.mint.nn.functional.hardsigmoid](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.hardsigmoid.html) |Consistent functions, MindSpore has no parameter inplace. | -| [torch.nn.functional.hardswish](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardswish) | [mindspore.mint.nn.functional.hardswish](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.hardswish.html) |Consistent functions, MindSpore has no parameter inplace. | -| [torch.nn.functional.interpolate](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.interpolate) | [mindspore.mint.nn.functional.interpolate](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.interpolate.html) | Consistent functions, MindSpore has no parameter antialias. | -| [torch.nn.functional.l1_loss](https://pytorch.org/docs/2.1/nn.functional.html#l1-loss) | [mindspore.mint.nn.functional.l1_loss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.l1_loss.html) | Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.leaky_relu](https://pytorch.org/docs/2.1/nn.functional.html#leaky-relu) | [mindspore.mint.nn.functional.leaky_relu](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.leaky_relu.html) |Consistent functions, MindSpore has no parameter inplace. | -| [torch.nn.functional.linear](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.linear) | [mindspore.mint.nn.functional.linear](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.linear.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.log_softmax](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.log_softmax) | [mindspore.mint.nn.functional.log_softmax](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.log_softmax.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.logsigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.logsigmoid) | [mindspore.mint.nn.functional.logsigmoid](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.logsigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.max_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.max_pool2d) | [mindspore.mint.nn.functional.max_pool2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.max_pool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.max_unpool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.max_unpool2d) | [mindspore.mint.nn.functional.max_unpool2d](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.max_unpool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.mish](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.mish) | [mindspore.mint.nn.functional.mish](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.mish.html) |Consistent functions, MindSpore has no parameter inplace. | -| [torch.nn.functional.mse_loss](https://pytorch.org/docs/2.1/nn.functional.html#mse-loss) | [mindspore.mint.nn.functional.mse_loss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.mse_loss.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.nll_loss](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.nll_loss) | [mindspore.mint.nn.functional.nll_loss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.nll_loss.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.normalize](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.normalize) | [mindspore.mint.nn.functional.normalize](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.normalize.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.one_hot](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.one_hot) | [mindspore.mint.nn.functional.one_hot](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.one_hot.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.pad](https://pytorch.org/docs/2.1/nn.functional.html#pad) | [mindspore.mint.nn.functional.pad](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.pad.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.prelu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.prelu) | [mindspore.mint.nn.functional.prelu](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.prelu.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.relu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu) | [mindspore.mint.nn.functional.relu](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.relu.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.relu6](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu6) | [mindspore.mint.nn.functional.relu6](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.relu6.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.relu_](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu_) | [mindspore.mint.nn.functional.relu_](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.relu_.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.selu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.selu) | [mindspore.mint.nn.functional.selu](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.selu.html) | Consistent functions, MindSpore does not contain parameter inplace. | -| [torch.nn.functional.sigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.sigmoid) | [mindspore.mint.nn.functional.sigmoid](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.sigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.silu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.silu) | [mindspore.mint.nn.functional.silu](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.silu.html) | Consistent functions, MindSpore does not contain parameter inplace. | -| [torch.nn.functional.smooth_l1_loss](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.smooth_l1_loss) | [mindspore.mint.nn.functional.smooth_l1_loss](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.smooth_l1_loss.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.softmax](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softmax) | [mindspore.mint.nn.functional.softmax](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.softmax.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.nn.functional.softplus](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softplus) | [mindspore.mint.nn.functional.softplus](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.softplus.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.softshrink](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softshrink) | [mindspore.mint.nn.functional.softshrink](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.softshrink.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.tanh](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.tanh) | [mindspore.mint.nn.functional.tanh](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.tanh.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.nn.functional.unfold](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.unfold) | [mindspore.mint.nn.functional.unfold](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.nn.functional.unfold.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.adaptive_avg_pool1d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.adaptive_avg_pool1d) | [mindspore.mint.nn.functional.adaptive_avg_pool1d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.adaptive_avg_pool1d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.adaptive_avg_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.adaptive_avg_pool2d) | [mindspore.mint.nn.functional.adaptive_avg_pool2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.adaptive_avg_pool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.avg_pool1d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.avg_pool1d) | [mindspore.mint.nn.functional.avg_pool1d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.avg_pool1d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.avg_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.avg_pool2d) | [mindspore.mint.nn.functional.avg_pool2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.avg_pool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.batch_norm](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.batch_norm) | [mindspore.mint.nn.functional.batch_norm](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.batch_norm.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.functional.binary_cross_entropy](https://pytorch.org/docs/2.1/nn.functional.html#binary-cross-entropy) | [mindspore.mint.nn.functional.binary_cross_entropy](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.binary_cross_entropy.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.binary_cross_entropy_with_logits](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.binary_cross_entropy_with_logits) | [mindspore.mint.nn.functional.binary_cross_entropy_with_logits](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.binary_cross_entropy_with_logits.html) |[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.conv2d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.conv2d.html) | [mindspore.mint.nn.functional.conv2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.conv2d.html)| [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.conv3d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.conv3d.html) | [mindspore.mint.nn.functional.conv3d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.conv3d.html)| [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.conv_transpose2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.conv_transpose2d) | [mindspore.mint.nn.functional.conv_transpose2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.conv_transpose2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.dropout](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.dropout) | [mindspore.mint.nn.functional.dropout](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.dropout.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.dropout2d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.dropout2d.html#torch.nn.functional.dropout2d) | [mindspore.mint.nn.functional.dropout2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.dropout2d.html) | Consistent functions, MindSpore does not contain the parameter inplace. | +| [torch.nn.functional.elu](https://pytorch.org/docs/2.1/nn.functional.html#elu) | [mindspore.mint.nn.functional.elu](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.elu.html) | Consistent functions, MindSpore does not contain the parameter inplace. | +| [torch.nn.functional.embedding](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.embedding) | [mindspore.mint.nn.functional.embedding](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.embedding.html) | Consistent functions, MindSpore does not contain the parameter sparse. | +| [torch.nn.functional.fold](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.fold) | [mindspore.mint.nn.functional.fold](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.fold.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.functional.gelu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.gelu) | [mindspore.mint.nn.functional.gelu](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.gelu.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.functional.grid_sample](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.grid_sample) | [mindspore.mint.nn.functional.grid_sample](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.grid_sample.html) | Consistent functions, but the default value of align_corners is different. | +| [torch.nn.functional.hardshrink](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardshrink) | [mindspore.mint.nn.functional.hardshrink](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.hardshrink.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.hardsigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardsigmoid) | [mindspore.mint.nn.functional.hardsigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.hardsigmoid.html) |Consistent functions, MindSpore has no parameter inplace. | +| [torch.nn.functional.hardswish](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardswish) | [mindspore.mint.nn.functional.hardswish](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.hardswish.html) |Consistent functions, MindSpore has no parameter inplace. | +| [torch.nn.functional.interpolate](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.interpolate) | [mindspore.mint.nn.functional.interpolate](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.interpolate.html) | Consistent functions, MindSpore has no parameter antialias. | +| [torch.nn.functional.l1_loss](https://pytorch.org/docs/2.1/nn.functional.html#l1-loss) | [mindspore.mint.nn.functional.l1_loss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.l1_loss.html) | Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.leaky_relu](https://pytorch.org/docs/2.1/nn.functional.html#leaky-relu) | [mindspore.mint.nn.functional.leaky_relu](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.leaky_relu.html) |Consistent functions, MindSpore has no parameter inplace. | +| [torch.nn.functional.linear](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.linear) | [mindspore.mint.nn.functional.linear](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.linear.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.log_softmax](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.log_softmax) | [mindspore.mint.nn.functional.log_softmax](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.log_softmax.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.logsigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.logsigmoid) | [mindspore.mint.nn.functional.logsigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.logsigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.max_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.max_pool2d) | [mindspore.mint.nn.functional.max_pool2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.max_pool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.max_unpool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.max_unpool2d) | [mindspore.mint.nn.functional.max_unpool2d](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.max_unpool2d.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.mish](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.mish) | [mindspore.mint.nn.functional.mish](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.mish.html) |Consistent functions, MindSpore has no parameter inplace. | +| [torch.nn.functional.mse_loss](https://pytorch.org/docs/2.1/nn.functional.html#mse-loss) | [mindspore.mint.nn.functional.mse_loss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.mse_loss.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.nll_loss](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.nll_loss) | [mindspore.mint.nn.functional.nll_loss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.nll_loss.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.normalize](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.normalize) | [mindspore.mint.nn.functional.normalize](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.normalize.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.one_hot](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.one_hot) | [mindspore.mint.nn.functional.one_hot](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.one_hot.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.pad](https://pytorch.org/docs/2.1/nn.functional.html#pad) | [mindspore.mint.nn.functional.pad](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.pad.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.prelu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.prelu) | [mindspore.mint.nn.functional.prelu](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.prelu.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.relu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu) | [mindspore.mint.nn.functional.relu](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.relu.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.relu6](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu6) | [mindspore.mint.nn.functional.relu6](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.relu6.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.relu_](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu_) | [mindspore.mint.nn.functional.relu_](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.relu_.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.selu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.selu) | [mindspore.mint.nn.functional.selu](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.selu.html) | Consistent functions, MindSpore does not contain parameter inplace. | +| [torch.nn.functional.sigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.sigmoid) | [mindspore.mint.nn.functional.sigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.sigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.silu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.silu) | [mindspore.mint.nn.functional.silu](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.silu.html) | Consistent functions, MindSpore does not contain parameter inplace. | +| [torch.nn.functional.smooth_l1_loss](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.smooth_l1_loss) | [mindspore.mint.nn.functional.smooth_l1_loss](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.smooth_l1_loss.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.softmax](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softmax) | [mindspore.mint.nn.functional.softmax](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.softmax.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.nn.functional.softplus](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softplus) | [mindspore.mint.nn.functional.softplus](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.softplus.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.softshrink](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softshrink) | [mindspore.mint.nn.functional.softshrink](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.softshrink.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.tanh](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.tanh) | [mindspore.mint.nn.functional.tanh](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.tanh.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.nn.functional.unfold](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.unfold) | [mindspore.mint.nn.functional.unfold](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.nn.functional.unfold.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | ## torch.special | PyTorch 2.1 APIs | MindSpore APIs | Descriptions | | -------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ---- | -| [torch.special.erfc](https://pytorch.org/docs/2.1/special.html#torch.special.erfc) | [mindspore.mint.special.erfc](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.special.erfc.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.special.exp2](https://pytorch.org/docs/2.1/special.html#torch.special.exp2) | [mindspore.mint.special.exp2](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.special.exp2.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.special.expm1](https://pytorch.org/docs/2.1/special.html#torch.special.expm1) | [mindspore.mint.special.expm1](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.special.expm1.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.special.log1p](https://pytorch.org/docs/2.1/special.html#torch.special.log1p) | [mindspore.mint.special.log1p](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.special.log1p.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.special.log_softmax](https://pytorch.org/docs/2.1/special.html#torch.special.log_softmax) | [mindspore.mint.special.log_softmax](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.special.log_softmax.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.special.round](https://pytorch.org/docs/2.1/special.html#torch.special.round) | [mindspore.mint.special.round](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.special.round.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.special.sinc](https://pytorch.org/docs/2.1/special.html#torch.special.sinc) | [mindspore.mint.special.sinc](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.special.sinc.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.special.erfc](https://pytorch.org/docs/2.1/special.html#torch.special.erfc) | [mindspore.mint.special.erfc](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.special.erfc.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.special.exp2](https://pytorch.org/docs/2.1/special.html#torch.special.exp2) | [mindspore.mint.special.exp2](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.special.exp2.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.special.expm1](https://pytorch.org/docs/2.1/special.html#torch.special.expm1) | [mindspore.mint.special.expm1](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.special.expm1.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.special.log1p](https://pytorch.org/docs/2.1/special.html#torch.special.log1p) | [mindspore.mint.special.log1p](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.special.log1p.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.special.log_softmax](https://pytorch.org/docs/2.1/special.html#torch.special.log_softmax) | [mindspore.mint.special.log_softmax](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.special.log_softmax.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.special.round](https://pytorch.org/docs/2.1/special.html#torch.special.round) | [mindspore.mint.special.round](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.special.round.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.special.sinc](https://pytorch.org/docs/2.1/special.html#torch.special.sinc) | [mindspore.mint.special.sinc](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.special.sinc.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | ## torch.Tensor | PyTorch 2.1 APIs | MindSpore APIs | Descriptions | | ------------------------------------------------------------ | ------------------------------------------------------------ |---------------------------------------------| -| [torch.Tensor.abs](https://pytorch.org/docs/2.1/generated/torch.Tensor.abs.html) | [mindspore.Tensor.abs](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.abs.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.absolute](https://pytorch.org/docs/2.1/generated/torch.Tensor.absolute.html) | [mindspore.Tensor.absolute](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.absolute.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.add](https://pytorch.org/docs/2.1/generated/torch.Tensor.add.html)| [mindspore.Tensor.add](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.add.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.all](https://pytorch.org/docs/2.1/generated/torch.Tensor.all.html) | [mindspore.Tensor.all](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.all.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.any](https://pytorch.org/docs/2.1/generated/torch.Tensor.any.html) | [mindspore.Tensor.any](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.any.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.argmax](https://pytorch.org/docs/2.1/generated/torch.Tensor.argmax.html) | [mindspore.Tensor.argmax](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.argmax.html#mindspore.Tensor.argmax) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.arctan2](https://pytorch.org/docs/2.1/generated/torch.Tensor.arctan2.html) | [mindspore.Tensor.arctan2](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.arctan2.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.argmin](https://pytorch.org/docs/2.1/generated/torch.Tensor.argmin.html) | [mindspore.Tensor.argmin](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.argmin.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.atan2](https://pytorch.org/docs/2.1/generated/torch.Tensor.atan2.html) | [mindspore.Tensor.atan2](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.atan2.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.bool](https://pytorch.org/docs/2.1/generated/torch.Tensor.bool.html)| [mindspore.Tensor.bool](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.bool.html)| [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.ceil](https://pytorch.org/docs/2.1/generated/torch.Tensor.ceil.html) | [mindspore.Tensor.ceil](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.ceil.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.chunk](https://pytorch.org/docs/2.1/generated/torch.Tensor.chunk.html) | [mindspore.Tensor.chunk](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.chunk.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.clamp](https://pytorch.org/docs/2.1/generated/torch.Tensor.clamp.html) | [mindspore.Tensor.clamp](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.clamp.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.clamp_](https://pytorch.org/docs/2.1/generated/torch.Tensor.clamp_.html) | [mindspore.Tensor.clamp_](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.clamp_.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.clip](https://pytorch.org/docs/2.1/generated/torch.Tensor.clip.html) | [mindspore.Tensor.clip](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.clip.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.cos](https://pytorch.org/docs/2.1/generated/torch.Tensor.cos.html) | [mindspore.Tensor.cos](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.cos.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.cumsum](https://pytorch.org/docs/2.1/generated/torch.Tensor.cumsum.html) | [mindspore.Tensor.cumsum](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.cumsum.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.div](https://pytorch.org/docs/2.1/generated/torch.Tensor.div.html) | [mindspore.Tensor.div](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.div.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.divide](https://pytorch.org/docs/2.1/generated/torch.Tensor.divide.html) | [mindspore.Tensor.divide](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.divide.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.eq](https://pytorch.org/docs/2.1/generated/torch.Tensor.eq.html) | [mindspore.Tensor.eq](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.eq.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.erf](https://pytorch.org/docs/2.1/generated/torch.Tensor.erf.html) | [mindspore.Tensor.erf](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.erf.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.exp](https://pytorch.org/docs/2.1/generated/torch.Tensor.exp.html) | [mindspore.Tensor.exp](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.exp.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.expand_as](https://pytorch.org/docs/2.1/generated/torch.Tensor.expand_as.html) | [mindspore.Tensor.expand_as](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.expand_as.html#mindspore.Tensor.expand_as) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.flatten](https://pytorch.org/docs/2.1/generated/torch.Tensor.flatten.html) | [mindspore.Tensor.flatten](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.flatten.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.float](https://pytorch.org/docs/2.1/generated/torch.Tensor.float.html) | [mindspore.Tensor.float](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.float.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.floor](https://pytorch.org/docs/2.1/generated/torch.Tensor.floor.html) | [mindspore.Tensor.floor](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.floor.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.gather](https://pytorch.org/docs/2.1/generated/torch.Tensor.gather.html) | [mindspore.Tensor.gather](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.gather.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.greater](https://pytorch.org/docs/2.1/generated/torch.Tensor.greater.html) | [mindspore.Tensor.greater](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.greater.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.gt](https://pytorch.org/docs/2.1/generated/torch.Tensor.gt.html) | [mindspore.Tensor.gt](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.gt.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.half](https://pytorch.org/docs/2.1/generated/torch.Tensor.half.html) | [mindspore.Tensor.half](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.half.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.index_select](https://pytorch.org/docs/2.1/generated/torch.Tensor.index_select.html) | [mindspore.Tensor.index_select](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.index_select.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.int](https://pytorch.org/docs/2.1/generated/torch.Tensor.int.html) | [mindspore.Tensor.int](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.int.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.isfinite](https://pytorch.org/docs/2.1/generated/torch.Tensor.isfinite.html) | [mindspore.Tensor.isfinite](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.isfinite.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.isnan](https://pytorch.org/docs/2.1/generated/torch.Tensor.isnan.html) | [mindspore.Tensor.isnan](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.isnan.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.item](https://pytorch.org/docs/2.1/generated/torch.Tensor.item.html) | [mindspore.Tensor.item](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.item.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.le](https://pytorch.org/docs/2.1/generated/torch.Tensor.le.html) | [mindspore.Tensor.le](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.le.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.less](https://pytorch.org/docs/2.1/generated/torch.Tensor.less.html) | [mindspore.Tensor.less](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.less.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.less_equal](https://pytorch.org/docs/2.1/generated/torch.Tensor.less_equal.html) | [mindspore.Tensor.less_equal](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.less_equal.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.log](https://pytorch.org/docs/2.1/generated/torch.Tensor.log.html) | [mindspore.Tensor.log](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.log.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.logical_and](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_and.html) | [mindspore.Tensor.logical_and](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.logical_and.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.logical_not](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_not.html) | [mindspore.Tensor.logical_not](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.logical_not.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.logical_or](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_or.html) | [mindspore.Tensor.logical_or](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.logical_or.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.long](https://pytorch.org/docs/2.1/generated/torch.Tensor.long.html) | [mindspore.Tensor.long](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.long.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.lt](https://pytorch.org/docs/2.1/generated/torch.Tensor.lt.html) | [mindspore.Tensor.lt](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.lt.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.masked_fill](https://pytorch.org/docs/2.1/generated/torch.Tensor.masked_fill.html) | [mindspore.Tensor.masked_fill](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.masked_fill.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.masked_select](https://pytorch.org/docs/2.1/generated/torch.Tensor.masked_select.html) | [mindspore.Tensor.masked_select](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.masked_select.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.matmul](https://pytorch.org/docs/2.1/generated/torch.Tensor.matmul.html) | [mindspore.Tensor.matmul](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.matmul.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.max](https://pytorch.org/docs/2.1/generated/torch.Tensor.max.html) | [mindspore.Tensor.max](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.max.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.maximum](https://pytorch.org/docs/2.1/generated/torch.Tensor.maximum.html) | [mindspore.Tensor.maximum](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.maximum.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.mean](https://pytorch.org/docs/2.1/generated/torch.Tensor.mean.html) | [mindspore.Tensor.mean](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.mean.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.min](https://pytorch.org/docs/2.1/generated/torch.Tensor.min.html) | [mindspore.Tensor.min](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.min.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.minimum](https://pytorch.org/docs/2.1/generated/torch.Tensor.minimum.html) | [mindspore.Tensor.minimum](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.minimum.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.mul](https://pytorch.org/docs/2.1/generated/torch.Tensor.mul.html) | [mindspore.Tensor.mul](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.mul.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.nan_to_num](https://pytorch.org/docs/2.1/generated/torch.Tensor.nan_to_num.html) | [mindspore.Tensor.nan_to_num](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.nan_to_num.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.ne](https://pytorch.org/docs/2.1/generated/torch.Tensor.ne.html) | [mindspore.Tensor.ne](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.ne.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.neg](https://pytorch.org/docs/2.1/generated/torch.Tensor.neg.html) | [mindspore.Tensor.neg](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.neg.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.negative](https://pytorch.org/docs/2.1/generated/torch.Tensor.negative.html) | [mindspore.Tensor.negative](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.negative.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.not_equal](https://pytorch.org/docs/2.1/generated/torch.Tensor.not_equal.html) | [mindspore.Tensor.not_equal](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.not_equal.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.pow](https://pytorch.org/docs/2.1/generated/torch.Tensor.pow.html) | [mindspore.Tensor.pow](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.pow.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.prod](https://pytorch.org/docs/2.1/generated/torch.Tensor.prod.html) | [mindspore.Tensor.prod](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.prod.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.reciprocal](https://pytorch.org/docs/2.1/generated/torch.Tensor.reciprocal.html) | [mindspore.Tensor.reciprocal](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.reciprocal.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.remainder](https://pytorch.org/docs/2.1/generated/torch.Tensor.remainder.html) | [mindspore.Tensor.remainder](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.remainder.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.repeat_interleave](https://pytorch.org/docs/2.1/generated/torch.Tensor.repeat_interleave.html) | [mindspore.Tensor.repeat_interleave](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.repeat_interleave.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.reshape](https://pytorch.org/docs/2.1/generated/torch.Tensor.reshape.html) | [mindspore.Tensor.reshape](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.reshape.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.round](https://pytorch.org/docs/2.1/generated/torch.Tensor.round.html)| [mindspore.Tensor.round](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.round.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.rsqrt](https://pytorch.org/docs/2.1/generated/torch.Tensor.rsqrt.html) | [mindspore.Tensor.rsqrt](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.rsqrt.html) |[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.scatter](https://pytorch.org/docs/2.1/generated/torch.Tensor.scatter.html) | [mindspore.Tensor.scatter](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.scatter.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.scatter_add](https://pytorch.org/docs/2.1/generated/torch.Tensor.scatter_add.html) | [mindspore.Tensor.scatter_add](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.scatter_add.html) |[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.sigmoid](https://pytorch.org/docs/2.1/generated/torch.Tensor.sigmoid.html) | [mindspore.Tensor.sigmoid](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.sigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.sin](https://pytorch.org/docs/2.1/generated/torch.Tensor.sin.html)| [mindspore.Tensor.sin](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.sin.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.sort](https://pytorch.org/docs/2.1/generated/torch.Tensor.sort.html) | [mindspore.Tensor.sort](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.sort.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.split](https://pytorch.org/docs/2.1/generated/torch.Tensor.split.html) | [mindspore.Tensor.split](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.split.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.sqrt](https://pytorch.org/docs/2.1/generated/torch.Tensor.sqrt.html) | [mindspore.Tensor.sqrt](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.sqrt.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.square](https://pytorch.org/docs/2.1/generated/torch.Tensor.square.html)| [mindspore.Tensor.square](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.square.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.sub](https://pytorch.org/docs/2.1/generated/torch.Tensor.sub.html) | [mindspore.Tensor.sub](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.sub.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.sum](https://pytorch.org/docs/2.1/generated/torch.Tensor.sum.html) | [mindspore.Tensor.sum](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.sum.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.t](https://pytorch.org/docs/2.1/generated/torch.Tensor.t.html) | [mindspore.Tensor.t](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.t.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.tanh](https://pytorch.org/docs/2.1/generated/torch.Tensor.tanh.html) | [mindspore.Tensor.tanh](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.tanh.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.tile](https://pytorch.org/docs/2.1/generated/torch.Tensor.tile.html) | [mindspore.Tensor.tile](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.tile.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.topk](https://pytorch.org/docs/2.1/generated/torch.Tensor.topk.html)| [mindspore.Tensor.topk](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.topk.html)|[Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.tril](https://pytorch.org/docs/2.1/generated/torch.Tensor.tril.html) | [mindspore.Tensor.tril](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.tril.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.trunc](https://pytorch.org/docs/2.1/generated/torch.Tensor.trunc.html) | [mindspore.Tensor.trunc](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.trunc.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| -| [torch.Tensor.view_as](https://pytorch.org/docs/2.1/generated/torch.Tensor.view_as.html) | [mindspore.Tensor.view_as](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.view_as.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | -| [torch.Tensor.where](https://pytorch.org/docs/2.1/generated/torch.Tensor.where.html) | [mindspore.Tensor.where](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.where.html) | [Consistent](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.abs](https://pytorch.org/docs/2.1/generated/torch.Tensor.abs.html) | [mindspore.Tensor.abs](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.abs.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.absolute](https://pytorch.org/docs/2.1/generated/torch.Tensor.absolute.html) | [mindspore.Tensor.absolute](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.absolute.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.add](https://pytorch.org/docs/2.1/generated/torch.Tensor.add.html)| [mindspore.Tensor.add](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.add.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.all](https://pytorch.org/docs/2.1/generated/torch.Tensor.all.html) | [mindspore.Tensor.all](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.all.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.any](https://pytorch.org/docs/2.1/generated/torch.Tensor.any.html) | [mindspore.Tensor.any](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.any.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.argmax](https://pytorch.org/docs/2.1/generated/torch.Tensor.argmax.html) | [mindspore.Tensor.argmax](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.argmax.html#mindspore.Tensor.argmax) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.arctan2](https://pytorch.org/docs/2.1/generated/torch.Tensor.arctan2.html) | [mindspore.Tensor.arctan2](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.arctan2.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.argmin](https://pytorch.org/docs/2.1/generated/torch.Tensor.argmin.html) | [mindspore.Tensor.argmin](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.argmin.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.atan2](https://pytorch.org/docs/2.1/generated/torch.Tensor.atan2.html) | [mindspore.Tensor.atan2](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.atan2.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.bool](https://pytorch.org/docs/2.1/generated/torch.Tensor.bool.html)| [mindspore.Tensor.bool](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.bool.html)| [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.ceil](https://pytorch.org/docs/2.1/generated/torch.Tensor.ceil.html) | [mindspore.Tensor.ceil](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.ceil.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.chunk](https://pytorch.org/docs/2.1/generated/torch.Tensor.chunk.html) | [mindspore.Tensor.chunk](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.chunk.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.clamp](https://pytorch.org/docs/2.1/generated/torch.Tensor.clamp.html) | [mindspore.Tensor.clamp](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.clamp.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.clamp_](https://pytorch.org/docs/2.1/generated/torch.Tensor.clamp_.html) | [mindspore.Tensor.clamp_](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.clamp_.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.clip](https://pytorch.org/docs/2.1/generated/torch.Tensor.clip.html) | [mindspore.Tensor.clip](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.clip.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.cos](https://pytorch.org/docs/2.1/generated/torch.Tensor.cos.html) | [mindspore.Tensor.cos](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.cos.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.cumsum](https://pytorch.org/docs/2.1/generated/torch.Tensor.cumsum.html) | [mindspore.Tensor.cumsum](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.cumsum.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.div](https://pytorch.org/docs/2.1/generated/torch.Tensor.div.html) | [mindspore.Tensor.div](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.div.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.divide](https://pytorch.org/docs/2.1/generated/torch.Tensor.divide.html) | [mindspore.Tensor.divide](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.divide.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.eq](https://pytorch.org/docs/2.1/generated/torch.Tensor.eq.html) | [mindspore.Tensor.eq](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.eq.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.erf](https://pytorch.org/docs/2.1/generated/torch.Tensor.erf.html) | [mindspore.Tensor.erf](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.erf.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.exp](https://pytorch.org/docs/2.1/generated/torch.Tensor.exp.html) | [mindspore.Tensor.exp](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.exp.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.expand_as](https://pytorch.org/docs/2.1/generated/torch.Tensor.expand_as.html) | [mindspore.Tensor.expand_as](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.expand_as.html#mindspore.Tensor.expand_as) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.flatten](https://pytorch.org/docs/2.1/generated/torch.Tensor.flatten.html) | [mindspore.Tensor.flatten](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.flatten.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.float](https://pytorch.org/docs/2.1/generated/torch.Tensor.float.html) | [mindspore.Tensor.float](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.float.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.floor](https://pytorch.org/docs/2.1/generated/torch.Tensor.floor.html) | [mindspore.Tensor.floor](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.floor.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.gather](https://pytorch.org/docs/2.1/generated/torch.Tensor.gather.html) | [mindspore.Tensor.gather](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.gather.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.greater](https://pytorch.org/docs/2.1/generated/torch.Tensor.greater.html) | [mindspore.Tensor.greater](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.greater.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.gt](https://pytorch.org/docs/2.1/generated/torch.Tensor.gt.html) | [mindspore.Tensor.gt](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.gt.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.half](https://pytorch.org/docs/2.1/generated/torch.Tensor.half.html) | [mindspore.Tensor.half](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.half.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.index_select](https://pytorch.org/docs/2.1/generated/torch.Tensor.index_select.html) | [mindspore.Tensor.index_select](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.index_select.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.int](https://pytorch.org/docs/2.1/generated/torch.Tensor.int.html) | [mindspore.Tensor.int](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.int.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.isfinite](https://pytorch.org/docs/2.1/generated/torch.Tensor.isfinite.html) | [mindspore.Tensor.isfinite](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.isfinite.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.isnan](https://pytorch.org/docs/2.1/generated/torch.Tensor.isnan.html) | [mindspore.Tensor.isnan](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.isnan.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.item](https://pytorch.org/docs/2.1/generated/torch.Tensor.item.html) | [mindspore.Tensor.item](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.item.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.le](https://pytorch.org/docs/2.1/generated/torch.Tensor.le.html) | [mindspore.Tensor.le](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.le.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.less](https://pytorch.org/docs/2.1/generated/torch.Tensor.less.html) | [mindspore.Tensor.less](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.less.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.less_equal](https://pytorch.org/docs/2.1/generated/torch.Tensor.less_equal.html) | [mindspore.Tensor.less_equal](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.less_equal.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.log](https://pytorch.org/docs/2.1/generated/torch.Tensor.log.html) | [mindspore.Tensor.log](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.log.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.logical_and](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_and.html) | [mindspore.Tensor.logical_and](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.logical_and.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.logical_not](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_not.html) | [mindspore.Tensor.logical_not](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.logical_not.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.logical_or](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_or.html) | [mindspore.Tensor.logical_or](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.logical_or.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.long](https://pytorch.org/docs/2.1/generated/torch.Tensor.long.html) | [mindspore.Tensor.long](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.long.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.lt](https://pytorch.org/docs/2.1/generated/torch.Tensor.lt.html) | [mindspore.Tensor.lt](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.lt.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.masked_fill](https://pytorch.org/docs/2.1/generated/torch.Tensor.masked_fill.html) | [mindspore.Tensor.masked_fill](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.masked_fill.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.masked_select](https://pytorch.org/docs/2.1/generated/torch.Tensor.masked_select.html) | [mindspore.Tensor.masked_select](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.masked_select.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.matmul](https://pytorch.org/docs/2.1/generated/torch.Tensor.matmul.html) | [mindspore.Tensor.matmul](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.matmul.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.max](https://pytorch.org/docs/2.1/generated/torch.Tensor.max.html) | [mindspore.Tensor.max](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.max.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.maximum](https://pytorch.org/docs/2.1/generated/torch.Tensor.maximum.html) | [mindspore.Tensor.maximum](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.maximum.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.mean](https://pytorch.org/docs/2.1/generated/torch.Tensor.mean.html) | [mindspore.Tensor.mean](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.mean.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.min](https://pytorch.org/docs/2.1/generated/torch.Tensor.min.html) | [mindspore.Tensor.min](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.min.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.minimum](https://pytorch.org/docs/2.1/generated/torch.Tensor.minimum.html) | [mindspore.Tensor.minimum](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.minimum.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.mul](https://pytorch.org/docs/2.1/generated/torch.Tensor.mul.html) | [mindspore.Tensor.mul](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.mul.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.nan_to_num](https://pytorch.org/docs/2.1/generated/torch.Tensor.nan_to_num.html) | [mindspore.Tensor.nan_to_num](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.nan_to_num.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.ne](https://pytorch.org/docs/2.1/generated/torch.Tensor.ne.html) | [mindspore.Tensor.ne](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.ne.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.neg](https://pytorch.org/docs/2.1/generated/torch.Tensor.neg.html) | [mindspore.Tensor.neg](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.neg.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.negative](https://pytorch.org/docs/2.1/generated/torch.Tensor.negative.html) | [mindspore.Tensor.negative](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.negative.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.not_equal](https://pytorch.org/docs/2.1/generated/torch.Tensor.not_equal.html) | [mindspore.Tensor.not_equal](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.not_equal.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.pow](https://pytorch.org/docs/2.1/generated/torch.Tensor.pow.html) | [mindspore.Tensor.pow](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.pow.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.prod](https://pytorch.org/docs/2.1/generated/torch.Tensor.prod.html) | [mindspore.Tensor.prod](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.prod.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.reciprocal](https://pytorch.org/docs/2.1/generated/torch.Tensor.reciprocal.html) | [mindspore.Tensor.reciprocal](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.reciprocal.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.remainder](https://pytorch.org/docs/2.1/generated/torch.Tensor.remainder.html) | [mindspore.Tensor.remainder](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.remainder.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.repeat_interleave](https://pytorch.org/docs/2.1/generated/torch.Tensor.repeat_interleave.html) | [mindspore.Tensor.repeat_interleave](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.repeat_interleave.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.reshape](https://pytorch.org/docs/2.1/generated/torch.Tensor.reshape.html) | [mindspore.Tensor.reshape](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.reshape.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.round](https://pytorch.org/docs/2.1/generated/torch.Tensor.round.html)| [mindspore.Tensor.round](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.round.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.rsqrt](https://pytorch.org/docs/2.1/generated/torch.Tensor.rsqrt.html) | [mindspore.Tensor.rsqrt](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.rsqrt.html) |[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.scatter](https://pytorch.org/docs/2.1/generated/torch.Tensor.scatter.html) | [mindspore.Tensor.scatter](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.scatter.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.scatter_add](https://pytorch.org/docs/2.1/generated/torch.Tensor.scatter_add.html) | [mindspore.Tensor.scatter_add](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.scatter_add.html) |[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.sigmoid](https://pytorch.org/docs/2.1/generated/torch.Tensor.sigmoid.html) | [mindspore.Tensor.sigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sigmoid.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.sin](https://pytorch.org/docs/2.1/generated/torch.Tensor.sin.html)| [mindspore.Tensor.sin](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sin.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.sort](https://pytorch.org/docs/2.1/generated/torch.Tensor.sort.html) | [mindspore.Tensor.sort](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sort.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.split](https://pytorch.org/docs/2.1/generated/torch.Tensor.split.html) | [mindspore.Tensor.split](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.split.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.sqrt](https://pytorch.org/docs/2.1/generated/torch.Tensor.sqrt.html) | [mindspore.Tensor.sqrt](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sqrt.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.square](https://pytorch.org/docs/2.1/generated/torch.Tensor.square.html)| [mindspore.Tensor.square](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.square.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.sub](https://pytorch.org/docs/2.1/generated/torch.Tensor.sub.html) | [mindspore.Tensor.sub](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sub.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.sum](https://pytorch.org/docs/2.1/generated/torch.Tensor.sum.html) | [mindspore.Tensor.sum](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sum.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.t](https://pytorch.org/docs/2.1/generated/torch.Tensor.t.html) | [mindspore.Tensor.t](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.t.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.tanh](https://pytorch.org/docs/2.1/generated/torch.Tensor.tanh.html) | [mindspore.Tensor.tanh](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.tanh.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.tile](https://pytorch.org/docs/2.1/generated/torch.Tensor.tile.html) | [mindspore.Tensor.tile](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.tile.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.topk](https://pytorch.org/docs/2.1/generated/torch.Tensor.topk.html)| [mindspore.Tensor.topk](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.topk.html)|[Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.tril](https://pytorch.org/docs/2.1/generated/torch.Tensor.tril.html) | [mindspore.Tensor.tril](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.tril.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.trunc](https://pytorch.org/docs/2.1/generated/torch.Tensor.trunc.html) | [mindspore.Tensor.trunc](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.trunc.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions)| +| [torch.Tensor.view_as](https://pytorch.org/docs/2.1/generated/torch.Tensor.view_as.html) | [mindspore.Tensor.view_as](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.view_as.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | +| [torch.Tensor.where](https://pytorch.org/docs/2.1/generated/torch.Tensor.where.html) | [mindspore.Tensor.where](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.where.html) | [Consistent](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#api-mapping-consistency-criteria-and-exceptions) | ## torch.optim | PyTorch 2.1 APIs | MindSpore APIs | Descriptions | |------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------| -| [torch.optim.Adam](https://pytorch.org/docs/2.1/optim.html#torch.optim.Adam) | [mindspore.mint.optim.Adam](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.optim.Adam.html) | The functions are consistent, but PyTorch has some optimization parameters | -| [torch.optim.AdamW](https://pytorch.org/docs/2.1/optim.html#torch.optim.AdamW) | [mindspore.mint.optim.AdamW](https://www.mindspore.cn/docs/en/master/api_python/mint/mindspore.mint.optim.AdamW.html) | The functions are consistent, but PyTorch has some optimization parameters | +| [torch.optim.Adam](https://pytorch.org/docs/2.1/optim.html#torch.optim.Adam) | [mindspore.mint.optim.Adam](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.optim.Adam.html) | The functions are consistent, but PyTorch has some optimization parameters | +| [torch.optim.AdamW](https://pytorch.org/docs/2.1/optim.html#torch.optim.AdamW) | [mindspore.mint.optim.AdamW](https://www.mindspore.cn/docs/en/br_base/api_python/mint/mindspore.mint.optim.AdamW.html) | The functions are consistent, but PyTorch has some optimization parameters | ## torch.utils | PyTorch 1.8.1 APIs | MindSpore APIs | Descriptions | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| [torch.utils.data.DataLoader](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.DataLoader) | [mindspore.dataset.GeneratorDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/DataLoader.html) | -| [torch.utils.data.distributed.DistributedSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.distributed.DistributedSampler) | [mindspore.dataset.DistributedSampler](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.DistributedSampler.html#mindspore.dataset.DistributedSampler) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/DistributedSampler.html) | -| [torch.utils.data.RandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.RandomSampler) | [mindspore.dataset.RandomSampler](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.RandomSampler.html#mindspore.dataset.RandomSampler) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/RandomSampler.html) | -| [torch.utils.data.SequentialSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.SequentialSampler) | [mindspore.dataset.SequentialSampler](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SequentialSampler.html#mindspore.dataset.SequentialSampler) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/SequentialSampler.html) | -| [torch.utils.data.SubsetRandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.SubsetRandomSampler) | [mindspore.dataset.SubsetRandomSampler](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SubsetRandomSampler.html#mindspore.dataset.SubsetRandomSampler) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/SubsetRandomSampler.html) | -| [torch.utils.data.WeightedRandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.WeightedRandomSampler) | [mindspore.dataset.WeightedRandomSampler](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.WeightedRandomSampler.html#mindspore.dataset.WeightedRandomSampler) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/WeightedRandomSampler.html) | | -| [torch.utils.checkpoint.checkpoint](https://pytorch.org/docs/1.8.1/checkpoint.html#torch.utils.checkpoint.checkpoint) | [mindspore.nn.Cell.recompute](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.recompute) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/checkpoint.html) | +| [torch.utils.data.DataLoader](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.DataLoader) | [mindspore.dataset.GeneratorDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/DataLoader.html) | +| [torch.utils.data.distributed.DistributedSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.distributed.DistributedSampler) | [mindspore.dataset.DistributedSampler](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.DistributedSampler.html#mindspore.dataset.DistributedSampler) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/DistributedSampler.html) | +| [torch.utils.data.RandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.RandomSampler) | [mindspore.dataset.RandomSampler](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.RandomSampler.html#mindspore.dataset.RandomSampler) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/RandomSampler.html) | +| [torch.utils.data.SequentialSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.SequentialSampler) | [mindspore.dataset.SequentialSampler](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SequentialSampler.html#mindspore.dataset.SequentialSampler) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/SequentialSampler.html) | +| [torch.utils.data.SubsetRandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.SubsetRandomSampler) | [mindspore.dataset.SubsetRandomSampler](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SubsetRandomSampler.html#mindspore.dataset.SubsetRandomSampler) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/SubsetRandomSampler.html) | +| [torch.utils.data.WeightedRandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.WeightedRandomSampler) | [mindspore.dataset.WeightedRandomSampler](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.WeightedRandomSampler.html#mindspore.dataset.WeightedRandomSampler) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/WeightedRandomSampler.html) | | +| [torch.utils.checkpoint.checkpoint](https://pytorch.org/docs/1.8.1/checkpoint.html#torch.utils.checkpoint.checkpoint) | [mindspore.nn.Cell.recompute](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.recompute) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/checkpoint.html) | ## torchaudio @@ -523,33 +523,33 @@ Because of the framework mechanism, MindSpore does not provide the following par | TorchAudio 0.8.1 APIs | MindSpore APIs | Descriptions | | ----------------------- | ------------------------- | ------------------ | -| [torchaudio.datasets.CMUARCTIC](https://pytorch.org/audio/0.8.0/datasets.html#cmuarctic) | [mindspore.dataset.CMUArcticDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CMUArcticDataset.html#mindspore.dataset.CMUArcticDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/CMUARCTIC.html) | -| [torchaudio.datasets.GTZAN](https://pytorch.org/audio/0.8.0/datasets.html#gtzan) | [mindspore.dataset.GTZANDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GTZANDataset.html#mindspore.dataset.GTZANDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/GTZAN.html) | -| [torchaudio.datasets.LIBRITTS](https://pytorch.org/audio/0.8.0/datasets.html#libritts) | [mindspore.dataset.LibriTTSDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.LibriTTSDataset.html#mindspore.dataset.LibriTTSDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/LIBRITTS.html) | -| [torchaudio.datasets.LJSPEECH](https://pytorch.org/audio/0.8.0/datasets.html#ljspeech) | [mindspore.dataset.LJSpeechDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/LJSPEECH.html) | -| [torchaudio.datasets.SPEECHCOMMANDS](https://pytorch.org/audio/0.8.0/datasets.html#speechcommands) | [mindspore.dataset.SpeechCommandsDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SpeechCommandsDataset.html#mindspore.dataset.SpeechCommandsDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/SPEECHCOMMANDS.html) | -| [torchaudio.datasets.TEDLIUM](https://pytorch.org/audio/0.8.0/datasets.html#tedlium) | [mindspore.dataset.TedliumDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.TedliumDataset.html#mindspore.dataset.TedliumDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/TEDLIUM.html) | -| [torchaudio.datasets.YESNO](https://pytorch.org/audio/0.8.0/datasets.html#yesno) | [mindspore.dataset.YesNoDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.YesNoDataset.html#mindspore.dataset.YesNoDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/YESNO.html) | -| [torchaudio.transforms.AmplitudeToDB](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.AmplitudeToDB.html) | [mindspore.dataset.audio.AmplitudeToDB](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.AmplitudeToDB.html#mindspore.dataset.audio.AmplitudeToDB) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/AmplitudeToDB.html) | -| [torchaudio.transforms.ComplexNorm](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.ComplexNorm.html) | [mindspore.dataset.audio.ComplexNorm](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.ComplexNorm.html#mindspore.dataset.audio.ComplexNorm) | Consistent | -| [torchaudio.transforms.ComputeDeltas](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.ComputeDeltas.html) | [mindspore.dataset.audio.ComputeDeltas](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.ComputeDeltas.html#mindspore.dataset.audio.ComputeDeltas) | The functions are consistent, but the parameter names are inconsistent. | -| [torchaudio.transforms.Fade](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Fade.html) | [mindspore.dataset.audio.Fade](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.Fade.html#mindspore.dataset.audio.Fade) | Consistent | -| [torchaudio.transforms.FrequencyMasking](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.FrequencyMasking.html) | [mindspore.dataset.audio.FrequencyMasking](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.FrequencyMasking.html#mindspore.dataset.audio.FrequencyMasking) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/FrequencyMasking.html) | -| [torchaudio.transforms.GriffinLim](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.GriffinLim.html) | [mindspore.dataset.audio.GriffinLim](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.GriffinLim.html#mindspore.dataset.audio.GriffinLim) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/GriffinLim.html) | -| [torchaudio.transforms.InverseMelScale](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.InverseMelScale.html) | [mindspore.dataset.audio.InverseMelScale](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.InverseMelScale.html#mindspore.dataset.audio.InverseMelScale) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/InverseMelScale.html) | -| [torchaudio.transforms.MelScale](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MelScale.html) | [mindspore.dataset.audio.MelScale](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.MelScale.html#mindspore.dataset.audio.MelScale) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/MelScale.html) | -| [torchaudio.transforms.MelSpectrogram](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MelSpectrogram.html) | [mindspore.dataset.audio.MelSpectrogram](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.MelSpectrogram.html#mindspore.dataset.audio.MelSpectrogram) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/MelSpectrogram.html) | -| [torchaudio.transforms.MFCC](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MFCC.html) | [mindspore.dataset.audio.MFCC](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.MFCC.html#mindspore.dataset.audio.MFCC) | Consistent | -| [torchaudio.transforms.MuLawEncoding](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MuLawEncoding.html) | [mindspore.dataset.audio.MuLawEncoding](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.MuLawEncoding.html#mindspore.dataset.audio.MuLawEncoding) | Consistent | -| [torchaudio.transforms.MuLawDecoding](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MuLawDecoding.html) | [mindspore.dataset.audio.MuLawDecoding](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.MuLawDecoding.html#mindspore.dataset.audio.MuLawDecoding) | Consistent | -| [torchaudio.transforms.Resample](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Resample.html) | [mindspore.dataset.audio.Resample](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.Resample.html#mindspore.dataset.audio.Resample) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/Resample.html) | -| [torchaudio.transforms.SlidingWindowCmn](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.SlidingWindowCmn.html) | [mindspore.dataset.audio.SlidingWindowCmn](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.SlidingWindowCmn.html#mindspore.dataset.audio.SlidingWindowCmn) | Consistent | -| [torchaudio.transforms.SpectralCentroid](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.SpectralCentroid.html) | [mindspore.dataset.audio.SpectralCentroid](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.SpectralCentroid.html#mindspore.dataset.audio.SpectralCentroid) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/SpectralCentroid.html) | -| [torchaudio.transforms.Spectrogram](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Spectrogram.html) | [mindspore.dataset.audio.Spectrogram](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.Spectrogram.html#mindspore.dataset.audio.Spectrogram) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/Spectrogram.html) | -| [torchaudio.transforms.TimeMasking](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.TimeMasking.html) | [mindspore.dataset.audio.TimeMasking](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.TimeMasking.html#mindspore.dataset.audio.TimeMasking) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/TimeMasking.html) | -| [torchaudio.transforms.TimeStretch](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.TimeStretch.html) | [mindspore.dataset.audio.TimeStretch](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.TimeStretch.html#mindspore.dataset.audio.TimeStretch) | Consistent | -| [torchaudio.transforms.Vad](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Vad.html) | [mindspore.dataset.audio.Vad](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.Vad.html#mindspore.dataset.audio.Vad) | Consistent | -| [torchaudio.transforms.Vol](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Vol.html) | [mindspore.dataset.audio.Vol](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.Vol.html#mindspore.dataset.audio.Vol) | Consistent | +| [torchaudio.datasets.CMUARCTIC](https://pytorch.org/audio/0.8.0/datasets.html#cmuarctic) | [mindspore.dataset.CMUArcticDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CMUArcticDataset.html#mindspore.dataset.CMUArcticDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/CMUARCTIC.html) | +| [torchaudio.datasets.GTZAN](https://pytorch.org/audio/0.8.0/datasets.html#gtzan) | [mindspore.dataset.GTZANDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GTZANDataset.html#mindspore.dataset.GTZANDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/GTZAN.html) | +| [torchaudio.datasets.LIBRITTS](https://pytorch.org/audio/0.8.0/datasets.html#libritts) | [mindspore.dataset.LibriTTSDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.LibriTTSDataset.html#mindspore.dataset.LibriTTSDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/LIBRITTS.html) | +| [torchaudio.datasets.LJSPEECH](https://pytorch.org/audio/0.8.0/datasets.html#ljspeech) | [mindspore.dataset.LJSpeechDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/LJSPEECH.html) | +| [torchaudio.datasets.SPEECHCOMMANDS](https://pytorch.org/audio/0.8.0/datasets.html#speechcommands) | [mindspore.dataset.SpeechCommandsDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SpeechCommandsDataset.html#mindspore.dataset.SpeechCommandsDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/SPEECHCOMMANDS.html) | +| [torchaudio.datasets.TEDLIUM](https://pytorch.org/audio/0.8.0/datasets.html#tedlium) | [mindspore.dataset.TedliumDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.TedliumDataset.html#mindspore.dataset.TedliumDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/TEDLIUM.html) | +| [torchaudio.datasets.YESNO](https://pytorch.org/audio/0.8.0/datasets.html#yesno) | [mindspore.dataset.YesNoDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.YesNoDataset.html#mindspore.dataset.YesNoDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/YESNO.html) | +| [torchaudio.transforms.AmplitudeToDB](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.AmplitudeToDB.html) | [mindspore.dataset.audio.AmplitudeToDB](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.AmplitudeToDB.html#mindspore.dataset.audio.AmplitudeToDB) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/AmplitudeToDB.html) | +| [torchaudio.transforms.ComplexNorm](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.ComplexNorm.html) | [mindspore.dataset.audio.ComplexNorm](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.ComplexNorm.html#mindspore.dataset.audio.ComplexNorm) | Consistent | +| [torchaudio.transforms.ComputeDeltas](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.ComputeDeltas.html) | [mindspore.dataset.audio.ComputeDeltas](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.ComputeDeltas.html#mindspore.dataset.audio.ComputeDeltas) | The functions are consistent, but the parameter names are inconsistent. | +| [torchaudio.transforms.Fade](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Fade.html) | [mindspore.dataset.audio.Fade](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.Fade.html#mindspore.dataset.audio.Fade) | Consistent | +| [torchaudio.transforms.FrequencyMasking](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.FrequencyMasking.html) | [mindspore.dataset.audio.FrequencyMasking](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.FrequencyMasking.html#mindspore.dataset.audio.FrequencyMasking) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/FrequencyMasking.html) | +| [torchaudio.transforms.GriffinLim](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.GriffinLim.html) | [mindspore.dataset.audio.GriffinLim](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.GriffinLim.html#mindspore.dataset.audio.GriffinLim) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/GriffinLim.html) | +| [torchaudio.transforms.InverseMelScale](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.InverseMelScale.html) | [mindspore.dataset.audio.InverseMelScale](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.InverseMelScale.html#mindspore.dataset.audio.InverseMelScale) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/InverseMelScale.html) | +| [torchaudio.transforms.MelScale](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MelScale.html) | [mindspore.dataset.audio.MelScale](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.MelScale.html#mindspore.dataset.audio.MelScale) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/MelScale.html) | +| [torchaudio.transforms.MelSpectrogram](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MelSpectrogram.html) | [mindspore.dataset.audio.MelSpectrogram](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.MelSpectrogram.html#mindspore.dataset.audio.MelSpectrogram) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/MelSpectrogram.html) | +| [torchaudio.transforms.MFCC](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MFCC.html) | [mindspore.dataset.audio.MFCC](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.MFCC.html#mindspore.dataset.audio.MFCC) | Consistent | +| [torchaudio.transforms.MuLawEncoding](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MuLawEncoding.html) | [mindspore.dataset.audio.MuLawEncoding](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.MuLawEncoding.html#mindspore.dataset.audio.MuLawEncoding) | Consistent | +| [torchaudio.transforms.MuLawDecoding](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MuLawDecoding.html) | [mindspore.dataset.audio.MuLawDecoding](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.MuLawDecoding.html#mindspore.dataset.audio.MuLawDecoding) | Consistent | +| [torchaudio.transforms.Resample](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Resample.html) | [mindspore.dataset.audio.Resample](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.Resample.html#mindspore.dataset.audio.Resample) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/Resample.html) | +| [torchaudio.transforms.SlidingWindowCmn](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.SlidingWindowCmn.html) | [mindspore.dataset.audio.SlidingWindowCmn](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.SlidingWindowCmn.html#mindspore.dataset.audio.SlidingWindowCmn) | Consistent | +| [torchaudio.transforms.SpectralCentroid](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.SpectralCentroid.html) | [mindspore.dataset.audio.SpectralCentroid](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.SpectralCentroid.html#mindspore.dataset.audio.SpectralCentroid) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/SpectralCentroid.html) | +| [torchaudio.transforms.Spectrogram](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Spectrogram.html) | [mindspore.dataset.audio.Spectrogram](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.Spectrogram.html#mindspore.dataset.audio.Spectrogram) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/Spectrogram.html) | +| [torchaudio.transforms.TimeMasking](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.TimeMasking.html) | [mindspore.dataset.audio.TimeMasking](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.TimeMasking.html#mindspore.dataset.audio.TimeMasking) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/TimeMasking.html) | +| [torchaudio.transforms.TimeStretch](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.TimeStretch.html) | [mindspore.dataset.audio.TimeStretch](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.TimeStretch.html#mindspore.dataset.audio.TimeStretch) | Consistent | +| [torchaudio.transforms.Vad](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Vad.html) | [mindspore.dataset.audio.Vad](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.Vad.html#mindspore.dataset.audio.Vad) | Consistent | +| [torchaudio.transforms.Vol](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Vol.html) | [mindspore.dataset.audio.Vol](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.Vol.html#mindspore.dataset.audio.Vol) | Consistent | ## torchtext @@ -557,31 +557,31 @@ Because of the framework mechanism, MindSpore does not provide the following par | TorchText 0.9.1 APIs | MindSpore APIs | Descriptions | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| [torchtext.data.functional.custom_replace](https://pytorch.org/text/0.9.0/data_functional.html#custom-replace) | [mindspore.dataset.text.RegexReplace](https://mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.RegexReplace.html#mindspore.dataset.text.RegexReplace) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/RegexReplace.html) | -| [torchtext.data.functional.load_sp_model](https://pytorch.org/text/0.9.0/data_functional.html#load-sp-model) | [mindspore.dataset.text.SentencePieceTokenizer](https://www.mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/load_sp_model.html) | -| [torchtext.data.functional.numericalize_tokens_from_iterator](https://pytorch.org/text/0.9.0/data_functional.html#numericalize-tokens-from-iterator) | [mindspore.dataset.text.Lookup](https://mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.Lookup.html#mindspore.dataset.text.Lookup) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/Lookup.html) | -| [torchtext.data.functional.sentencepiece_numericalizer](https://pytorch.org/text/0.9.0/data_functional.html#sentencepiece-numericalizer) | [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.html) | -| [torchtext.data.functional.sentencepiece_tokenizer](https://pytorch.org/text/0.9.0/data_functional.html#sentencepiece-tokenizer) | [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_STRING.html) | -| [torchtext.data.functional.simple_space_split](https://pytorch.org/text/0.9.0/data_functional.html#simple-space-split) | [mindspore.dataset.text.WhitespaceTokenizer](https://mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.WhitespaceTokenizer.html#mindspore.dataset.text.WhitespaceTokenizer) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/WhitespaceTokenizer.html) | -| [torchtext.data.utils.ngrams_iterator](https://pytorch.org/text/0.9.0/data_utils.html#ngrams-iterator) | [mindspore.dataset.text.Ngram](https://mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.Ngram.html#mindspore.dataset.text.Ngram) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/Ngram.html) | -| [torchtext.datasets.AG_NEWS](https://pytorch.org/text/0.9.0/datasets.html#ag-news) | [mindspore.dataset.AGNewsDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.AGNewsDataset.html#mindspore.dataset.AGNewsDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/AGNEWS.html) | -| [torchtext.datasets.AmazonReviewFull](https://pytorch.org/text/0.9.0/datasets.html#amazonreviewfull) | [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/AmazonReviewFull.html) | -| [torchtext.datasets.AmazonReviewPolarity](https://pytorch.org/text/0.9.0/datasets.html#amazonreviewpolarity) | [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/AmazonReviewPolarity.html) | -| [torchtext.datasets.CoNLL2000Chunking](https://pytorch.org/text/0.9.0/datasets.html#conll2000chunking) | [mindspore.dataset.CoNLL2000Dataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CoNLL2000Dataset.html#mindspore.dataset.CoNLL2000Dataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/CoNLL2000Chunking.html) | -| [torchtext.datasets.DBpedia](https://pytorch.org/text/0.9.0/datasets.html#dbpedia) | [mindspore.dataset.DBpediaDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.DBpediaDataset.html#mindspore.dataset.DBpediaDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/DBpedia.html) | -| [torchtext.datasets.IMDB](https://pytorch.org/text/0.9.0/datasets.html#imdb) | [mindspore.dataset.IMDBDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.IMDBDataset.html#mindspore.dataset.IMDBDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/IMDB.html) | -| [torchtext.datasets.IWSLT2016](https://pytorch.org/text/0.9.0/datasets.html#iwslt2016) | [mindspore.dataset.IWSLT2016Dataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.IWSLT2016Dataset.html#mindspore.dataset.IWSLT2016Dataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/IWSLT2016.html) | -| [torchtext.datasets.IWSLT2017](https://pytorch.org/text/0.9.0/datasets.html#iwslt2017) | [mindspore.dataset.IWSLT2017Dataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/IWSLT2017.html) | -| [torchtext.datasets.PennTreebank](https://pytorch.org/text/0.9.0/datasets.html#penntreebank) | [mindspore.dataset.PennTreebankDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.PennTreebankDataset.html#mindspore.dataset.PennTreebankDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/PennTreebank.html) | -| [torchtext.datasets.SogouNews](https://pytorch.org/text/0.9.0/datasets.html#sogounews) | [mindspore.dataset.SogouNewsDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SogouNewsDataset.html#mindspore.dataset.SogouNewsDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/SogouNews.html) | -| [torchtext.datasets.SQuAD1](https://pytorch.org/text/0.9.0/datasets.html#torchtext.datasets.SQuAD1) | [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/SQuAD1.html) | -| [torchtext.datasets.SQuAD2](https://pytorch.org/text/0.9.0/datasets.html#torchtext.datasets.SQuAD2) | [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/SQuAD2.html) | -| [torchtext.datasets.UDPOS](https://pytorch.org/text/0.9.0/datasets.html#udpos) | [mindspore.dataset.UDPOSDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.UDPOSDataset.html#mindspore.dataset.UDPOSDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/UDPOS.html) | -| [torchtext.datasets.WikiText103](https://pytorch.org/text/0.9.0/datasets.html#wikitext103) | [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/WikiText103.html) | -| [torchtext.datasets.WikiText2](https://pytorch.org/text/0.9.0/datasets.html#wikitext-2) | [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/WikiText2.html) | -| [torchtext.datasets.YahooAnswers](https://pytorch.org/text/0.9.0/datasets.html#yahooanswers) | [mindspore.dataset.YahooAnswersDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.YahooAnswersDataset.html#mindspore.dataset.YahooAnswersDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/YahooAnswers.html) | -| [torchtext.datasets.YelpReviewFull](https://pytorch.org/text/0.9.0/datasets.html#yelpreviewfull) | [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/YelpReviewFull.html) | -| [torchtext.datasets.YelpReviewPolarity](https://pytorch.org/text/0.9.0/datasets.html#yelpreviewpolarity) | [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/YelpReviewPolarity.html) | +| [torchtext.data.functional.custom_replace](https://pytorch.org/text/0.9.0/data_functional.html#custom-replace) | [mindspore.dataset.text.RegexReplace](https://mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.RegexReplace.html#mindspore.dataset.text.RegexReplace) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/RegexReplace.html) | +| [torchtext.data.functional.load_sp_model](https://pytorch.org/text/0.9.0/data_functional.html#load-sp-model) | [mindspore.dataset.text.SentencePieceTokenizer](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/load_sp_model.html) | +| [torchtext.data.functional.numericalize_tokens_from_iterator](https://pytorch.org/text/0.9.0/data_functional.html#numericalize-tokens-from-iterator) | [mindspore.dataset.text.Lookup](https://mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.Lookup.html#mindspore.dataset.text.Lookup) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/Lookup.html) | +| [torchtext.data.functional.sentencepiece_numericalizer](https://pytorch.org/text/0.9.0/data_functional.html#sentencepiece-numericalizer) | [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.html) | +| [torchtext.data.functional.sentencepiece_tokenizer](https://pytorch.org/text/0.9.0/data_functional.html#sentencepiece-tokenizer) | [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_STRING.html) | +| [torchtext.data.functional.simple_space_split](https://pytorch.org/text/0.9.0/data_functional.html#simple-space-split) | [mindspore.dataset.text.WhitespaceTokenizer](https://mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.WhitespaceTokenizer.html#mindspore.dataset.text.WhitespaceTokenizer) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/WhitespaceTokenizer.html) | +| [torchtext.data.utils.ngrams_iterator](https://pytorch.org/text/0.9.0/data_utils.html#ngrams-iterator) | [mindspore.dataset.text.Ngram](https://mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.Ngram.html#mindspore.dataset.text.Ngram) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/Ngram.html) | +| [torchtext.datasets.AG_NEWS](https://pytorch.org/text/0.9.0/datasets.html#ag-news) | [mindspore.dataset.AGNewsDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.AGNewsDataset.html#mindspore.dataset.AGNewsDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/AGNEWS.html) | +| [torchtext.datasets.AmazonReviewFull](https://pytorch.org/text/0.9.0/datasets.html#amazonreviewfull) | [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/AmazonReviewFull.html) | +| [torchtext.datasets.AmazonReviewPolarity](https://pytorch.org/text/0.9.0/datasets.html#amazonreviewpolarity) | [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/AmazonReviewPolarity.html) | +| [torchtext.datasets.CoNLL2000Chunking](https://pytorch.org/text/0.9.0/datasets.html#conll2000chunking) | [mindspore.dataset.CoNLL2000Dataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CoNLL2000Dataset.html#mindspore.dataset.CoNLL2000Dataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/CoNLL2000Chunking.html) | +| [torchtext.datasets.DBpedia](https://pytorch.org/text/0.9.0/datasets.html#dbpedia) | [mindspore.dataset.DBpediaDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.DBpediaDataset.html#mindspore.dataset.DBpediaDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/DBpedia.html) | +| [torchtext.datasets.IMDB](https://pytorch.org/text/0.9.0/datasets.html#imdb) | [mindspore.dataset.IMDBDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.IMDBDataset.html#mindspore.dataset.IMDBDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/IMDB.html) | +| [torchtext.datasets.IWSLT2016](https://pytorch.org/text/0.9.0/datasets.html#iwslt2016) | [mindspore.dataset.IWSLT2016Dataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.IWSLT2016Dataset.html#mindspore.dataset.IWSLT2016Dataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/IWSLT2016.html) | +| [torchtext.datasets.IWSLT2017](https://pytorch.org/text/0.9.0/datasets.html#iwslt2017) | [mindspore.dataset.IWSLT2017Dataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/IWSLT2017.html) | +| [torchtext.datasets.PennTreebank](https://pytorch.org/text/0.9.0/datasets.html#penntreebank) | [mindspore.dataset.PennTreebankDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.PennTreebankDataset.html#mindspore.dataset.PennTreebankDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/PennTreebank.html) | +| [torchtext.datasets.SogouNews](https://pytorch.org/text/0.9.0/datasets.html#sogounews) | [mindspore.dataset.SogouNewsDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SogouNewsDataset.html#mindspore.dataset.SogouNewsDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/SogouNews.html) | +| [torchtext.datasets.SQuAD1](https://pytorch.org/text/0.9.0/datasets.html#torchtext.datasets.SQuAD1) | [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/SQuAD1.html) | +| [torchtext.datasets.SQuAD2](https://pytorch.org/text/0.9.0/datasets.html#torchtext.datasets.SQuAD2) | [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/SQuAD2.html) | +| [torchtext.datasets.UDPOS](https://pytorch.org/text/0.9.0/datasets.html#udpos) | [mindspore.dataset.UDPOSDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.UDPOSDataset.html#mindspore.dataset.UDPOSDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/UDPOS.html) | +| [torchtext.datasets.WikiText103](https://pytorch.org/text/0.9.0/datasets.html#wikitext103) | [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/WikiText103.html) | +| [torchtext.datasets.WikiText2](https://pytorch.org/text/0.9.0/datasets.html#wikitext-2) | [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/WikiText2.html) | +| [torchtext.datasets.YahooAnswers](https://pytorch.org/text/0.9.0/datasets.html#yahooanswers) | [mindspore.dataset.YahooAnswersDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.YahooAnswersDataset.html#mindspore.dataset.YahooAnswersDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/YahooAnswers.html) | +| [torchtext.datasets.YelpReviewFull](https://pytorch.org/text/0.9.0/datasets.html#yelpreviewfull) | [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/YelpReviewFull.html) | +| [torchtext.datasets.YelpReviewPolarity](https://pytorch.org/text/0.9.0/datasets.html#yelpreviewpolarity) | [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/YelpReviewPolarity.html) | ## torchvision @@ -589,40 +589,40 @@ Because of the framework mechanism, MindSpore does not provide the following par | TorchVision 0.9.1 APIs | MindSpore APIs | Descriptions | | ---------------------------------------- | --------------------------------- | --------------------------------------- | -| [torchvision.datasets.CelebA](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CelebA) | [mindspore.dataset.CelebADataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CelebADataset.html#mindspore.dataset.CelebADataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/CelebA.html) | -| [torchvision.datasets.Cityscapes](https://pytorch.org/vision/0.9/datasets.html#cityscapes) | [mindspore.dataset.CityscapesDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CityscapesDataset.html) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/Cityscapes.html) | -| [torchvision.datasets.CIFAR10](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CIFAR10) | [mindspore.dataset.Cifar10Dataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/CIFAR10.html) | -| [torchvision.datasets.CIFAR100](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CIFAR100) | [mindspore.dataset.Cifar100Dataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.Cifar100Dataset.html#mindspore.dataset.Cifar100Dataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/CIFAR100.html) | -| [torchvision.datasets.CocoDetection](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CocoDetection) | [mindspore.dataset.CocoDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CocoDataset.html#mindspore.dataset.CocoDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/CocoDataset.html) | -| [torchvision.datasets.ImageFolder](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.ImageFolder) | [mindspore.dataset.ImageFolderDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/ImageFolder.html) | -| [torchvision.datasets.MNIST](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.MNIST) | [mindspore.dataset.MnistDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MnistDataset.html#mindspore.dataset.MnistDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/MNIST.html) | -| [torchvision.datasets.VOCDetection](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.VOCDetection) | [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/VOCDetection.html) | -| [torchvision.datasets.VOCSegmentation](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.VOCSegmentation) | [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/VOCSegmentation.html) | -| [torchvision.ops.nms](https://pytorch.org/vision/0.9/ops.html#torchvision.ops.nms.html#torchvision.ops.nms) | [mindspore.ops.NMSWithMask](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.NMSWithMask.html#mindspore.ops.NMSWithMask) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/nms.html) | -| [torchvision.ops.roi_align](https://pytorch.org/vision/0.9/ops.html#torchvision.ops.roi_align.html#torchvision.ops.roi_align) | [mindspore.ops.ROIAlign](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ROIAlign.html#mindspore.ops.ROIAlign) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/roi_align.html) | -| [torchvision.transforms.CenterCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.CenterCrop) | [mindspore.dataset.vision.CenterCrop](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.CenterCrop.html#mindspore.dataset.vision.CenterCrop) | Consistent | -| [torchvision.transforms.ColorJitter](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ColorJitter) | [mindspore.dataset.vision.RandomColorAdjust](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomColorAdjust.html#mindspore.dataset.vision.RandomColorAdjust) | Consistent | -| [torchvision.transforms.Compose](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Compose) | [mindspore.dataset.transforms.Compose](https://mindspore.cn/docs/en/master/api_python/dataset_transforms/mindspore.dataset.transforms.Compose.html#mindspore.dataset.transforms.Compose) | Consistent | -| [torchvision.transforms.ConvertImageDtype](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ConvertImageDtype) | [mindspore.dataset.transforms.TypeCast](https://mindspore.cn/docs/en/master/api_python/dataset_transforms/mindspore.dataset.transforms.TypeCast.html#mindspore.dataset.transforms.TypeCast) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/TypeCast.html) | -| [torchvision.transforms.FiveCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.FiveCrop) | [mindspore.dataset.vision.FiveCrop](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.FiveCrop.html#mindspore.dataset.vision.FiveCrop) | Consistent | -| [torchvision.transforms.GaussianBlur](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.GaussianBlur) | [mindspore.dataset.vision.GaussianBlur](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.GaussianBlur.html#mindspore.dataset.vision.GaussianBlur) | Consistent | -| [torchvision.transforms.Grayscale](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Grayscale) | [mindspore.dataset.vision.Grayscale](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.Grayscale.html#mindspore.dataset.vision.Grayscale) | Consistent | -| [torchvision.transforms.LinearTransformation](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.LinearTransformation) | [mindspore.dataset.vision.LinearTransformation](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.LinearTransformation.html#mindspore.dataset.vision.LinearTransformation) | Consistent | -| [torchvision.transforms.Normalize](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Normalize) | [mindspore.dataset.vision.Normalize](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.Normalize.html#mindspore.dataset.vision.Normalize) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/Normalize.html) | -| [torchvision.transforms.Pad](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Pad) | [mindspore.dataset.vision.Pad](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.Pad.html#mindspore.dataset.vision.Pad) | The functions are consistent, but the parameter names are inconsistent. | -| [torchvision.transforms.RandomAffine](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomAffine) | [mindspore.dataset.vision.RandomAffine](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomAffine.html#mindspore.dataset.vision.RandomAffine) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/RandomAffine.html) | -| [torchvision.transforms.RandomApply](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomApply) | [mindspore.dataset.transforms.RandomApply](https://mindspore.cn/docs/en/master/api_python/dataset_transforms/mindspore.dataset.transforms.RandomApply.html#mindspore.dataset.transforms.RandomApply) | The functions are consistent, but the parameter names are inconsistent. | -| [torchvision.transforms.RandomChoice](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomChoice) | [mindspore.dataset.transforms.RandomChoice](https://mindspore.cn/docs/en/master/api_python/dataset_transforms/mindspore.dataset.transforms.RandomChoice.html#mindspore.dataset.transforms.RandomChoice) | Consistent | -| [torchvision.transforms.RandomCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomCrop) | [mindspore.dataset.vision.RandomCrop](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomCrop.html#mindspore.dataset.vision.RandomCrop) | The functions are consistent, but the parameter names are inconsistent. | -| [torchvision.transforms.RandomGrayscale](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomGrayscale) | [mindspore.dataset.vision.RandomGrayscale](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomGrayscale.html#mindspore.dataset.vision.RandomGrayscale) | The functions are consistent, but the parameter names are inconsistent. | -| [torchvision.transforms.RandomHorizontalFlip](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomHorizontalFlip) | [mindspore.dataset.vision.RandomHorizontalFlip](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomHorizontalFlip.html#mindspore.dataset.vision.RandomHorizontalFlip) | The functions are consistent, but the parameter names are inconsistent. | -| [torchvision.transforms.RandomOrder](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomOrder) | [mindspore.dataset.transforms.RandomOrder](https://mindspore.cn/docs/en/master/api_python/dataset_transforms/mindspore.dataset.transforms.RandomOrder.html#mindspore.dataset.transforms.RandomOrder) | Consistent | -| [torchvision.transforms.RandomPerspective](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomPerspective) | [mindspore.dataset.vision.RandomPerspective](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomPerspective.html#mindspore.dataset.vision.RandomPerspective) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/RandomPerspective.html) | -| [torchvision.transforms.RandomResizedCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomResizedCrop) | [mindspore.dataset.vision.RandomResizedCrop](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomResizedCrop.html#mindspore.dataset.vision.RandomResizedCrop) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/RandomResizedCrop.html) | -| [torchvision.transforms.RandomRotation](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomRotation) | [mindspore.dataset.vision.RandomRotation](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomRotation.html#mindspore.dataset.vision.RandomRotation) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/RandomRotation.html) | -| [torchvision.transforms.RandomVerticalFlip](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomVerticalFlip) | [mindspore.dataset.vision.RandomVerticalFlip](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomVerticalFlip.html#mindspore.dataset.vision.RandomVerticalFlip) | The functions are consistent, but the parameter names are inconsistent. | -| [torchvision.transforms.Resize](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Resize) | [mindspore.dataset.vision.Resize](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.Resize.html#mindspore.dataset.vision.Resize) | Consistent | -| [torchvision.transforms.TenCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.TenCrop) | [mindspore.dataset.vision.TenCrop](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.TenCrop.html#mindspore.dataset.vision.TenCrop) | The functions are consistent, but the parameter names are inconsistent. | -| [torchvision.transforms.ToPILImage](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ToPILImage) | [mindspore.dataset.vision.ToPIL](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.ToPIL.html#mindspore.dataset.vision.ToPIL) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/ToPIL.html) | -| [torchvision.transforms.ToTensor](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ToTensor) | [mindspore.dataset.vision.ToTensor](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.ToTensor.html#mindspore.dataset.vision.ToTensor) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/ToTensor.html) | -| [torchvision.ops.deform_conv2d](https://pytorch.org/vision/main/generated/torchvision.ops.deform_conv2d.html#deform-conv2d) | [mindspore.ops.deformable_conv2d](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.deformable_conv2d.html#mindspore-ops-deformable-conv2d) | [diff](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/deform_conv2d.html) | +| [torchvision.datasets.CelebA](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CelebA) | [mindspore.dataset.CelebADataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CelebADataset.html#mindspore.dataset.CelebADataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/CelebA.html) | +| [torchvision.datasets.Cityscapes](https://pytorch.org/vision/0.9/datasets.html#cityscapes) | [mindspore.dataset.CityscapesDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CityscapesDataset.html) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/Cityscapes.html) | +| [torchvision.datasets.CIFAR10](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CIFAR10) | [mindspore.dataset.Cifar10Dataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/CIFAR10.html) | +| [torchvision.datasets.CIFAR100](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CIFAR100) | [mindspore.dataset.Cifar100Dataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.Cifar100Dataset.html#mindspore.dataset.Cifar100Dataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/CIFAR100.html) | +| [torchvision.datasets.CocoDetection](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CocoDetection) | [mindspore.dataset.CocoDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CocoDataset.html#mindspore.dataset.CocoDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/CocoDataset.html) | +| [torchvision.datasets.ImageFolder](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.ImageFolder) | [mindspore.dataset.ImageFolderDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/ImageFolder.html) | +| [torchvision.datasets.MNIST](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.MNIST) | [mindspore.dataset.MnistDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MnistDataset.html#mindspore.dataset.MnistDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/MNIST.html) | +| [torchvision.datasets.VOCDetection](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.VOCDetection) | [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/VOCDetection.html) | +| [torchvision.datasets.VOCSegmentation](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.VOCSegmentation) | [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/VOCSegmentation.html) | +| [torchvision.ops.nms](https://pytorch.org/vision/0.9/ops.html#torchvision.ops.nms.html#torchvision.ops.nms) | [mindspore.ops.NMSWithMask](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.NMSWithMask.html#mindspore.ops.NMSWithMask) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/nms.html) | +| [torchvision.ops.roi_align](https://pytorch.org/vision/0.9/ops.html#torchvision.ops.roi_align.html#torchvision.ops.roi_align) | [mindspore.ops.ROIAlign](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ROIAlign.html#mindspore.ops.ROIAlign) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/roi_align.html) | +| [torchvision.transforms.CenterCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.CenterCrop) | [mindspore.dataset.vision.CenterCrop](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.CenterCrop.html#mindspore.dataset.vision.CenterCrop) | Consistent | +| [torchvision.transforms.ColorJitter](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ColorJitter) | [mindspore.dataset.vision.RandomColorAdjust](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomColorAdjust.html#mindspore.dataset.vision.RandomColorAdjust) | Consistent | +| [torchvision.transforms.Compose](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Compose) | [mindspore.dataset.transforms.Compose](https://mindspore.cn/docs/en/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.Compose.html#mindspore.dataset.transforms.Compose) | Consistent | +| [torchvision.transforms.ConvertImageDtype](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ConvertImageDtype) | [mindspore.dataset.transforms.TypeCast](https://mindspore.cn/docs/en/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.TypeCast.html#mindspore.dataset.transforms.TypeCast) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/TypeCast.html) | +| [torchvision.transforms.FiveCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.FiveCrop) | [mindspore.dataset.vision.FiveCrop](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.FiveCrop.html#mindspore.dataset.vision.FiveCrop) | Consistent | +| [torchvision.transforms.GaussianBlur](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.GaussianBlur) | [mindspore.dataset.vision.GaussianBlur](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.GaussianBlur.html#mindspore.dataset.vision.GaussianBlur) | Consistent | +| [torchvision.transforms.Grayscale](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Grayscale) | [mindspore.dataset.vision.Grayscale](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.Grayscale.html#mindspore.dataset.vision.Grayscale) | Consistent | +| [torchvision.transforms.LinearTransformation](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.LinearTransformation) | [mindspore.dataset.vision.LinearTransformation](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.LinearTransformation.html#mindspore.dataset.vision.LinearTransformation) | Consistent | +| [torchvision.transforms.Normalize](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Normalize) | [mindspore.dataset.vision.Normalize](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.Normalize.html#mindspore.dataset.vision.Normalize) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/Normalize.html) | +| [torchvision.transforms.Pad](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Pad) | [mindspore.dataset.vision.Pad](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.Pad.html#mindspore.dataset.vision.Pad) | The functions are consistent, but the parameter names are inconsistent. | +| [torchvision.transforms.RandomAffine](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomAffine) | [mindspore.dataset.vision.RandomAffine](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomAffine.html#mindspore.dataset.vision.RandomAffine) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/RandomAffine.html) | +| [torchvision.transforms.RandomApply](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomApply) | [mindspore.dataset.transforms.RandomApply](https://mindspore.cn/docs/en/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.RandomApply.html#mindspore.dataset.transforms.RandomApply) | The functions are consistent, but the parameter names are inconsistent. | +| [torchvision.transforms.RandomChoice](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomChoice) | [mindspore.dataset.transforms.RandomChoice](https://mindspore.cn/docs/en/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.RandomChoice.html#mindspore.dataset.transforms.RandomChoice) | Consistent | +| [torchvision.transforms.RandomCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomCrop) | [mindspore.dataset.vision.RandomCrop](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomCrop.html#mindspore.dataset.vision.RandomCrop) | The functions are consistent, but the parameter names are inconsistent. | +| [torchvision.transforms.RandomGrayscale](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomGrayscale) | [mindspore.dataset.vision.RandomGrayscale](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomGrayscale.html#mindspore.dataset.vision.RandomGrayscale) | The functions are consistent, but the parameter names are inconsistent. | +| [torchvision.transforms.RandomHorizontalFlip](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomHorizontalFlip) | [mindspore.dataset.vision.RandomHorizontalFlip](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomHorizontalFlip.html#mindspore.dataset.vision.RandomHorizontalFlip) | The functions are consistent, but the parameter names are inconsistent. | +| [torchvision.transforms.RandomOrder](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomOrder) | [mindspore.dataset.transforms.RandomOrder](https://mindspore.cn/docs/en/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.RandomOrder.html#mindspore.dataset.transforms.RandomOrder) | Consistent | +| [torchvision.transforms.RandomPerspective](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomPerspective) | [mindspore.dataset.vision.RandomPerspective](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomPerspective.html#mindspore.dataset.vision.RandomPerspective) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/RandomPerspective.html) | +| [torchvision.transforms.RandomResizedCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomResizedCrop) | [mindspore.dataset.vision.RandomResizedCrop](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomResizedCrop.html#mindspore.dataset.vision.RandomResizedCrop) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/RandomResizedCrop.html) | +| [torchvision.transforms.RandomRotation](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomRotation) | [mindspore.dataset.vision.RandomRotation](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomRotation.html#mindspore.dataset.vision.RandomRotation) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/RandomRotation.html) | +| [torchvision.transforms.RandomVerticalFlip](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomVerticalFlip) | [mindspore.dataset.vision.RandomVerticalFlip](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomVerticalFlip.html#mindspore.dataset.vision.RandomVerticalFlip) | The functions are consistent, but the parameter names are inconsistent. | +| [torchvision.transforms.Resize](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Resize) | [mindspore.dataset.vision.Resize](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.Resize.html#mindspore.dataset.vision.Resize) | Consistent | +| [torchvision.transforms.TenCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.TenCrop) | [mindspore.dataset.vision.TenCrop](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.TenCrop.html#mindspore.dataset.vision.TenCrop) | The functions are consistent, but the parameter names are inconsistent. | +| [torchvision.transforms.ToPILImage](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ToPILImage) | [mindspore.dataset.vision.ToPIL](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.ToPIL.html#mindspore.dataset.vision.ToPIL) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/ToPIL.html) | +| [torchvision.transforms.ToTensor](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ToTensor) | [mindspore.dataset.vision.ToTensor](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.ToTensor.html#mindspore.dataset.vision.ToTensor) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/ToTensor.html) | +| [torchvision.ops.deform_conv2d](https://pytorch.org/vision/main/generated/torchvision.ops.deform_conv2d.html#deform-conv2d) | [mindspore.ops.deformable_conv2d](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.deformable_conv2d.html#mindspore-ops-deformable-conv2d) | [diff](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/deform_conv2d.html) | diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AGNEWS.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AGNEWS.md index e1f325d6ba..1acead798f 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AGNEWS.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AGNEWS.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.AG_NEWS -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AGNEWS.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AGNEWS.md) ## torchtext.datasets.AG_NEWS @@ -26,7 +26,7 @@ class mindspore.dataset.AGNewsDataset( cache=None) ``` -For more information, see [mindspore.dataset.AGNewsDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.AGNewsDataset.html#mindspore.dataset.AGNewsDataset). +For more information, see [mindspore.dataset.AGNewsDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.AGNewsDataset.html#mindspore.dataset.AGNewsDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewFull.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewFull.md index 8d597f0878..0e43e75162 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewFull.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewFull.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.AmazonReviewFull -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewFull.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewFull.md) ## torchtext.datasets.AmazonReviewFull @@ -26,7 +26,7 @@ class mindspore.dataset.AmazonReviewDataset( cache=None) ``` -For more information, see [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset). +For more information, see [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md index 61dc574aca..2dfbc1c36f 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.AmazonReviewPolarity -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md) ## torchtext.datasets.AmazonReviewPolarity @@ -26,7 +26,7 @@ class mindspore.dataset.AmazonReviewDataset( cache=None) ``` -For more information, see [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset). +For more information, see [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmplitudeToDB.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmplitudeToDB.md index 5275aad4e4..8f7ede468d 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmplitudeToDB.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmplitudeToDB.md @@ -1,6 +1,6 @@ # Differences with torchaudio.transforms.AmplitudeToDB -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmplitudeToDB.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/AmplitudeToDB.md) ## torchaudio.transforms.AmplitudeToDB @@ -16,7 +16,7 @@ For more information, see [torchaudio.transforms.AmplitudeToDB](https://pytorch. class mindspore.dataset.audio.AmplitudeToDB(stype=ScaleType.POWER, ref_value=1.0, amin=1e-10, top_db=80.0) ``` -For more information, see [mindspore.dataset.audio.AmplitudeToDB](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.AmplitudeToDB.html#mindspore.dataset.audio.AmplitudeToDB). +For more information, see [mindspore.dataset.audio.AmplitudeToDB](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.AmplitudeToDB.html#mindspore.dataset.audio.AmplitudeToDB). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR10.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR10.md index 41f957b4ef..44eb70b68b 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR10.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR10.md @@ -1,6 +1,6 @@ # Differences with torchvision.datasets.CIFAR10 -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR10.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR10.md) ## torchvision.datasets.CIFAR10 @@ -30,7 +30,7 @@ class mindspore.dataset.Cifar10Dataset( cache=None) ``` -For more information, see [mindspore.dataset.Cifar10Dataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset). +For more information, see [mindspore.dataset.Cifar10Dataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR100.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR100.md index 0a1ce795d2..4bfc55c0ee 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR100.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR100.md @@ -1,6 +1,6 @@ # Differences with torchvision.datasets.CIFAR100 -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR100.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CIFAR100.md) ## torchvision.datasets.CIFAR100 @@ -30,7 +30,7 @@ class mindspore.dataset.Cifar100Dataset( cache=None) ``` -For more information, see [mindspore.dataset.Cifar100Dataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.Cifar100Dataset.html#mindspore.dataset.Cifar100Dataset). +For more information, see [mindspore.dataset.Cifar100Dataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.Cifar100Dataset.html#mindspore.dataset.Cifar100Dataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CMUARCTIC.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CMUARCTIC.md index 5760172c9a..a804c67c50 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CMUARCTIC.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CMUARCTIC.md @@ -1,6 +1,6 @@ # Differences with torchaudio.datasets.CMUARCTIC -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CMUARCTIC.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CMUARCTIC.md) ## torchaudio.datasets.CMUARCTIC @@ -29,7 +29,7 @@ class mindspore.dataset.CMUArcticDataset( cache=None) ``` -For more information, see [mindspore.dataset.CMUArcticDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CMUArcticDataset.html#mindspore.dataset.CMUArcticDataset). +For more information, see [mindspore.dataset.CMUArcticDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CMUArcticDataset.html#mindspore.dataset.CMUArcticDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CelebA.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CelebA.md index ad7f540aaa..925ef83604 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CelebA.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CelebA.md @@ -1,6 +1,6 @@ # Differences with torchvision.datasets.CelebA -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CelebA.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CelebA.md) ## torchvision.datasets.CelebA @@ -34,7 +34,7 @@ class mindspore.dataset.CelebADataset( decrypt=None) ``` -For more information, see [mindspore.dataset.CelebADataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CelebADataset.html#mindspore.dataset.CelebADataset). +For more information, see [mindspore.dataset.CelebADataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CelebADataset.html#mindspore.dataset.CelebADataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Cityscapes.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Cityscapes.md index 8309340932..5770c1258f 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Cityscapes.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Cityscapes.md @@ -1,6 +1,6 @@ # Differences with torchvision.datasets.Cityscapes -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Cityscapes.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Cityscapes.md) ## torchvision.datasets.Cityscapes @@ -37,7 +37,7 @@ class mindspore.dataset.CityscapesDataset( ) ``` -For more information, see [mindspore.dataset.CityscapesDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CityscapesDataset.html). +For more information, see [mindspore.dataset.CityscapesDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CityscapesDataset.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md index 1501151182..262bb1bace 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.CoNLL2000Chunking -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md) ## torchtext.datasets.CoNLL2000Chunking @@ -26,7 +26,7 @@ class mindspore.dataset.CoNLL2000Dataset( cache=None) ``` -For more information, see [mindspore.dataset.CoNLL2000Dataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CoNLL2000Dataset.html#mindspore.dataset.CoNLL2000Dataset). +For more information, see [mindspore.dataset.CoNLL2000Dataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CoNLL2000Dataset.html#mindspore.dataset.CoNLL2000Dataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CocoDataset.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CocoDataset.md index 34a3645022..0d0ba66fd9 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CocoDataset.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CocoDataset.md @@ -1,6 +1,6 @@ # Differences with torch.torchvision.datasets.CocoDetection -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CocoDataset.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/CocoDataset.md) ## torchvision.datasets.CocoDetection @@ -36,7 +36,7 @@ class mindspore.dataset.CocoDataset( ) ``` -For more information, see [mindspore.dataset.CocoDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CocoDataset.html#mindspore.dataset.CocoDataset). +For more information, see [mindspore.dataset.CocoDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CocoDataset.html#mindspore.dataset.CocoDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DBpedia.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DBpedia.md index df4cd65609..3e4ac70462 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DBpedia.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DBpedia.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.DBpedia -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DBpedia.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DBpedia.md) ## torchtext.datasets.DBpedia @@ -26,7 +26,7 @@ class mindspore.dataset.DBpediaDataset( cache=None) ``` -For more information, see [mindspore.dataset.DBpediaDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.DBpediaDataset.html#mindspore.dataset.DBpediaDataset). +For more information, see [mindspore.dataset.DBpediaDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.DBpediaDataset.html#mindspore.dataset.DBpediaDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DataLoader.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DataLoader.md index c27b387a41..43f4fcaa89 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DataLoader.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DataLoader.md @@ -1,6 +1,6 @@ # Differences with torch.utils.data.DataLoader -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DataLoader.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DataLoader.md) ## torch.utils.data.DataLoader @@ -23,7 +23,7 @@ class mindspore.dataset.GeneratorDataset( num_shards=None, shard_id=None, python_multiprocessing=True, max_rowsize=None) ``` -For more information, see [mindspore.dataset.GeneratorDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset). +For more information, see [mindspore.dataset.GeneratorDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset). ## Difference diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DistributedSampler.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DistributedSampler.md index 324a220511..ce9240fda3 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DistributedSampler.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DistributedSampler.md @@ -1,6 +1,6 @@ # Differences with torch.utils.data.distributed.DistributedSampler -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DistributedSampler.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/DistributedSampler.md) ## torch.utils.data.distributed.DistributedSampler @@ -16,7 +16,7 @@ For more information, see [torch.utils.data.distributed.DistributedSampler](http class mindspore.dataset.DistributedSampler(num_shards, shard_id, shuffle=True, num_samples=None, offset=-1) ``` -For more information, see [mindspore.dataset.DistributedSampler](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.DistributedSampler.html). +For more information, see [mindspore.dataset.DistributedSampler](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.DistributedSampler.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/FrequencyMasking.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/FrequencyMasking.md index 2f84cf2b66..9201ee89c9 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/FrequencyMasking.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/FrequencyMasking.md @@ -1,6 +1,6 @@ # Differences with torchaudio.transforms.FrequencyMasking -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/FrequencyMasking.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/FrequencyMasking.md) ## torchaudio.transforms.FrequencyMasking @@ -16,7 +16,7 @@ For more information, see [torchaudio.transforms.FrequencyMasking](https://pytor class mindspore.dataset.audio.FrequencyMasking(iid_masks=False, freq_mask_param=0, mask_start=0, mask_value=0.0) ``` -For more information, see [mindspore.dataset.audio.FrequencyMasking](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.FrequencyMasking.html#mindspore.dataset.audio.FrequencyMasking). +For more information, see [mindspore.dataset.audio.FrequencyMasking](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.FrequencyMasking.html#mindspore.dataset.audio.FrequencyMasking). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GTZAN.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GTZAN.md index ed7ba0c2a5..44268eb4e0 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GTZAN.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GTZAN.md @@ -1,6 +1,6 @@ # Differences with torchaudio.datasets.GTZAN -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GTZAN.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GTZAN.md) ## torchaudio.datasets.GTZAN @@ -30,7 +30,7 @@ class mindspore.dataset.GTZANDataset( cache=None) ``` -For more information, see [mindspore.dataset.GTZANDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GTZANDataset.html#mindspore.dataset.GTZANDataset). +For more information, see [mindspore.dataset.GTZANDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GTZANDataset.html#mindspore.dataset.GTZANDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GriffinLim.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GriffinLim.md index 89edc78765..f3a5852072 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GriffinLim.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GriffinLim.md @@ -1,6 +1,6 @@ # Differences with torchaudio.transforms.GriffinLim -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GriffinLim.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/GriffinLim.md) ## torchaudio.transforms.GriffinLim @@ -21,7 +21,7 @@ class mindspore.dataset.audio.GriffinLim(n_fft=400, n_iter=32, win_length=None, momentum=0.99, length=None, rand_init=True) ``` -For more information, see [mindspore.dataset.audio.GriffinLim](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.GriffinLim.html#mindspore.dataset.audio.GriffinLim). +For more information, see [mindspore.dataset.audio.GriffinLim](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.GriffinLim.html#mindspore.dataset.audio.GriffinLim). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IMDB.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IMDB.md index 89999f2271..be736244cb 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IMDB.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IMDB.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.IMDB -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IMDB.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IMDB.md) ## torchtext.datasets.IMDB @@ -27,7 +27,7 @@ class mindspore.dataset.IMDBDataset( cache=None) ``` -For more information, see [mindspore.dataset.IMDBDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.IMDBDataset.html#mindspore.dataset.IMDBDataset). +For more information, see [mindspore.dataset.IMDBDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.IMDBDataset.html#mindspore.dataset.IMDBDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2016.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2016.md index eb3170fbed..f1d160581f 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2016.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2016.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.IWSLT2016 -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2016.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2016.md) ## torchtext.datasets.IWSLT2016 @@ -32,7 +32,7 @@ class mindspore.dataset.IWSLT2016Dataset( cache=None) ``` -For more information, see [mindspore.dataset.IWSLT2016Dataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.IWSLT2016Dataset.html#mindspore.dataset.IWSLT2016Dataset). +For more information, see [mindspore.dataset.IWSLT2016Dataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.IWSLT2016Dataset.html#mindspore.dataset.IWSLT2016Dataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2017.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2017.md index b6e1f8b9e7..d84b29b6c2 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2017.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2017.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.IWSLT2017 -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2017.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/IWSLT2017.md) ## torchtext.datasets.IWSLT2017 @@ -28,7 +28,7 @@ class mindspore.dataset.IWSLT2017Dataset( cache=None) ``` -For more information, see [mindspore.dataset.IWSLT2017Dataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset). +For more information, see [mindspore.dataset.IWSLT2017Dataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ImageFolder.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ImageFolder.md index c31a898181..af04cee6ac 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ImageFolder.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ImageFolder.md @@ -1,6 +1,6 @@ # Differences with torchvision.datasets.ImageFolder -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ImageFolder.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ImageFolder.md) ## torchvision.datasets.ImageFolder @@ -33,7 +33,7 @@ class mindspore.dataset.ImageFolderDataset( decrypt=None) ``` -For more information, see [mindspore.dataset.ImageFolderDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset). +For more information, see [mindspore.dataset.ImageFolderDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/InverseMelScale.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/InverseMelScale.md index 9ba4295dbb..8f2898e8de 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/InverseMelScale.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/InverseMelScale.md @@ -1,6 +1,6 @@ # Differences with torchaudio.transforms.InverseMelScale -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/InverseMelScale.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/InverseMelScale.md) ## torchaudio.transforms.InverseMelScale @@ -20,7 +20,7 @@ class mindspore.dataset.audio.InverseMelScale(n_stft, n_mels=128, sample_rate=16 norm=NormType.NONE, mel_type=MelType.HTK) ``` -For more information, see [mindspore.dataset.audio.InverseMelScale](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.InverseMelScale.html#mindspore.dataset.audio.InverseMelScale). +For more information, see [mindspore.dataset.audio.InverseMelScale](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.InverseMelScale.html#mindspore.dataset.audio.InverseMelScale). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LIBRITTS.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LIBRITTS.md index ecb007459f..85061932f3 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LIBRITTS.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LIBRITTS.md @@ -1,6 +1,6 @@ # Differences with torchaudio.datasets.LIBRITTS -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LIBRITTS.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LIBRITTS.md) ## torchaudio.datasets.LIBRITTS @@ -29,7 +29,7 @@ class mindspore.dataset.LibriTTSDataset( cache=None) ``` -For more information, see [mindspore.dataset.LibriTTSDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.LibriTTSDataset.html#mindspore.dataset.LibriTTSDataset). +For more information, see [mindspore.dataset.LibriTTSDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.LibriTTSDataset.html#mindspore.dataset.LibriTTSDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LJSPEECH.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LJSPEECH.md index 44207629c4..150444b2bb 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LJSPEECH.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LJSPEECH.md @@ -1,6 +1,6 @@ # Differences with torchaudio.datasets.LJSPEECH -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LJSPEECH.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/LJSPEECH.md) ## torchaudio.datasets.LJSPEECH @@ -28,7 +28,7 @@ class mindspore.dataset.LJSpeechDataset( cache=None) ``` -For more information, see [mindspore.dataset.LJSpeechDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset). +For more information, see [mindspore.dataset.LJSpeechDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Lookup.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Lookup.md index f5bbcc5621..33629f79d3 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Lookup.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Lookup.md @@ -1,6 +1,6 @@ # Differences with torchtext.data.functional.numericalize_tokens_from_iterator -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Lookup.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Lookup.md) ## torchtext.data.functional.numericalize_tokens_from_iterator @@ -24,7 +24,7 @@ class mindspore.dataset.text.Lookup( ) ``` -For more information, see [mindspore.dataset.text.Lookup](https://mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.Lookup.html#mindspore.dataset.text.Lookup). +For more information, see [mindspore.dataset.text.Lookup](https://mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.Lookup.html#mindspore.dataset.text.Lookup). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MNIST.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MNIST.md index 8c6b2b09a8..64c2f427e5 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MNIST.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MNIST.md @@ -1,6 +1,6 @@ # Differences with torchvision.datasets.MNIST -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MNIST.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MNIST.md) ## torchvision.datasets.MNIST @@ -30,7 +30,7 @@ class mindspore.dataset.MnistDataset( cache=None) ``` -For more information, see [mindspore.dataset.MnistDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MnistDataset.html#mindspore.dataset.MnistDataset). +For more information, see [mindspore.dataset.MnistDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MnistDataset.html#mindspore.dataset.MnistDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelScale.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelScale.md index c846b24b31..1dd9e8c7c1 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelScale.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelScale.md @@ -1,6 +1,6 @@ # Differences with torchaudio.transforms.MelScale -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelScale.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelScale.md) ## torchaudio.transforms.MelScale @@ -18,7 +18,7 @@ class mindspore.dataset.audio.MelScale(n_mels=128, sample_rate=16000, f_min=0.0, n_stft=201, norm=NormType.NONE, mel_type=MelType.HTK) ``` -For more information, see [mindspore.dataset.audio.MelScale](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.MelScale.html#mindspore.dataset.audio.MelScale). +For more information, see [mindspore.dataset.audio.MelScale](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.MelScale.html#mindspore.dataset.audio.MelScale). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelSpectrogram.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelSpectrogram.md index 29a084dc14..70dc8ce454 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelSpectrogram.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelSpectrogram.md @@ -1,6 +1,6 @@ # Differences with torchaudio.transforms.MelSpectrogram -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelSpectrogram.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/MelSpectrogram.md) ## torchaudio.transforms.MelSpectrogram @@ -23,7 +23,7 @@ class mindspore.dataset.audio.MelSpectrogram(sample_rate=16000, n_fft=400, win_l center=True, pad_mode=BorderType.REFLECT, onesided=True, norm=NormType.NONE, mel_scale=MelType.HTK) ``` -For more information, see [mindspore.dataset.audio.MelSpectrogram](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.MelSpectrogram.html#mindspore.dataset.audio.MelSpectrogram). +For more information, see [mindspore.dataset.audio.MelSpectrogram](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.MelSpectrogram.html#mindspore.dataset.audio.MelSpectrogram). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Ngram.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Ngram.md index aaf93acfa2..c5b156e1c5 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Ngram.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Ngram.md @@ -1,6 +1,6 @@ # Differences with torchtext.data.utils.ngrams_iterator -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Ngram.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Ngram.md) ## torchtext.data.utils.ngrams_iterator @@ -24,7 +24,7 @@ class mindspore.dataset.text.Ngram( ) ``` -For more information, see [mindspore.dataset.text.Ngram](https://mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.Ngram.html#mindspore.dataset.text.Ngram). +For more information, see [mindspore.dataset.text.Ngram](https://mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.Ngram.html#mindspore.dataset.text.Ngram). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Normalize.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Normalize.md index cd05ce2257..fd44731997 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Normalize.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Normalize.md @@ -1,6 +1,6 @@ # Differences with torchvision.transforms.Normalize -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Normalize.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Normalize.md) ## torchvision.transforms.Normalize @@ -16,7 +16,7 @@ For more information, see [torchvision.transforms.Normalize](https://pytorch.org class mindspore.dataset.vision.Normalize(mean, std, is_hwc=True) ``` -For more information, see [mindspore.dataset.vision.Normalize](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.Normalize.html). +For more information, see [mindspore.dataset.vision.Normalize](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.Normalize.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/PennTreebank.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/PennTreebank.md index 389a8f7a69..044f77c990 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/PennTreebank.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/PennTreebank.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.PennTreebank -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/PennTreebank.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/PennTreebank.md) ## torchtext.datasets.PennTreebank @@ -26,7 +26,7 @@ class mindspore.dataset.PennTreebankDataset( cache=None) ``` -For more information, see [mindspore.dataset.PennTreebankDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.PennTreebankDataset.html#mindspore.dataset.PennTreebankDataset). +For more information, see [mindspore.dataset.PennTreebankDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.PennTreebankDataset.html#mindspore.dataset.PennTreebankDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomAffine.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomAffine.md index 084126da9d..ed1eb96291 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomAffine.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomAffine.md @@ -1,6 +1,6 @@ # Differences with torchvision.transforms.RandomAffine -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomAffine.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomAffine.md) ## torchvision.transforms.RandomAffine @@ -16,7 +16,7 @@ For more information, see [torchvision.transforms.RandomAffine](https://pytorch. class mindspore.dataset.vision.RandomAffine(degrees, translate=None, scale=None, shear=None, resample=Inter.NEAREST, fill_value=0) ``` -For more information, see [mindspore.dataset.vision.RandomAffine](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomAffine.html). +For more information, see [mindspore.dataset.vision.RandomAffine](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomAffine.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomPerspective.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomPerspective.md index 6ebe676496..043e70c23d 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomPerspective.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomPerspective.md @@ -1,6 +1,6 @@ # Differences with torchvision.transforms.RandomPerspective -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomPerspective.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomPerspective.md) ## torchvision.transforms.RandomPerspective @@ -16,7 +16,7 @@ For more information, see [torchvision.transforms.RandomPerspective](https://pyt class mindspore.dataset.vision.RandomPerspective(distortion_scale=0.5, prob=0.5, interpolation=Inter.BICUBIC) ``` -For more information, see [mindspore.dataset.vision.RandomPerspective](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomPerspective.html). +For more information, see [mindspore.dataset.vision.RandomPerspective](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomPerspective.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomResizedCrop.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomResizedCrop.md index 66642a5ef6..accac14a87 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomResizedCrop.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomResizedCrop.md @@ -1,6 +1,6 @@ # Differences with torchvision.transforms.RandomResizedCrop -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomResizedCrop.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomResizedCrop.md) ## torchvision.transforms.RandomResizedCrop @@ -16,7 +16,7 @@ For more information, see [torchvision.transforms.RandomResizedCrop](https://pyt class mindspore.dataset.vision.RandomResizedCrop(size, scale=(0.08, 1.0), ratio=(3. / 4., 4. / 3.), interpolation=Inter.BILINEAR, max_attempts=10) ``` -For more information, see [mindspore.dataset.vision.RandomResizedCrop](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomResizedCrop.html). +For more information, see [mindspore.dataset.vision.RandomResizedCrop](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomResizedCrop.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomRotation.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomRotation.md index 1cf35767a0..e12123bc78 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomRotation.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomRotation.md @@ -1,6 +1,6 @@ # Differences with torchvision.transforms.RandomRotation -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomRotation.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomRotation.md) ## torchvision.transforms.RandomRotation @@ -16,7 +16,7 @@ For more information, see [torchvision.transforms.RandomRotation](https://pytorc class mindspore.dataset.vision.RandomRotation(degrees, resample=Inter.NEAREST, expand=False, center=None, fill_value=0) ``` -For more information, see [mindspore.dataset.vision.RandomRotation](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.RandomRotation.html). +For more information, see [mindspore.dataset.vision.RandomRotation](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomRotation.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomSampler.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomSampler.md index a18dd0542d..a64060bfb3 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomSampler.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomSampler.md @@ -1,6 +1,6 @@ # Differences with torch.utils.data.RandomSampler -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomSampler.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RandomSampler.md) ## torch.utils.data.RandomSampler @@ -16,7 +16,7 @@ For more information, see [torch.utils.data.RandomSampler](https://pytorch.org/d class mindspore.dataset.RandomSampler(replacement=False, num_samples=None) ``` -For more information, see [mindspore.dataset.RandomSampler](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.RandomSampler.html). +For more information, see [mindspore.dataset.RandomSampler](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.RandomSampler.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RegexReplace.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RegexReplace.md index 16df240f98..91a51c0694 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RegexReplace.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RegexReplace.md @@ -1,6 +1,6 @@ # Differences with torchtext.data.functional.custom_replace -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RegexReplace.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/RegexReplace.md) ## torchtext.data.functional.custom_replace @@ -16,7 +16,7 @@ For more information, see [torchtext.data.functional.custom_replace](https://pyt class mindspore.dataset.text.RegexReplace(pattern, replace, replace_all=True) ``` -For more information, see [mindspore.dataset.text.RegexReplace](https://www.mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.RegexReplace.html#mindspore.dataset.text.RegexReplace). +For more information, see [mindspore.dataset.text.RegexReplace](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.RegexReplace.html#mindspore.dataset.text.RegexReplace). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Resample.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Resample.md index 5562ea6fc9..808378a8f4 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Resample.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Resample.md @@ -1,6 +1,6 @@ # Differences with torchaudio.transforms.Resample -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Resample.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Resample.md) ## torchaudio.transforms.Resample @@ -17,7 +17,7 @@ class mindspore.dataset.audio.Resample(orig_freq=16000, new_freq=16000, resample lowpass_filter_width=6, rolloff=0.99, beta=None) ``` -For more information, see [mindspore.dataset.audio.Resample](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.Resample.html#mindspore.dataset.audio.Resample). +For more information, see [mindspore.dataset.audio.Resample](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.Resample.html#mindspore.dataset.audio.Resample). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SPEECHCOMMANDS.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SPEECHCOMMANDS.md index 231e5cb078..9de276ffc0 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SPEECHCOMMANDS.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SPEECHCOMMANDS.md @@ -1,6 +1,6 @@ # Differences with torchaudio.datasets.SPEECHCOMMANDS -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SPEECHCOMMANDS.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SPEECHCOMMANDS.md) ## torchaudio.datasets.SPEECHCOMMANDS @@ -30,7 +30,7 @@ class mindspore.dataset.SpeechCommandsDataset( cache=None) ``` -For more information, see [mindspore.dataset.SpeechCommandsDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SpeechCommandsDataset.html#mindspore.dataset.SpeechCommandsDataset). +For more information, see [mindspore.dataset.SpeechCommandsDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SpeechCommandsDataset.html#mindspore.dataset.SpeechCommandsDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD1.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD1.md index b5564bfb0d..4c34cfede0 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD1.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD1.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.SQuAD1 -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD1.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD1.md) ## torchtext.datasets.SQuAD1 @@ -26,7 +26,7 @@ class mindspore.dataset.SQuADDataset( cache=None) ``` -For more information, see [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset). +For more information, see [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD2.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD2.md index dd41c3dbc2..cd53450ab3 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD2.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD2.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.SQuAD2 -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD2.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SQuAD2.md) ## torchtext.datasets.SQuAD2 @@ -26,7 +26,7 @@ class mindspore.dataset.SQuADDataset( cache=None) ``` -For more information, see [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset). +For more information, see [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.md index 23d3aaf202..36495f2a3a 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.md @@ -1,6 +1,6 @@ # Differences with torchtext.data.functional.sentencepiece_numericalizer -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.md) ## torchtext.data.functional.sentencepiece_numericalizer @@ -21,7 +21,7 @@ class mindspore.dataset.text.SentencePieceTokenizer( ) ``` -For more information, see [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer). +For more information, see [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_STRING.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_STRING.md index 84fb0b3d08..e4112aa056 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_STRING.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_STRING.md @@ -1,6 +1,6 @@ # Differences with torchtext.data.functional.sentencepiece_tokenizer -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.md) ## torchtext.data.functional.sentencepiece_tokenizer @@ -21,7 +21,7 @@ class mindspore.dataset.text.SentencePieceTokenizer( ) ``` -For more information, see [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer). +For more information, see [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SequentialSampler.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SequentialSampler.md index 24df99bc9a..1135aeb0ef 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SequentialSampler.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SequentialSampler.md @@ -1,6 +1,6 @@ # Differences with torch.utils.data.SequentialSampler -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SequentialSampler.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SequentialSampler.md) ## torch.utils.data.SequentialSampler @@ -16,7 +16,7 @@ For more information, see [torch.utils.data.SequentialSampler](https://pytorch.o class mindspore.dataset.SequentialSampler(start_index=None, num_samples=None) ``` -For more information, see [mindspore.dataset.SequentialSampler](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SequentialSampler.html). +For more information, see [mindspore.dataset.SequentialSampler](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SequentialSampler.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SogouNews.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SogouNews.md index e0f7b305b0..729cd93625 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SogouNews.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SogouNews.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.SogouNews -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SogouNews.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SogouNews.md) ## torchtext.datasets.SogouNews @@ -26,7 +26,7 @@ class mindspore.dataset.SogouNewsDataset( cache=None) ``` -For more information, see [mindspore.dataset.SogouNewsDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SogouNewsDataset.html#mindspore.dataset.SogouNewsDataset). +For more information, see [mindspore.dataset.SogouNewsDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SogouNewsDataset.html#mindspore.dataset.SogouNewsDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SpectralCentroid.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SpectralCentroid.md index 31a14241eb..d74dcc7bfe 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SpectralCentroid.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SpectralCentroid.md @@ -1,6 +1,6 @@ # Differences with torchaudio.transforms.SpectralCentroid -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SpectralCentroid.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SpectralCentroid.md) ## torchaudio.transforms.SpectralCentroid @@ -20,7 +20,7 @@ class mindspore.dataset.audio.SpectralCentroid(sample_rate, n_fft=400, win_lengt pad=0, window=WindowType.HANN) ``` -For more information, see [mindspore.dataset.audio.SpectralCentroid](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.SpectralCentroid.html#mindspore.dataset.audio.SpectralCentroid). +For more information, see [mindspore.dataset.audio.SpectralCentroid](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.SpectralCentroid.html#mindspore.dataset.audio.SpectralCentroid). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Spectrogram.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Spectrogram.md index fc14ee69a6..c7a9f458cd 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Spectrogram.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Spectrogram.md @@ -1,6 +1,6 @@ # Differences with torchaudio.transforms.Spectrogram -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Spectrogram.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/Spectrogram.md) ## torchaudio.transforms.Spectrogram @@ -22,7 +22,7 @@ class mindspore.dataset.audio.Spectrogram(n_fft=400, win_length=None, hop_length center=True, pad_mode=BorderType.REFLECT, onesided=True) ``` -For more information, see [mindspore.dataset.audio.Spectrogram](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.Spectrogram.html#mindspore.dataset.audio.Spectrogram). +For more information, see [mindspore.dataset.audio.Spectrogram](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.Spectrogram.html#mindspore.dataset.audio.Spectrogram). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SubsetRandomSampler.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SubsetRandomSampler.md index 5bac945335..984324312a 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SubsetRandomSampler.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SubsetRandomSampler.md @@ -1,6 +1,6 @@ # Differences with torch.utils.data.SubsetRandomSampler -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SubsetRandomSampler.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/SubsetRandomSampler.md) ## torch.utils.data.SubsetRandomSampler @@ -16,7 +16,7 @@ For more information, see [torch.utils.data.SubsetRandomSampler](https://pytorch class mindspore.dataset.SubsetRandomSampler(indices, num_samples=None) ``` -For more information, see [mindspore.dataset.SubsetRandomSampler](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SubsetRandomSampler.html). +For more information, see [mindspore.dataset.SubsetRandomSampler](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SubsetRandomSampler.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TEDLIUM.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TEDLIUM.md index b67ff746e2..eb5baa3f34 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TEDLIUM.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TEDLIUM.md @@ -1,6 +1,6 @@ # Differences with torchaudio.datasets.TEDLIUM -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TEDLIUM.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TEDLIUM.md) ## torchaudio.datasets.TEDLIUM @@ -32,7 +32,7 @@ class mindspore.dataset.TedliumDataset( cache=None) ``` -For more information, see [mindspore.dataset.TedliumDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.TedliumDataset.html#mindspore.dataset.TedliumDataset). +For more information, see [mindspore.dataset.TedliumDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.TedliumDataset.html#mindspore.dataset.TedliumDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TimeMasking.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TimeMasking.md index 203fb7f5e1..b250f62249 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TimeMasking.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TimeMasking.md @@ -1,6 +1,6 @@ # Differences with torchaudio.transforms.TimeMasking -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TimeMasking.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TimeMasking.md) ## torchaudio.transforms.TimeMasking @@ -16,7 +16,7 @@ For more information, see [torchaudio.transforms.TimeMasking](https://pytorch.or class mindspore.dataset.audio.TimeMasking(iid_masks=False, time_mask_param=0, mask_start=0, mask_value=0.0) ``` -For more information, see [mindspore.dataset.audio.TimeMasking](https://mindspore.cn/docs/en/master/api_python/dataset_audio/mindspore.dataset.audio.TimeMasking.html#mindspore.dataset.audio.TimeMasking). +For more information, see [mindspore.dataset.audio.TimeMasking](https://mindspore.cn/docs/en/br_base/api_python/dataset_audio/mindspore.dataset.audio.TimeMasking.html#mindspore.dataset.audio.TimeMasking). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToPIL.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToPIL.md index 49cb2183dd..20fc42e9c4 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToPIL.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToPIL.md @@ -1,6 +1,6 @@ # Differences with torchvision.transforms.ToPILImage -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToPIL.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToPIL.md) ## torchvision.transforms.ToPILImage @@ -18,7 +18,7 @@ For more information, see [torchvision.transforms.ToPILImage](https://pytorch.or class mindspore.dataset.vision.ToPIL ``` -For more information, see [mindspore.dataset.vision.ToPIL](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.ToPIL.html#mindspore.dataset.vision.ToPIL). +For more information, see [mindspore.dataset.vision.ToPIL](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.ToPIL.html#mindspore.dataset.vision.ToPIL). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToTensor.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToTensor.md index 4006860845..02a0982b5f 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToTensor.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToTensor.md @@ -1,6 +1,6 @@ # Differences with torchvision.transforms.ToTensor -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToTensor.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/ToTensor.md) ## torchvision.transforms.ToTensor @@ -18,7 +18,7 @@ class mindspore.dataset.vision.ToTensor( ) ``` -For more information, see [mindspore.dataset.vision.ToTensor](https://mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.ToTensor.html#mindspore.dataset.vision.ToTensor). +For more information, see [mindspore.dataset.vision.ToTensor](https://mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.ToTensor.html#mindspore.dataset.vision.ToTensor). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TypeCast.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TypeCast.md index 812cfcff9a..b2378c9ee1 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TypeCast.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TypeCast.md @@ -1,6 +1,6 @@ # Differences with torchvision.transforms.ConvertImageDtype -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TypeCast.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/TypeCast.md) ## torchvision.transforms.ConvertImageDtype @@ -20,7 +20,7 @@ class mindspore.dataset.transforms.TypeCast( ) ``` -For more information, see [mindspore.dataset.transforms.TypeCast](https://mindspore.cn/docs/en/master/api_python/dataset_transforms/mindspore.dataset.transforms.TypeCast.html#mindspore.dataset.transforms.TypeCast). +For more information, see [mindspore.dataset.transforms.TypeCast](https://mindspore.cn/docs/en/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.TypeCast.html#mindspore.dataset.transforms.TypeCast). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/UDPOS.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/UDPOS.md index 8c7aa45623..0e9ed1df21 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/UDPOS.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/UDPOS.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.UDPOS -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/UDPOS.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/UDPOS.md) ## torchtext.datasets.UDPOS @@ -26,7 +26,7 @@ class mindspore.dataset.UDPOSDataset( cache=None) ``` -For more information, see [mindspore.dataset.UDPOSDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.UDPOSDataset.html#mindspore.dataset.UDPOSDataset). +For more information, see [mindspore.dataset.UDPOSDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.UDPOSDataset.html#mindspore.dataset.UDPOSDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCDetection.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCDetection.md index 0c7b70f090..2e8cc5cc10 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCDetection.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCDetection.md @@ -1,6 +1,6 @@ # Differences with torchvision.datasets.VOCDetection -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCDetection.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCDetection.md) ## torchvision.datasets.VOCDetection @@ -39,7 +39,7 @@ class mindspore.dataset.VOCDataset( ) ``` -For more information, see [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset). +For more information, see [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCSegmentation.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCSegmentation.md index aa12c4d01b..f11759d178 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCSegmentation.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCSegmentation.md @@ -1,6 +1,6 @@ # Differences with torchvision.datasets.VOCSegmentation -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCSegmentation.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/VOCSegmentation.md) ## torchvision.datasets.VOCSegmentation @@ -39,7 +39,7 @@ class mindspore.dataset.VOCDataset( ) ``` -For more information, see [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset). +For more information, see [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WeightedRandomSampler.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WeightedRandomSampler.md index 21de0964c0..ed172db7bb 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WeightedRandomSampler.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WeightedRandomSampler.md @@ -1,6 +1,6 @@ # Differences with torch.utils.data.WeightedRandomSampler -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WeightedRandomSampler.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WeightedRandomSampler.md) ## torch.utils.data.WeightedRandomSampler @@ -16,7 +16,7 @@ For more information, see [torch.utils.data.WeightedRandomSampler](https://pytor class mindspore.dataset.WeightedRandomSampler(weights, num_samples=None, replacement=True) ``` -For more information, see [mindspore.dataset.WeightedRandomSampler](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.WeightedRandomSampler.html). +For more information, see [mindspore.dataset.WeightedRandomSampler](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.WeightedRandomSampler.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WhitespaceTokenizer.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WhitespaceTokenizer.md index 9530993e2d..7f0b411239 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WhitespaceTokenizer.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WhitespaceTokenizer.md @@ -1,6 +1,6 @@ # Differences with torchtext.data.functional.simple_space_split -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WhitespaceTokenizer.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WhitespaceTokenizer.md) ## torchtext.data.functional.simple_space_split @@ -16,7 +16,7 @@ For more information, see [torchtext.data.functional.simple_space_split](https:/ class mindspore.dataset.text.WhitespaceTokenizer(with_offsets=False) ``` -For more information, see [mindspore.dataset.text.WhitespaceTokenizer](https://www.mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.WhitespaceTokenizer.html#mindspore.dataset.text.WhitespaceTokenizer). +For more information, see [mindspore.dataset.text.WhitespaceTokenizer](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.WhitespaceTokenizer.html#mindspore.dataset.text.WhitespaceTokenizer). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText103.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText103.md index 7377273b6f..07263b6879 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText103.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText103.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.WikiText103 -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText103.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText103.md) ## torchtext.datasets.WikiText103 @@ -26,7 +26,7 @@ class mindspore.dataset.WikiTextDataset( cache=None) ``` -For more information, see [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset). +For more information, see [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText2.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText2.md index 0c525fc445..79d6a981d2 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText2.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText2.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.WikiText2 -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText2.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/WikiText2.md) ## torchtext.datasets.WikiText2 @@ -26,7 +26,7 @@ class mindspore.dataset.WikiTextDataset( cache=None) ``` -For more information, see [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset). +For more information, see [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YESNO.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YESNO.md index 12f843742d..53eec4e87f 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YESNO.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YESNO.md @@ -1,6 +1,6 @@ # Differences with torchaudio.datasets.YESNO -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YESNO.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YESNO.md) ## torchaudio.datasets.YESNO @@ -28,7 +28,7 @@ class mindspore.dataset.YesNoDataset( cache=None) ``` -For more information, see [mindspore.dataset.YesNoDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.YesNoDataset.html#mindspore.dataset.YesNoDataset). +For more information, see [mindspore.dataset.YesNoDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.YesNoDataset.html#mindspore.dataset.YesNoDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YahooAnswers.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YahooAnswers.md index 2e53a315f1..a6c52e7675 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YahooAnswers.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YahooAnswers.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.YahooAnswers -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YahooAnswers.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YahooAnswers.md) ## torchtext.datasets.YahooAnswers @@ -26,7 +26,7 @@ class mindspore.dataset.YahooAnswersDataset( cache=None) ``` -For more information, see [mindspore.dataset.YahooAnswersDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.YahooAnswersDataset.html#mindspore.dataset.YahooAnswersDataset). +For more information, see [mindspore.dataset.YahooAnswersDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.YahooAnswersDataset.html#mindspore.dataset.YahooAnswersDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewFull.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewFull.md index 75b43b50bd..0bdad74b01 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewFull.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewFull.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.YelpReviewFull -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewFull.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewFull.md) ## torchtext.datasets.YelpReviewFull @@ -26,7 +26,7 @@ class mindspore.dataset.YelpReviewDataset( cache=None) ``` -For more information, see [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset). +For more information, see [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewPolarity.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewPolarity.md index 4974b88b59..124ef00c9a 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewPolarity.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewPolarity.md @@ -1,6 +1,6 @@ # Differences with torchtext.datasets.YelpReviewPolarity -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewPolarity.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/YelpReviewPolarity.md) ## torchtext.datasets.YelpReviewPolarity @@ -26,7 +26,7 @@ class mindspore.dataset.YelpReviewDataset( cache=None) ``` -For more information, see [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset). +For more information, see [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/checkpoint.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/checkpoint.md index ae019ffdee..8f947b1aff 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/checkpoint.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/checkpoint.md @@ -1,6 +1,6 @@ # Differences with torch.utils.checkpoint.checkpoint -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/checkpoint.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/checkpoint.md) ## torch.utils.checkpoint.checkpoint @@ -16,7 +16,7 @@ For more information, see [torch.utils.checkpoint.checkpoint](https://pytorch.or mindspore.nn.Cell.recompute(mp_comm_recompute=True, parallel_optimizer_comm_recompute=False) ``` -For more information, see [mindspore.nn.Cell.recompute](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.recompute). +For more information, see [mindspore.nn.Cell.recompute](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.recompute). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/deform_conv2d.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/deform_conv2d.md index ac917c040a..d70bd27c85 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/deform_conv2d.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/deform_conv2d.md @@ -1,6 +1,6 @@ # Differences with torchvision.ops.deform_conv2d -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/deform_conv2d.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/deform_conv2d.md) ## torchvision.ops.deform_conv2d @@ -37,7 +37,7 @@ class mindspore.ops.deformable_conv2d( ) ``` -For more information, see [mindspore.ops.deformable_conv2d](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.deformable_conv2d.html). +For more information, see [mindspore.ops.deformable_conv2d](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.deformable_conv2d.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/load_sp_model.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/load_sp_model.md index f021df0a89..b321462299 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/load_sp_model.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/load_sp_model.md @@ -1,6 +1,6 @@ # Differences with torchtext.data.functional.load_sp_model -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/load_sp_model.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/load_sp_model.md) ## torchtext.data.functional.load_sp_model @@ -18,7 +18,7 @@ For more information, see [torchtext.data.functional.load_sp_model](https://pyto class mindspore.dataset.text.SentencePieceTokenizer(mode, out_type) ``` -For more information, see [mindspore.dataset.text.SentencePieceTokenizer](https://www.mindspore.cn/docs/en/master/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer). +For more information, see [mindspore.dataset.text.SentencePieceTokenizer](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/nms.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/nms.md index 8e3246e523..66021c0b8a 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/nms.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/nms.md @@ -1,6 +1,6 @@ # Differences with torchvision.ops.nms -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/nms.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/nms.md) ## torchvision.ops.nms @@ -16,7 +16,7 @@ For more information, see [torchvision.ops.nms](https://pytorch.org/vision/0.9/o class mindspore.ops.NMSWithMask(iou_threshold=0.5)(bboxes) ``` -For more information, see [mindspore.ops.NMSWithMask](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.NMSWithMask.html). +For more information, see [mindspore.ops.NMSWithMask](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.NMSWithMask.html). ## Differences diff --git a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/roi_align.md b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/roi_align.md index 06201913c6..4827fac80c 100644 --- a/docs/mindspore/source_en/note/api_mapping/pytorch_diff/roi_align.md +++ b/docs/mindspore/source_en/note/api_mapping/pytorch_diff/roi_align.md @@ -1,6 +1,6 @@ # Differences with torchvision.ops.roi_align -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_en/note/api_mapping/pytorch_diff/roi_align.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_en/note/api_mapping/pytorch_diff/roi_align.md) ## torchvision.ops.roi_align @@ -16,7 +16,7 @@ For more information, see [torchvision.ops.roi_align](https://pytorch.org/vision class mindspore.ops.ROIAlign(pooled_height, pooled_width, spatial_scale, sample_num=2, roi_end_mode=1)(features, rois) ``` -For more information, see [mindspore.ops.ROIAlign](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ROIAlign.html). +For more information, see [mindspore.ops.ROIAlign](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ROIAlign.html). ## Differences diff --git a/docs/mindspore/source_zh_cn/api_python/bfloat16_support.md b/docs/mindspore/source_zh_cn/api_python/bfloat16_support.md index 4b1a0bd0f9..6deb599153 100644 --- a/docs/mindspore/source_zh_cn/api_python/bfloat16_support.md +++ b/docs/mindspore/source_zh_cn/api_python/bfloat16_support.md @@ -1,6 +1,6 @@ # bfloat16 数据类型支持情况 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/api_python/bfloat16_support.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/api_python/bfloat16_support.md) ## 概述 @@ -15,38 +15,38 @@ FP16 格式有 5 位指数和 10 位尾数,而 BF16 有 8 位指数和 7 位 |API名称|Ascend|说明| |:----|:---------|:---------| -|[mindspore.Tensor.asnumpy](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.asnumpy.html)|❌|由于numpy不支持bfloat16数据类型,无法将bfloat16类型的Tensor转换为numpy类型。| -|[mindspore.amp.auto_mixed_precision](https://www.mindspore.cn/docs/zh-CN/master/api_python/amp/mindspore.amp.auto_mixed_precision.html)|✔️|使用自动混合精度接口时,支持将低精度的数据类型指定为bfloat16。| -|[mindspore.amp.custom_mixed_precision](https://www.mindspore.cn/docs/zh-CN/master/api_python/amp/mindspore.amp.custom_mixed_precision.html)|✔️|使用自定义混合精度接口时,支持将低精度的数据类型指定为bfloat16。| -|[mindspore.load_checkpoint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.load_checkpoint.html)|✔️|| -|[mindspore.save_checkpoint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.save_checkpoint.html)|✔️|| -|[mindspore.ops.Add](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Add.html)|✔️|| -|[mindspore.ops.AddN](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AddN.html)|✔️|| -|[mindspore.ops.AllGather](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AllGather.html)|✔️|| -|[mindspore.ops.AllReduce](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AllReduce.html)|✔️|| -|[mindspore.ops.AssignAdd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AssignAdd.html)|✔️|| -|[mindspore.ops.BatchMatMul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BatchMatMul.html)|✔️|| -|[mindspore.ops.Broadcast](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Broadcast.html)|✔️|| -|[mindspore.ops.Cast](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cast.html)|✔️|| -|[mindspore.ops.Equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Equal.html)|✔️|| -|[mindspore.ops.Exp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Exp.html)|✔️|| -|[mindspore.ops.FastGeLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.FastGeLU.html)|✔️|| -|[mindspore.ops.GreaterEqual](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.GreaterEqual.html)|✔️|| -|[mindspore.ops.LayerNorm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LayerNorm.html)|✔️|| -|[mindspore.ops.LessEqual](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LessEqual.html)|✔️|| -|[mindspore.ops.MatMul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MatMul.html)|✔️|| -|[mindspore.ops.Maximum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Maximum.html)|✔️|| -|[mindspore.ops.Minimum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Minimum.html)|✔️|| -|[mindspore.ops.Mul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Mul.html)|✔️|| -|[mindspore.ops.NotEqual](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.NotEqual.html)|✔️|| -|[mindspore.ops.RealDiv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.RealDiv.html)|✔️|| -|[mindspore.ops.ReduceMean](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceMean.html)|✔️|| -|[mindspore.ops.ReduceScatter](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceScatter.html)|✔️|| -|[mindspore.ops.ReduceSum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceSum.html)|✔️|| -|[mindspore.ops.Select](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Select.html)|✔️|| -|[mindspore.ops.Softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Softmax.html)|✔️|| -|[mindspore.ops.Sqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sqrt.html)|✔️|| -|[mindspore.ops.Square](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Square.html)|✔️|| -|[mindspore.ops.Sub](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sub.html)|✔️|| -|[mindspore.ops.Tile](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Tile.html)|✔️|| -|[mindspore.ops.Transpose](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Transpose.html)|✔️|| +|[mindspore.Tensor.asnumpy](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.asnumpy.html)|❌|由于numpy不支持bfloat16数据类型,无法将bfloat16类型的Tensor转换为numpy类型。| +|[mindspore.amp.auto_mixed_precision](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/amp/mindspore.amp.auto_mixed_precision.html)|✔️|使用自动混合精度接口时,支持将低精度的数据类型指定为bfloat16。| +|[mindspore.amp.custom_mixed_precision](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/amp/mindspore.amp.custom_mixed_precision.html)|✔️|使用自定义混合精度接口时,支持将低精度的数据类型指定为bfloat16。| +|[mindspore.load_checkpoint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.load_checkpoint.html)|✔️|| +|[mindspore.save_checkpoint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.save_checkpoint.html)|✔️|| +|[mindspore.ops.Add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Add.html)|✔️|| +|[mindspore.ops.AddN](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AddN.html)|✔️|| +|[mindspore.ops.AllGather](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AllGather.html)|✔️|| +|[mindspore.ops.AllReduce](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AllReduce.html)|✔️|| +|[mindspore.ops.AssignAdd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AssignAdd.html)|✔️|| +|[mindspore.ops.BatchMatMul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BatchMatMul.html)|✔️|| +|[mindspore.ops.Broadcast](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Broadcast.html)|✔️|| +|[mindspore.ops.Cast](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cast.html)|✔️|| +|[mindspore.ops.Equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Equal.html)|✔️|| +|[mindspore.ops.Exp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Exp.html)|✔️|| +|[mindspore.ops.FastGeLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.FastGeLU.html)|✔️|| +|[mindspore.ops.GreaterEqual](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.GreaterEqual.html)|✔️|| +|[mindspore.ops.LayerNorm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LayerNorm.html)|✔️|| +|[mindspore.ops.LessEqual](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LessEqual.html)|✔️|| +|[mindspore.ops.MatMul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MatMul.html)|✔️|| +|[mindspore.ops.Maximum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Maximum.html)|✔️|| +|[mindspore.ops.Minimum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Minimum.html)|✔️|| +|[mindspore.ops.Mul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Mul.html)|✔️|| +|[mindspore.ops.NotEqual](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.NotEqual.html)|✔️|| +|[mindspore.ops.RealDiv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.RealDiv.html)|✔️|| +|[mindspore.ops.ReduceMean](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceMean.html)|✔️|| +|[mindspore.ops.ReduceScatter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceScatter.html)|✔️|| +|[mindspore.ops.ReduceSum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceSum.html)|✔️|| +|[mindspore.ops.Select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Select.html)|✔️|| +|[mindspore.ops.Softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Softmax.html)|✔️|| +|[mindspore.ops.Sqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sqrt.html)|✔️|| +|[mindspore.ops.Square](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Square.html)|✔️|| +|[mindspore.ops.Sub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sub.html)|✔️|| +|[mindspore.ops.Tile](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Tile.html)|✔️|| +|[mindspore.ops.Transpose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Transpose.html)|✔️|| diff --git a/docs/mindspore/source_zh_cn/api_python/dynamic_shape_func.md b/docs/mindspore/source_zh_cn/api_python/dynamic_shape_func.md index 8cda19067a..aed6d6c238 100644 --- a/docs/mindspore/source_zh_cn/api_python/dynamic_shape_func.md +++ b/docs/mindspore/source_zh_cn/api_python/dynamic_shape_func.md @@ -1,8 +1,8 @@ # functional接口动态shape支持情况 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/api_python/dynamic_shape_func.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/api_python/dynamic_shape_func.md) -> 以下列表列举了PYNATIVE模式下支持动态shape功能的functional接口。其中部分functional接口可能会存在数据类型支持不全的问题,如遇到此类问题,可以通过主动插入[Cast](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cast.html)算子解决。 +> 以下列表列举了PYNATIVE模式下支持动态shape功能的functional接口。其中部分functional接口可能会存在数据类型支持不全的问题,如遇到此类问题,可以通过主动插入[Cast](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cast.html)算子解决。 > > 列表以外的functional接口对动态shape功能支持尚不完善,可能会执行失败。另外,图模式下,动态shape功能支持也不完善,可能会执行失败。 > @@ -10,242 +10,242 @@ | API名称 | Ascend | GPU | CPU | | :--- |:-------- | :------- |:---------| -|[mindspore.ops.abs](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.abs.html)|✔️|✔️|✔️| -|[mindspore.ops.acos](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.acos.html)|✔️|✔️|✔️| -|[mindspore.ops.acosh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.acosh.html)|✔️|✔️|✔️| -|[mindspore.ops.add](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.add.html)|✔️|✔️|✔️| -|[mindspore.ops.addcdiv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.addcdiv.html)|✔️|✔️|✔️| -|[mindspore.ops.addcmul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.addcmul.html)|✔️|✔️|✔️| -|[mindspore.ops.addmm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.addmm.html)|✔️|✔️|✔️| -|[mindspore.ops.addn](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.addn.html)|✔️|✔️|✔️| -|[mindspore.ops.all](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.all.html)|✔️|✔️|✔️| -|[mindspore.ops.amax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.amax.html)|✔️|✔️|✔️| -|[mindspore.ops.amin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.amin.html)|✔️|✔️|✔️| -|[mindspore.ops.angle](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.angle.html)|✔️|✔️|✔️| -|[mindspore.ops.any](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.any.html)|✔️|✔️|✔️| -|[mindspore.ops.argmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.argmax.html)|✔️|✔️|✔️| -|[mindspore.ops.argmin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.argmin.html)|✔️|✔️|✔️| -|[mindspore.ops.argsort](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.argsort.html)|✔️|✔️|✔️| -|[mindspore.ops.asin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.asin.html)|✔️|✔️|✔️| -|[mindspore.ops.asinh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.asinh.html)|✔️|✔️|✔️| -|[mindspore.ops.assign](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.assign.html)|✔️|✔️|✔️| -|[mindspore.ops.assign_add](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.assign_add.html)|✔️|✔️|✔️| -|[mindspore.ops.atan](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.atan.html)|✔️|✔️|✔️| -|[mindspore.ops.atan2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.atan2.html)|✔️|✔️|✔️| -|[mindspore.ops.atanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.atanh.html)|✔️|✔️|✔️| -|[mindspore.ops.baddbmm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.baddbmm.html)|✔️|✔️|✔️| -|[mindspore.ops.bernoulli](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bernoulli.html)|❌|✔️|✔️| -|[mindspore.ops.bessel_i0](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bessel_i0.html)|❌|✔️|✔️| -|[mindspore.ops.bessel_i0e](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bessel_i0e.html)|✔️|✔️|✔️| -|[mindspore.ops.bessel_i1](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bessel_i1.html)|❌|✔️|✔️| -|[mindspore.ops.bessel_i1e](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bessel_i1e.html)|✔️|✔️|✔️| -|[mindspore.ops.bessel_j0](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bessel_j0.html)|❌|✔️|✔️| -|[mindspore.ops.bessel_j1](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bessel_j1.html)|❌|✔️|✔️| -|[mindspore.ops.bias_add](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bias_add.html)|❌|✔️|✔️| -|[mindspore.ops.bincount](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bincount.html)|✔️|✔️|✔️| -|[mindspore.ops.bitwise_and](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bitwise_and.html)|✔️|✔️|✔️| -|[mindspore.ops.bitwise_left_shift](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bitwise_left_shift.html)|✔️|✔️|✔️| -|[mindspore.ops.bitwise_or](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bitwise_or.html)|✔️|✔️|✔️| -|[mindspore.ops.bitwise_right_shift](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bitwise_right_shift.html)|✔️|✔️|✔️| -|[mindspore.ops.bitwise_xor](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bitwise_xor.html)|✔️|✔️|✔️| -|[mindspore.ops.bmm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.bmm.html)|✔️|✔️|✔️| -|[mindspore.ops.broadcast_to](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.broadcast_to.html)|✔️|✔️|✔️| -|[mindspore.ops.ceil](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ceil.html)|✔️|✔️|✔️| -|[mindspore.ops.celu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.celu.html)|✔️|✔️|✔️| -|[mindspore.ops.chunk](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.chunk.html)|❌|✔️|✔️| -|[mindspore.ops.clamp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.clamp.html)|✔️|✔️|✔️| -|[mindspore.ops.clip_by_global_norm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.clip_by_global_norm.html)|✔️|✔️|✔️| -|[mindspore.ops.clip_by_value](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.clip_by_value.html)|✔️|✔️|✔️| -|[mindspore.ops.concat](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.concat.html)|✔️|✔️|✔️| -|[mindspore.ops.conj](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.conj.html)|❌|✔️|✔️| -|[mindspore.ops.cos](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.cos.html)|✔️|✔️|✔️| -|[mindspore.ops.cosh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.cosh.html)|✔️|✔️|✔️| -|[mindspore.ops.cross](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.cross.html)|✔️|❌|✔️| -|[mindspore.ops.cross_entropy](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.cross_entropy.html)|✔️|✔️|✔️| -|[mindspore.ops.cummax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.cummax.html)|❌|✔️|✔️| -|[mindspore.ops.cummin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.cummin.html)|✔️|✔️|✔️| -|[mindspore.ops.cumprod](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.cumprod.html)|❌|✔️|✔️| -|[mindspore.ops.cumsum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.cumsum.html)|❌|✔️|✔️| -|[mindspore.ops.diag](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.diag.html)|✔️|✔️|✔️| -|[mindspore.ops.diag_embed](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.diag_embed.html)|✔️|✔️|✔️| -|[mindspore.ops.diagonal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.diagonal.html)|✔️|✔️|✔️| -|[mindspore.ops.dist](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.dist.html)|✔️|✔️|✔️| -|[mindspore.ops.div](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.div.html)|✔️|✔️|✔️| -|[mindspore.ops.dot](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.dot.html)|✔️|✔️|✔️| -|[mindspore.ops.dropout](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.dropout.html)|✔️|✔️|✔️| -|[mindspore.ops.dropout2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.dropout2d.html)|✔️|✔️|✔️| -|[mindspore.ops.dropout3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.dropout3d.html)|✔️|✔️|✔️| -|[mindspore.ops.einsum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.einsum.html)|❌|✔️|❌| -|[mindspore.ops.elu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.elu.html)|✔️|✔️|✔️| -|[mindspore.ops.equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.equal.html)|✔️|✔️|✔️| -|[mindspore.ops.erf](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.erf.html)|✔️|✔️|✔️| -|[mindspore.ops.erfc](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.erfc.html)|✔️|✔️|✔️| -|[mindspore.ops.erfinv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.erfinv.html)|✔️|✔️|✔️| -|[mindspore.ops.exp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.exp.html)|✔️|✔️|✔️| -|[mindspore.ops.expand_dims](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.expand_dims.html)|✔️|✔️|✔️| -|[mindspore.ops.expm1](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.expm1.html)|✔️|✔️|✔️| -|[mindspore.ops.eye](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.eye.html)|✔️|✔️|✔️| -|[mindspore.ops.fill](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.fill.html)|✔️|✔️|✔️| -|[mindspore.ops.flatten](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.flatten.html)|✔️|✔️|✔️| -|[mindspore.ops.flip](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.flip.html)|✔️|✔️|✔️| -|[mindspore.ops.floor](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.floor.html)|✔️|✔️|✔️| -|[mindspore.ops.floor_div](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.floor_div.html)|✔️|✔️|✔️| -|[mindspore.ops.floor_mod](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.floor_mod.html)|✔️|✔️|✔️| -|[mindspore.ops.fmod](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.fmod.html)|✔️|✔️|✔️| -|[mindspore.ops.fold](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.fold.html)|✔️|✔️|✔️| -|[mindspore.ops.full](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.full.html)|✔️|✔️|✔️| -|[mindspore.ops.full_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.full_like.html)|✔️|✔️|✔️| -|[mindspore.ops.gather](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.gather.html)|✔️|✔️|✔️| -|[mindspore.ops.gather_elements](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.gather_elements.html)|✔️|✔️|✔️| -|[mindspore.ops.gather_nd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.gather_nd.html)|✔️|✔️|✔️| -|[mindspore.ops.gcd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.gcd.html)|✔️|✔️|✔️| -|[mindspore.ops.ge](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ge.html)|✔️|✔️|✔️| -|[mindspore.ops.gelu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.gelu.html)|✔️|✔️|✔️| -|[mindspore.ops.geqrf](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.geqrf.html)|✔️|✔️|✔️| -|[mindspore.ops.ger](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ger.html)|✔️|✔️|✔️| -|[mindspore.ops.glu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.glu.html)|✔️|✔️|✔️| -|[mindspore.ops.greater](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.greater.html)|✔️|✔️|✔️| -|[mindspore.ops.greater_equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.greater_equal.html)|✔️|✔️|✔️| -|[mindspore.ops.grid_sample](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.grid_sample.html)|✔️|✔️|✔️| -|[mindspore.ops.gt](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.gt.html)|✔️|✔️|✔️| -|[mindspore.ops.gumbel_softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.gumbel_softmax.html)|✔️|✔️|✔️| -|[mindspore.ops.hardshrink](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.hardshrink.html)|✔️|✔️|✔️| -|[mindspore.ops.hardsigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.hardsigmoid.html)|✔️|✔️|✔️| -|[mindspore.ops.hardswish](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.hardswish.html)|✔️|✔️|✔️| -|[mindspore.ops.hardtanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.hardtanh.html)|✔️|✔️|✔️| -|[mindspore.ops.heaviside](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.heaviside.html)|✔️|✔️|✔️| -|[mindspore.ops.hypot](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.hypot.html)|✔️|✔️|✔️| -|[mindspore.ops.igammac](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.igammac.html)|✔️|✔️|✔️| -|[mindspore.ops.imag](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.imag.html)|✔️|✔️|✔️| -|[mindspore.ops.index_select](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.index_select.html)|✔️|✔️|✔️| -|[mindspore.ops.interpolate](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.interpolate.html)|❌|✔️|✔️| -|[mindspore.ops.inverse](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.inverse.html)|❌|✔️|✔️| -|[mindspore.ops.invert](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.invert.html)|✔️|✔️|✔️| -|[mindspore.ops.isfinite](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.isfinite.html)|✔️|✔️|✔️| -|[mindspore.ops.isinf](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.isinf.html)|✔️|✔️|✔️| -|[mindspore.ops.isnan](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.isnan.html)|✔️|✔️|✔️| -|[mindspore.ops.l1_loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.l1_loss.html)|✔️|✔️|✔️| -|[mindspore.ops.lcm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.lcm.html)|✔️|✔️|✔️| -|[mindspore.ops.le](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.le.html)|✔️|✔️|✔️| -|[mindspore.ops.lerp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.lerp.html)|✔️|✔️|✔️| -|[mindspore.ops.less](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.less.html)|✔️|✔️|✔️| -|[mindspore.ops.less_equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.less_equal.html)|✔️|✔️|✔️| -|[mindspore.ops.linspace](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.linspace.html)|✔️|✔️|✔️| -|[mindspore.ops.log](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.log.html)|✔️|✔️|✔️| -|[mindspore.ops.log_softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.log_softmax.html)|✔️|✔️|✔️| -|[mindspore.ops.log10](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.log10.html)|✔️|✔️|✔️| -|[mindspore.ops.log1p](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.log1p.html)|✔️|✔️|✔️| -|[mindspore.ops.log2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.log2.html)|✔️|✔️|✔️| -|[mindspore.ops.logical_and](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.logical_and.html)|✔️|✔️|✔️| -|[mindspore.ops.logical_not](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.logical_not.html)|✔️|✔️|✔️| -|[mindspore.ops.logical_or](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.logical_or.html)|✔️|✔️|✔️| -|[mindspore.ops.logical_xor](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.logical_xor.html)|✔️|❌|✔️| -|[mindspore.ops.logit](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.logit.html)|✔️|✔️|✔️| -|[mindspore.ops.logsumexp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.logsumexp.html)|✔️|✔️|✔️| -|[mindspore.ops.lt](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.lt.html)|✔️|✔️|✔️| -|[mindspore.ops.margin_ranking_loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.margin_ranking_loss.html)|✔️|✔️|✔️| -|[mindspore.ops.masked_fill](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.masked_fill.html)|✔️|✔️|✔️| -|[mindspore.ops.masked_select](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.masked_select.html)|✔️|✔️|✔️| -|[mindspore.ops.matmul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.matmul.html)|✔️|✔️|✔️| -|[mindspore.ops.matrix_solve](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.matrix_solve.html)|✔️|❌|✔️| -|[mindspore.ops.max](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.max.html)|✔️|✔️|✔️| -|[mindspore.ops.maximum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.maximum.html)|✔️|✔️|✔️| -|[mindspore.ops.mean](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.mean.html)|✔️|✔️|✔️| -|[mindspore.ops.median](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.median.html)|❌|✔️|✔️| -|[mindspore.ops.meshgrid](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.meshgrid.html)|✔️|✔️|✔️| -|[mindspore.ops.min](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.min.html)|✔️|✔️|✔️| -|[mindspore.ops.minimum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.minimum.html)|✔️|✔️|✔️| -|[mindspore.ops.mish](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.mish.html)|❌|✔️|✔️| -|[mindspore.ops.mse_loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.mse_loss.html)|✔️|✔️|✔️| -|[mindspore.ops.mul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.mul.html)|✔️|✔️|✔️| -|[mindspore.ops.multinomial](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.multinomial.html)|✔️|✔️|✔️| -|[mindspore.ops.mv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.mv.html)|✔️|✔️|✔️| -|[mindspore.ops.mvlgamma](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.mvlgamma.html)|✔️|✔️|✔️| -|[mindspore.ops.nan_to_num](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.nan_to_num.html)|✔️|❌|✔️| -|[mindspore.ops.narrow](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.narrow.html)|✔️|✔️|✔️| -|[mindspore.ops.ne](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ne.html)|✔️|✔️|✔️| -|[mindspore.ops.neg](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.neg.html)|✔️|✔️|✔️| -|[mindspore.ops.nll_loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.nll_loss.html)|✔️|✔️|✔️| -|[mindspore.ops.nonzero](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.nonzero.html)|✔️|✔️|✔️| -|[mindspore.ops.norm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.norm.html)|❌|✔️|✔️| -|[mindspore.ops.normal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.normal.html)|✔️|✔️|✔️| -|[mindspore.ops.numel](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.numel.html)|✔️|✔️|✔️| -|[mindspore.ops.one_hot](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.one_hot.html)|✔️|✔️|✔️| -|[mindspore.ops.ones](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ones.html)|✔️|✔️|✔️| -|[mindspore.ops.ones_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ones_like.html)|✔️|✔️|✔️| -|[mindspore.ops.pad](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.pad.html)|✔️|✔️|✔️| -|[mindspore.ops.polar](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.polar.html)|❌|✔️|✔️| -|[mindspore.ops.polygamma](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.polygamma.html)|❌|✔️|✔️| -|[mindspore.ops.pow](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.pow.html)|✔️|✔️|✔️| -|[mindspore.ops.prelu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.prelu.html)|✔️|✔️|✔️| -|[mindspore.ops.prod](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.prod.html)|✔️|✔️|✔️| -|[mindspore.ops.rand](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.rand.html)|✔️|✔️|✔️| -|[mindspore.ops.rand_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.rand_like.html)|✔️|✔️|✔️| -|[mindspore.ops.randint](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.randint.html)|✔️|✔️|✔️| -|[mindspore.ops.randn](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.randn.html)|✔️|✔️|✔️| -|[mindspore.ops.randn_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.randn_like.html)|✔️|✔️|✔️| -|[mindspore.ops.randperm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.randperm.html)|❌|❌|✔️| -|[mindspore.ops.range](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.range.html)|❌|✔️|✔️| -|[mindspore.ops.ravel](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ravel.html)|✔️|✔️|✔️| -|[mindspore.ops.real](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.real.html)|❌|✔️|✔️| -|[mindspore.ops.reciprocal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.reciprocal.html)|✔️|✔️|✔️| -|[mindspore.ops.relu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.relu.html)|✔️|✔️|✔️| -|[mindspore.ops.relu6](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.relu6.html)|✔️|✔️|✔️| -|[mindspore.ops.remainder](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.remainder.html)|✔️|✔️|✔️| -|[mindspore.ops.repeat_interleave](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.repeat_interleave.html)|✔️|✔️|✔️| -|[mindspore.ops.reshape](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.reshape.html)|✔️|✔️|✔️| -|[mindspore.ops.reverse_sequence](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.reverse_sequence.html)|✔️|✔️|✔️| -|[mindspore.ops.roll](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.roll.html)|❌|✔️|❌| -|[mindspore.ops.round](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.round.html)|✔️|✔️|✔️| -|[mindspore.ops.rsqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.rsqrt.html)|✔️|✔️|✔️| -|[mindspore.ops.scatter_nd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.scatter_nd.html)|✔️|✔️|✔️| -|[mindspore.ops.scatter_nd_add](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.scatter_nd_add.html)|✔️|✔️|✔️| -|[mindspore.ops.scatter_nd_max](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.scatter_nd_max.html)|❌|✔️|✔️| -|[mindspore.ops.scatter_nd_min](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.scatter_nd_min.html)|✔️|✔️|✔️| -|[mindspore.ops.scatter_nd_mul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.scatter_nd_mul.html)|❌|✔️|✔️| -|[mindspore.ops.scatter_nd_sub](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.scatter_nd_sub.html)|✔️|✔️|✔️| -|[mindspore.ops.scatter_update](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.scatter_update.html)|✔️|✔️|✔️| -|[mindspore.ops.select](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.select.html)|✔️|✔️|✔️| -|[mindspore.ops.selu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.selu.html)|✔️|✔️|✔️| -|[mindspore.ops.sigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.sigmoid.html)|✔️|✔️|✔️| -|[mindspore.ops.sign](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.sign.html)|✔️|✔️|✔️| -|[mindspore.ops.silu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.silu.html)|✔️|✔️|✔️| -|[mindspore.ops.sin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.sin.html)|✔️|✔️|✔️| -|[mindspore.ops.sinc](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.sinc.html)|✔️|✔️|✔️| -|[mindspore.ops.sinh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.sinh.html)|✔️|✔️|✔️| -|[mindspore.ops.slice](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.slice.html)|✔️|✔️|✔️| -|[mindspore.ops.softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.softmax.html)|✔️|✔️|✔️| -|[mindspore.ops.softshrink](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.softshrink.html)|✔️|✔️|✔️| -|[mindspore.ops.sort](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.sort.html)|✔️|❌|✔️|| -|[mindspore.ops.split](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.split.html)|❌|✔️|✔️| -|[mindspore.ops.sqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.sqrt.html)|✔️|✔️|✔️| -|[mindspore.ops.square](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.square.html)|✔️|✔️|✔️| -|[mindspore.ops.squeeze](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.squeeze.html)|✔️|✔️|✔️| -|[mindspore.ops.stack](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.stack.html)|✔️|✔️|✔️| -|[mindspore.ops.std](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.std.html)|✔️|❌|✔️|| -|[mindspore.ops.strided_slice](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.strided_slice.html)|✔️|✔️|✔️| -|[mindspore.ops.sub](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.sub.html)|✔️|✔️|✔️| -|[mindspore.ops.sum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.sum.html)|✔️|✔️|✔️| -|[mindspore.ops.svd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.svd.html)|❌|✔️|✔️| -|[mindspore.ops.tan](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.tan.html)|✔️|✔️|✔️| -|[mindspore.ops.tanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.tanh.html)|✔️|✔️|✔️| -|[mindspore.ops.tile](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.tile.html)|✔️|✔️|✔️| -|[mindspore.ops.topk](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.topk.html)|✔️|✔️|✔️| -|[mindspore.ops.trace](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.trace.html)|✔️|✔️|✔️| -|[mindspore.ops.transpose](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.transpose.html)|✔️|✔️|✔️| -|[mindspore.ops.tril](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.tril.html)|✔️|✔️|✔️| -|[mindspore.ops.triu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.triu.html)|✔️|✔️|✔️| -|[mindspore.ops.trunc](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.trunc.html)|✔️|✔️|✔️| -|[mindspore.ops.unfold](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.unfold.html)|✔️|✔️|✔️| -|[mindspore.ops.uniform](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.uniform.html)|❌|✔️|✔️| -|[mindspore.ops.unique](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.unique.html)|✔️|✔️|✔️| -|[mindspore.ops.unsorted_segment_sum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.unsorted_segment_sum.html)|✔️|✔️|✔️| -|[mindspore.ops.unsqueeze](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.unsqueeze.html)|✔️|✔️|✔️| -|[mindspore.ops.unstack](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.unstack.html)|✔️|✔️|✔️| -|[mindspore.ops.where](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.where.html)|✔️|✔️|✔️| -|[mindspore.ops.xlogy](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.xlogy.html)|✔️|✔️|✔️| -|[mindspore.ops.zeros](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.zeros.html)|✔️|✔️|✔️| -|[mindspore.ops.zeros_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.zeros_like.html)|✔️|✔️|✔️| -|[mindspore.ops.zeta](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.zeta.html)|✔️|✔️|✔️| -|[mindspore.mint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.mint.html#mindspore-mint)|✔️|❌|❌| -|[mindspore.mint.nn.functional](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.mint.html#mindspore-mint-nn-functional)|✔️|❌|❌| +|[mindspore.ops.abs](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.abs.html)|✔️|✔️|✔️| +|[mindspore.ops.acos](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.acos.html)|✔️|✔️|✔️| +|[mindspore.ops.acosh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.acosh.html)|✔️|✔️|✔️| +|[mindspore.ops.add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.add.html)|✔️|✔️|✔️| +|[mindspore.ops.addcdiv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.addcdiv.html)|✔️|✔️|✔️| +|[mindspore.ops.addcmul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.addcmul.html)|✔️|✔️|✔️| +|[mindspore.ops.addmm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.addmm.html)|✔️|✔️|✔️| +|[mindspore.ops.addn](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.addn.html)|✔️|✔️|✔️| +|[mindspore.ops.all](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.all.html)|✔️|✔️|✔️| +|[mindspore.ops.amax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.amax.html)|✔️|✔️|✔️| +|[mindspore.ops.amin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.amin.html)|✔️|✔️|✔️| +|[mindspore.ops.angle](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.angle.html)|✔️|✔️|✔️| +|[mindspore.ops.any](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.any.html)|✔️|✔️|✔️| +|[mindspore.ops.argmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.argmax.html)|✔️|✔️|✔️| +|[mindspore.ops.argmin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.argmin.html)|✔️|✔️|✔️| +|[mindspore.ops.argsort](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.argsort.html)|✔️|✔️|✔️| +|[mindspore.ops.asin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.asin.html)|✔️|✔️|✔️| +|[mindspore.ops.asinh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.asinh.html)|✔️|✔️|✔️| +|[mindspore.ops.assign](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.assign.html)|✔️|✔️|✔️| +|[mindspore.ops.assign_add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.assign_add.html)|✔️|✔️|✔️| +|[mindspore.ops.atan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.atan.html)|✔️|✔️|✔️| +|[mindspore.ops.atan2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.atan2.html)|✔️|✔️|✔️| +|[mindspore.ops.atanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.atanh.html)|✔️|✔️|✔️| +|[mindspore.ops.baddbmm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.baddbmm.html)|✔️|✔️|✔️| +|[mindspore.ops.bernoulli](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bernoulli.html)|❌|✔️|✔️| +|[mindspore.ops.bessel_i0](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bessel_i0.html)|❌|✔️|✔️| +|[mindspore.ops.bessel_i0e](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bessel_i0e.html)|✔️|✔️|✔️| +|[mindspore.ops.bessel_i1](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bessel_i1.html)|❌|✔️|✔️| +|[mindspore.ops.bessel_i1e](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bessel_i1e.html)|✔️|✔️|✔️| +|[mindspore.ops.bessel_j0](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bessel_j0.html)|❌|✔️|✔️| +|[mindspore.ops.bessel_j1](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bessel_j1.html)|❌|✔️|✔️| +|[mindspore.ops.bias_add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bias_add.html)|❌|✔️|✔️| +|[mindspore.ops.bincount](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bincount.html)|✔️|✔️|✔️| +|[mindspore.ops.bitwise_and](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bitwise_and.html)|✔️|✔️|✔️| +|[mindspore.ops.bitwise_left_shift](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bitwise_left_shift.html)|✔️|✔️|✔️| +|[mindspore.ops.bitwise_or](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bitwise_or.html)|✔️|✔️|✔️| +|[mindspore.ops.bitwise_right_shift](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bitwise_right_shift.html)|✔️|✔️|✔️| +|[mindspore.ops.bitwise_xor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bitwise_xor.html)|✔️|✔️|✔️| +|[mindspore.ops.bmm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.bmm.html)|✔️|✔️|✔️| +|[mindspore.ops.broadcast_to](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.broadcast_to.html)|✔️|✔️|✔️| +|[mindspore.ops.ceil](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ceil.html)|✔️|✔️|✔️| +|[mindspore.ops.celu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.celu.html)|✔️|✔️|✔️| +|[mindspore.ops.chunk](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.chunk.html)|❌|✔️|✔️| +|[mindspore.ops.clamp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.clamp.html)|✔️|✔️|✔️| +|[mindspore.ops.clip_by_global_norm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.clip_by_global_norm.html)|✔️|✔️|✔️| +|[mindspore.ops.clip_by_value](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.clip_by_value.html)|✔️|✔️|✔️| +|[mindspore.ops.concat](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.concat.html)|✔️|✔️|✔️| +|[mindspore.ops.conj](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.conj.html)|❌|✔️|✔️| +|[mindspore.ops.cos](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.cos.html)|✔️|✔️|✔️| +|[mindspore.ops.cosh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.cosh.html)|✔️|✔️|✔️| +|[mindspore.ops.cross](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.cross.html)|✔️|❌|✔️| +|[mindspore.ops.cross_entropy](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.cross_entropy.html)|✔️|✔️|✔️| +|[mindspore.ops.cummax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.cummax.html)|❌|✔️|✔️| +|[mindspore.ops.cummin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.cummin.html)|✔️|✔️|✔️| +|[mindspore.ops.cumprod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.cumprod.html)|❌|✔️|✔️| +|[mindspore.ops.cumsum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.cumsum.html)|❌|✔️|✔️| +|[mindspore.ops.diag](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.diag.html)|✔️|✔️|✔️| +|[mindspore.ops.diag_embed](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.diag_embed.html)|✔️|✔️|✔️| +|[mindspore.ops.diagonal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.diagonal.html)|✔️|✔️|✔️| +|[mindspore.ops.dist](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.dist.html)|✔️|✔️|✔️| +|[mindspore.ops.div](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.div.html)|✔️|✔️|✔️| +|[mindspore.ops.dot](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.dot.html)|✔️|✔️|✔️| +|[mindspore.ops.dropout](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.dropout.html)|✔️|✔️|✔️| +|[mindspore.ops.dropout2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.dropout2d.html)|✔️|✔️|✔️| +|[mindspore.ops.dropout3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.dropout3d.html)|✔️|✔️|✔️| +|[mindspore.ops.einsum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.einsum.html)|❌|✔️|❌| +|[mindspore.ops.elu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.elu.html)|✔️|✔️|✔️| +|[mindspore.ops.equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.equal.html)|✔️|✔️|✔️| +|[mindspore.ops.erf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.erf.html)|✔️|✔️|✔️| +|[mindspore.ops.erfc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.erfc.html)|✔️|✔️|✔️| +|[mindspore.ops.erfinv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.erfinv.html)|✔️|✔️|✔️| +|[mindspore.ops.exp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.exp.html)|✔️|✔️|✔️| +|[mindspore.ops.expand_dims](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.expand_dims.html)|✔️|✔️|✔️| +|[mindspore.ops.expm1](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.expm1.html)|✔️|✔️|✔️| +|[mindspore.ops.eye](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.eye.html)|✔️|✔️|✔️| +|[mindspore.ops.fill](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.fill.html)|✔️|✔️|✔️| +|[mindspore.ops.flatten](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.flatten.html)|✔️|✔️|✔️| +|[mindspore.ops.flip](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.flip.html)|✔️|✔️|✔️| +|[mindspore.ops.floor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.floor.html)|✔️|✔️|✔️| +|[mindspore.ops.floor_div](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.floor_div.html)|✔️|✔️|✔️| +|[mindspore.ops.floor_mod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.floor_mod.html)|✔️|✔️|✔️| +|[mindspore.ops.fmod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.fmod.html)|✔️|✔️|✔️| +|[mindspore.ops.fold](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.fold.html)|✔️|✔️|✔️| +|[mindspore.ops.full](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.full.html)|✔️|✔️|✔️| +|[mindspore.ops.full_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.full_like.html)|✔️|✔️|✔️| +|[mindspore.ops.gather](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.gather.html)|✔️|✔️|✔️| +|[mindspore.ops.gather_elements](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.gather_elements.html)|✔️|✔️|✔️| +|[mindspore.ops.gather_nd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.gather_nd.html)|✔️|✔️|✔️| +|[mindspore.ops.gcd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.gcd.html)|✔️|✔️|✔️| +|[mindspore.ops.ge](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ge.html)|✔️|✔️|✔️| +|[mindspore.ops.gelu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.gelu.html)|✔️|✔️|✔️| +|[mindspore.ops.geqrf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.geqrf.html)|✔️|✔️|✔️| +|[mindspore.ops.ger](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ger.html)|✔️|✔️|✔️| +|[mindspore.ops.glu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.glu.html)|✔️|✔️|✔️| +|[mindspore.ops.greater](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.greater.html)|✔️|✔️|✔️| +|[mindspore.ops.greater_equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.greater_equal.html)|✔️|✔️|✔️| +|[mindspore.ops.grid_sample](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.grid_sample.html)|✔️|✔️|✔️| +|[mindspore.ops.gt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.gt.html)|✔️|✔️|✔️| +|[mindspore.ops.gumbel_softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.gumbel_softmax.html)|✔️|✔️|✔️| +|[mindspore.ops.hardshrink](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.hardshrink.html)|✔️|✔️|✔️| +|[mindspore.ops.hardsigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.hardsigmoid.html)|✔️|✔️|✔️| +|[mindspore.ops.hardswish](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.hardswish.html)|✔️|✔️|✔️| +|[mindspore.ops.hardtanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.hardtanh.html)|✔️|✔️|✔️| +|[mindspore.ops.heaviside](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.heaviside.html)|✔️|✔️|✔️| +|[mindspore.ops.hypot](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.hypot.html)|✔️|✔️|✔️| +|[mindspore.ops.igammac](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.igammac.html)|✔️|✔️|✔️| +|[mindspore.ops.imag](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.imag.html)|✔️|✔️|✔️| +|[mindspore.ops.index_select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.index_select.html)|✔️|✔️|✔️| +|[mindspore.ops.interpolate](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.interpolate.html)|❌|✔️|✔️| +|[mindspore.ops.inverse](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.inverse.html)|❌|✔️|✔️| +|[mindspore.ops.invert](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.invert.html)|✔️|✔️|✔️| +|[mindspore.ops.isfinite](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.isfinite.html)|✔️|✔️|✔️| +|[mindspore.ops.isinf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.isinf.html)|✔️|✔️|✔️| +|[mindspore.ops.isnan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.isnan.html)|✔️|✔️|✔️| +|[mindspore.ops.l1_loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.l1_loss.html)|✔️|✔️|✔️| +|[mindspore.ops.lcm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.lcm.html)|✔️|✔️|✔️| +|[mindspore.ops.le](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.le.html)|✔️|✔️|✔️| +|[mindspore.ops.lerp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.lerp.html)|✔️|✔️|✔️| +|[mindspore.ops.less](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.less.html)|✔️|✔️|✔️| +|[mindspore.ops.less_equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.less_equal.html)|✔️|✔️|✔️| +|[mindspore.ops.linspace](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.linspace.html)|✔️|✔️|✔️| +|[mindspore.ops.log](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.log.html)|✔️|✔️|✔️| +|[mindspore.ops.log_softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.log_softmax.html)|✔️|✔️|✔️| +|[mindspore.ops.log10](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.log10.html)|✔️|✔️|✔️| +|[mindspore.ops.log1p](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.log1p.html)|✔️|✔️|✔️| +|[mindspore.ops.log2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.log2.html)|✔️|✔️|✔️| +|[mindspore.ops.logical_and](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.logical_and.html)|✔️|✔️|✔️| +|[mindspore.ops.logical_not](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.logical_not.html)|✔️|✔️|✔️| +|[mindspore.ops.logical_or](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.logical_or.html)|✔️|✔️|✔️| +|[mindspore.ops.logical_xor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.logical_xor.html)|✔️|❌|✔️| +|[mindspore.ops.logit](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.logit.html)|✔️|✔️|✔️| +|[mindspore.ops.logsumexp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.logsumexp.html)|✔️|✔️|✔️| +|[mindspore.ops.lt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.lt.html)|✔️|✔️|✔️| +|[mindspore.ops.margin_ranking_loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.margin_ranking_loss.html)|✔️|✔️|✔️| +|[mindspore.ops.masked_fill](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.masked_fill.html)|✔️|✔️|✔️| +|[mindspore.ops.masked_select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.masked_select.html)|✔️|✔️|✔️| +|[mindspore.ops.matmul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.matmul.html)|✔️|✔️|✔️| +|[mindspore.ops.matrix_solve](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.matrix_solve.html)|✔️|❌|✔️| +|[mindspore.ops.max](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.max.html)|✔️|✔️|✔️| +|[mindspore.ops.maximum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.maximum.html)|✔️|✔️|✔️| +|[mindspore.ops.mean](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.mean.html)|✔️|✔️|✔️| +|[mindspore.ops.median](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.median.html)|❌|✔️|✔️| +|[mindspore.ops.meshgrid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.meshgrid.html)|✔️|✔️|✔️| +|[mindspore.ops.min](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.min.html)|✔️|✔️|✔️| +|[mindspore.ops.minimum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.minimum.html)|✔️|✔️|✔️| +|[mindspore.ops.mish](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.mish.html)|❌|✔️|✔️| +|[mindspore.ops.mse_loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.mse_loss.html)|✔️|✔️|✔️| +|[mindspore.ops.mul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.mul.html)|✔️|✔️|✔️| +|[mindspore.ops.multinomial](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.multinomial.html)|✔️|✔️|✔️| +|[mindspore.ops.mv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.mv.html)|✔️|✔️|✔️| +|[mindspore.ops.mvlgamma](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.mvlgamma.html)|✔️|✔️|✔️| +|[mindspore.ops.nan_to_num](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.nan_to_num.html)|✔️|❌|✔️| +|[mindspore.ops.narrow](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.narrow.html)|✔️|✔️|✔️| +|[mindspore.ops.ne](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ne.html)|✔️|✔️|✔️| +|[mindspore.ops.neg](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.neg.html)|✔️|✔️|✔️| +|[mindspore.ops.nll_loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.nll_loss.html)|✔️|✔️|✔️| +|[mindspore.ops.nonzero](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.nonzero.html)|✔️|✔️|✔️| +|[mindspore.ops.norm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.norm.html)|❌|✔️|✔️| +|[mindspore.ops.normal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.normal.html)|✔️|✔️|✔️| +|[mindspore.ops.numel](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.numel.html)|✔️|✔️|✔️| +|[mindspore.ops.one_hot](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.one_hot.html)|✔️|✔️|✔️| +|[mindspore.ops.ones](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ones.html)|✔️|✔️|✔️| +|[mindspore.ops.ones_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ones_like.html)|✔️|✔️|✔️| +|[mindspore.ops.pad](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.pad.html)|✔️|✔️|✔️| +|[mindspore.ops.polar](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.polar.html)|❌|✔️|✔️| +|[mindspore.ops.polygamma](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.polygamma.html)|❌|✔️|✔️| +|[mindspore.ops.pow](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.pow.html)|✔️|✔️|✔️| +|[mindspore.ops.prelu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.prelu.html)|✔️|✔️|✔️| +|[mindspore.ops.prod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.prod.html)|✔️|✔️|✔️| +|[mindspore.ops.rand](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.rand.html)|✔️|✔️|✔️| +|[mindspore.ops.rand_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.rand_like.html)|✔️|✔️|✔️| +|[mindspore.ops.randint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.randint.html)|✔️|✔️|✔️| +|[mindspore.ops.randn](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.randn.html)|✔️|✔️|✔️| +|[mindspore.ops.randn_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.randn_like.html)|✔️|✔️|✔️| +|[mindspore.ops.randperm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.randperm.html)|❌|❌|✔️| +|[mindspore.ops.range](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.range.html)|❌|✔️|✔️| +|[mindspore.ops.ravel](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ravel.html)|✔️|✔️|✔️| +|[mindspore.ops.real](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.real.html)|❌|✔️|✔️| +|[mindspore.ops.reciprocal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.reciprocal.html)|✔️|✔️|✔️| +|[mindspore.ops.relu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.relu.html)|✔️|✔️|✔️| +|[mindspore.ops.relu6](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.relu6.html)|✔️|✔️|✔️| +|[mindspore.ops.remainder](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.remainder.html)|✔️|✔️|✔️| +|[mindspore.ops.repeat_interleave](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.repeat_interleave.html)|✔️|✔️|✔️| +|[mindspore.ops.reshape](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.reshape.html)|✔️|✔️|✔️| +|[mindspore.ops.reverse_sequence](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.reverse_sequence.html)|✔️|✔️|✔️| +|[mindspore.ops.roll](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.roll.html)|❌|✔️|❌| +|[mindspore.ops.round](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.round.html)|✔️|✔️|✔️| +|[mindspore.ops.rsqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.rsqrt.html)|✔️|✔️|✔️| +|[mindspore.ops.scatter_nd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.scatter_nd.html)|✔️|✔️|✔️| +|[mindspore.ops.scatter_nd_add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.scatter_nd_add.html)|✔️|✔️|✔️| +|[mindspore.ops.scatter_nd_max](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.scatter_nd_max.html)|❌|✔️|✔️| +|[mindspore.ops.scatter_nd_min](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.scatter_nd_min.html)|✔️|✔️|✔️| +|[mindspore.ops.scatter_nd_mul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.scatter_nd_mul.html)|❌|✔️|✔️| +|[mindspore.ops.scatter_nd_sub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.scatter_nd_sub.html)|✔️|✔️|✔️| +|[mindspore.ops.scatter_update](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.scatter_update.html)|✔️|✔️|✔️| +|[mindspore.ops.select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.select.html)|✔️|✔️|✔️| +|[mindspore.ops.selu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.selu.html)|✔️|✔️|✔️| +|[mindspore.ops.sigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.sigmoid.html)|✔️|✔️|✔️| +|[mindspore.ops.sign](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.sign.html)|✔️|✔️|✔️| +|[mindspore.ops.silu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.silu.html)|✔️|✔️|✔️| +|[mindspore.ops.sin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.sin.html)|✔️|✔️|✔️| +|[mindspore.ops.sinc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.sinc.html)|✔️|✔️|✔️| +|[mindspore.ops.sinh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.sinh.html)|✔️|✔️|✔️| +|[mindspore.ops.slice](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.slice.html)|✔️|✔️|✔️| +|[mindspore.ops.softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.softmax.html)|✔️|✔️|✔️| +|[mindspore.ops.softshrink](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.softshrink.html)|✔️|✔️|✔️| +|[mindspore.ops.sort](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.sort.html)|✔️|❌|✔️|| +|[mindspore.ops.split](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.split.html)|❌|✔️|✔️| +|[mindspore.ops.sqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.sqrt.html)|✔️|✔️|✔️| +|[mindspore.ops.square](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.square.html)|✔️|✔️|✔️| +|[mindspore.ops.squeeze](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.squeeze.html)|✔️|✔️|✔️| +|[mindspore.ops.stack](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.stack.html)|✔️|✔️|✔️| +|[mindspore.ops.std](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.std.html)|✔️|❌|✔️|| +|[mindspore.ops.strided_slice](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.strided_slice.html)|✔️|✔️|✔️| +|[mindspore.ops.sub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.sub.html)|✔️|✔️|✔️| +|[mindspore.ops.sum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.sum.html)|✔️|✔️|✔️| +|[mindspore.ops.svd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.svd.html)|❌|✔️|✔️| +|[mindspore.ops.tan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.tan.html)|✔️|✔️|✔️| +|[mindspore.ops.tanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.tanh.html)|✔️|✔️|✔️| +|[mindspore.ops.tile](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.tile.html)|✔️|✔️|✔️| +|[mindspore.ops.topk](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.topk.html)|✔️|✔️|✔️| +|[mindspore.ops.trace](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.trace.html)|✔️|✔️|✔️| +|[mindspore.ops.transpose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.transpose.html)|✔️|✔️|✔️| +|[mindspore.ops.tril](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.tril.html)|✔️|✔️|✔️| +|[mindspore.ops.triu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.triu.html)|✔️|✔️|✔️| +|[mindspore.ops.trunc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.trunc.html)|✔️|✔️|✔️| +|[mindspore.ops.unfold](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.unfold.html)|✔️|✔️|✔️| +|[mindspore.ops.uniform](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.uniform.html)|❌|✔️|✔️| +|[mindspore.ops.unique](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.unique.html)|✔️|✔️|✔️| +|[mindspore.ops.unsorted_segment_sum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.unsorted_segment_sum.html)|✔️|✔️|✔️| +|[mindspore.ops.unsqueeze](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.unsqueeze.html)|✔️|✔️|✔️| +|[mindspore.ops.unstack](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.unstack.html)|✔️|✔️|✔️| +|[mindspore.ops.where](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.where.html)|✔️|✔️|✔️| +|[mindspore.ops.xlogy](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.xlogy.html)|✔️|✔️|✔️| +|[mindspore.ops.zeros](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.zeros.html)|✔️|✔️|✔️| +|[mindspore.ops.zeros_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.zeros_like.html)|✔️|✔️|✔️| +|[mindspore.ops.zeta](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.zeta.html)|✔️|✔️|✔️| +|[mindspore.mint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.mint.html#mindspore-mint)|✔️|❌|❌| +|[mindspore.mint.nn.functional](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.mint.html#mindspore-mint-nn-functional)|✔️|❌|❌| diff --git a/docs/mindspore/source_zh_cn/api_python/dynamic_shape_nn.md b/docs/mindspore/source_zh_cn/api_python/dynamic_shape_nn.md index 348759b330..ea10aba0d4 100644 --- a/docs/mindspore/source_zh_cn/api_python/dynamic_shape_nn.md +++ b/docs/mindspore/source_zh_cn/api_python/dynamic_shape_nn.md @@ -1,8 +1,8 @@ # nn接口动态shape支持情况 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/api_python/dynamic_shape_nn.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/api_python/dynamic_shape_nn.md) -> 以下列表列举了PYNATIVE模式下支持动态shape功能的nn接口。其中部分nn接口可能会存在数据类型支持不全的问题,如遇到此类问题,可以通过主动插入[Cast](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cast.html)算子解决。 +> 以下列表列举了PYNATIVE模式下支持动态shape功能的nn接口。其中部分nn接口可能会存在数据类型支持不全的问题,如遇到此类问题,可以通过主动插入[Cast](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cast.html)算子解决。 > > 列表以外的nn接口对动态shape功能支持尚不完善,可能会执行失败。另外,图模式下,动态shape功能支持也不完善,可能会执行失败。 > @@ -10,58 +10,58 @@ | 算子名称 | Ascend | GPU | CPU | | :--- |:-------- | :------- |:---------| -|[mindspore.nn.Adam](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Adam.html)|✔️|✔️|✔️| -|[mindspore.nn.AdaptiveAvgPool1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.AdaptiveAvgPool1d.html)|✔️|✔️|✔️| -|[mindspore.nn.AdaptiveAvgPool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.AdaptiveAvgPool2d.html)|✔️|✔️|✔️| -|[mindspore.nn.AdaptiveAvgPool3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.AdaptiveAvgPool3d.html)|✔️|✔️|✔️| -|[mindspore.nn.AdaptiveMaxPool1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.AdaptiveMaxPool1d.html)|✔️|✔️|✔️| -|[mindspore.nn.AvgPool1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.AvgPool1d.html)|✔️|✔️|✔️| -|[mindspore.nn.AvgPool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.AvgPool2d.html)|✔️|✔️|✔️| -|[mindspore.nn.AvgPool3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.AvgPool3d.html)|✔️|✔️|✔️| -|[mindspore.nn.BatchNorm1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.BatchNorm1d.html)|✔️|✔️|✔️| -|[mindspore.nn.BatchNorm2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.BatchNorm2d.html)|✔️|✔️|✔️| -|[mindspore.nn.BatchNorm3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.BatchNorm3d.html)|✔️|✔️|✔️| -|[mindspore.nn.BCELoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.BCELoss.html)|✔️|✔️|✔️| -|[mindspore.nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.BCEWithLogitsLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.ConstantPad1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.ConstantPad1d.html)|✔️|✔️|✔️| -|[mindspore.nn.ConstantPad2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.ConstantPad2d.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Conv1d.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv1dTranspose](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Conv1dTranspose.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Conv2d.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv2dTranspose](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Conv2dTranspose.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Conv3d.html)|✔️|✔️|✔️| -|[mindspore.nn.Conv3dTranspose](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Conv3dTranspose.html)|✔️|✔️|✔️| -|[mindspore.nn.CosineEmbeddingLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.CosineEmbeddingLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.CrossEntropyLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.CTCLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.CTCLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.Dense](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Dense.html)|✔️|✔️|✔️| -|[mindspore.nn.Embedding](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Embedding.html)|✔️|✔️|✔️| -|[mindspore.nn.EmbeddingLookup](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.EmbeddingLookup.html)|✔️|✔️|✔️| -|[mindspore.nn.GLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.GLU.html)|✔️|✔️|✔️| -|[mindspore.nn.GroupNorm](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.GroupNorm.html)|✔️|✔️|✔️| -|[mindspore.nn.GRU](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.GRU.html)|❌|❌|✔️| -|[mindspore.nn.GRUCell](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.GRUCell.html)|✔️|✔️|✔️| -|[mindspore.nn.InstanceNorm1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.InstanceNorm1d.html)|❌|✔️|❌| -|[mindspore.nn.InstanceNorm2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.InstanceNorm2d.html)|❌|✔️|❌| -|[mindspore.nn.InstanceNorm3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.InstanceNorm3d.html)|❌|✔️|❌| -|[mindspore.nn.KLDivLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.KLDivLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.L1Loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.L1Loss.html)|✔️|✔️|✔️| -|[mindspore.nn.LeakyReLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.LeakyReLU.html)|✔️|✔️|✔️| -|[mindspore.nn.LRN](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.LRN.html)|✔️|✔️|✔️| -|[mindspore.nn.LSTM](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.LSTM.html)|✔️|✔️|✔️| -|[mindspore.nn.MarginRankingLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.MarginRankingLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.MaxPool1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.MaxPool1d.html)|✔️|✔️|✔️| -|[mindspore.nn.MaxPool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.MaxPool2d.html)|✔️|✔️|✔️| -|[mindspore.nn.MaxPool3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.MaxPool3d.html)|✔️|✔️|✔️| -|[mindspore.nn.MaxUnpool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.MaxUnpool2d.html)|❌|✔️|✔️| -|[mindspore.nn.MSELoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.MSELoss.html)|✔️|✔️|✔️| -|[mindspore.nn.MultiLabelSoftMarginLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.MultiLabelSoftMarginLoss.html)|✔️|✔️|✔️| -|[mindspore.nn.PixelShuffle](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.PixelShuffle.html)|✔️|✔️|✔️| -|[mindspore.nn.ReflectionPad1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.ReflectionPad1d.html)|✔️|❌|✔️| -|[mindspore.nn.ReplicationPad2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.ReplicationPad2d.html)|❌|✔️|❌| -|[mindspore.nn.RReLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.RReLU.html)|✔️|✔️|✔️| -|[mindspore.nn.SmoothL1Loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.SmoothL1Loss.html)|✔️|✔️|✔️| -|[mindspore.nn.Softmax2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Softmax2d.html)|✔️|✔️|✔️| -|[mindspore.nn.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.SoftmaxCrossEntropyWithLogits.html)|✔️|✔️|✔️| -|[mindspore.nn.ZeroPad2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.ZeroPad2d.html)|✔️|✔️|✔️| -|[mindspore.mint.nn](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.mint.html#mindspore-mint-nn)|✔️|❌|❌| +|[mindspore.nn.Adam](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Adam.html)|✔️|✔️|✔️| +|[mindspore.nn.AdaptiveAvgPool1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.AdaptiveAvgPool1d.html)|✔️|✔️|✔️| +|[mindspore.nn.AdaptiveAvgPool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.AdaptiveAvgPool2d.html)|✔️|✔️|✔️| +|[mindspore.nn.AdaptiveAvgPool3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.AdaptiveAvgPool3d.html)|✔️|✔️|✔️| +|[mindspore.nn.AdaptiveMaxPool1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.AdaptiveMaxPool1d.html)|✔️|✔️|✔️| +|[mindspore.nn.AvgPool1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.AvgPool1d.html)|✔️|✔️|✔️| +|[mindspore.nn.AvgPool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.AvgPool2d.html)|✔️|✔️|✔️| +|[mindspore.nn.AvgPool3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.AvgPool3d.html)|✔️|✔️|✔️| +|[mindspore.nn.BatchNorm1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.BatchNorm1d.html)|✔️|✔️|✔️| +|[mindspore.nn.BatchNorm2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.BatchNorm2d.html)|✔️|✔️|✔️| +|[mindspore.nn.BatchNorm3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.BatchNorm3d.html)|✔️|✔️|✔️| +|[mindspore.nn.BCELoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.BCELoss.html)|✔️|✔️|✔️| +|[mindspore.nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.BCEWithLogitsLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.ConstantPad1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.ConstantPad1d.html)|✔️|✔️|✔️| +|[mindspore.nn.ConstantPad2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.ConstantPad2d.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Conv1d.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv1dTranspose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Conv1dTranspose.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Conv2d.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv2dTranspose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Conv2dTranspose.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Conv3d.html)|✔️|✔️|✔️| +|[mindspore.nn.Conv3dTranspose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Conv3dTranspose.html)|✔️|✔️|✔️| +|[mindspore.nn.CosineEmbeddingLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.CosineEmbeddingLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.CrossEntropyLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.CTCLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.CTCLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.Dense](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Dense.html)|✔️|✔️|✔️| +|[mindspore.nn.Embedding](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Embedding.html)|✔️|✔️|✔️| +|[mindspore.nn.EmbeddingLookup](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.EmbeddingLookup.html)|✔️|✔️|✔️| +|[mindspore.nn.GLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.GLU.html)|✔️|✔️|✔️| +|[mindspore.nn.GroupNorm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.GroupNorm.html)|✔️|✔️|✔️| +|[mindspore.nn.GRU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.GRU.html)|❌|❌|✔️| +|[mindspore.nn.GRUCell](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.GRUCell.html)|✔️|✔️|✔️| +|[mindspore.nn.InstanceNorm1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.InstanceNorm1d.html)|❌|✔️|❌| +|[mindspore.nn.InstanceNorm2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.InstanceNorm2d.html)|❌|✔️|❌| +|[mindspore.nn.InstanceNorm3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.InstanceNorm3d.html)|❌|✔️|❌| +|[mindspore.nn.KLDivLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.KLDivLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.L1Loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.L1Loss.html)|✔️|✔️|✔️| +|[mindspore.nn.LeakyReLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.LeakyReLU.html)|✔️|✔️|✔️| +|[mindspore.nn.LRN](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.LRN.html)|✔️|✔️|✔️| +|[mindspore.nn.LSTM](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.LSTM.html)|✔️|✔️|✔️| +|[mindspore.nn.MarginRankingLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.MarginRankingLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.MaxPool1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.MaxPool1d.html)|✔️|✔️|✔️| +|[mindspore.nn.MaxPool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.MaxPool2d.html)|✔️|✔️|✔️| +|[mindspore.nn.MaxPool3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.MaxPool3d.html)|✔️|✔️|✔️| +|[mindspore.nn.MaxUnpool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.MaxUnpool2d.html)|❌|✔️|✔️| +|[mindspore.nn.MSELoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.MSELoss.html)|✔️|✔️|✔️| +|[mindspore.nn.MultiLabelSoftMarginLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.MultiLabelSoftMarginLoss.html)|✔️|✔️|✔️| +|[mindspore.nn.PixelShuffle](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.PixelShuffle.html)|✔️|✔️|✔️| +|[mindspore.nn.ReflectionPad1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.ReflectionPad1d.html)|✔️|❌|✔️| +|[mindspore.nn.ReplicationPad2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.ReplicationPad2d.html)|❌|✔️|❌| +|[mindspore.nn.RReLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.RReLU.html)|✔️|✔️|✔️| +|[mindspore.nn.SmoothL1Loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.SmoothL1Loss.html)|✔️|✔️|✔️| +|[mindspore.nn.Softmax2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Softmax2d.html)|✔️|✔️|✔️| +|[mindspore.nn.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.SoftmaxCrossEntropyWithLogits.html)|✔️|✔️|✔️| +|[mindspore.nn.ZeroPad2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.ZeroPad2d.html)|✔️|✔️|✔️| +|[mindspore.mint.nn](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.mint.html#mindspore-mint-nn)|✔️|❌|❌| diff --git a/docs/mindspore/source_zh_cn/api_python/dynamic_shape_primitive.md b/docs/mindspore/source_zh_cn/api_python/dynamic_shape_primitive.md index 8127782c83..a6714ebdf1 100644 --- a/docs/mindspore/source_zh_cn/api_python/dynamic_shape_primitive.md +++ b/docs/mindspore/source_zh_cn/api_python/dynamic_shape_primitive.md @@ -1,8 +1,8 @@ # 算子动态shape支持情况 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/api_python/dynamic_shape_primitive.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/api_python/dynamic_shape_primitive.md) -> 以下列表列举了PyNative模式下支持动态shape功能的算子。其中部分算子可能会存在数据类型支持不全的问题,如遇到此类问题,可以通过主动插入[Cast](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cast.html)算子解决。 +> 以下列表列举了PyNative模式下支持动态shape功能的算子。其中部分算子可能会存在数据类型支持不全的问题,如遇到此类问题,可以通过主动插入[Cast](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cast.html)算子解决。 > > 列表以外的算子对动态shape功能支持尚不完善,可能会执行失败。另外,图模式下,动态shape功能支持也不完善,可能会执行失败。 > @@ -10,215 +10,215 @@ | API名称 | Ascend | GPU | CPU | | :--- |:-------- | :------- |:---------| -|[mindspore.Abs](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Abs.html)|✔️|✔️|✔️| -|[mindspore.Acosh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Acosh.html)|✔️|✔️|✔️| -|[mindspore.Adam](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Adam.html)|✔️|✔️|✔️| -|[mindspore.AdaptiveAvgPool2D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AdaptiveAvgPool2D.html)|✔️|✔️|✔️| -|[mindspore.AdaptiveAvgPool3D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AdaptiveAvgPool3D.html)|✔️|✔️|✔️| -|[mindspore.Add](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Add.html)|✔️|✔️|✔️| -|[mindspore.Addcmul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Addcmul.html)|✔️|✔️|✔️| -|[mindspore.AddN](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AddN.html)|✔️|✔️|✔️| -|[mindspore.Angle](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Angle.html)|✔️|✔️|✔️| -|[mindspore.ArgMaxWithValue](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ArgMaxWithValue.html)|✔️|✔️|✔️| -|[mindspore.ArgMinWithValue](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ArgMinWithValue.html)|✔️|✔️|✔️| -|[mindspore.Asin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Asin.html)|✔️|✔️|✔️| -|[mindspore.Asinh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Asinh.html)|✔️|✔️|✔️| -|[mindspore.Assign](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Assign.html)|✔️|✔️|✔️| -|[mindspore.AssignAdd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AssignAdd.html)|✔️|✔️|✔️| -|[mindspore.Atan](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Atan.html)|✔️|✔️|✔️| -|[mindspore.Atan2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Atan2.html)|✔️|✔️|✔️| -|[mindspore.Atanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Atanh.html)|✔️|✔️|✔️| -|[mindspore.AvgPool](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AvgPool.html)|✔️|✔️|✔️| -|[mindspore.AvgPool3D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AvgPool3D.html)|✔️|✔️|✔️| -|[mindspore.BatchNorm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BatchNorm.html)|✔️|✔️|✔️| -|[mindspore.BCEWithLogitsLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BCEWithLogitsLoss.html)|✔️|✔️|✔️| -|[mindspore.Bernoulli](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Bernoulli.html)|❌|✔️|✔️| -|[mindspore.BesselI0](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BesselI0.html)|❌|✔️|✔️| -|[mindspore.BesselI0e](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BesselI0e.html)|✔️|✔️|✔️| -|[mindspore.BesselI1](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BesselI1.html)|❌|✔️|✔️| -|[mindspore.BesselI1e](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BesselI1e.html)|✔️|✔️|✔️| -|[mindspore.BesselJ0](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BesselJ0.html)|❌|✔️|✔️| -|[mindspore.BesselJ1](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BesselJ1.html)|❌|✔️|✔️| -|[mindspore.BiasAdd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BiasAdd.html)|✔️|✔️|️❌| -|[mindspore.BinaryCrossEntropy](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BinaryCrossEntropy.html)|✔️|✔️|✔️| -|[mindspore.BitwiseAnd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BitwiseAnd.html)|✔️|✔️|✔️| -|[mindspore.BitwiseOr](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BitwiseOr.html)|✔️|✔️|✔️| -|[mindspore.BitwiseXor](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BitwiseXor.html)|✔️|✔️|✔️| -|[mindspore.BroadcastTo](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BroadcastTo.html)|✔️|✔️|✔️| -|[mindspore.Cast](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cast.html)|✔️|✔️|✔️| -|[mindspore.Ceil](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Ceil.html)|✔️|✔️|✔️| -|[mindspore.Col2Im](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Col2Im.html)|✔️|✔️|✔️| -|[mindspore.Complex](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Complex.html)|✔️|✔️|✔️| -|[mindspore.Concat](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Concat.html)|✔️|✔️|✔️| -|[mindspore.Conj](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Conj.html)|❌|✔️|✔️| -|[mindspore.Conv2D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Conv2D.html)|✔️|✔️|✔️| -|[mindspore.Conv2DTranspose](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Conv2DTranspose.html)|✔️|✔️|✔️| -|[mindspore.Conv3D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Conv3D.html)|✔️|✔️|✔️| -|[mindspore.Conv3DTranspose](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Conv3DTranspose.html)|✔️|✔️|✔️| -|[mindspore.Cos](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cos.html)|✔️|✔️|✔️| -|[mindspore.Cosh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cosh.html)|✔️|✔️|✔️| -|[mindspore.Cross](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cross.html)|✔️|❌|✔️| -|[mindspore.CTCLossV2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.CTCLossV2.html)|✔️|✔️|✔️| -|[mindspore.Cummax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cummax.html)|❌|✔️|✔️| -|[mindspore.Cummin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cummin.html)|✔️|✔️|✔️| -|[mindspore.CumSum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.CumSum.html)|✔️|✔️|️❌| -|[mindspore.Diag](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Diag.html)|✔️|✔️|✔️| -|[mindspore.Digamma](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Digamma.html)|❌|✔️|✔️| -|[mindspore.Div](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Div.html)|✔️|✔️|✔️| -|[mindspore.Dropout](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Dropout.html)|✔️|✔️|✔️| -|[mindspore.Dropout2D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Dropout2D.html)|✔️|✔️|✔️| -|[mindspore.Dropout3D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Dropout3D.html)|✔️|✔️|✔️| -|[mindspore.DynamicGRUV2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.DynamicGRUV2.html)|✔️|❌|❌| -|[mindspore.Einsum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Einsum.html)|❌|✔️|❌| -|[mindspore.Elu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Elu.html)|✔️|✔️|✔️| -|[mindspore.Equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Equal.html)|✔️|✔️|✔️| -|[mindspore.Erf](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Erf.html)|✔️|✔️|✔️| -|[mindspore.Erfc](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Erfc.html)|✔️|✔️|✔️| -|[mindspore.Exp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Exp.html)|✔️|✔️|✔️| -|[mindspore.ExpandDims](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ExpandDims.html)|✔️|✔️|✔️| -|[mindspore.Expm1](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Expm1.html)|✔️|✔️|✔️| -|[mindspore.Eye](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Eye.html)|✔️|✔️|✔️| -|[mindspore.FFTWithSize](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.FFTWithSize.html)|✔️|✔️|✔️| -|[mindspore.Fill](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Fill.html)|✔️|✔️|✔️| -|[mindspore.FillV2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.FillV2.html)|✔️|✔️|✔️| -|[mindspore.Flatten](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Flatten.html)|✔️|✔️|✔️| -|[mindspore.Floor](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Floor.html)|✔️|✔️|✔️| -|[mindspore.FloorDiv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.FloorDiv.html)|✔️|✔️|✔️| -|[mindspore.FloorMod](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.FloorMod.html)|✔️|✔️|✔️| -|[mindspore.Gather](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Gather.html)|✔️|✔️|✔️| -|[mindspore.GatherD](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.GatherD.html)|✔️|✔️|✔️| -|[mindspore.GatherNd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.GatherNd.html)|✔️|✔️|✔️| -|[mindspore.Gcd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Gcd.html)|✔️|✔️|✔️| -|[mindspore.Geqrf](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Geqrf.html)|✔️|✔️|✔️| -|[mindspore.Ger](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Ger.html)|✔️|✔️|✔️| -|[mindspore.Greater](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Greater.html)|✔️|✔️|✔️| -|[mindspore.GreaterEqual](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.GreaterEqual.html)|✔️|✔️|✔️| -|[mindspore.GridSampler2D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.GridSampler2D.html)|✔️|✔️|✔️| -|[mindspore.GridSampler3D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.GridSampler3D.html)|✔️|✔️|✔️| -|[mindspore.Heaviside](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Heaviside.html)|✔️|✔️|✔️| -|[mindspore.HSwish](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.HSwish.html)|✔️|✔️|✔️| -|[mindspore.Hypot](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Hypot.html)|✔️|✔️|✔️| -|[mindspore.Identity](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Identity.html)|✔️|✔️|✔️| -|[mindspore.Igammac](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Igammac.html)|✔️|✔️|✔️| -|[mindspore.Imag](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Imag.html)|✔️|✔️|✔️| -|[mindspore.Invert](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Invert.html)|✔️|✔️|✔️| -|[mindspore.IsFinite](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.IsFinite.html)|✔️|✔️|✔️| -|[mindspore.IsInf](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.IsInf.html)|✔️|✔️|✔️| -|[mindspore.IsNan](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.IsNan.html)|✔️|✔️|✔️| -|[mindspore.KLDivLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.KLDivLoss.html)|✔️|✔️|✔️| -|[mindspore.LayerNorm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LayerNorm.html)|✔️|✔️|✔️| -|[mindspore.Lcm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Lcm.html)|✔️|✔️|✔️| -|[mindspore.LeftShift](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LeftShift.html)|✔️|✔️|✔️| -|[mindspore.Lerp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Lerp.html)|✔️|✔️|✔️| -|[mindspore.Less](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Less.html)|✔️|✔️|✔️| -|[mindspore.LessEqual](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LessEqual.html)|✔️|✔️|✔️| -|[mindspore.LinSpace](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LinSpace.html)|✔️|✔️|✔️| -|[mindspore.Log](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Log.html)|✔️|✔️|✔️| -|[mindspore.Log1p](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Log1p.html)|✔️|✔️|✔️| -|[mindspore.LogicalAnd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LogicalAnd.html)|✔️|✔️|✔️| -|[mindspore.LogicalNot](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LogicalNot.html)|✔️|✔️|✔️| -|[mindspore.LogicalOr](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LogicalOr.html)|✔️|✔️|✔️| -|[mindspore.LogicalXor](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LogicalXor.html)|✔️|❌|✔️| -|[mindspore.Logit](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Logit.html)|✔️|✔️|✔️| -|[mindspore.LogSoftmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LogSoftmax.html)|✔️|✔️|✔️| -|[mindspore.LpNorm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LpNorm.html)|✔️|✔️|✔️| -|[mindspore.LRN](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LRN.html)|✔️|✔️|✔️| -|[mindspore.MaskedFill](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MaskedFill.html)|✔️|✔️|✔️| -|[mindspore.MaskedSelect](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MaskedSelect.html)|✔️|✔️|✔️| -|[mindspore.MatrixInverse](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MatrixInverse.html)|❌|✔️|✔️| -|[mindspore.MatrixSolve](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MatrixSolve.html)|✔️|❌|✔️| -|[mindspore.Maximum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Maximum.html)|✔️|✔️|✔️| -|[mindspore.MaxPool](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MaxPool.html)|✔️|✔️|✔️| -|[mindspore.MaxPool3D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MaxPool3D.html)|✔️|✔️|✔️| -|[mindspore.MaxUnpool2D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MaxUnpool2D.html)|❌|✔️|✔️| -|[mindspore.Median](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Median.html)|❌|✔️|✔️| -|[mindspore.Meshgrid](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Meshgrid.html)|✔️|✔️|✔️| -|[mindspore.Minimum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Minimum.html)|✔️|✔️|✔️| -|[mindspore.MirrorPad](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MirrorPad.html)|✔️|❌|✔️| -|[mindspore.Mish](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Mish.html)|❌|✔️|✔️| -|[mindspore.Mod](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Mod.html)|✔️|✔️|✔️| -|[mindspore.Mul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Mul.html)|✔️|✔️|✔️| -|[mindspore.Multinomial](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Multinomial.html)|✔️|✔️|✔️| -|[mindspore.Mvlgamma](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Mvlgamma.html)|✔️|✔️|✔️| -|[mindspore.NanToNum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.NanToNum.html)|✔️|❌|✔️| -|[mindspore.Neg](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Neg.html)|✔️|✔️|✔️| -|[mindspore.NextAfter](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.NextAfter.html)|✔️|✔️|✔️| -|[mindspore.NLLLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.NLLLoss.html)|✔️|✔️|✔️| -|[mindspore.nonzero](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.nonzero.html)|✔️|✔️|✔️| -|[mindspore.NotEqual](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.NotEqual.html)|✔️|✔️|✔️| -|[mindspore.OneHot](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.OneHot.html)|✔️|✔️|✔️| -|[mindspore.OnesLike](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.OnesLike.html)|✔️|✔️|✔️| -|[mindspore.Pad](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Pad.html)|✔️|✔️|✔️| -|[mindspore.Polar](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Polar.html)|❌|✔️|✔️| -|[mindspore.Polygamma](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Polygamma.html)|❌|✔️|✔️| -|[mindspore.Pow](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Pow.html)|✔️|✔️|✔️| -|[mindspore.PReLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.PReLU.html)|✔️|✔️|✔️| -|[mindspore.RandpermV2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.RandpermV2.html)|❌|❌|✔️| -|[mindspore.Range](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Range.html)|❌|✔️|✔️| -|[mindspore.Real](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Real.html)|❌|✔️|✔️| -|[mindspore.RealDiv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.RealDiv.html)|✔️|✔️|✔️| -|[mindspore.Reciprocal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Reciprocal.html)|✔️|✔️|✔️| -|[mindspore.ReduceAll](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceAll.html)|✔️|✔️|✔️| -|[mindspore.ReduceAny](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceAny.html)|✔️|✔️|✔️| -|[mindspore.ReduceMax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceMax.html)|✔️|✔️|✔️| -|[mindspore.ReduceMean](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceMean.html)|✔️|✔️|✔️| -|[mindspore.ReduceMin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceMin.html)|✔️|✔️|✔️| -|[mindspore.ReduceProd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceProd.html)|✔️|✔️|✔️| -|[mindspore.ReduceSum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceSum.html)|✔️|✔️|✔️| -|[mindspore.Reshape](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Reshape.html)|✔️|✔️|✔️| -|[mindspore.ResizeBicubic](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ResizeBicubic.html)|✔️|✔️|✔️| -|[mindspore.ResizeBilinearV2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ResizeBilinearV2.html)|✔️|✔️|️❌| -|[mindspore.ReverseSequence](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReverseSequence.html)|✔️|✔️|✔️| -|[mindspore.ReverseV2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReverseV2.html)|✔️|✔️|✔️| -|[mindspore.RightShift](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.RightShift.html)|✔️|✔️|✔️| -|[mindspore.Rint](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Rint.html)|✔️|✔️|✔️| -|[mindspore.Round](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Round.html)|✔️|✔️|✔️| -|[mindspore.Rsqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Rsqrt.html)|✔️|✔️|✔️| -|[mindspore.ScatterNd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterNd.html)|✔️|✔️|✔️| -|[mindspore.ScatterNdAdd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterNdAdd.html)|✔️|✔️|✔️| -|[mindspore.ScatterNdMax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterNdMax.html)|❌|✔️|✔️| -|[mindspore.ScatterNdMin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterNdMin.html)|✔️|✔️|✔️| -|[mindspore.ScatterNdMul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterNdMul.html)|❌|✔️|✔️| -|[mindspore.ScatterNdSub](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterNdSub.html)|✔️|✔️|✔️| -|[mindspore.ScatterNdUpdate](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterNdUpdate.html)|✔️|✔️|✔️| -|[mindspore.ScatterSub](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterSub.html)|✔️|✔️|✔️| -|[mindspore.ScatterUpdate](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterUpdate.html)|✔️|✔️|✔️| -|[mindspore.Select](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Select.html)|✔️|✔️|✔️| -|[mindspore.Sigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sigmoid.html)|✔️|✔️|✔️| -|[mindspore.Sign](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sign.html)|✔️|✔️|✔️| -|[mindspore.Sin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sin.html)|✔️|✔️|✔️| -|[mindspore.Sinc](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sinc.html)|✔️|✔️|✔️| -|[mindspore.Sinh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sinh.html)|✔️|✔️|✔️| -|[mindspore.Slice](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Slice.html)|✔️|✔️|✔️| -|[mindspore.SmoothL1Loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.SmoothL1Loss.html)|✔️|✔️|✔️| -|[mindspore.Softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Softmax.html)|✔️|✔️|✔️| -|[mindspore.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.SoftmaxCrossEntropyWithLogits.html)|✔️|✔️|✔️| -|[mindspore.Softplus](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Softplus.html)|✔️|✔️|✔️| -|[mindspore.SoftShrink](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.SoftShrink.html)|✔️|✔️|✔️| -|[mindspore.Sort](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sort.html)|✔️|❌|✔️| -|[mindspore.Split](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Split.html)|✔️|✔️|️❌| -|[mindspore.Sqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sqrt.html)|✔️|✔️|✔️| -|[mindspore.Square](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Square.html)|✔️|✔️|✔️| -|[mindspore.Squeeze](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Squeeze.html)|✔️|✔️|✔️| -|[mindspore.Stack](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Stack.html)|✔️|✔️|✔️| -|[mindspore.StandardNormal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.StandardNormal.html)|✔️|✔️|✔️| -|[mindspore.StridedSlice](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.StridedSlice.html)|✔️|✔️|✔️| -|[mindspore.Sub](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sub.html)|✔️|✔️|✔️| -|[mindspore.Svd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Svd.html)|❌|✔️|✔️| -|[mindspore.Tan](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Tan.html)|✔️|✔️|✔️| -|[mindspore.Tanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Tanh.html)|✔️|✔️|✔️| -|[mindspore.TensorScatterUpdate](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TensorScatterUpdate.html)|✔️|✔️|✔️| -|[mindspore.Tile](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Tile.html)|✔️|✔️|✔️| -|[mindspore.TopK](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TopK.html)|✔️|✔️|✔️| -|[mindspore.Trace](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Trace.html)|✔️|✔️|✔️| -|[mindspore.Transpose](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Transpose.html)|✔️|✔️|✔️| -|[mindspore.Tril](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Tril.html)|✔️|✔️|✔️| -|[mindspore.triu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.triu.html)|✔️|✔️|✔️| -|[mindspore.Trunc](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Trunc.html)|✔️|✔️|✔️| -|[mindspore.TruncateDiv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TruncateDiv.html)|✔️|✔️|️❌| -|[mindspore.UniformInt](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.UniformInt.html)|❌|✔️|✔️| -|[mindspore.UniformReal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.UniformReal.html)|❌|✔️|✔️| -|[mindspore.Unique](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Unique.html)|✔️|✔️|✔️| -|[mindspore.UnsortedSegmentSum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.UnsortedSegmentSum.html)|✔️|✔️|✔️| -|[mindspore.Xlogy](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Xlogy.html)|✔️|✔️|✔️| -|[mindspore.ZerosLike](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ZerosLike.html)|✔️|✔️|✔️| +|[mindspore.Abs](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Abs.html)|✔️|✔️|✔️| +|[mindspore.Acosh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Acosh.html)|✔️|✔️|✔️| +|[mindspore.Adam](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Adam.html)|✔️|✔️|✔️| +|[mindspore.AdaptiveAvgPool2D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AdaptiveAvgPool2D.html)|✔️|✔️|✔️| +|[mindspore.AdaptiveAvgPool3D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AdaptiveAvgPool3D.html)|✔️|✔️|✔️| +|[mindspore.Add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Add.html)|✔️|✔️|✔️| +|[mindspore.Addcmul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Addcmul.html)|✔️|✔️|✔️| +|[mindspore.AddN](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AddN.html)|✔️|✔️|✔️| +|[mindspore.Angle](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Angle.html)|✔️|✔️|✔️| +|[mindspore.ArgMaxWithValue](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ArgMaxWithValue.html)|✔️|✔️|✔️| +|[mindspore.ArgMinWithValue](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ArgMinWithValue.html)|✔️|✔️|✔️| +|[mindspore.Asin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Asin.html)|✔️|✔️|✔️| +|[mindspore.Asinh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Asinh.html)|✔️|✔️|✔️| +|[mindspore.Assign](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Assign.html)|✔️|✔️|✔️| +|[mindspore.AssignAdd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AssignAdd.html)|✔️|✔️|✔️| +|[mindspore.Atan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Atan.html)|✔️|✔️|✔️| +|[mindspore.Atan2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Atan2.html)|✔️|✔️|✔️| +|[mindspore.Atanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Atanh.html)|✔️|✔️|✔️| +|[mindspore.AvgPool](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AvgPool.html)|✔️|✔️|✔️| +|[mindspore.AvgPool3D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AvgPool3D.html)|✔️|✔️|✔️| +|[mindspore.BatchNorm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BatchNorm.html)|✔️|✔️|✔️| +|[mindspore.BCEWithLogitsLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BCEWithLogitsLoss.html)|✔️|✔️|✔️| +|[mindspore.Bernoulli](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Bernoulli.html)|❌|✔️|✔️| +|[mindspore.BesselI0](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BesselI0.html)|❌|✔️|✔️| +|[mindspore.BesselI0e](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BesselI0e.html)|✔️|✔️|✔️| +|[mindspore.BesselI1](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BesselI1.html)|❌|✔️|✔️| +|[mindspore.BesselI1e](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BesselI1e.html)|✔️|✔️|✔️| +|[mindspore.BesselJ0](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BesselJ0.html)|❌|✔️|✔️| +|[mindspore.BesselJ1](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BesselJ1.html)|❌|✔️|✔️| +|[mindspore.BiasAdd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BiasAdd.html)|✔️|✔️|️❌| +|[mindspore.BinaryCrossEntropy](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BinaryCrossEntropy.html)|✔️|✔️|✔️| +|[mindspore.BitwiseAnd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BitwiseAnd.html)|✔️|✔️|✔️| +|[mindspore.BitwiseOr](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BitwiseOr.html)|✔️|✔️|✔️| +|[mindspore.BitwiseXor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BitwiseXor.html)|✔️|✔️|✔️| +|[mindspore.BroadcastTo](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BroadcastTo.html)|✔️|✔️|✔️| +|[mindspore.Cast](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cast.html)|✔️|✔️|✔️| +|[mindspore.Ceil](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Ceil.html)|✔️|✔️|✔️| +|[mindspore.Col2Im](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Col2Im.html)|✔️|✔️|✔️| +|[mindspore.Complex](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Complex.html)|✔️|✔️|✔️| +|[mindspore.Concat](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Concat.html)|✔️|✔️|✔️| +|[mindspore.Conj](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Conj.html)|❌|✔️|✔️| +|[mindspore.Conv2D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Conv2D.html)|✔️|✔️|✔️| +|[mindspore.Conv2DTranspose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Conv2DTranspose.html)|✔️|✔️|✔️| +|[mindspore.Conv3D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Conv3D.html)|✔️|✔️|✔️| +|[mindspore.Conv3DTranspose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Conv3DTranspose.html)|✔️|✔️|✔️| +|[mindspore.Cos](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cos.html)|✔️|✔️|✔️| +|[mindspore.Cosh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cosh.html)|✔️|✔️|✔️| +|[mindspore.Cross](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cross.html)|✔️|❌|✔️| +|[mindspore.CTCLossV2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.CTCLossV2.html)|✔️|✔️|✔️| +|[mindspore.Cummax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cummax.html)|❌|✔️|✔️| +|[mindspore.Cummin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cummin.html)|✔️|✔️|✔️| +|[mindspore.CumSum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.CumSum.html)|✔️|✔️|️❌| +|[mindspore.Diag](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Diag.html)|✔️|✔️|✔️| +|[mindspore.Digamma](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Digamma.html)|❌|✔️|✔️| +|[mindspore.Div](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Div.html)|✔️|✔️|✔️| +|[mindspore.Dropout](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Dropout.html)|✔️|✔️|✔️| +|[mindspore.Dropout2D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Dropout2D.html)|✔️|✔️|✔️| +|[mindspore.Dropout3D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Dropout3D.html)|✔️|✔️|✔️| +|[mindspore.DynamicGRUV2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.DynamicGRUV2.html)|✔️|❌|❌| +|[mindspore.Einsum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Einsum.html)|❌|✔️|❌| +|[mindspore.Elu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Elu.html)|✔️|✔️|✔️| +|[mindspore.Equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Equal.html)|✔️|✔️|✔️| +|[mindspore.Erf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Erf.html)|✔️|✔️|✔️| +|[mindspore.Erfc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Erfc.html)|✔️|✔️|✔️| +|[mindspore.Exp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Exp.html)|✔️|✔️|✔️| +|[mindspore.ExpandDims](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ExpandDims.html)|✔️|✔️|✔️| +|[mindspore.Expm1](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Expm1.html)|✔️|✔️|✔️| +|[mindspore.Eye](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Eye.html)|✔️|✔️|✔️| +|[mindspore.FFTWithSize](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.FFTWithSize.html)|✔️|✔️|✔️| +|[mindspore.Fill](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Fill.html)|✔️|✔️|✔️| +|[mindspore.FillV2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.FillV2.html)|✔️|✔️|✔️| +|[mindspore.Flatten](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Flatten.html)|✔️|✔️|✔️| +|[mindspore.Floor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Floor.html)|✔️|✔️|✔️| +|[mindspore.FloorDiv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.FloorDiv.html)|✔️|✔️|✔️| +|[mindspore.FloorMod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.FloorMod.html)|✔️|✔️|✔️| +|[mindspore.Gather](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Gather.html)|✔️|✔️|✔️| +|[mindspore.GatherD](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.GatherD.html)|✔️|✔️|✔️| +|[mindspore.GatherNd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.GatherNd.html)|✔️|✔️|✔️| +|[mindspore.Gcd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Gcd.html)|✔️|✔️|✔️| +|[mindspore.Geqrf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Geqrf.html)|✔️|✔️|✔️| +|[mindspore.Ger](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Ger.html)|✔️|✔️|✔️| +|[mindspore.Greater](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Greater.html)|✔️|✔️|✔️| +|[mindspore.GreaterEqual](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.GreaterEqual.html)|✔️|✔️|✔️| +|[mindspore.GridSampler2D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.GridSampler2D.html)|✔️|✔️|✔️| +|[mindspore.GridSampler3D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.GridSampler3D.html)|✔️|✔️|✔️| +|[mindspore.Heaviside](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Heaviside.html)|✔️|✔️|✔️| +|[mindspore.HSwish](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.HSwish.html)|✔️|✔️|✔️| +|[mindspore.Hypot](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Hypot.html)|✔️|✔️|✔️| +|[mindspore.Identity](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Identity.html)|✔️|✔️|✔️| +|[mindspore.Igammac](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Igammac.html)|✔️|✔️|✔️| +|[mindspore.Imag](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Imag.html)|✔️|✔️|✔️| +|[mindspore.Invert](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Invert.html)|✔️|✔️|✔️| +|[mindspore.IsFinite](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.IsFinite.html)|✔️|✔️|✔️| +|[mindspore.IsInf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.IsInf.html)|✔️|✔️|✔️| +|[mindspore.IsNan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.IsNan.html)|✔️|✔️|✔️| +|[mindspore.KLDivLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.KLDivLoss.html)|✔️|✔️|✔️| +|[mindspore.LayerNorm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LayerNorm.html)|✔️|✔️|✔️| +|[mindspore.Lcm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Lcm.html)|✔️|✔️|✔️| +|[mindspore.LeftShift](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LeftShift.html)|✔️|✔️|✔️| +|[mindspore.Lerp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Lerp.html)|✔️|✔️|✔️| +|[mindspore.Less](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Less.html)|✔️|✔️|✔️| +|[mindspore.LessEqual](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LessEqual.html)|✔️|✔️|✔️| +|[mindspore.LinSpace](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LinSpace.html)|✔️|✔️|✔️| +|[mindspore.Log](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Log.html)|✔️|✔️|✔️| +|[mindspore.Log1p](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Log1p.html)|✔️|✔️|✔️| +|[mindspore.LogicalAnd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LogicalAnd.html)|✔️|✔️|✔️| +|[mindspore.LogicalNot](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LogicalNot.html)|✔️|✔️|✔️| +|[mindspore.LogicalOr](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LogicalOr.html)|✔️|✔️|✔️| +|[mindspore.LogicalXor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LogicalXor.html)|✔️|❌|✔️| +|[mindspore.Logit](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Logit.html)|✔️|✔️|✔️| +|[mindspore.LogSoftmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LogSoftmax.html)|✔️|✔️|✔️| +|[mindspore.LpNorm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LpNorm.html)|✔️|✔️|✔️| +|[mindspore.LRN](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LRN.html)|✔️|✔️|✔️| +|[mindspore.MaskedFill](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MaskedFill.html)|✔️|✔️|✔️| +|[mindspore.MaskedSelect](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MaskedSelect.html)|✔️|✔️|✔️| +|[mindspore.MatrixInverse](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MatrixInverse.html)|❌|✔️|✔️| +|[mindspore.MatrixSolve](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MatrixSolve.html)|✔️|❌|✔️| +|[mindspore.Maximum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Maximum.html)|✔️|✔️|✔️| +|[mindspore.MaxPool](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MaxPool.html)|✔️|✔️|✔️| +|[mindspore.MaxPool3D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MaxPool3D.html)|✔️|✔️|✔️| +|[mindspore.MaxUnpool2D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MaxUnpool2D.html)|❌|✔️|✔️| +|[mindspore.Median](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Median.html)|❌|✔️|✔️| +|[mindspore.Meshgrid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Meshgrid.html)|✔️|✔️|✔️| +|[mindspore.Minimum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Minimum.html)|✔️|✔️|✔️| +|[mindspore.MirrorPad](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MirrorPad.html)|✔️|❌|✔️| +|[mindspore.Mish](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Mish.html)|❌|✔️|✔️| +|[mindspore.Mod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Mod.html)|✔️|✔️|✔️| +|[mindspore.Mul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Mul.html)|✔️|✔️|✔️| +|[mindspore.Multinomial](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Multinomial.html)|✔️|✔️|✔️| +|[mindspore.Mvlgamma](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Mvlgamma.html)|✔️|✔️|✔️| +|[mindspore.NanToNum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.NanToNum.html)|✔️|❌|✔️| +|[mindspore.Neg](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Neg.html)|✔️|✔️|✔️| +|[mindspore.NextAfter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.NextAfter.html)|✔️|✔️|✔️| +|[mindspore.NLLLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.NLLLoss.html)|✔️|✔️|✔️| +|[mindspore.nonzero](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.nonzero.html)|✔️|✔️|✔️| +|[mindspore.NotEqual](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.NotEqual.html)|✔️|✔️|✔️| +|[mindspore.OneHot](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.OneHot.html)|✔️|✔️|✔️| +|[mindspore.OnesLike](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.OnesLike.html)|✔️|✔️|✔️| +|[mindspore.Pad](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Pad.html)|✔️|✔️|✔️| +|[mindspore.Polar](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Polar.html)|❌|✔️|✔️| +|[mindspore.Polygamma](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Polygamma.html)|❌|✔️|✔️| +|[mindspore.Pow](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Pow.html)|✔️|✔️|✔️| +|[mindspore.PReLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.PReLU.html)|✔️|✔️|✔️| +|[mindspore.RandpermV2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.RandpermV2.html)|❌|❌|✔️| +|[mindspore.Range](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Range.html)|❌|✔️|✔️| +|[mindspore.Real](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Real.html)|❌|✔️|✔️| +|[mindspore.RealDiv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.RealDiv.html)|✔️|✔️|✔️| +|[mindspore.Reciprocal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Reciprocal.html)|✔️|✔️|✔️| +|[mindspore.ReduceAll](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceAll.html)|✔️|✔️|✔️| +|[mindspore.ReduceAny](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceAny.html)|✔️|✔️|✔️| +|[mindspore.ReduceMax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceMax.html)|✔️|✔️|✔️| +|[mindspore.ReduceMean](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceMean.html)|✔️|✔️|✔️| +|[mindspore.ReduceMin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceMin.html)|✔️|✔️|✔️| +|[mindspore.ReduceProd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceProd.html)|✔️|✔️|✔️| +|[mindspore.ReduceSum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceSum.html)|✔️|✔️|✔️| +|[mindspore.Reshape](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Reshape.html)|✔️|✔️|✔️| +|[mindspore.ResizeBicubic](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ResizeBicubic.html)|✔️|✔️|✔️| +|[mindspore.ResizeBilinearV2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ResizeBilinearV2.html)|✔️|✔️|️❌| +|[mindspore.ReverseSequence](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReverseSequence.html)|✔️|✔️|✔️| +|[mindspore.ReverseV2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReverseV2.html)|✔️|✔️|✔️| +|[mindspore.RightShift](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.RightShift.html)|✔️|✔️|✔️| +|[mindspore.Rint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Rint.html)|✔️|✔️|✔️| +|[mindspore.Round](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Round.html)|✔️|✔️|✔️| +|[mindspore.Rsqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Rsqrt.html)|✔️|✔️|✔️| +|[mindspore.ScatterNd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterNd.html)|✔️|✔️|✔️| +|[mindspore.ScatterNdAdd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterNdAdd.html)|✔️|✔️|✔️| +|[mindspore.ScatterNdMax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterNdMax.html)|❌|✔️|✔️| +|[mindspore.ScatterNdMin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterNdMin.html)|✔️|✔️|✔️| +|[mindspore.ScatterNdMul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterNdMul.html)|❌|✔️|✔️| +|[mindspore.ScatterNdSub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterNdSub.html)|✔️|✔️|✔️| +|[mindspore.ScatterNdUpdate](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterNdUpdate.html)|✔️|✔️|✔️| +|[mindspore.ScatterSub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterSub.html)|✔️|✔️|✔️| +|[mindspore.ScatterUpdate](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterUpdate.html)|✔️|✔️|✔️| +|[mindspore.Select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Select.html)|✔️|✔️|✔️| +|[mindspore.Sigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sigmoid.html)|✔️|✔️|✔️| +|[mindspore.Sign](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sign.html)|✔️|✔️|✔️| +|[mindspore.Sin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sin.html)|✔️|✔️|✔️| +|[mindspore.Sinc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sinc.html)|✔️|✔️|✔️| +|[mindspore.Sinh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sinh.html)|✔️|✔️|✔️| +|[mindspore.Slice](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Slice.html)|✔️|✔️|✔️| +|[mindspore.SmoothL1Loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.SmoothL1Loss.html)|✔️|✔️|✔️| +|[mindspore.Softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Softmax.html)|✔️|✔️|✔️| +|[mindspore.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.SoftmaxCrossEntropyWithLogits.html)|✔️|✔️|✔️| +|[mindspore.Softplus](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Softplus.html)|✔️|✔️|✔️| +|[mindspore.SoftShrink](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.SoftShrink.html)|✔️|✔️|✔️| +|[mindspore.Sort](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sort.html)|✔️|❌|✔️| +|[mindspore.Split](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Split.html)|✔️|✔️|️❌| +|[mindspore.Sqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sqrt.html)|✔️|✔️|✔️| +|[mindspore.Square](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Square.html)|✔️|✔️|✔️| +|[mindspore.Squeeze](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Squeeze.html)|✔️|✔️|✔️| +|[mindspore.Stack](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Stack.html)|✔️|✔️|✔️| +|[mindspore.StandardNormal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.StandardNormal.html)|✔️|✔️|✔️| +|[mindspore.StridedSlice](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.StridedSlice.html)|✔️|✔️|✔️| +|[mindspore.Sub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sub.html)|✔️|✔️|✔️| +|[mindspore.Svd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Svd.html)|❌|✔️|✔️| +|[mindspore.Tan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Tan.html)|✔️|✔️|✔️| +|[mindspore.Tanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Tanh.html)|✔️|✔️|✔️| +|[mindspore.TensorScatterUpdate](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TensorScatterUpdate.html)|✔️|✔️|✔️| +|[mindspore.Tile](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Tile.html)|✔️|✔️|✔️| +|[mindspore.TopK](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TopK.html)|✔️|✔️|✔️| +|[mindspore.Trace](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Trace.html)|✔️|✔️|✔️| +|[mindspore.Transpose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Transpose.html)|✔️|✔️|✔️| +|[mindspore.Tril](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Tril.html)|✔️|✔️|✔️| +|[mindspore.triu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.triu.html)|✔️|✔️|✔️| +|[mindspore.Trunc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Trunc.html)|✔️|✔️|✔️| +|[mindspore.TruncateDiv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TruncateDiv.html)|✔️|✔️|️❌| +|[mindspore.UniformInt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.UniformInt.html)|❌|✔️|✔️| +|[mindspore.UniformReal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.UniformReal.html)|❌|✔️|✔️| +|[mindspore.Unique](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Unique.html)|✔️|✔️|✔️| +|[mindspore.UnsortedSegmentSum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.UnsortedSegmentSum.html)|✔️|✔️|✔️| +|[mindspore.Xlogy](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Xlogy.html)|✔️|✔️|✔️| +|[mindspore.ZerosLike](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ZerosLike.html)|✔️|✔️|✔️| diff --git a/docs/mindspore/source_zh_cn/api_python/env_var_list.rst b/docs/mindspore/source_zh_cn/api_python/env_var_list.rst index b99fbcf457..f5aa8feaad 100644 --- a/docs/mindspore/source_zh_cn/api_python/env_var_list.rst +++ b/docs/mindspore/source_zh_cn/api_python/env_var_list.rst @@ -1,8 +1,8 @@ 环境变量 ======== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg - :target: https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/api_python/env_var_list.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/api_python/env_var_list.rst :alt: 查看源文件 本文介绍MindSpore的环境变量。 @@ -73,8 +73,8 @@ false: 关闭pipeline树优化 - -具体用法详见 `单节点数据缓存 `_ -和 `数据处理性能优化 `_ 。 +具体用法详见 `单节点数据缓存 `_ +和 `数据处理性能优化 `_ 。 图编译执行 ---------- @@ -251,7 +251,7 @@ - 1:使用进程优雅退出功能。 不设置或者其他值: 不使用进程优雅退出功能。 - - 使能进程优雅退出功能,依赖callback函数,具体请参考 `进程优雅退出用例 `_ 。 + - 使能进程优雅退出功能,依赖callback函数,具体请参考 `进程优雅退出用例 `_ 。 * - MS_DEV_BOOST_INFER - 针对前端图编译提供编译优化开关。该开关可加速类型推导模块,以加速网络编译。 - Integer @@ -341,7 +341,7 @@ enable_debug_mode:在图算kernelmod launch前后插同步,并在launch失败时打印调试信息,仅支持GPU后端。默认值: `False` 。 path:指定读取json配置。当设置该选项时,忽略以上选项。 - - 详细说明参考 `自定义融合 `_ + - 详细说明参考 `自定义融合 `_ * - MS_DEV_DISABLE_AUTO_H2D - 控制动态图流程算子输入是否隐式拷贝。开启后,将关闭动态图算子输入隐式拷贝。 @@ -410,13 +410,13 @@ Dump调试 - 取值 - 说明 * - MINDSPORE_DUMP_CONFIG - - 指定 `云侧Dump功能 `_ + - 指定 `云侧Dump功能 `_ 或 `端侧Dump功能 `_ 所依赖的配置文件的路径 - String - 文件路径,支持相对路径与绝对路径 - * - MS_DIAGNOSTIC_DATA_PATH - - 使用 `云侧Dump功能 `_ 时, + - 使用 `云侧Dump功能 `_ 时, 如果Dump配置文件没有设置 `path` 字段或者设置为空字符串,则 `$MS_DIAGNOSTIC_DATA_PATH` `/debug_dump` 就会被当做path的值。 若Dump配置文件中设置了 `path` 字段,则仍以该字段的实际取值为准。 - String @@ -464,7 +464,7 @@ Dump调试 - 0~600,单位:秒,默认值为0。当取值为0时,表示使用默认超时时间,即 `mindspore.get_context("op_timeout")` 的取值。 - 该环境变量仅仅在MS_DUMP_SLICE_SIZE不为零的情况下生效。目前二阶段的等待时间无法超过mindspore.get_context("op_timeout")的值。 -具体用法详见 `Dump功能调试 `_ 。 +具体用法详见 `Dump功能调试 `_ 。 分布式并行 ----------- @@ -629,7 +629,7 @@ Dump调试 - -动态组网相关的具体用法详见 `动态组网 `_ 。 +动态组网相关的具体用法详见 `动态组网 `_ 。 算子编译 -------- @@ -698,7 +698,7 @@ Dump调试 默认:空。 - -常见问题详见 `FAQ `_ 。 +常见问题详见 `FAQ `_ 。 日志 ---- @@ -809,7 +809,7 @@ Dump调试 其中被指定子模块的日志级别将覆盖 `GLOG_v` 在此模块内的设置, 此处子模块的日志级别 `LogLevel` 与 `GLOG_v` 的日志级别含义相同, - MindSpore子模块列表详见 `sub-module_names `_。 + MindSpore子模块列表详见 `sub-module_names `_。 例如可以通过 `GLOG_v=1 MS_SUBMODULE_LOG_v="{PARSER:2,ANALYZER:2}"` 把 `PARSER` 和 `ANALYZER` 模块的日志级别设为WARNING,其他模块的日志级别设为INFO @@ -903,7 +903,7 @@ Dump调试 1:使能CheckSum检测静默故障 - 目前本特性仅支持Atlas A2训练系列产品,仅支持在O0或O1模式下,对bfloat16数据类型的MatMul算子进行CheckSum校验 -特征值检测的更多内容详见 `特征值检测 `_ 。 +特征值检测的更多内容详见 `特征值检测 `_ 。 三方库 ------ @@ -1026,7 +1026,7 @@ Profiler profiler_level (str, 可选) - 设置采集性能数据级别。默认值:Level0。可取值:Level0、Level1、Level2。 - 其他参数可参考 `MindSpore profile参数详解 `_ 。 + 其他参数可参考 `MindSpore profile参数详解 `_ 。 - 此环境变量使能与输入参数实例化Profiler方式使能性能数据采集的方式二选一。 * - PROFILING_MODE diff --git a/docs/mindspore/source_zh_cn/api_python/operator_list_parallel.md b/docs/mindspore/source_zh_cn/api_python/operator_list_parallel.md index 309870af95..1372827599 100644 --- a/docs/mindspore/source_zh_cn/api_python/operator_list_parallel.md +++ b/docs/mindspore/source_zh_cn/api_python/operator_list_parallel.md @@ -1,176 +1,176 @@ # 算子级并行使用约束 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/api_python/operator_list_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/api_python/operator_list_parallel.md) | 操作名 | 约束 | Layout配置约束 | | :----------------------------------------------------------- | :----------------------------------------------------------- | :----------------------------------------------------------- | -| [mindspore.ops.Abs](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Abs.html) | 无 | 不支持配置Layout | -| [mindspore.ops.ACos](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ACos.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Acosh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Acosh.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Add](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Add.html) | 无 | 支持配置Layout,输入的Layout 需要相同或能广播,不支持配置输出的Layout | -| [mindspore.ops.AddN](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AddN.html) | 无 | 不支持配置Layout | -| [mindspore.ops.ApproximateEqual](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ApproximateEqual.html) | 无 | 不支持配置Layout | -| [mindspore.ops.ArgMaxWithValue](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ArgMaxWithValue.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | 不支持配置Layout | -| [mindspore.ops.ArgMinWithValue](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ArgMinWithValue.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | 不支持配置Layout | -| [mindspore.ops.Asin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Asin.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Asinh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Asinh.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Assign](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Assign.html) | 无 | 不支持配置Layout | -| [mindspore.ops.AssignAdd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AssignAdd.html) | 无 | 不支持配置Layout | -| [mindspore.ops.AssignSub](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AssignSub.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Atan](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Atan.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Atan2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Atan2.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Atanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Atanh.html) | 无 | 不支持配置Layout | -| [mindspore.ops.AvgPool](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AvgPool.html) | 1. 数据格式只支持‘NCHW’;
2. 输出的H/W维的shape必须能被输入的H/W维的切分策略整除;
3. 如果切分H/W:
1)当kernel_size <= stride时,输入切片大小需能被stride整除;
2)不支持kernel_size > stride;
4. 在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.AvgPool3D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AvgPool3D.html) | 1. 数据格式只支持‘NCDHW’;
2. 如果涉及相邻节点数据交换,只支持Ascend;
3. W维不支持切分;
4. 输出的D/H维的shape必须能被输入的D/H维的切分策略整除;
5. valid模式下:如果切分D/H:
1)当kernel_size <= stride时,输入切片大小需能被stride整除;
2)不支持kernel_size > stride;
6. same/pad模式下:如果切分D/H:
1)kernel_size >= stride时,(包含pad的输入总长度 - kernel_size)需能被stride整除;kernel_size < stride时,pad需等于0且分片能被stride整除;
2)(输出总长度*stride - 输入总长度)需能被切分策略整除;
3)相邻卡间发送接收的数据长度需大于等于0且小于等于切片大小;
7. 在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.BatchMatMul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BatchMatMul.html) | 每个输入和输出的最后两个维度的切分规则与MatMul算子的切分规则相同 | 支持配置Layout; 每个输入和输出的最后两个维度的切分规则与MatMul算子的切分规则相同 | -| [mindspore.ops.BatchNorm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BatchNorm.html) | 不支持GPU | 不支持配置Layout | -| [mindspore.ops.BesselI0e](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BesselI0e.html) | 无 | 不支持配置Layout | -| [mindspore.ops.BesselI1e](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BesselI1e.html) | 无 | 不支持配置Layout | -| [mindspore.ops.BiasAdd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BiasAdd.html) | 无 | 支持配置Layout,输入参数bias的Layout需要和input_x的最后一维度相同,不支持配置输出的Layout | -| [mindspore.ops.BitwiseAnd](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BitwiseAnd.html) | 无 | 不支持配置Layout | -| [mindspore.ops.BitwiseOr](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BitwiseOr.html) | 无 | 不支持配置Layout | -| [mindspore.ops.BitwiseXor](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BitwiseXor.html) | 无 | 不支持配置Layout | -| [mindspore.ops.BoundingBoxEncode](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BoundingBoxEncode.html) | 1. 支持对输入(anchor_box)和输入(groundtruth_box)的第一维进行切分;
2. 输入(anchor_box)和输入(groundtruth_box)的切分策略必须一致 | 不支持配置Layout | -| [mindspore.ops.BroadcastTo](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BroadcastTo.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Cast](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cast.html) | Auto Parallel和Semi Auto Parallel模式下,配置策略不生效 | 不支持配置Layout | -| [mindspore.ops.Cdist](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cdist.html) | 1. 当两输入都含有Batch维时,这一维的切分策略必须相等;
2.`M`维度不支持切分 | 不支持配置Layout | -| [mindspore.ops.Ceil](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Ceil.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Concat](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Concat.html) | 输入(input_x)在轴(axis)所对应的维度不能切分,切分后,在数学逻辑上和单机不等价 | 不支持配置Layout | -| [mindspore.ops.Conv2D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Conv2D.html) | 1. 数据格式只支持‘NCHW’;
2. 如果涉及相邻节点数据交换,只支持Ascend;
3. 当group的值不为1时,不支持切分C-in/C-out;
4. weight的后两维不支持切分;
5. 输出的H/W维的shape必须能被输入的H/W维的切分策略整除;
6. valid模式下:如果切分H/W:
1)当kernel_size <= stride时其中(kernel_size=dilation *(kernel_size - 1) + 1,下同),输入切片大小需能被stride整除;
2)不支持kernel_size > stride;
7. same/pad模式下:如果切分H/W:
1)kernel_size >= stride时,(包含pad的输入总长度 - kernel_size)需能被stride整除;kernel_size < stride时,pad需等于0且分片能被stride整除;
2)( 输出总长度*stride - 输入总长度)需能被切分策略整除;
3)相邻卡间发送接收的数据长度需大于等于0且小于等于切片大小; | 不支持配置Layout | -| [mindspore.ops.Conv3D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Conv3D.html) | 1. 数据格式只支持‘NCDHW’;
2. 如果涉及相邻节点数据交换,只支持Ascend;
3. 当group的值不为1时,不支持切分C-in/C-out;
4. W维不支持切分,weight的后三维不支持切分;
5. 输出的D/H维的shape必须能被输入的D/H维的切分策略整除;
6. valid模式下:如果切分D/H:
1)当kernel_size <= stride时(kernel_size=dilation *(kernel_size - 1) + 1,下同),输入切片大小需能被stride整除;
2)不支持kernel_size > stride;
7. same/pad模式下:如果切分D/H:
1)kernel_size >= stride时,(包含pad的输入总长度 - kernel_size)需能被stride整除;kernel_size < stride时,pad需等于0且分片能被stride整除;
2)( 输出总长度*stride - 输入总长度)需能被切分策略整除;
3)相邻卡间发送接收的数据长度需大于等于0且小于等于切片大小;
8. 在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.Cos](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cos.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Cosh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Cosh.html) | 无 | 不支持配置Layout | -| [mindspore.ops.CropAndResize](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.CropAndResize.html) | 1. 不支持对输入(x)的H/W维和输入(boxes)的第二维进行切分;
2. 输入(boxes)和输入(box_index)第一维的切分策略必须一致 | 不支持配置Layout | -| [mindspore.ops.CumProd](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.CumProd.html) | 不支持`axis维`切分 | 不支持配置Layout | -| [mindspore.ops.CumSum](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.CumSum.html) | 不支持`axis维`切分 | 不支持配置Layout | -| [mindspore.ops.Div](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Div.html) | 无 | 支持配置Layout,输入的Layout 需要相同或能广播,不支持配置输出的Layout | -| [mindspore.ops.DivNoNan](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.DivNoNan.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Dropout](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Dropout.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Elu](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Elu.html) | 无 | 不支持配置Layout | -| [mindspore.ops.embedding](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.embedding.html) | 1. padding_idx、max_norm、norm_type和scale_gradid_by_freq仅支持默认值;
2. 第一个输入不支持切分;
3. 第二个输入不支持切不满的情况。 | 支持配置Layout | -| [mindspore.ops.EmbeddingLookup](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.EmbeddingLookup.html) | 同Gather | 不支持配置Layout | -| [mindspore.ops.Equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Equal.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Erf](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Erf.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Erfc](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Erfc.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Erfinv](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Erfinv.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Exp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Exp.html) | 无 | 不支持配置Layout | -| [mindspore.ops.ExpandDims](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ExpandDims.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Expm1](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Expm1.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Floor](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Floor.html) | 无 | 不支持配置Layout | -| [mindspore.ops.FloorDiv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.FloorDiv.html) | 无 | 不支持配置Layout | -| [mindspore.ops.FloorMod](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.FloorMod.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Gamma](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Gamma.html) | 1. `shape`为tuple,但可以对`shape`进行切分,如shape=(8, 16),对应的策略可以为(2, 4);
2. `alpha`和`beta`对应的策略必须为全1;
3. 当`shape`切分策略不为全1时,结果和单卡不一致 | 不支持配置Layout | -| [mindspore.ops.Gather](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Gather.html) | 1. batch_dims > 0时:
1)input_params的axis维度不支持切分;
2)不支持非均匀切分;
2. batch_dims = 0时:
1)均匀切分时:
a)如果input_params的axis维度不切分,则其他维度可以任意切分;
b)input_params的axis维度切分时:input_params只支持1维和2维,input_indices不支持标量,不支持input_indices和input_params同时进行切分;axis=0且参数在轴(axis)所在维度切分时,支持配置输出切分策略,合法的输出切分策略为(indices_strategy, param_strategy[1:]) 或 ((indices_strategy[0]*param_strategy[0], indices_strategy[1:]), param_strategy[1:])
2)非均匀切分时:
a)axis仅支持为0;
b)非均匀切分仅表示对input_params的第零维非均匀切分;
c)对input_params第零维的切分份数要等于对input_indices最后一维的切分份数;
d)input_params的每个维度都能切分,但input_indices只能切分最后一维,且不支持重复计算;
e)input_indices需满足:后一个切片的Tensor值需大于前一个分片的Tensor值。 | 不支持配置Layout | -| [mindspore.ops.GatherD](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.GatherD.html) | dim所对应的维度不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.GatherNd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.GatherNd.html) | 第一个输入不能切分,第二个输入的最后一维不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.GeLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.GeLU.html) | 无 | 支持配置输入的Layout,不支持配置输出的Layout | -| [mindspore.ops.Greater](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Greater.html) | 无 | 不支持配置Layout | -| [mindspore.ops.GreaterEqual](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.GreaterEqual.html) | 无 | 不支持配置Layout | -| [mindspore.ops.HShrink](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.HShrink.html) | 无 | 不支持配置Layout | -| [mindspore.ops.HSigmoid](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.HSigmoid.html) | 无 | 不支持配置Layout | -| [mindspore.ops.InplaceAdd](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.InplaceAdd.html) | 不支持对`x`和`input_v`的第一维切分 | 不支持配置Layout | -| [mindspore.ops.InplaceSub](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.InplaceSub.html) | 同InplaceAdd | 不支持配置Layout | -| [mindspore.ops.InplaceUpdate](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.InplaceUpdate.html) | 同InplaceAdd | 不支持配置Layout | -| [mindspore.ops.Inv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Inv.html) | 无 | 不支持配置Layout | -| [mindspore.ops.IOU](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.IOU.html) | 支持对`anchor_boxes`和`gt_boxes`的第一维切分 | 不支持配置Layout | -| [mindspore.ops.IsFinite](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.IsFinite.html) | 无 | 不支持配置Layout | -| [mindspore.ops.KLDivLoss](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.KLDivLoss.html) | 无 | 不支持配置Layout | -| [mindspore.ops.LayerNorm](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LayerNorm.html) | 第二个输入gamma以及第三个输入beta的切分策略需要等于input_x_strategy[begin_params_axis:],input_x_strategy是第一个输入的切分策略 | 支持配置Layout。第二个输入gamma以及第三个输入beta的Layout配置需要等于input_x_layout_tuple[begin_params_axis:],input_x_layout_tuple是第一个输入的layout配置 | -| [mindspore.ops.L2Loss](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.L2Loss.html) | 无 | 不支持配置Layout | -| [mindspore.ops.L2Normalize](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.L2Normalize.html) | 输入(input_x)在轴(axis)对应的维度不能切,切分后,在数学逻辑上和单机不等价 | 不支持配置Layout | -| [mindspore.ops.Lerp](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Lerp.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Less](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Less.html) | 无 | 不支持配置Layout | -| [mindspore.ops.LessEqual](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LessEqual.html) | 无 | 不支持配置Layout | -| [mindspore.ops.LinSpace](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LinSpace.html) | 不需要为`start`和`end`配置策略,只需要传入一个长度为1的策略,其数值能整除`num` | 不支持配置Layout | -| [mindspore.ops.LogicalAnd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LogicalAnd.html) | 无 | 不支持配置Layout | -| [mindspore.ops.LogicalNot](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LogicalNot.html) | 无 | 不支持配置Layout | -| [mindspore.ops.LogicalOr](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LogicalOr.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Log](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Log.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Log1p](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Log1p.html) | 无 | 不支持配置Layout | -| [mindspore.ops.LogSoftmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.LogSoftmax.html) | 输入(logits)在轴(axis)对应的维度不可切分,切分后,在数学逻辑上和单机不等价 | 不支持配置Layout | -| [mindspore.ops.MaskedFill](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MaskedFill.html) | 无 | 不支持配置Layout | -| [mindspore.ops.MatMul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MatMul.html) | 1. 当`transpose_b=True`时,输入的切分策略需是 ((A, B), (C, B)) 的形式
2. 当`transpose_b=False`时,输入的切分策略需是 ((A, B), (B, C)) 的形式;
3. 支持设置输出切分策略,合法的输出切分策略为 ((A, C),) 或 ((A * B, C),) 。 | 支持配置Layout。
1. 当`transpose_b=True`时,输入的Layout配置需是 (layout(A, B), layout(C, B)) 的形式,其中A/B/C可以是设备别名或者是设备别名元组
2. 当`transpose_b=False`时,输入的layout配置需是 (layout(A, B), layout(B, C)) 的形式,其中A/B/C可以是设备别名或者是设备别名元组;
3. 支持配置输出Layout,合法的输出Layout为 (layout(A, C),) 或 (layout((A, B), C),),这里A/B/C均为设备别名;如若A是别名元组(A1, A2),则合法的输出Layout为 (layout((A1, A2), C),) 或 (layout((A1, A2, B), C),)
4. 切分策略中,若A/B/C为设备别名,A/B/C应互不相同;若A/B/C中存在设备别名元组,则元组中的每个子项也应该与其余子项或其他设备别名互不相同。 | -| [mindspore.ops.Maximum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Maximum.html) | 无 | 不支持配置Layout | -| [mindspore.ops.MaxPool](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MaxPool.html) | 1. 数据格式只支持‘NCHW’;
2. 输出的H/W维的shape必须能被输入的H/W维的切分策略整除;
3. 如果切分H/W:
1)当kernel_size <= stride时,输入切片大小需能被stride整除;
2)不支持kernel_size > stride;
4. 在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.MaxPool3D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MaxPool3D.html) | 同AvgPool3D | 不支持配置Layout | -| [mindspore.ops.Minimum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Minimum.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Mish](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Mish.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Mod](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Mod.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Mul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Mul.html) | 无 | 支持配置Layout,输入的Layout 需要相同或能广播,不支持配置输出的Layout | -| [mindspore.ops.MulNoNan](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MulNoNan.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Neg](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Neg.html) | 无 | 不支持配置Layout | -| [mindspore.ops.NotEqual](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.NotEqual.html) | 无 | 不支持配置Layout | -| [mindspore.ops.OneHot](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.OneHot.html) | 仅支持输入(indices)是一维的Tensor,切分策略要配置输出的切分策略,以及第1和第2个输入的切分策略 | 不支持配置Layout | -| [mindspore.ops.OnesLike](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.OnesLike.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Pow](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Pow.html) | 无 | 不支持配置Layout | -| [mindspore.ops.PReLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.PReLU.html) | weight的shape在非[1]的情况下,输入(input_x)的Channel维要和weight的切分方式一致 | 不支持配置Layout | -| [mindspore.ops.RandomChoiceWithMask](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.RandomChoiceWithMask.html) | 不支持切分,仅支持全1策略 | 不支持配置Layout | -| [mindspore.ops.RealDiv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.RealDiv.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Reciprocal](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Reciprocal.html) | 无 | 不支持配置Layout | -| [mindspore.ops.ReduceMax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceMax.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | 不支持配置Layout | -| [mindspore.ops.ReduceMin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceMin.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | 不支持配置Layout | -| [mindspore.ops.ReduceSum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceSum.html) | 无 | 不支持配置Layout | -| [mindspore.ops.ReduceMean](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceMean.html) | 无 | 不支持配置Layout | -| [mindspore.ops.ReLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReLU.html) | 无 | 支持配置输入的Layout,不支持配置输出的Layout | -| [mindspore.ops.ReLU6](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReLU6.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Reshape](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Reshape.html) | 不支持配置切分策略,并且,在自动并行模式下,当reshape API后接有多个API,不允许对这些API配置不同的切分策略 | 不支持配置Layout | -| [mindspore.ops.ResizeNearestNeighbor](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ResizeNearestNeighbor.html) | 在`align_corners=True`时只支持切分第一维和第二维。 | 不支持配置Layout | -| [mindspore.ops.Rint](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Rint.html) | 无 | 不支持配置Layout | -| [mindspore.ops.ROIAlign](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ROIAlign.html) | 不支持对输入(features)的H/W维和输入(rois)的第二维进行切分 | 不支持配置Layout | -| [mindspore.ops.Round](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Round.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Rsqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Rsqrt.html) | 无 | 不支持配置Layout | -| [mindspore.ops.ScatterAdd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterAdd.html) | 第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.ScatterDiv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterDiv.html) | 第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.ScatterMax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterMax.html) | 第一个输入的第一维不能切分,第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.ScatterMin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterMin.html) | 第一个输入的第一维不能切分,第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.ScatterMul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterMul.html) | 第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.ScatterNdAdd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterNdAdd.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.ScatterNdSub](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterNdSub.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.ScatterNdUpdate](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterNdUpdate.html) | 第一个输入前m维度不能切(m为第二个输入indices的最后一维的值indices[-1])第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.ScatterSub](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterSub.html) | 第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.ScatterUpdate](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ScatterUpdate.html) | 第一个输入的第一维不能切分,第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.TensorScatterAdd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TensorScatterAdd.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.TensorScatterDiv](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TensorScatterDiv.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.TensorScatterMax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TensorScatterMax.html) | 第一个输入前m维度不能切(m为第二个输入indices的最后一维的值indices[-1])第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.TensorScatterMax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TensorScatterMin.html) | 第一个输入前m维度不能切(m为第二个输入indices的最后一维的值indices[-1])第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.TensorScatterMul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TensorScatterMul.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.TensorScatterAdd](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TensorScatterSub.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.TensorScatterUpdate](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TensorScatterUpdate.html) | 第一个输入前m维度不能切(m为第二个输入indices的最后一维的值indices[-1])第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.Select](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Select.html) | 在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | -| [mindspore.ops.SeLU](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.SeLU.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Sigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sigmoid.html) | 无 | 支持配置输入的Layout,不支持配置输出的Layout | -| [mindspore.ops.SigmoidCrossEntropyWithLogits](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.SigmoidCrossEntropyWithLogits.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Sign](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sign.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Sin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sin.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Sinh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sinh.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Softmax.html) | 输入(logits)在轴(axis)对应的维度不可切分,切分后,在数学逻辑上和单机不等价 | 支持配置输入的Layout,不支持配置输出的Layout,并且不能在轴(axis)对应的维度配置Layout | -| [mindspore.ops.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.SoftmaxCrossEntropyWithLogits.html) | 输入(logits、labels)的最后一维不能切分;有两个输出,正向的loss只支持取[0] | 不支持配置Layout | -| [mindspore.ops.Softplus](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Softplus.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Softsign](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Softsign.html) | 无 | 不支持配置Layout | -| [mindspore.ops.SoftShrink](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.SoftShrink.html) | 无 | 不支持配置Layout | -| [mindspore.ops.SparseGatherV2](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.SparseGatherV2.html) | 同Gather | 不支持配置Layout | -| [mindspore.ops.Split](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Split.html) | 轴(axis)所对应的维度不能切分,切分后,在数学逻辑上和单机不等价 | 支持配置Layout,并且不能在轴(axis)所对应的维度配置 | -| [mindspore.ops.Sqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sqrt.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Square](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Square.html) | 无 | 不支持配置Layout | -| [mindspore.ops.SquaredDifference](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.SquaredDifference.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Squeeze](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Squeeze.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Stack](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Stack.html) | 无 | 不支持配置Layout | -| [mindspore.ops.StridedSlice](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.StridedSlice.html) | 仅支持值为全0的mask;需要切分的维度必须全部提取;输入在strides不为1对应的维度不支持切分 | 不支持配置Layout | -| [mindspore.ops.Slice](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Slice.html) | 需要切分的维度必须全部提取 | 不支持配置Layout | -| [mindspore.ops.Sub](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Sub.html) | 无 | 支持配置Layout,输入的Layout 需要相同或能广播,不支持配置输出的Layout | -| [mindspore.ops.Tan](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Tan.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Tanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Tanh.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Tile](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Tile.html) | 仅支持对dims配置切分策略 | 支持配置输入与输出的Layout,dim (复制次数) 为1的维度,输入与输出中此维度切分策略需相同;dim>1的维度,输入中此维度不允许切分以防止复制后数据乱序,输出中对应dim需要能被切分数整除 | -| [mindspore.ops.TopK](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TopK.html) | 最后一维不支持切分,切分后,在数学逻辑上和单机不等价 | 不支持配置Layout | -| [mindspore.ops.Transpose](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Transpose.html) | 无 | 支持配置Layout,不支持配置输出的Layout | -| [mindspore.ops.TruncateDiv](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TruncateDiv.html) | 无 | 不支持配置Layout | -| [mindspore.ops.TruncateMod](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.TruncateMod.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Unique](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Unique.html) | 只支持重复计算的策略(1,) | 不支持配置Layout | -| [mindspore.ops.UnsortedSegmentSum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.UnsortedSegmentSum.html) | 输入input_x和segment_ids的切分配置必须在segment_ids的维度上保持一致 | 不支持配置Layout | -| [mindspore.ops.UnsortedSegmentMin](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.UnsortedSegmentMin.html) | 输入input_x和segment_ids的切分配置必须在segment_ids的维度上保持一致。注意:在segment id为空时,输出向量的对应位置会填充为输入类型的最大值。需要用户进行掩码处理,将最大值转换成0。否则容易造成数值溢出,导致通信API上溢错误,从而引发Run Task Error | 不支持配置Layout | -| [mindspore.ops.UnsortedSegmentMax](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.UnsortedSegmentMax.html) | 输入input_x和segment_ids的切分配置必须在segment_ids的维度上保持一致。注意:在segment id为空时,输出向量的对应位置会填充为输入类型的最小值。需要用户进行掩码处理,将最小值转换成0。否则容易造成数值溢出,导致通信API上溢错误,从而引发Run Task Error | 不支持配置Layout | -| [mindspore.ops.Xdivy](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Xdivy.html) | 无 | 不支持配置Layout | -| [mindspore.ops.Xlogy](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Xlogy.html) | 无 | 不支持配置Layout | -| [mindspore.ops.ZerosLike](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ZerosLike.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Abs](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Abs.html) | 无 | 不支持配置Layout | +| [mindspore.ops.ACos](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ACos.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Acosh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Acosh.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Add.html) | 无 | 支持配置Layout,输入的Layout 需要相同或能广播,不支持配置输出的Layout | +| [mindspore.ops.AddN](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AddN.html) | 无 | 不支持配置Layout | +| [mindspore.ops.ApproximateEqual](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ApproximateEqual.html) | 无 | 不支持配置Layout | +| [mindspore.ops.ArgMaxWithValue](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ArgMaxWithValue.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | 不支持配置Layout | +| [mindspore.ops.ArgMinWithValue](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ArgMinWithValue.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | 不支持配置Layout | +| [mindspore.ops.Asin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Asin.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Asinh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Asinh.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Assign](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Assign.html) | 无 | 不支持配置Layout | +| [mindspore.ops.AssignAdd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AssignAdd.html) | 无 | 不支持配置Layout | +| [mindspore.ops.AssignSub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AssignSub.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Atan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Atan.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Atan2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Atan2.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Atanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Atanh.html) | 无 | 不支持配置Layout | +| [mindspore.ops.AvgPool](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AvgPool.html) | 1. 数据格式只支持‘NCHW’;
2. 输出的H/W维的shape必须能被输入的H/W维的切分策略整除;
3. 如果切分H/W:
1)当kernel_size <= stride时,输入切片大小需能被stride整除;
2)不支持kernel_size > stride;
4. 在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.AvgPool3D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AvgPool3D.html) | 1. 数据格式只支持‘NCDHW’;
2. 如果涉及相邻节点数据交换,只支持Ascend;
3. W维不支持切分;
4. 输出的D/H维的shape必须能被输入的D/H维的切分策略整除;
5. valid模式下:如果切分D/H:
1)当kernel_size <= stride时,输入切片大小需能被stride整除;
2)不支持kernel_size > stride;
6. same/pad模式下:如果切分D/H:
1)kernel_size >= stride时,(包含pad的输入总长度 - kernel_size)需能被stride整除;kernel_size < stride时,pad需等于0且分片能被stride整除;
2)(输出总长度*stride - 输入总长度)需能被切分策略整除;
3)相邻卡间发送接收的数据长度需大于等于0且小于等于切片大小;
7. 在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.BatchMatMul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BatchMatMul.html) | 每个输入和输出的最后两个维度的切分规则与MatMul算子的切分规则相同 | 支持配置Layout; 每个输入和输出的最后两个维度的切分规则与MatMul算子的切分规则相同 | +| [mindspore.ops.BatchNorm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BatchNorm.html) | 不支持GPU | 不支持配置Layout | +| [mindspore.ops.BesselI0e](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BesselI0e.html) | 无 | 不支持配置Layout | +| [mindspore.ops.BesselI1e](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BesselI1e.html) | 无 | 不支持配置Layout | +| [mindspore.ops.BiasAdd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BiasAdd.html) | 无 | 支持配置Layout,输入参数bias的Layout需要和input_x的最后一维度相同,不支持配置输出的Layout | +| [mindspore.ops.BitwiseAnd](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BitwiseAnd.html) | 无 | 不支持配置Layout | +| [mindspore.ops.BitwiseOr](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BitwiseOr.html) | 无 | 不支持配置Layout | +| [mindspore.ops.BitwiseXor](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BitwiseXor.html) | 无 | 不支持配置Layout | +| [mindspore.ops.BoundingBoxEncode](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BoundingBoxEncode.html) | 1. 支持对输入(anchor_box)和输入(groundtruth_box)的第一维进行切分;
2. 输入(anchor_box)和输入(groundtruth_box)的切分策略必须一致 | 不支持配置Layout | +| [mindspore.ops.BroadcastTo](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BroadcastTo.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Cast](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cast.html) | Auto Parallel和Semi Auto Parallel模式下,配置策略不生效 | 不支持配置Layout | +| [mindspore.ops.Cdist](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cdist.html) | 1. 当两输入都含有Batch维时,这一维的切分策略必须相等;
2.`M`维度不支持切分 | 不支持配置Layout | +| [mindspore.ops.Ceil](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Ceil.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Concat](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Concat.html) | 输入(input_x)在轴(axis)所对应的维度不能切分,切分后,在数学逻辑上和单机不等价 | 不支持配置Layout | +| [mindspore.ops.Conv2D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Conv2D.html) | 1. 数据格式只支持‘NCHW’;
2. 如果涉及相邻节点数据交换,只支持Ascend;
3. 当group的值不为1时,不支持切分C-in/C-out;
4. weight的后两维不支持切分;
5. 输出的H/W维的shape必须能被输入的H/W维的切分策略整除;
6. valid模式下:如果切分H/W:
1)当kernel_size <= stride时其中(kernel_size=dilation *(kernel_size - 1) + 1,下同),输入切片大小需能被stride整除;
2)不支持kernel_size > stride;
7. same/pad模式下:如果切分H/W:
1)kernel_size >= stride时,(包含pad的输入总长度 - kernel_size)需能被stride整除;kernel_size < stride时,pad需等于0且分片能被stride整除;
2)( 输出总长度*stride - 输入总长度)需能被切分策略整除;
3)相邻卡间发送接收的数据长度需大于等于0且小于等于切片大小; | 不支持配置Layout | +| [mindspore.ops.Conv3D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Conv3D.html) | 1. 数据格式只支持‘NCDHW’;
2. 如果涉及相邻节点数据交换,只支持Ascend;
3. 当group的值不为1时,不支持切分C-in/C-out;
4. W维不支持切分,weight的后三维不支持切分;
5. 输出的D/H维的shape必须能被输入的D/H维的切分策略整除;
6. valid模式下:如果切分D/H:
1)当kernel_size <= stride时(kernel_size=dilation *(kernel_size - 1) + 1,下同),输入切片大小需能被stride整除;
2)不支持kernel_size > stride;
7. same/pad模式下:如果切分D/H:
1)kernel_size >= stride时,(包含pad的输入总长度 - kernel_size)需能被stride整除;kernel_size < stride时,pad需等于0且分片能被stride整除;
2)( 输出总长度*stride - 输入总长度)需能被切分策略整除;
3)相邻卡间发送接收的数据长度需大于等于0且小于等于切片大小;
8. 在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.Cos](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cos.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Cosh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Cosh.html) | 无 | 不支持配置Layout | +| [mindspore.ops.CropAndResize](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.CropAndResize.html) | 1. 不支持对输入(x)的H/W维和输入(boxes)的第二维进行切分;
2. 输入(boxes)和输入(box_index)第一维的切分策略必须一致 | 不支持配置Layout | +| [mindspore.ops.CumProd](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.CumProd.html) | 不支持`axis维`切分 | 不支持配置Layout | +| [mindspore.ops.CumSum](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.CumSum.html) | 不支持`axis维`切分 | 不支持配置Layout | +| [mindspore.ops.Div](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Div.html) | 无 | 支持配置Layout,输入的Layout 需要相同或能广播,不支持配置输出的Layout | +| [mindspore.ops.DivNoNan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.DivNoNan.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Dropout](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Dropout.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Elu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Elu.html) | 无 | 不支持配置Layout | +| [mindspore.ops.embedding](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.embedding.html) | 1. padding_idx、max_norm、norm_type和scale_gradid_by_freq仅支持默认值;
2. 第一个输入不支持切分;
3. 第二个输入不支持切不满的情况。 | 支持配置Layout | +| [mindspore.ops.EmbeddingLookup](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.EmbeddingLookup.html) | 同Gather | 不支持配置Layout | +| [mindspore.ops.Equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Equal.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Erf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Erf.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Erfc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Erfc.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Erfinv](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Erfinv.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Exp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Exp.html) | 无 | 不支持配置Layout | +| [mindspore.ops.ExpandDims](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ExpandDims.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Expm1](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Expm1.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Floor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Floor.html) | 无 | 不支持配置Layout | +| [mindspore.ops.FloorDiv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.FloorDiv.html) | 无 | 不支持配置Layout | +| [mindspore.ops.FloorMod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.FloorMod.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Gamma](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Gamma.html) | 1. `shape`为tuple,但可以对`shape`进行切分,如shape=(8, 16),对应的策略可以为(2, 4);
2. `alpha`和`beta`对应的策略必须为全1;
3. 当`shape`切分策略不为全1时,结果和单卡不一致 | 不支持配置Layout | +| [mindspore.ops.Gather](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Gather.html) | 1. batch_dims > 0时:
1)input_params的axis维度不支持切分;
2)不支持非均匀切分;
2. batch_dims = 0时:
1)均匀切分时:
a)如果input_params的axis维度不切分,则其他维度可以任意切分;
b)input_params的axis维度切分时:input_params只支持1维和2维,input_indices不支持标量,不支持input_indices和input_params同时进行切分;axis=0且参数在轴(axis)所在维度切分时,支持配置输出切分策略,合法的输出切分策略为(indices_strategy, param_strategy[1:]) 或 ((indices_strategy[0]*param_strategy[0], indices_strategy[1:]), param_strategy[1:])
2)非均匀切分时:
a)axis仅支持为0;
b)非均匀切分仅表示对input_params的第零维非均匀切分;
c)对input_params第零维的切分份数要等于对input_indices最后一维的切分份数;
d)input_params的每个维度都能切分,但input_indices只能切分最后一维,且不支持重复计算;
e)input_indices需满足:后一个切片的Tensor值需大于前一个分片的Tensor值。 | 不支持配置Layout | +| [mindspore.ops.GatherD](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.GatherD.html) | dim所对应的维度不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.GatherNd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.GatherNd.html) | 第一个输入不能切分,第二个输入的最后一维不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.GeLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.GeLU.html) | 无 | 支持配置输入的Layout,不支持配置输出的Layout | +| [mindspore.ops.Greater](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Greater.html) | 无 | 不支持配置Layout | +| [mindspore.ops.GreaterEqual](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.GreaterEqual.html) | 无 | 不支持配置Layout | +| [mindspore.ops.HShrink](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.HShrink.html) | 无 | 不支持配置Layout | +| [mindspore.ops.HSigmoid](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.HSigmoid.html) | 无 | 不支持配置Layout | +| [mindspore.ops.InplaceAdd](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.InplaceAdd.html) | 不支持对`x`和`input_v`的第一维切分 | 不支持配置Layout | +| [mindspore.ops.InplaceSub](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.InplaceSub.html) | 同InplaceAdd | 不支持配置Layout | +| [mindspore.ops.InplaceUpdate](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.InplaceUpdate.html) | 同InplaceAdd | 不支持配置Layout | +| [mindspore.ops.Inv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Inv.html) | 无 | 不支持配置Layout | +| [mindspore.ops.IOU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.IOU.html) | 支持对`anchor_boxes`和`gt_boxes`的第一维切分 | 不支持配置Layout | +| [mindspore.ops.IsFinite](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.IsFinite.html) | 无 | 不支持配置Layout | +| [mindspore.ops.KLDivLoss](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.KLDivLoss.html) | 无 | 不支持配置Layout | +| [mindspore.ops.LayerNorm](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LayerNorm.html) | 第二个输入gamma以及第三个输入beta的切分策略需要等于input_x_strategy[begin_params_axis:],input_x_strategy是第一个输入的切分策略 | 支持配置Layout。第二个输入gamma以及第三个输入beta的Layout配置需要等于input_x_layout_tuple[begin_params_axis:],input_x_layout_tuple是第一个输入的layout配置 | +| [mindspore.ops.L2Loss](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.L2Loss.html) | 无 | 不支持配置Layout | +| [mindspore.ops.L2Normalize](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.L2Normalize.html) | 输入(input_x)在轴(axis)对应的维度不能切,切分后,在数学逻辑上和单机不等价 | 不支持配置Layout | +| [mindspore.ops.Lerp](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Lerp.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Less](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Less.html) | 无 | 不支持配置Layout | +| [mindspore.ops.LessEqual](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LessEqual.html) | 无 | 不支持配置Layout | +| [mindspore.ops.LinSpace](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LinSpace.html) | 不需要为`start`和`end`配置策略,只需要传入一个长度为1的策略,其数值能整除`num` | 不支持配置Layout | +| [mindspore.ops.LogicalAnd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LogicalAnd.html) | 无 | 不支持配置Layout | +| [mindspore.ops.LogicalNot](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LogicalNot.html) | 无 | 不支持配置Layout | +| [mindspore.ops.LogicalOr](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LogicalOr.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Log](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Log.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Log1p](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Log1p.html) | 无 | 不支持配置Layout | +| [mindspore.ops.LogSoftmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.LogSoftmax.html) | 输入(logits)在轴(axis)对应的维度不可切分,切分后,在数学逻辑上和单机不等价 | 不支持配置Layout | +| [mindspore.ops.MaskedFill](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MaskedFill.html) | 无 | 不支持配置Layout | +| [mindspore.ops.MatMul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MatMul.html) | 1. 当`transpose_b=True`时,输入的切分策略需是 ((A, B), (C, B)) 的形式
2. 当`transpose_b=False`时,输入的切分策略需是 ((A, B), (B, C)) 的形式;
3. 支持设置输出切分策略,合法的输出切分策略为 ((A, C),) 或 ((A * B, C),) 。 | 支持配置Layout。
1. 当`transpose_b=True`时,输入的Layout配置需是 (layout(A, B), layout(C, B)) 的形式,其中A/B/C可以是设备别名或者是设备别名元组
2. 当`transpose_b=False`时,输入的layout配置需是 (layout(A, B), layout(B, C)) 的形式,其中A/B/C可以是设备别名或者是设备别名元组;
3. 支持配置输出Layout,合法的输出Layout为 (layout(A, C),) 或 (layout((A, B), C),),这里A/B/C均为设备别名;如若A是别名元组(A1, A2),则合法的输出Layout为 (layout((A1, A2), C),) 或 (layout((A1, A2, B), C),)
4. 切分策略中,若A/B/C为设备别名,A/B/C应互不相同;若A/B/C中存在设备别名元组,则元组中的每个子项也应该与其余子项或其他设备别名互不相同。 | +| [mindspore.ops.Maximum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Maximum.html) | 无 | 不支持配置Layout | +| [mindspore.ops.MaxPool](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MaxPool.html) | 1. 数据格式只支持‘NCHW’;
2. 输出的H/W维的shape必须能被输入的H/W维的切分策略整除;
3. 如果切分H/W:
1)当kernel_size <= stride时,输入切片大小需能被stride整除;
2)不支持kernel_size > stride;
4. 在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.MaxPool3D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MaxPool3D.html) | 同AvgPool3D | 不支持配置Layout | +| [mindspore.ops.Minimum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Minimum.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Mish](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Mish.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Mod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Mod.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Mul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Mul.html) | 无 | 支持配置Layout,输入的Layout 需要相同或能广播,不支持配置输出的Layout | +| [mindspore.ops.MulNoNan](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MulNoNan.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Neg](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Neg.html) | 无 | 不支持配置Layout | +| [mindspore.ops.NotEqual](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.NotEqual.html) | 无 | 不支持配置Layout | +| [mindspore.ops.OneHot](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.OneHot.html) | 仅支持输入(indices)是一维的Tensor,切分策略要配置输出的切分策略,以及第1和第2个输入的切分策略 | 不支持配置Layout | +| [mindspore.ops.OnesLike](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.OnesLike.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Pow](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Pow.html) | 无 | 不支持配置Layout | +| [mindspore.ops.PReLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.PReLU.html) | weight的shape在非[1]的情况下,输入(input_x)的Channel维要和weight的切分方式一致 | 不支持配置Layout | +| [mindspore.ops.RandomChoiceWithMask](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.RandomChoiceWithMask.html) | 不支持切分,仅支持全1策略 | 不支持配置Layout | +| [mindspore.ops.RealDiv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.RealDiv.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Reciprocal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Reciprocal.html) | 无 | 不支持配置Layout | +| [mindspore.ops.ReduceMax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceMax.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | 不支持配置Layout | +| [mindspore.ops.ReduceMin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceMin.html) | 输入在轴(axis)的维度进行切分时,分布式结果可能会和单机不一致 | 不支持配置Layout | +| [mindspore.ops.ReduceSum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceSum.html) | 无 | 不支持配置Layout | +| [mindspore.ops.ReduceMean](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceMean.html) | 无 | 不支持配置Layout | +| [mindspore.ops.ReLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReLU.html) | 无 | 支持配置输入的Layout,不支持配置输出的Layout | +| [mindspore.ops.ReLU6](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReLU6.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Reshape](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Reshape.html) | 不支持配置切分策略,并且,在自动并行模式下,当reshape API后接有多个API,不允许对这些API配置不同的切分策略 | 不支持配置Layout | +| [mindspore.ops.ResizeNearestNeighbor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ResizeNearestNeighbor.html) | 在`align_corners=True`时只支持切分第一维和第二维。 | 不支持配置Layout | +| [mindspore.ops.Rint](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Rint.html) | 无 | 不支持配置Layout | +| [mindspore.ops.ROIAlign](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ROIAlign.html) | 不支持对输入(features)的H/W维和输入(rois)的第二维进行切分 | 不支持配置Layout | +| [mindspore.ops.Round](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Round.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Rsqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Rsqrt.html) | 无 | 不支持配置Layout | +| [mindspore.ops.ScatterAdd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterAdd.html) | 第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.ScatterDiv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterDiv.html) | 第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.ScatterMax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterMax.html) | 第一个输入的第一维不能切分,第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.ScatterMin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterMin.html) | 第一个输入的第一维不能切分,第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.ScatterMul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterMul.html) | 第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.ScatterNdAdd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterNdAdd.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.ScatterNdSub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterNdSub.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.ScatterNdUpdate](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterNdUpdate.html) | 第一个输入前m维度不能切(m为第二个输入indices的最后一维的值indices[-1])第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.ScatterSub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterSub.html) | 第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.ScatterUpdate](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ScatterUpdate.html) | 第一个输入的第一维不能切分,第二个输入不能切分,第三个输入的前n维(n为第二个输入的维度)不能切分;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.TensorScatterAdd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TensorScatterAdd.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.TensorScatterDiv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TensorScatterDiv.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.TensorScatterMax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TensorScatterMax.html) | 第一个输入前m维度不能切(m为第二个输入indices的最后一维的值indices[-1])第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.TensorScatterMax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TensorScatterMin.html) | 第一个输入前m维度不能切(m为第二个输入indices的最后一维的值indices[-1])第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.TensorScatterMul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TensorScatterMul.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.TensorScatterAdd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TensorScatterSub.html) | 第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.TensorScatterUpdate](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TensorScatterUpdate.html) | 第一个输入前m维度不能切(m为第二个输入indices的最后一维的值indices[-1])第二个输入不能切分,第三个输入的前n-1维(n为第二个输入的维度)不能切分,第三个输入的剩下k个维度(除去前n-1维度)的切分与第一个输入的最后k个切分一致;在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.Select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Select.html) | 在auto_parallel模式下,不支持双递归算法。 | 不支持配置Layout | +| [mindspore.ops.SeLU](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.SeLU.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Sigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sigmoid.html) | 无 | 支持配置输入的Layout,不支持配置输出的Layout | +| [mindspore.ops.SigmoidCrossEntropyWithLogits](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.SigmoidCrossEntropyWithLogits.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Sign](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sign.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Sin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sin.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Sinh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sinh.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Softmax.html) | 输入(logits)在轴(axis)对应的维度不可切分,切分后,在数学逻辑上和单机不等价 | 支持配置输入的Layout,不支持配置输出的Layout,并且不能在轴(axis)对应的维度配置Layout | +| [mindspore.ops.SoftmaxCrossEntropyWithLogits](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.SoftmaxCrossEntropyWithLogits.html) | 输入(logits、labels)的最后一维不能切分;有两个输出,正向的loss只支持取[0] | 不支持配置Layout | +| [mindspore.ops.Softplus](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Softplus.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Softsign](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Softsign.html) | 无 | 不支持配置Layout | +| [mindspore.ops.SoftShrink](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.SoftShrink.html) | 无 | 不支持配置Layout | +| [mindspore.ops.SparseGatherV2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.SparseGatherV2.html) | 同Gather | 不支持配置Layout | +| [mindspore.ops.Split](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Split.html) | 轴(axis)所对应的维度不能切分,切分后,在数学逻辑上和单机不等价 | 支持配置Layout,并且不能在轴(axis)所对应的维度配置 | +| [mindspore.ops.Sqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sqrt.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Square](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Square.html) | 无 | 不支持配置Layout | +| [mindspore.ops.SquaredDifference](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.SquaredDifference.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Squeeze](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Squeeze.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Stack](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Stack.html) | 无 | 不支持配置Layout | +| [mindspore.ops.StridedSlice](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.StridedSlice.html) | 仅支持值为全0的mask;需要切分的维度必须全部提取;输入在strides不为1对应的维度不支持切分 | 不支持配置Layout | +| [mindspore.ops.Slice](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Slice.html) | 需要切分的维度必须全部提取 | 不支持配置Layout | +| [mindspore.ops.Sub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Sub.html) | 无 | 支持配置Layout,输入的Layout 需要相同或能广播,不支持配置输出的Layout | +| [mindspore.ops.Tan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Tan.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Tanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Tanh.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Tile](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Tile.html) | 仅支持对dims配置切分策略 | 支持配置输入与输出的Layout,dim (复制次数) 为1的维度,输入与输出中此维度切分策略需相同;dim>1的维度,输入中此维度不允许切分以防止复制后数据乱序,输出中对应dim需要能被切分数整除 | +| [mindspore.ops.TopK](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TopK.html) | 最后一维不支持切分,切分后,在数学逻辑上和单机不等价 | 不支持配置Layout | +| [mindspore.ops.Transpose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Transpose.html) | 无 | 支持配置Layout,不支持配置输出的Layout | +| [mindspore.ops.TruncateDiv](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TruncateDiv.html) | 无 | 不支持配置Layout | +| [mindspore.ops.TruncateMod](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.TruncateMod.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Unique](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Unique.html) | 只支持重复计算的策略(1,) | 不支持配置Layout | +| [mindspore.ops.UnsortedSegmentSum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.UnsortedSegmentSum.html) | 输入input_x和segment_ids的切分配置必须在segment_ids的维度上保持一致 | 不支持配置Layout | +| [mindspore.ops.UnsortedSegmentMin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.UnsortedSegmentMin.html) | 输入input_x和segment_ids的切分配置必须在segment_ids的维度上保持一致。注意:在segment id为空时,输出向量的对应位置会填充为输入类型的最大值。需要用户进行掩码处理,将最大值转换成0。否则容易造成数值溢出,导致通信API上溢错误,从而引发Run Task Error | 不支持配置Layout | +| [mindspore.ops.UnsortedSegmentMax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.UnsortedSegmentMax.html) | 输入input_x和segment_ids的切分配置必须在segment_ids的维度上保持一致。注意:在segment id为空时,输出向量的对应位置会填充为输入类型的最小值。需要用户进行掩码处理,将最小值转换成0。否则容易造成数值溢出,导致通信API上溢错误,从而引发Run Task Error | 不支持配置Layout | +| [mindspore.ops.Xdivy](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Xdivy.html) | 无 | 不支持配置Layout | +| [mindspore.ops.Xlogy](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Xlogy.html) | 无 | 不支持配置Layout | +| [mindspore.ops.ZerosLike](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ZerosLike.html) | 无 | 不支持配置Layout | > 重复计算是指,机器没有用满,比如:集群有8张卡跑分布式训练,切分策略只对输入切成了4份。这种情况下会发生重复计算。 diff --git a/docs/mindspore/source_zh_cn/faq/data_processing.md b/docs/mindspore/source_zh_cn/faq/data_processing.md index 49e9179d72..a39fbcdc92 100644 --- a/docs/mindspore/source_zh_cn/faq/data_processing.md +++ b/docs/mindspore/source_zh_cn/faq/data_processing.md @@ -1,10 +1,10 @@ # 数据处理 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/faq/data_processing.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/faq/data_processing.md) ## Q: 请问如果不使用高阶API,怎么实现数据下沉? -A: 可以参考此手动下沉方式的[test_tdt_data_transfer.py](https://gitee.com/mindspore/mindspore/blob/master/tests/st/data_transfer/test_tdt_data_transfer.py)示例实现,不用借助`model.train`接口,目前支持:GPU和Ascend硬件使用。 +A: 可以参考此手动下沉方式的[test_tdt_data_transfer.py](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/data_transfer/test_tdt_data_transfer.py)示例实现,不用借助`model.train`接口,目前支持:GPU和Ascend硬件使用。
@@ -38,7 +38,7 @@ A: 可以参考如下几个步骤来降低CPU占用,进一步提升性能, ## Q: 在`GeneratorDataset`中,看到有参数`shuffle`,在跑任务时发现`shuffle=True`和`shuffle=False`,两者没有区别,这是为什么? -A: 开启`shuffle`,需要传入的`Dataset`是支持随机访问的(例如自定义的`Dataset`有`getitem`方法),如果是在自定义的`Dataset`里面通过`yeild`方式返回回来的数据,是不支持随机访问的,具体可查看[GeneratorDataset 示例](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html)章节。 +A: 开启`shuffle`,需要传入的`Dataset`是支持随机访问的(例如自定义的`Dataset`有`getitem`方法),如果是在自定义的`Dataset`里面通过`yeild`方式返回回来的数据,是不支持随机访问的,具体可查看[GeneratorDataset 示例](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html)章节。
@@ -77,7 +77,7 @@ A: 推荐使用`c_transforms`,因为纯C层执行,所以性能会更好。 原理:`c_transform`底层使用的是C版本`opencv/jpeg-turbo`进行的数据处理,`py_transform`使用的是Python版本的`Pillow`进行数据处理。 -在MindSpore1.8开始,数据增强API进行了合并,用户无需显式感知`c_transforms`和`py_transforms`,MindSpore将根据传入数据增强API的数据类型决定使用何种后端,默认使用`c_transforms`,因其性能更佳。详细可以参考[最新API文档与import说明](https://gitee.com/mindspore/mindspore/blob/master/docs/api/api_python/mindspore.dataset.transforms.rst#视觉)。 +在MindSpore1.8开始,数据增强API进行了合并,用户无需显式感知`c_transforms`和`py_transforms`,MindSpore将根据传入数据增强API的数据类型决定使用何种后端,默认使用`c_transforms`,因其性能更佳。详细可以参考[最新API文档与import说明](https://gitee.com/mindspore/mindspore/blob/br_base/docs/api/api_python/mindspore.dataset.transforms.rst#视觉)。
@@ -160,7 +160,7 @@ A: 你可以参考yolov3对于此场景的使用,里面有对于图像的不 A: [build_seg_data.py](https://gitee.com/mindspore/models/blob/master/research/cv/FCN8s/src/data/build_seg_data.py)是将数据集生成MindRecord的脚本,可以直接使用/适配下你的数据集。或者如果你想尝试自己实现数据集的读取,可以使用`GeneratorDataset`自定义数据集加载。 -[GeneratorDataset 示例](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html) +[GeneratorDataset 示例](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html)
@@ -189,7 +189,7 @@ ds.GeneratorDataset(..., num_shards=8, shard_id=7, ...) A: 数据Schema可以按如下方式定义: `cv_schema_json = {"label": {"type": "int32", "shape": [-1]}, "data": {"type": "bytes"}}` 说明: label是一个数组,numpy类型,这里面可以存 1,1,0,1,0,1 这么多label值,这些label值对应同一个data,即: 同一个图像的二进制值。 -可以参考[将数据集转换为MindRecord](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/record.html#转换成record格式)教程。 +可以参考[将数据集转换为MindRecord](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/record.html#转换成record格式)教程。
@@ -201,7 +201,7 @@ A: 首先MindSpore训练使用的灰度图MNIST数据集。所以模型使用时 ## Q: MindSpore设计了专门用于数据处理的框架,有相关的设计和用法介绍? -A: MindSpore Dataset模块使得用户很简便地定义数据预处理Pipeline,并以高效(多进程/多线程)的方式处理数据集中样本,同时MindSpore Dataset也提供了多样化的API加载和处理数据集,详细介绍请参阅[数据处理Pipeline介绍](https://mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.loading.html#%E6%95%B0%E6%8D%AE%E5%A4%84%E7%90%86pipeline%E4%BB%8B%E7%BB%8D)。如果想进一步对数据处理Pipeline进行性能调优,请参阅[数据处理性能优化](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/optimize.html)。 +A: MindSpore Dataset模块使得用户很简便地定义数据预处理Pipeline,并以高效(多进程/多线程)的方式处理数据集中样本,同时MindSpore Dataset也提供了多样化的API加载和处理数据集,详细介绍请参阅[数据处理Pipeline介绍](https://mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.loading.html#%E6%95%B0%E6%8D%AE%E5%A4%84%E7%90%86pipeline%E4%BB%8B%E7%BB%8D)。如果想进一步对数据处理Pipeline进行性能调优,请参阅[数据处理性能优化](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/optimize.html)。
@@ -213,7 +213,7 @@ A: 首先上述报错指的是通过训练数据下发通道(TDT,train data 2. **在图编译阶段,训练还没开始报错时**(例如日志中还没打印loss),请先检查下报错(ERROR)日志中是否有网络中涉及的相关算子报错或涉及环境没配置好导致的报错(如hccl.json不对导致多卡通信初始化异常)。 -3. **在中间训练过程中报错时**,通常为下发的数据量(batch数)与网络训练需要的数据量(step数)不匹配导致的,可以通过[get_dataset_size](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/attribute/mindspore.dataset.Dataset.get_dataset_size.html)接口打印一个epoch中包含的batch数,导致异常的部分可能原因如下: +3. **在中间训练过程中报错时**,通常为下发的数据量(batch数)与网络训练需要的数据量(step数)不匹配导致的,可以通过[get_dataset_size](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/attribute/mindspore.dataset.Dataset.get_dataset_size.html)接口打印一个epoch中包含的batch数,导致异常的部分可能原因如下: - 通过查看打印loss次数的等方式判断如果数据量(step数)刚好为一个epoch中batch数的整数倍,则可能是数据处理部分涉及epoch的处理存在问题,如下面这场景: @@ -313,7 +313,7 @@ dataset3 = dataset2.map(***) ## Q: MindSpore中和DataLoader对应的接口是什么? -A:如果将DataLoader考虑为接收自定义Dataset的API接口,MindSpore数据处理API中和Dataloader较为相似的是GeneratorDataset,可接收用户自定义的Dataset,具体使用方式参考[GeneratorDataset 示例](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html),差异对比也可查看[API算子映射表](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html)。 +A:如果将DataLoader考虑为接收自定义Dataset的API接口,MindSpore数据处理API中和Dataloader较为相似的是GeneratorDataset,可接收用户自定义的Dataset,具体使用方式参考[GeneratorDataset 示例](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html),差异对比也可查看[API算子映射表](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html)。
@@ -332,7 +332,7 @@ for item in Dataset: ## Q: 数据处理操作与网络计算算子能否混合使用? A:通常数据处理操作与网络计算算子混合使用会导致性能有所降低,在缺少对应的数据处理操作且自定义Python操作不合适时可进行尝试。需要注意的是,因为二者需要的输入不一致,数据处理操作通常输入为numpy array 或 PIL Image,但网络计算算子输入需要是MindSpore.Tensor; -将二者混合使用需要使上一个的输出格式和下一个所需的输入格式一致。数据处理操作指的是官网API文档中[mindspore.dataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.html)模块下的接口,如 [mindspore.dataset.vision.CenterCrop](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.CenterCrop.html),网络计算算子包含 mindspore.nn、 mindspore.ops等模块下的算子。 +将二者混合使用需要使上一个的输出格式和下一个所需的输入格式一致。数据处理操作指的是官网API文档中[mindspore.dataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.html)模块下的接口,如 [mindspore.dataset.vision.CenterCrop](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.CenterCrop.html),网络计算算子包含 mindspore.nn、 mindspore.ops等模块下的算子。
@@ -495,7 +495,7 @@ A: 在使用数据下沉模式(此时 `数据预处理` -> `发送队列` -> ` 2022-05-09-11:36:01.893.412 -> 2022-05-09-11:36:02.006.771 ``` - 改进方法:查看最后一条 `push_end_time` 时间与GetNext报错时间,如果超过默认GetNext超时时间(默认:1900s,且可通过 `mindspore.device_context.ascend.op_debug.execute_timeout(xx)`来进行修改),说明数据预处理性能差,可参考 [数据处理性能优化](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/optimize.html) 对数据预处理部分进行优化。 + 改进方法:查看最后一条 `push_end_time` 时间与GetNext报错时间,如果超过默认GetNext超时时间(默认:1900s,且可通过 `mindspore.device_context.ascend.op_debug.execute_timeout(xx)`来进行修改),说明数据预处理性能差,可参考 [数据处理性能优化](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/optimize.html) 对数据预处理部分进行优化。 4. 当日志输出类似如下时,表示数据预处理产生了182条数据,正在向设备发送第183条数据,并且 `device_queue` 显示设备侧有充足的数据缓存。 @@ -545,7 +545,7 @@ A: 在使用数据下沉模式(此时 `数据预处理` -> `发送队列` -> ` 2022-05-09-14:31:04.064.571 -> ``` - 改进方法:可参考 [数据处理性能优化](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/optimize.html) 对数据预处理部分进行优化。 + 改进方法:可参考 [数据处理性能优化](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/optimize.html) 对数据预处理部分进行优化。
diff --git a/docs/mindspore/source_zh_cn/faq/distributed_parallel.md b/docs/mindspore/source_zh_cn/faq/distributed_parallel.md index a5f59d81bf..ef98f53be9 100644 --- a/docs/mindspore/source_zh_cn/faq/distributed_parallel.md +++ b/docs/mindspore/source_zh_cn/faq/distributed_parallel.md @@ -1,6 +1,6 @@ # 分布式并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/faq/distributed_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/faq/distributed_parallel.md) ## Q: 进行HCCL分布式训练出错:`Init plugin so failed, ret = 1343225860`,该如何处理? @@ -49,7 +49,7 @@ A:此场景下,异常进程由于各种问题退出,其余进程由于GPU [CRITICAL] DISTRIBUTED [mindspore/ccsrc/distributed/cluster/cluster_context.cc:130] InitNodeRole] Role name is invalid... ``` -A:在用户不使用`mpirun`启动进程,但是依然调用了`init()`方法的情况下,MindSpore要求用户按照[动态组网启动方式](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/dynamic_cluster.html)配置若干环境变量并进行校验,若没有配置,MindSpore会给出以上报错提示。因此建议只有在执行分布式训练时调用`mindspore.communication.init`,并在不使用`mpirun`的场景下,根据文档配置正确的环境变量以启动分布式训练。 +A:在用户不使用`mpirun`启动进程,但是依然调用了`init()`方法的情况下,MindSpore要求用户按照[动态组网启动方式](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/dynamic_cluster.html)配置若干环境变量并进行校验,若没有配置,MindSpore会给出以上报错提示。因此建议只有在执行分布式训练时调用`mindspore.communication.init`,并在不使用`mpirun`的场景下,根据文档配置正确的环境变量以启动分布式训练。
diff --git a/docs/mindspore/source_zh_cn/faq/feature_advice.md b/docs/mindspore/source_zh_cn/faq/feature_advice.md index 948836ac4d..44bcf4e325 100644 --- a/docs/mindspore/source_zh_cn/faq/feature_advice.md +++ b/docs/mindspore/source_zh_cn/faq/feature_advice.md @@ -1,6 +1,6 @@ # 特性咨询 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/faq/feature_advice.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/faq/feature_advice.md) ## Q: 导出MindIR格式的时候,`input=np.random.uniform(...)`是不是固定格式? @@ -10,7 +10,7 @@ A: 不是固定格式的,这一步操作是为了创建一个输入,以便 ## Q: MindSpore现支持直接读取哪些其他框架的模型和哪些格式呢?比如PyTorch下训练得到的pth模型可以加载到MindSpore框架下使用吗? -A: MindSpore采用Protobuf存储训练参数,无法直接读取其他框架的模型。对于模型文件本质保存的就是参数和对应的值,可以用其他框架的API将参数读取出来之后,拿到参数的键值对,然后再加载到MindSpore中使用。比如想用其他框架训练好的ckpt文件,可以先把参数读取出来,再调用MindSpore的[save_checkpoint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.save_checkpoint.html)接口,就可以保存成MindSpore可以读取的ckpt文件格式了。 +A: MindSpore采用Protobuf存储训练参数,无法直接读取其他框架的模型。对于模型文件本质保存的就是参数和对应的值,可以用其他框架的API将参数读取出来之后,拿到参数的键值对,然后再加载到MindSpore中使用。比如想用其他框架训练好的ckpt文件,可以先把参数读取出来,再调用MindSpore的[save_checkpoint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.save_checkpoint.html)接口,就可以保存成MindSpore可以读取的ckpt文件格式了。
diff --git a/docs/mindspore/source_zh_cn/faq/implement_problem.md b/docs/mindspore/source_zh_cn/faq/implement_problem.md index 077c45c60a..a7fbaca5c9 100644 --- a/docs/mindspore/source_zh_cn/faq/implement_problem.md +++ b/docs/mindspore/source_zh_cn/faq/implement_problem.md @@ -1,6 +1,6 @@ # 执行问题 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/faq/implement_problem.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/faq/implement_problem.md) ## Q: 请问使用MindSpore如何实现多尺度训练? @@ -16,7 +16,7 @@ A: 在PyNative模式下,如果中间使用`numpy`计算,会导致梯度传 ## Q: 请问怎样实现类似`torch.nn.functional.linear()`那样能够对全连接层`weight`、`bias`进行修改,应该如何操作? -A: MindSpore与`torch.nn.functional.linear()`功能最接近的接口就是[nn.Dense](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Dense.html)了。`nn.Dense`能指定`weight`和`bias`的初始值,后续的变化是由优化器自动更新的。训练过程中,用户不需要主动修改这两个参数的值。 +A: MindSpore与`torch.nn.functional.linear()`功能最接近的接口就是[nn.Dense](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Dense.html)了。`nn.Dense`能指定`weight`和`bias`的初始值,后续的变化是由优化器自动更新的。训练过程中,用户不需要主动修改这两个参数的值。
@@ -52,7 +52,7 @@ A: 在`eval`阶段主要看需要什么,比如图像分类任务`eval`网络 net.set_train(False) ``` -在eval阶段不需要优化器,但是需要使用MindSpore的[model.eval](https://www.mindspore.cn/docs/zh-CN/master/api_python/train/mindspore.train.Model.html#mindspore.train.Model.eval)接口的话需要配置一下`loss function`,如: +在eval阶段不需要优化器,但是需要使用MindSpore的[model.eval](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/train/mindspore.train.Model.html#mindspore.train.Model.eval)接口的话需要配置一下`loss function`,如: ```python # 定义模型 @@ -65,7 +65,7 @@ res = model.eval(dataset) ## Q: 如何使用SGD里的`param_group`来实现学习率的衰减? -A: 如果需要按照`epoch`来变化,可以使用[Dynamic LR](https://mindspore.cn/docs/zh-CN/master/api_python/mindspore.nn.html#dynamic-lr函数),把其中的`step_per_epoch`设置成`step_size`,如果需要按照`step`来变化,可以把其中的`step_per_epoch`设置成1,也可以用[LearningRateSchedule](https://mindspore.cn/docs/zh-CN/master/api_python/mindspore.nn.html#learningrateschedule类)。 +A: 如果需要按照`epoch`来变化,可以使用[Dynamic LR](https://mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.nn.html#dynamic-lr函数),把其中的`step_per_epoch`设置成`step_size`,如果需要按照`step`来变化,可以把其中的`step_per_epoch`设置成1,也可以用[LearningRateSchedule](https://mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.nn.html#learningrateschedule类)。
@@ -123,7 +123,7 @@ model.train(epoch_size, ds_train, callbacks=[stop_cb]) ## Q: 使用`nn.Conv2d`时,怎样获取期望大小的`feature map`? -A: `Conv2d shape`推导方法可以[参考这里](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Conv2d.html#mindspore.nn.Conv2d),`Conv2d`的`pad_mode`改成`same`,或者可以根据`Conv2d shape`推导公式自行计算`pad`,想要使得`shape`不变,一般pad为`(kernel_size-1)//2`。 +A: `Conv2d shape`推导方法可以[参考这里](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Conv2d.html#mindspore.nn.Conv2d),`Conv2d`的`pad_mode`改成`same`,或者可以根据`Conv2d shape`推导公式自行计算`pad`,想要使得`shape`不变,一般pad为`(kernel_size-1)//2`。
@@ -143,7 +143,7 @@ model = ms.train.Model(net=train_net, loss_fn=None, optimizer=None) ## Q: MindSpore如何实现早停功能? -A:可以使用[EarlyStopping 方法](https://www.mindspore.cn/docs/zh-CN/master/api_python/train/mindspore.train.EarlyStopping.html)。 +A:可以使用[EarlyStopping 方法](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/train/mindspore.train.EarlyStopping.html)。
@@ -243,7 +243,7 @@ print(network.layers) ## Q: 使用MindSpore进行模型训练时,`CTCLoss`的输入参数有四个: `inputs`、`labels_indices`、`labels_values`、`sequence_length`,如何使用`CTCLoss`进行训练? -A: 定义的`model.train`接口里接收的`dataset`可以是多个数据组成,形如(`data1`、`data2`、`data3`...),所以`dataset`是可以包含`inputs`、`labels_indices`、`labels_values`、`sequence_length`的信息的。只需要定义好相应形式的`dataset`,传入`model.train`里就可以。具体的可以了解下相应的[数据处理接口](https://www.mindspore.cn/docs/zh-CN/master/features/index.html)。 +A: 定义的`model.train`接口里接收的`dataset`可以是多个数据组成,形如(`data1`、`data2`、`data3`...),所以`dataset`是可以包含`inputs`、`labels_indices`、`labels_values`、`sequence_length`的信息的。只需要定义好相应形式的`dataset`,传入`model.train`里就可以。具体的可以了解下相应的[数据处理接口](https://www.mindspore.cn/docs/zh-CN/br_base/features/index.html)。
@@ -491,7 +491,7 @@ tsd client wait response fail, device response code[1]. unknown device error.[F ## Q: 如何控制`print`方法打印出的Tensor值? -A: 在PyNative动态图模式下,可以使用numpy原生方法如`set_printoptions`对输出的值进行控制。在Graph静态图模式下,因为`print`方法需要转化成为算子,所以暂时无法对输出的值进行控制。print算子具体用法可[参考](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Print.html)。 +A: 在PyNative动态图模式下,可以使用numpy原生方法如`set_printoptions`对输出的值进行控制。在Graph静态图模式下,因为`print`方法需要转化成为算子,所以暂时无法对输出的值进行控制。print算子具体用法可[参考](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Print.html)。
diff --git a/docs/mindspore/source_zh_cn/faq/inference.md b/docs/mindspore/source_zh_cn/faq/inference.md index ac143f4adb..7a1e31a507 100644 --- a/docs/mindspore/source_zh_cn/faq/inference.md +++ b/docs/mindspore/source_zh_cn/faq/inference.md @@ -1,6 +1,6 @@ # 推理 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/faq/inference.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/faq/inference.md) ## Q: 原先基于MindSpore安装包进行Atlas 200/300/500推理产品推理,新版本MindSpore发布包不支持Atlas 200/300/500推理产品平台的推理?如何使用Atlas 200/300/500推理产品进行推理?(MindSpore Atlas 200/300/500推理产品推理功能发布包变更说明) diff --git a/docs/mindspore/source_zh_cn/faq/installation.md b/docs/mindspore/source_zh_cn/faq/installation.md index 16bc671e85..2141a5acd8 100644 --- a/docs/mindspore/source_zh_cn/faq/installation.md +++ b/docs/mindspore/source_zh_cn/faq/installation.md @@ -1,6 +1,6 @@ # 安装 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/faq/installation.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/faq/installation.md) ## Pip安装 diff --git a/docs/mindspore/source_zh_cn/faq/network_compilation.md b/docs/mindspore/source_zh_cn/faq/network_compilation.md index 739c48f381..2d9673eac3 100644 --- a/docs/mindspore/source_zh_cn/faq/network_compilation.md +++ b/docs/mindspore/source_zh_cn/faq/network_compilation.md @@ -1,16 +1,16 @@ # 网络编译 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/faq/network_compilation.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/faq/network_compilation.md) ## Q: 静态图模式支持的语法集合是什么? -A: 静态图模式能够支持覆盖Python常用语法子集,以支持神经网络的构建和训练,部分Python语法暂不支持。具体支持的语法集合,请参考[静态图语法支持](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html)。静态图模式提供了JIT语法支持级别选项,便于用户选择是否扩展静态图语法,对于一些网络场景,推荐使用基础语法(nn/ops等)而非扩展语法(例如numpy三方库)。此外,推荐使用 [静态图高级编程技巧](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph_expert_programming.html) 优化编译性能。 +A: 静态图模式能够支持覆盖Python常用语法子集,以支持神经网络的构建和训练,部分Python语法暂不支持。具体支持的语法集合,请参考[静态图语法支持](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html)。静态图模式提供了JIT语法支持级别选项,便于用户选择是否扩展静态图语法,对于一些网络场景,推荐使用基础语法(nn/ops等)而非扩展语法(例如numpy三方库)。此外,推荐使用 [静态图高级编程技巧](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph_expert_programming.html) 优化编译性能。
## Q: 编译时报错'self.xx' should be initialized as a 'Parameter' type in the '`__init__`' function怎么办? -A: 在 `construct` 函数内,如果想对类成员 `self.xx` 赋值,那么 `self.xx` 必须已经在 `__init__` 函数中被定义为 [Parameter]() 类型,其他类型则不支持。局部变量 `xx` 不受这个限制。 +A: 在 `construct` 函数内,如果想对类成员 `self.xx` 赋值,那么 `self.xx` 必须已经在 `__init__` 函数中被定义为 [Parameter]() 类型,其他类型则不支持。局部变量 `xx` 不受这个限制。
@@ -298,7 +298,7 @@ A: 首先检查导出参数和导入执行的参数个数是否是匹配的。 因为导出数据输入为非Tensor时,该导出的输入将会变成常量固化到MindIR中,使MindIR中的输入要少于网络构建的Construct入参。 -如果是标量类型,可以将标量转成Tensor类型导出。如果是Tuple或者List类型,可以使用[mutable](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.mutable.html)接口进行包装后及进行导出。 +如果是标量类型,可以将标量转成Tensor类型导出。如果是Tuple或者List类型,可以使用[mutable](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.mutable.html)接口进行包装后及进行导出。
@@ -531,7 +531,7 @@ net = Net() out = net(Tensor(x)) ``` -3) 自定义类中调用了使用@jit装饰器修饰的函数,将会报错。这种场景建议将网络中的自定义类加上@jit_class装饰器,避免使用JIT Fallback特性。自定义类的更多使用可参考[自定义类的使用](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#支持自定义类的使用)。jit_class装饰器的使用可参考[使用jit_class](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph_expert_programming.html#使用jit-class)。 +3) 自定义类中调用了使用@jit装饰器修饰的函数,将会报错。这种场景建议将网络中的自定义类加上@jit_class装饰器,避免使用JIT Fallback特性。自定义类的更多使用可参考[自定义类的使用](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#支持自定义类的使用)。jit_class装饰器的使用可参考[使用jit_class](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph_expert_programming.html#使用jit-class)。 ```python import mindspore as ms @@ -766,13 +766,13 @@ A: 以下场景会触发重新编译: - Tuple或List的长度发生改变。 -- 网络的输入是tuple[Tensor]、list[Tensor]或Dict[Tensor],即使里面Tensor的shape和dtype没有发生变化。详情请参考 [mutable](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.mutable.html)。 +- 网络的输入是tuple[Tensor]、list[Tensor]或Dict[Tensor],即使里面Tensor的shape和dtype没有发生变化。详情请参考 [mutable](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.mutable.html)。
## Q: 静态图模式如何判断有几张图?什么情况会切分子图?多子图有什么影响?如何避免出现多子图? -A: 1、子图数量可以通过查看IR文件并搜索"Total subgraphs"获取。关于如何查看分析IR文件,请参考 [IR文件分析](https://www.mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/mindir.html)。 +A: 1、子图数量可以通过查看IR文件并搜索"Total subgraphs"获取。关于如何查看分析IR文件,请参考 [IR文件分析](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/mindir.html)。 2、图模式切分子图,常见于控制流场景,如if/while等。除了用户手动编写,MindSpore框架内部实现的控制流语法也可能会切分出多张子图。 diff --git a/docs/mindspore/source_zh_cn/faq/operators_compile.md b/docs/mindspore/source_zh_cn/faq/operators_compile.md index a54fa17a6b..a7557cbc7a 100644 --- a/docs/mindspore/source_zh_cn/faq/operators_compile.md +++ b/docs/mindspore/source_zh_cn/faq/operators_compile.md @@ -1,10 +1,10 @@ # 算子编译 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/faq/operators_compile.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/faq/operators_compile.md) ## Q: 在使用`ops.concat`算子时,因为数据规模有点大,导致报错`Error:Input and (output + workspace) num should <=192!`,可以怎么处理? -A: 这种报错,主要为[ops.concat](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.concat.html)算子提示`shape`过大。建议对`dataset`对象创建迭代器时可设置输出为`numpy`, 如下设置: +A: 这种报错,主要为[ops.concat](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.concat.html)算子提示`shape`过大。建议对`dataset`对象创建迭代器时可设置输出为`numpy`, 如下设置: ```python gallaryloader.create_dict_iterator(output_numpy=True) @@ -16,7 +16,7 @@ gallaryloader.create_dict_iterator(output_numpy=True) ## Q: 请问在静态图模式的`construct`函数里,如何把一个`tensor`中所含有的负数值全部去除掉? -A: 建议使用[ops.clip_by_value](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.clip_by_value.html)接口,把负数全变成0来进行计算。 +A: 建议使用[ops.clip_by_value](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.clip_by_value.html)接口,把负数全变成0来进行计算。
@@ -34,19 +34,19 @@ A: 这个昇腾算子底层规格限制一次拼接的Tensor个数不能超过19 ## Q: 在使用`Conv2D`进行卷积定义的时候使用到了`group`的参数,`group`的值不是只需要保证可以被输入输出的维度整除即可了吗?`group`参数的传递方式是怎样的呢? -A: `Conv2D`算子是有这个约束条件的: 当`group`大于1 时,其值必须要与输入输出的通道数相等。不要使用[ops.Conv2D](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Conv2D.html),这个算子目前不支持`group`>1。目前MindSpore只有[nn.Conv2D](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Conv2d.html)接口支持组卷积,但是有`group`要与输入输出的通道数相等的约束。 +A: `Conv2D`算子是有这个约束条件的: 当`group`大于1 时,其值必须要与输入输出的通道数相等。不要使用[ops.Conv2D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Conv2D.html),这个算子目前不支持`group`>1。目前MindSpore只有[nn.Conv2D](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Conv2d.html)接口支持组卷积,但是有`group`要与输入输出的通道数相等的约束。
## Q: MindSpore支持矩阵转置吗? -A: 支持,请参考`mindspore.ops.Transpose`的[算子教程](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Transpose.html#mindspore.ops.Transpose)。 +A: 支持,请参考`mindspore.ops.Transpose`的[算子教程](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Transpose.html#mindspore.ops.Transpose)。
## Q: 请问MindSpore能算给定任意一个`tensor`的方差吗? -A: 可以使用mindspore.Tensor.var接口计算Tensor的方差,你可以参考[mindspore.Tensor.var(axis=None, ddof=0, keepdims=False)](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.var.html#mindspore.Tensor.var)来实现。 +A: 可以使用mindspore.Tensor.var接口计算Tensor的方差,你可以参考[mindspore.Tensor.var(axis=None, ddof=0, keepdims=False)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.var.html#mindspore.Tensor.var)来实现。
@@ -59,7 +59,7 @@ A: 在PyTorch中`padding_idx`的作用是将embedding矩阵中`padding_idx`位 ## Q: Operations中`Tile`算子执行到`__infer__`时`value`值为`None`,丢失了数值是怎么回事? A: `Tile`算子的`multiples input`必须是一个常量(该值不能直接或间接来自于图的输入)。否则构图的时候会拿到一个`None`的数据,因为图的输入是在图执行的时候才传下去的,构图的时候拿不到图的输入数据。 -相关的资料可以看[静态图语法支持](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html)。 +相关的资料可以看[静态图语法支持](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html)。
@@ -71,7 +71,7 @@ A: TBE(Tensor Boost Engine)算子是华为自研的Ascend算子开发工具, ## Q: 请问MindSpore实现了反池化操作了吗?类似于`nn.MaxUnpool2d` 这个反池化操作? -A: 目前 MindSpore 还没有反池化相关的接口。用户可以通过自定义算子的方式自行开发算子,详情请见[自定义算子](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/op_custom.html)。 +A: 目前 MindSpore 还没有反池化相关的接口。用户可以通过自定义算子的方式自行开发算子,详情请见[自定义算子](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/op_custom.html)。
@@ -81,7 +81,7 @@ A: 遇到这种情况, 1. 看一下这些算子是否为融合算子。因为算子预编译可能会改变算子的fusion_type属性,而该属性会影响算子的融合,导致原本不应该融合的小算子融合成了大算子,这些融合出来的大算子性能不一定比小算子性能好。 -2. 其次,如果排除了上述融合算子的影响,可以尝试使用环境变量`MS_COMPILER_OP_LEVEL`来生成算子编译的debug调试信息,然后找算子开发人员根据这些调试信息进一步定位,具体配置信息可以参考[环境变量](https://www.mindspore.cn/docs/zh-CN/master/api_python/env_var_list.html)。 +2. 其次,如果排除了上述融合算子的影响,可以尝试使用环境变量`MS_COMPILER_OP_LEVEL`来生成算子编译的debug调试信息,然后找算子开发人员根据这些调试信息进一步定位,具体配置信息可以参考[环境变量](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/env_var_list.html)。
@@ -112,13 +112,13 @@ A: Ascend后端,算子有AI CORE算子和AI CPU算子之分,部分算子AI C 1. 如果`AI CORE`候选算子信息为空,则可能是在算子`check support`阶段,所有的算子信息均校验未通过。可以在日志中搜索关键字`CheckSupport`找到未通过的原因,根据具体信息修改shape或data type,或者找开发人员进一步定位; 2. 如果`AI CPU`候选算子信息不为空,或者`AI CORE`和`AI CPU`候选算子信息都不为空,则可能是用户给到该算子的输入数据类型不在候选列表中,在选择阶段被过滤掉导致,可以根据候选列表尝试修改该算子的输入data type。 -用户可以参考[官网教程](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html)选择合适、统一的模式和写法来完成训练。 +用户可以参考[官网教程](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html)选择合适、统一的模式和写法来完成训练。
## Q: MindSpore的算子输入的类型转换规则是什么?如果输入中存在零维Tensor,是否遵循这个规则? -A: MindSpore的算子输入的类型转换,可以参考[类型转换规则](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.dtype.html)。与PyTorch不同的是,算子输入中存在零维Tensor时,MindSpore同样遵循这一规则。示例代码如下: +A: MindSpore的算子输入的类型转换,可以参考[类型转换规则](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.dtype.html)。与PyTorch不同的是,算子输入中存在零维Tensor时,MindSpore同样遵循这一规则。示例代码如下: ```python import torch diff --git a/docs/mindspore/source_zh_cn/faq/performance_tuning.md b/docs/mindspore/source_zh_cn/faq/performance_tuning.md index c4a4a24a54..6ccbb634a4 100644 --- a/docs/mindspore/source_zh_cn/faq/performance_tuning.md +++ b/docs/mindspore/source_zh_cn/faq/performance_tuning.md @@ -1,11 +1,11 @@ # 性能调优 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/faq/performance_tuning.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/faq/performance_tuning.md) ## Q: MindSpore安装完成,执行训练时发现网络性能异常,权重初始化耗时过长,怎么办? A:可能与环境中使用了`scipy 1.4`系列版本有关,通过`pip list | grep scipy`命令可查看scipy版本,建议改成MindSpore要求的`scipy`版本。版本第三方库依赖可以在`requirement.txt`中查看。 - +
diff --git a/docs/mindspore/source_zh_cn/faq/precision_tuning.md b/docs/mindspore/source_zh_cn/faq/precision_tuning.md index fd8ae23c4f..60fcbadd8f 100644 --- a/docs/mindspore/source_zh_cn/faq/precision_tuning.md +++ b/docs/mindspore/source_zh_cn/faq/precision_tuning.md @@ -1,6 +1,6 @@ # 精度调优 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/faq/precision_tuning.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/faq/precision_tuning.md) ## Q: 导致Loss值不收敛或者精度不达标的原因有哪些呢,应该怎样定位调优? diff --git a/docs/mindspore/source_zh_cn/features/amp.md b/docs/mindspore/source_zh_cn/features/amp.md index 5e51462526..a93010141b 100644 --- a/docs/mindspore/source_zh_cn/features/amp.md +++ b/docs/mindspore/source_zh_cn/features/amp.md @@ -1,6 +1,6 @@ # 自动混合精度 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/amp.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/amp.md) 混合精度(Mix Precision)训练是指在训练时,对神经网络不同的运算采用不同的数值精度的运算策略。在神经网络运算中,部分运算对数值精度不敏感,此时使用较低精度可以达到明显的加速效果(如conv、matmul等);而部分运算由于输入和输出的数值差异大,通常需要保留较高精度以保证结果的正确性(如log、softmax等)。 @@ -78,4 +78,4 @@ for epoch in range(epochs): loss = train_step(data, label) ``` -关于自动混合精度,更多细节可以参考[amp.auto_mixed_precision](https://www.mindspore.cn/docs/zh-CN/master/api_python/amp/mindspore.amp.auto_mixed_precision.html)。 +关于自动混合精度,更多细节可以参考[amp.auto_mixed_precision](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/amp/mindspore.amp.auto_mixed_precision.html)。 diff --git a/docs/mindspore/source_zh_cn/features/compile/graph_construction.ipynb b/docs/mindspore/source_zh_cn/features/compile/graph_construction.ipynb index a8ed39198a..0627b23e35 100644 --- a/docs/mindspore/source_zh_cn/features/compile/graph_construction.ipynb +++ b/docs/mindspore/source_zh_cn/features/compile/graph_construction.ipynb @@ -6,7 +6,7 @@ "source": [ "# 构图(编译)\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/zh_cn/features/compile/mindspore_graph_construction.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/zh_cn/features/compile/mindspore_graph_construction.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/compile/graph_construction.ipynb)\n" + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/zh_cn/features/compile/mindspore_graph_construction.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/zh_cn/features/compile/mindspore_graph_construction.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/compile/graph_construction.ipynb)\n" ] }, { @@ -15,7 +15,7 @@ "source": [ "MindSpore提供JIT(just-in-time)技术来进行性能优化。JIT模式会通过AST树解析、Python字节码解析或追踪代码执行的方式,将代码解析为一张中间表示图(IR,intermediate representation)。IR图作为该代码的唯一表示,编译器通过对该IR图的优化,来达到对代码的优化,提高运行性能。与动态图模式相对应,这种JIT的编译模式被称为静态图模式。\n", "\n", - "基于JIT技术,MindSpore提供了动静结合的方法来提高用户的网络的运行效率。动静结合,即在整体运行为动态图的情况下,指定某些代码块以静态图的方式运行。按照静态图方式运行的代码块会采取先编译后执行的运行模式,在编译期对代码进行全局优化,来获取执行期的性能收益。用户可以通过`@jit`装饰器修饰函数,来指定其按照静态图的模式执行。有关`@jit`装饰器的相关文档请见[jit API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.jit.html#mindspore.jit)。\n", + "基于JIT技术,MindSpore提供了动静结合的方法来提高用户的网络的运行效率。动静结合,即在整体运行为动态图的情况下,指定某些代码块以静态图的方式运行。按照静态图方式运行的代码块会采取先编译后执行的运行模式,在编译期对代码进行全局优化,来获取执行期的性能收益。用户可以通过`@jit`装饰器修饰函数,来指定其按照静态图的模式执行。有关`@jit`装饰器的相关文档请见[jit API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.jit.html#mindspore.jit)。\n", "\n", "MindSpore提供了三种JIT编译方式,分别通过ast、bytecode和trace的方式来构图。ast是通过AST树解析的方式,将用户手工标识需要按照ast方式执行的函数转换成静态图。bytecode则是通过对Python字节码的解析,在动态图中尽可能的构建静态图,无法转换为静态图的部分则会按照动态图进行执行,来达到动静结合的目的。trace是通过追踪Python代码执行的轨迹来构建静态图,当前属于实验性质的特性。后续介绍会详细说明三者原理的不同以及各自的特点。\n" ] @@ -26,7 +26,7 @@ "source": [ "## Ast\n", "\n", - "在动态图模式下,用户可以通过`@jit(capture_mode=\"ast\")`装饰器修饰函数来让该函数以ast方式来执行。用ast方式修饰的函数,其内部使用的语法以及数据结构需要遵守静态图语法规范[静态图语法规范](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html)。ast方式通过源到源的方式来编译Python代码,先把模型定义的Python源码解析成抽象语法树,然后把抽象语法树解析为MindIR。例如下面的Python代码:\n", + "在动态图模式下,用户可以通过`@jit(capture_mode=\"ast\")`装饰器修饰函数来让该函数以ast方式来执行。用ast方式修饰的函数,其内部使用的语法以及数据结构需要遵守静态图语法规范[静态图语法规范](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html)。ast方式通过源到源的方式来编译Python代码,先把模型定义的Python源码解析成抽象语法树,然后把抽象语法树解析为MindIR。例如下面的Python代码:\n", "\n", "```python\n", "@jit\n", @@ -37,7 +37,7 @@ "\n", "它对应的抽象语法树如下:\n", "\n", - "![抽象语法树](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/compile/images/ast.png)\n", + "![抽象语法树](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/compile/images/ast.png)\n", "\n", "通过解析上面的抽象语法树,我们得到下面的MindIR:\n", "\n", @@ -113,7 +113,7 @@ "\n", "- MindSpore静态图绝大部分计算以及优化都是基于对Tensor计算的优化,因此我们建议被修饰的函数应该是那种用来进行真正的数据计算的函数,而不是一些简单的标量计算或者数据结构的变换。\n", "\n", - "- 被`@jit`修饰的函数,若其输入存在常量,那么该函数每次输入值的变化都会导致重新编译,关于变量常量的概念请见[即时编译下的常量与变量](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#%E5%8D%B3%E6%97%B6%E7%BC%96%E8%AF%91%E4%B8%8B%E7%9A%84%E5%B8%B8%E9%87%8F%E4%B8%8E%E5%8F%98%E9%87%8F)。因此,建议被修饰的函数以Tensor或者被Mutable修饰的数据作为输入。避免因多次编译导致的额外性能损耗。" + "- 被`@jit`修饰的函数,若其输入存在常量,那么该函数每次输入值的变化都会导致重新编译,关于变量常量的概念请见[即时编译下的常量与变量](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#%E5%8D%B3%E6%97%B6%E7%BC%96%E8%AF%91%E4%B8%8B%E7%9A%84%E5%B8%B8%E9%87%8F%E4%B8%8E%E5%8F%98%E9%87%8F)。因此,建议被修饰的函数以Tensor或者被Mutable修饰的数据作为输入。避免因多次编译导致的额外性能损耗。" ] }, { @@ -135,7 +135,7 @@ "\n", "bytecode的编译流程如下图所示\n", "\n", - "![bytecode的编译流程](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/compile/images/bytecode.png)\n", + "![bytecode的编译流程](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/compile/images/bytecode.png)\n", "\n", "**bytecode的使用方式**:\n", "\n", diff --git a/docs/mindspore/source_zh_cn/features/compile/graph_optimization.md b/docs/mindspore/source_zh_cn/features/compile/graph_optimization.md index e3d8f79424..b743aaa2d6 100644 --- a/docs/mindspore/source_zh_cn/features/compile/graph_optimization.md +++ b/docs/mindspore/source_zh_cn/features/compile/graph_optimization.md @@ -1,6 +1,6 @@ # 图优化(编译) -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/compile/graph_optimization.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/compile/graph_optimization.md) 与传统编译器类似,MindSpore 在进行完构图之后,也会进行编译优化。编译优化的主要目的是通过静态分析技术对 MindSpore 的中间表示 MindIR 进行分析和转换,以达成减小目标代码大小、提升代码执行效率、降低运行时资源开销或者提升其它性能指标的目的。编译优化是图编译系统中的重要一环,对提升整个神经网络模型的性能和资源利用率有着极其重要的意义,相较于未经过编译优化的原始代码,编译优化可能带来数倍甚至数十倍的性能提升。 diff --git a/docs/mindspore/source_zh_cn/features/compile/multi_level_compilation.md b/docs/mindspore/source_zh_cn/features/compile/multi_level_compilation.md index 0675dac4e7..bc4fa9b189 100644 --- a/docs/mindspore/source_zh_cn/features/compile/multi_level_compilation.md +++ b/docs/mindspore/source_zh_cn/features/compile/multi_level_compilation.md @@ -1,6 +1,6 @@ # 多级编译介绍(编译) -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/compile/multi_level_compilation.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/compile/multi_level_compilation.md) ## 背景 @@ -16,13 +16,13 @@ ![jit_level_framework](./images/multi_level_compilation/jit_level_framework.png) -1. 多级编译对外接口:通过[mindspore.jit(jit_level="O0/O1")](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.jit.html#mindspore.jit)来配置多级编译级别,jit_level默认为O0,通常我们建议用户使用O0模式进行网络调试调优,调试就绪后,为了更好的性能可以一键开启O1运行网络。 +1. 多级编译对外接口:通过[mindspore.jit(jit_level="O0/O1")](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.jit.html#mindspore.jit)来配置多级编译级别,jit_level默认为O0,通常我们建议用户使用O0模式进行网络调试调优,调试就绪后,为了更好的性能可以一键开启O1运行网络。 2. 后端图编译:根据配置的多级编译级别,选择不同的编译模式,O0为最基础的原生构图与编译,O1在O0基础增加了自动算子融合功能,主要功能有图优化、图算融合、算子选择、执行序编排,其中图算融合为O1模式下独有功能。 3. 后端图执行:O0跟O1模式执行层面是一样的,均使用单算子方式调度执行,主要功能有多流并发、多级流水、HAL管理、内存管理。 ## O0模式介绍 -O0为基础的图编译执行模式,除必要影响功能的优化外,其他优化均关闭,使用原生的图结构进行编译和执行,方便调试调优,具备较好的编译性能。下面主要介绍后端图编译相关功能,后端图执行相关功能详见[运行时](https://www.mindspore.cn/docs/zh-CN/master/features/runtime/memory_manager.html)。 +O0为基础的图编译执行模式,除必要影响功能的优化外,其他优化均关闭,使用原生的图结构进行编译和执行,方便调试调优,具备较好的编译性能。下面主要介绍后端图编译相关功能,后端图执行相关功能详见[运行时](https://www.mindspore.cn/docs/zh-CN/br_base/features/runtime/memory_manager.html)。 ### 图优化 @@ -70,7 +70,7 @@ MindSpore Ascend后端的算子类型有Aclnn kernel/Aclop kernel/Hccl kernel/Cp - 首先,优化模块需要解决求解最优算子并发的复杂性问题。由于计算图中的算子数量庞大且相互依赖,找到一个既能最大化并发又能保持计算图逻辑正确性的执行顺序是一个极具挑战性的任务。 - 其次,内存限制是执行序优化中不可忽视的关键因素。增大并发虽然可以提升计算效率,但往往会显著增加峰值内存需求,从而可能导致内存溢出(OOM)错误,尤其是在资源受限的环境中。因此,优化模块必须权衡并发与内存使用之间的关系,确保在提升并发的同时,不会超出系统的内存容量。 -- MindSpore的执行序调整模块结合了基于规则和基于启发式策略的方式,提供bfs/dfs两种执行序编排算法[mindspore.jit(option={"exec_order":"bfs/dfs"})](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.jit.html#mindspore.jit),以实现对计算图执行顺序的精细调整,从而在保证计算效率的同时,有效应对内存限制和系统稳定性等多重挑战。 +- MindSpore的执行序调整模块结合了基于规则和基于启发式策略的方式,提供bfs/dfs两种执行序编排算法[mindspore.jit(option={"exec_order":"bfs/dfs"})](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.jit.html#mindspore.jit),以实现对计算图执行顺序的精细调整,从而在保证计算效率的同时,有效应对内存限制和系统稳定性等多重挑战。 ## O1模式介绍 diff --git a/docs/mindspore/source_zh_cn/features/data_engine.md b/docs/mindspore/source_zh_cn/features/data_engine.md index 1940e5cbe4..5db69f833b 100644 --- a/docs/mindspore/source_zh_cn/features/data_engine.md +++ b/docs/mindspore/source_zh_cn/features/data_engine.md @@ -1,6 +1,6 @@ # 高性能数据处理引擎 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/data_engine.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/data_engine.md) ## 背景介绍 @@ -14,7 +14,7 @@ MindSpore训练数据处理引擎核心是将训练样本(数据集)高效 - 提供了自动数据增强模式,能够基于特定策略自动对图像进行数据增强处理; - 提供单节点数据缓存能力,解决重复加载、处理数据的问题,降低数据处理开销,提升端到端训练效率。 -具体用法参考:[数据处理与加载](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/overview.html) +具体用法参考:[数据处理与加载](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/overview.html) ![image](./images/data/data_engine.png) @@ -91,9 +91,9 @@ MindSpore的设计充分考虑了数据处理的高效性、灵活性以及在 针对数据集种类繁多、格式与组织方式各异的难题,MindSpore提供了三种不同的数据集加载方法: - - 对于各领域的常用数据集,可以直接使用MindSpore内置的API接口进行加载。MindSpore提供了[CelebADataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CelebADataset.html)、[Cifar10Dataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html)、[CocoDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CocoDataset.html)、[ImageFolderDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html)、[MnistDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MnistDataset.html)、[VOCDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.VOCDataset.html)等常用数据集加载接口,在保证性能的同时,能够让用户开箱即用。 - - 对于暂不支持直接加载的数据集,可以先转换为MindSpore数据格式,即MindRecord,再通过[MindDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MindDataset.html)接口进行加载。MindRecord可以将不同的数据集格式归一化,有聚合存储、高效读取、快速编解码、灵活控制分区大小等多种优势。 - - 用户也可以通过Python编写自定义数据集读取类,再使用[GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html) 接口进行数据集加载。该方式可以快速集成现有代码,但由于是Python IO Reader,需要额外关注数据加载性能。 + - 对于各领域的常用数据集,可以直接使用MindSpore内置的API接口进行加载。MindSpore提供了[CelebADataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CelebADataset.html)、[Cifar10Dataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html)、[CocoDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CocoDataset.html)、[ImageFolderDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html)、[MnistDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MnistDataset.html)、[VOCDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.VOCDataset.html)等常用数据集加载接口,在保证性能的同时,能够让用户开箱即用。 + - 对于暂不支持直接加载的数据集,可以先转换为MindSpore数据格式,即MindRecord,再通过[MindDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MindDataset.html)接口进行加载。MindRecord可以将不同的数据集格式归一化,有聚合存储、高效读取、快速编解码、灵活控制分区大小等多种优势。 + - 用户也可以通过Python编写自定义数据集读取类,再使用[GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html) 接口进行数据集加载。该方式可以快速集成现有代码,但由于是Python IO Reader,需要额外关注数据加载性能。 - 通过Python层自定义和C++层插件的方式支持更多操作 @@ -111,7 +111,7 @@ MindSpore的设计充分考虑了数据处理的高效性、灵活性以及在 为了支持AutoAugment这种自动数据增强策略,MindSpore提供了以下接口。 - - [RandomChoice](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset_transforms/mindspore.dataset.transforms.RandomChoice.html)即随机选择,允许用户定义一个数据增强操作列表,数据处理过程中将针对每张图像等概率选择列表中的一个数据增强操作执行。 + - [RandomChoice](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.RandomChoice.html)即随机选择,允许用户定义一个数据增强操作列表,数据处理过程中将针对每张图像等概率选择列表中的一个数据增强操作执行。 ```python from mindspore.dataset.transforms import RandomChoice @@ -122,7 +122,7 @@ MindSpore的设计充分考虑了数据处理的高效性、灵活性以及在 RandomRotation((90, 90))]) ``` - - [RandomApply](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset_transforms/mindspore.dataset.transforms.RandomApply.html)即随机概率执行,允许用户定义一个数据增强操作列表和对应概率,数据处理过程中将针对每张图像以指定的概率执行列表中的数据增强操作,要么全都执行,要么全不执行。 + - [RandomApply](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.RandomApply.html)即随机概率执行,允许用户定义一个数据增强操作列表和对应概率,数据处理过程中将针对每张图像以指定的概率执行列表中的数据增强操作,要么全都执行,要么全不执行。 ```python from mindspore.dataset.transforms import RandomApply @@ -133,7 +133,7 @@ MindSpore的设计充分考虑了数据处理的高效性、灵活性以及在 RandomRotation((90, 90))], 0.8) ``` - - [RandomSelectSubpolicy](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomSelectSubpolicy.html)即随机子策略选择,允许用户定义多个数据增强操作子策略列表,并对子策略中的每个数据增强操作指定执行的概率,数据处理过程中将针对每张图像先等概率选择一个子策略,然后按顺序依照概率决定其中各个数据增强操作是否执行。 + - [RandomSelectSubpolicy](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomSelectSubpolicy.html)即随机子策略选择,允许用户定义多个数据增强操作子策略列表,并对子策略中的每个数据增强操作指定执行的概率,数据处理过程中将针对每张图像先等概率选择一个子策略,然后按顺序依照概率决定其中各个数据增强操作是否执行。 ```python from mindspore.dataset.vision import RandomSelectSubpolicy, RandomRotation, RandomVerticalFlip, \ diff --git a/docs/mindspore/source_zh_cn/features/mint.md b/docs/mindspore/source_zh_cn/features/mint.md index 9cc38148fe..be47b99259 100644 --- a/docs/mindspore/source_zh_cn/features/mint.md +++ b/docs/mindspore/source_zh_cn/features/mint.md @@ -1,6 +1,6 @@ # mint API 介绍 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/mint.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/mint.md) ## 介绍 @@ -25,7 +25,7 @@ - `layout`: 创建torch tensor时,一般默认layout是stride,即dense tensor。mindspore创建tensor时,默认是dense tensor,与torch 无差异。开发者无需设置。 - `memory_format`: tensor的内存排布,默认都是NCHW格式。torch 提供channel_last格式即NHWC,在一些场景中,这样会有性能提升,但是泛化性和兼容性需要开发者实际测试和验证。使用mindspore开发,可不设置此参数。 -- `requires_grad`: 由于框架自动微分求导机制不同,mindspore在tensor的属性中没有设置此参数。对于是否需要计算梯度,常用的parameter类提供了此参数。如果无需计算梯度,可参考[mindspore.ops.stop_gradient](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.stop_gradient.html)。 +- `requires_grad`: 由于框架自动微分求导机制不同,mindspore在tensor的属性中没有设置此参数。对于是否需要计算梯度,常用的parameter类提供了此参数。如果无需计算梯度,可参考[mindspore.ops.stop_gradient](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.stop_gradient.html)。 - `pin_memory`: 返回的tensor被分配到pinned memory,我们已经规划支持此功能。计划在2.7.1版本推出。 - `out`: 指定输出张量,用于原地操作和内存优化。当提供 `out` 参数时,操作结果会直接写入到指定的张量中,而不是创建新的张量。当前未规划支持此参数。 @@ -162,4 +162,4 @@ torch废弃的参数,不支持,例如: | `group`(ProcessGroup) | `group` (ProcessGroup) | 可选 | | `async_op` (bool) | `async_op` (bool) | 可选 | -更多API支持情况请查阅[mint支持列表](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.mint.html)。 \ No newline at end of file +更多API支持情况请查阅[mint支持列表](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.mint.html)。 \ No newline at end of file diff --git a/docs/mindspore/source_zh_cn/features/overview.md b/docs/mindspore/source_zh_cn/features/overview.md index 4062caf187..eebbda5df5 100644 --- a/docs/mindspore/source_zh_cn/features/overview.md +++ b/docs/mindspore/source_zh_cn/features/overview.md @@ -1,6 +1,6 @@ # MindSpore设计概览 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/overview.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/overview.md) ## 概述 @@ -55,7 +55,7 @@ MindSpore实现了函数式微分编程,对可被微分求导的函数对象 MindSpore基于Python构建神经网络的图结构,相比于传统的静态图模式,能有更易用、更灵活的表达能力。MindSpore创新性的构建源码转换能力,基于Python语句提取AST进行计算图构建,因此可以支持开发者使用的Python原生语法(条件/循环等)和其他操作,如元组(Tuple)、列表(List)以及Lambda表达来构建计算图,并对计算图进行自动微分。所以MindSpore能更好地兼容动态图和静态图的编程接口,在代码层面保持一致,如控制流写法等。 -原生Python表达可基于Python控制流关键字,直接使能静态图模式的执行,使得动静态图的编程统一性更高。同时开发者基于MindSpore的接口,可以灵活的对Python代码片段进行动静态图模式控制。即可以将程序局部函数以静态图模式执行([mindspore.jit](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.jit.html))而同时其他函数按照动态图模式执行。从而使得在与常用Python库、自定义Python函数进行穿插执行使用时,开发者可以灵活指定函数片段进行静态图优化加速,而不牺牲穿插执行的编程易用性。 +原生Python表达可基于Python控制流关键字,直接使能静态图模式的执行,使得动静态图的编程统一性更高。同时开发者基于MindSpore的接口,可以灵活的对Python代码片段进行动静态图模式控制。即可以将程序局部函数以静态图模式执行([mindspore.jit](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.jit.html))而同时其他函数按照动态图模式执行。从而使得在与常用Python库、自定义Python函数进行穿插执行使用时,开发者可以灵活指定函数片段进行静态图优化加速,而不牺牲穿插执行的编程易用性。 ### 分布式并行 @@ -71,7 +71,7 @@ MindSpore在并行化策略搜索中引入了张量重排布技术(Tensor Redi MindSpore基于编译技术,提供了丰富的硬件无关优化,如IR融合、代数化简、常数折叠、公共子表达式消除等。同时针对NPU、GPU等不同硬件,也提供各种硬件优化能力,从而更好的发挥硬件的大规模计算加速能力。 -#### [图算融合](https://www.mindspore.cn/docs/zh-CN/master/features/compile/multi_level_compilation.html#图算融合) +#### [图算融合](https://www.mindspore.cn/docs/zh-CN/br_base/features/compile/multi_level_compilation.html#图算融合) MindSpore等主流AI计算框架对开发者提供的算子通常是从开发中可理解、易使用角度进行定义。每个算子承载的计算量不等,计算复杂度也各不相同。但从硬件执行角度看,这种天然的、基于用开发者角度的算子计算量划分,并不高效,也无法充分发挥硬件资源计算能力。主要体现在: @@ -101,6 +101,6 @@ MindSpore是训推一体的AI框架,同时支持训练和推理等功能。同 MindSpore按照实际执行环境和业务需求,提供多种规格的版本形态,支持部署在云端、服务器端、手机等嵌入式设备端以及耳机等超轻量级设备端上的部署执行。 -### [三方硬件接入](https://www.mindspore.cn/docs/zh-CN/master/features/runtime/pluggable_device.html) +### [三方硬件接入](https://www.mindspore.cn/docs/zh-CN/br_base/features/runtime/pluggable_device.html) MindSpore基于统一MindIR构建了开放式AI架构,支持第三方芯片插件化、标准化、低成本快速对接,可接入GPU系列芯片亦可接入各类DSA芯片。MindSpore提供Kernel模式对接和Graph模式对接两种芯片接入方式,芯片产商可根据自身特点进行接入方式选择。 diff --git a/docs/mindspore/source_zh_cn/features/parallel/auto_parallel.rst b/docs/mindspore/source_zh_cn/features/parallel/auto_parallel.rst index 2e31949db2..f4467b29df 100644 --- a/docs/mindspore/source_zh_cn/features/parallel/auto_parallel.rst +++ b/docs/mindspore/source_zh_cn/features/parallel/auto_parallel.rst @@ -1,8 +1,8 @@ 自动并行策略搜索 ======================== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg - :target: https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/parallel/auto_parallel.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/parallel/auto_parallel.rst :alt: 查看源文件 自动并行策略搜索模式能够让用户无需关心策略配置,自动地建立代价模型,找到训练时间较短的并行策略。当前MindSpore支持如下两种不同的自动并行策略搜索方案: @@ -71,9 +71,9 @@ MindSpore将单机版本的程序转换成并行版本的程序。该转换是 相关接口: -1. `mindspore.parallel.auto_parallel.AutoParallel(net, parallel_mode="sharding_propagation") `_:设置并行模式,可以通过parallel_mode选择策略传播算法。 +1. `mindspore.parallel.auto_parallel.AutoParallel(net, parallel_mode="sharding_propagation") `_:设置并行模式,可以通过parallel_mode选择策略传播算法。 -2. `mindspore.nn.Cell.shard() `_ 以及 `mindspore.ops.Primitive.shard() `_ :指定算子切分策略,其余算子的策略通过传播算法推导得到。目前 ``mindspore.nn.Cell.shard()`` 接口同时支持 PyNative 模式与 Graph 模式; ``mindspore.ops.Primitive.shard()`` 接口仅可在 Graph 模式下使用。 +2. `mindspore.nn.Cell.shard() `_ 以及 `mindspore.ops.Primitive.shard() `_ :指定算子切分策略,其余算子的策略通过传播算法推导得到。目前 ``mindspore.nn.Cell.shard()`` 接口同时支持 PyNative 模式与 Graph 模式; ``mindspore.ops.Primitive.shard()`` 接口仅可在 Graph 模式下使用。 总而言之,切分策略传播算法需要用户手动配置关键算子的切分策略。 diff --git a/docs/mindspore/source_zh_cn/features/parallel/data_parallel.md b/docs/mindspore/source_zh_cn/features/parallel/data_parallel.md index 26bdfeea1f..23e209f1a0 100644 --- a/docs/mindspore/source_zh_cn/features/parallel/data_parallel.md +++ b/docs/mindspore/source_zh_cn/features/parallel/data_parallel.md @@ -1,6 +1,6 @@ # 数据并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/parallel/data_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/parallel/data_parallel.md) ## 概述 @@ -10,24 +10,24 @@ 相关接口: -1. [mindspore.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL)](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.set_auto_parallel_context.html):设置数据并行模式。 -2. [mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.DistributedGradReducer.html):进行多卡梯度聚合。 +1. [mindspore.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.set_auto_parallel_context.html):设置数据并行模式。 +2. [mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.DistributedGradReducer.html):进行多卡梯度聚合。 ## 整体流程 -![整体流程](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/images/data_parallel.png) +![整体流程](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/images/data_parallel.png) 1. 环境依赖 - 每次开始进行并行训练前,通过调用[mindspore.communication.init](https://www.mindspore.cn/docs/zh-CN/master/api_python/communication/mindspore.communication.init.html)接口初始化通信资源,并自动创建全局通信组`WORLD_COMM_GROUP`。通信组能让通信算子在卡间和机器间进行信息收发,全局通信组是最大的一个通信组,包括了当前训练的所有设备。通过调用`mindspore.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL)`设置当前模式为数据并行模式。 + 每次开始进行并行训练前,通过调用[mindspore.communication.init](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/communication/mindspore.communication.init.html)接口初始化通信资源,并自动创建全局通信组`WORLD_COMM_GROUP`。通信组能让通信算子在卡间和机器间进行信息收发,全局通信组是最大的一个通信组,包括了当前训练的所有设备。通过调用`mindspore.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL)`设置当前模式为数据并行模式。 2. 数据分发(Data distribution) - 数据并行的核心在于将数据集在样本维度拆分并下发到不同的卡上。在[mindspore.dataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.html)模块提供的所有数据集加载接口中都有`num_shards`和`shard_id`两个参数,它们用于将数据集拆分为多份并循环采样的方式,采集`batch`大小的数据到各自的卡上,当出现数据量不足的情况时将会从头开始采样。 + 数据并行的核心在于将数据集在样本维度拆分并下发到不同的卡上。在[mindspore.dataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.html)模块提供的所有数据集加载接口中都有`num_shards`和`shard_id`两个参数,它们用于将数据集拆分为多份并循环采样的方式,采集`batch`大小的数据到各自的卡上,当出现数据量不足的情况时将会从头开始采样。 3. 网络构图 - 数据并行网络的书写方式与单卡网络没有差别,这是因为在正反向传播(Forward propagation & Backward propagation)过程中各卡的模型间是独立执行的,只是保持了相同的网络结构。唯一需要特别注意的是为了保证各卡间训练同步,相应的网络参数初始化值应当是一致的,在`DATA_PARALLEL`模式下可以通过[mindspore.set_seed](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.set_seed.html)接口来设置seed或通过使能`mindspore.set_auto_parallel_context`中的`parameter_broadcast`达到多卡间权重初始化一致的目的。 + 数据并行网络的书写方式与单卡网络没有差别,这是因为在正反向传播(Forward propagation & Backward propagation)过程中各卡的模型间是独立执行的,只是保持了相同的网络结构。唯一需要特别注意的是为了保证各卡间训练同步,相应的网络参数初始化值应当是一致的,在`DATA_PARALLEL`模式下可以通过[mindspore.set_seed](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.set_seed.html)接口来设置seed或通过使能`mindspore.set_auto_parallel_context`中的`parameter_broadcast`达到多卡间权重初始化一致的目的。 4. 梯度聚合(Gradient aggregation) diff --git a/docs/mindspore/source_zh_cn/features/parallel/operator_parallel.md b/docs/mindspore/source_zh_cn/features/parallel/operator_parallel.md index 4bed8a24d9..3a65bd62f3 100644 --- a/docs/mindspore/source_zh_cn/features/parallel/operator_parallel.md +++ b/docs/mindspore/source_zh_cn/features/parallel/operator_parallel.md @@ -1,6 +1,6 @@ # 算子级并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/parallel/operator_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/parallel/operator_parallel.md) ## 概述 @@ -8,19 +8,19 @@ 算子级并行是将网络模型中每个算子涉及到的张量进行切分,当仅切分数据维度时,为逻辑上的数据并行;当仅切分模型维度时,为逻辑上的模型并行。通过将张量切分到多个设备上,降低单个设备的内存消耗,从而使大模型的训练成为可能。 -MindSpore提供两种粒度的算子级并行能力:[算子级并行](#基本原理)和[高阶算子级并行](#高阶算子级并行)。算子级并行通过简单切分策略描述张量维度分布,满足大多数场景需求。高阶算子级并行通过开放设备排布描述,支持复杂切分场景(如非连续设备分配、多维混合切分)。两种粒度的算子级并行能力均同时支持ops和mint算子,本章仅介绍基于ops算子的算子级并行和高阶算子级并行,基于mint算子的算子级并行配置方法请参照[算子级并行教程](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/operator_parallel.html)中的mint算子并行和高阶mint算子并行章节。 +MindSpore提供两种粒度的算子级并行能力:[算子级并行](#基本原理)和[高阶算子级并行](#高阶算子级并行)。算子级并行通过简单切分策略描述张量维度分布,满足大多数场景需求。高阶算子级并行通过开放设备排布描述,支持复杂切分场景(如非连续设备分配、多维混合切分)。两种粒度的算子级并行能力均同时支持ops和mint算子,本章仅介绍基于ops算子的算子级并行和高阶算子级并行,基于mint算子的算子级并行配置方法请参照[算子级并行教程](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/operator_parallel.html)中的mint算子并行和高阶mint算子并行章节。 -目前,MindSpore支持并行的算子列表,可以参考[算子级并行使用约束](https://www.mindspore.cn/docs/zh-CN/master/api_python/operator_list_parallel.html)。 +目前,MindSpore支持并行的算子列表,可以参考[算子级并行使用约束](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/operator_list_parallel.html)。 > 算子级并行模型支持的硬件平台包括Ascend、GPU,需要在Graph模式下运行。 相关接口: -1. [mindspore.parallel.auto_parallel.AutoParallel(network, parallel_mode="semi_auto")](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html):通过静态图并行封装指定并行模式,其中`network`是待封装的顶层`Cell`或函数,`parallel_mode`取值`semi_auto`,表示半自动并行模式。该接口返回封装后包含并行配置的`Cell`。 +1. [mindspore.parallel.auto_parallel.AutoParallel(network, parallel_mode="semi_auto")](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html):通过静态图并行封装指定并行模式,其中`network`是待封装的顶层`Cell`或函数,`parallel_mode`取值`semi_auto`,表示半自动并行模式。该接口返回封装后包含并行配置的`Cell`。 -2. [mindspore.ops.Primitive.shard()](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.shard):指定算子切分策略,详细案例请参考本章的[基本原理](#基本原理)。 +2. [mindspore.ops.Primitive.shard()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.shard):指定算子切分策略,详细案例请参考本章的[基本原理](#基本原理)。 -3. [`mindspore.ops.Primitive.add_prim_attr()`](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.add_prim_attr):为满足不同场景诉求,部分算子能通过`add_prim_attr`接口对其分布式实现进行配置,这些配置仅对`SEMI_AUTO_PARALLEL`与`AUTO_PARALLEL`模式适用,例如: +3. [`mindspore.ops.Primitive.add_prim_attr()`](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.add_prim_attr):为满足不同场景诉求,部分算子能通过`add_prim_attr`接口对其分布式实现进行配置,这些配置仅对`SEMI_AUTO_PARALLEL`与`AUTO_PARALLEL`模式适用,例如: - `ops.Gather().add_prim_attr("manual_split", split_tuple)`:该接口配置Gather算子的第一个输入非均匀切分,它仅对axis=0时有效。其中`split_tuple`是一个元素为int类型的元组,元素之和须等于Gather算子第一个输入的第零维的长度,元组个数须等于Gather算子第一个输入的第零维切分份数。 - `ops.Gather().add_prim_attr("primitive_target", "CPU")`:该接口配置Gather算子在CPU上执行,用于异构场景。 @@ -87,7 +87,7 @@ paralell_net = AutoParallel(net, parallel_mode='semi_auto') 为了应对这些复杂场景,本章节将介绍一种开放设备排布描述的高阶算子级并行配置方法。 -[算子级并行](https://www.mindspore.cn/docs/zh-CN/master/features/parallel/operator_parallel.html) 中介绍了MindSpore对张量的基本切分逻辑,但不能表达出所有的切分场景。例如,对于一个二维张量 "[[a0, a1, a2, a3], [a4, a5, a6, a7]]",其张量排布如下图所示: +[算子级并行](https://www.mindspore.cn/docs/zh-CN/br_base/features/parallel/operator_parallel.html) 中介绍了MindSpore对张量的基本切分逻辑,但不能表达出所有的切分场景。例如,对于一个二维张量 "[[a0, a1, a2, a3], [a4, a5, a6, a7]]",其张量排布如下图所示: ![image](images/advanced_operator_parallel_view1.PNG) @@ -103,9 +103,9 @@ paralell_net = AutoParallel(net, parallel_mode='semi_auto') ### 接口配置 -为了表达出如上述场景下的切分,[shard](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.shard.html) 接口进行了功能扩展。 +为了表达出如上述场景下的切分,[shard](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.shard.html) 接口进行了功能扩展。 -入参in_strategy和out_strategy都额外接收新的数量类型——tuple(Layout)。其中[Layout](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.Layout.html) 通过设备矩阵进行初始化,并同时要求给设备矩阵的每个轴取一个别名。例如:"layout = Layout((8, 4, 4), name = ("dp", "sp", "mp"))"表示该设备共有128张卡,按照(8, 4, 4)的形状进行排列,并为每个轴分别取了别名"dp"、"sp"、"mp"。 +入参in_strategy和out_strategy都额外接收新的数量类型——tuple(Layout)。其中[Layout](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.Layout.html) 通过设备矩阵进行初始化,并同时要求给设备矩阵的每个轴取一个别名。例如:"layout = Layout((8, 4, 4), name = ("dp", "sp", "mp"))"表示该设备共有128张卡,按照(8, 4, 4)的形状进行排列,并为每个轴分别取了别名"dp"、"sp"、"mp"。 关于Layout的具体含义与配置推导方法,可参考如下两篇技术文档: diff --git a/docs/mindspore/source_zh_cn/features/parallel/optimizer_parallel.md b/docs/mindspore/source_zh_cn/features/parallel/optimizer_parallel.md index d8f06aacd1..68e667f7d6 100644 --- a/docs/mindspore/source_zh_cn/features/parallel/optimizer_parallel.md +++ b/docs/mindspore/source_zh_cn/features/parallel/optimizer_parallel.md @@ -1,6 +1,6 @@ # 优化器并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/parallel/optimizer_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/parallel/optimizer_parallel.md) ## 概述 @@ -18,11 +18,11 @@ 相关接口: -1. [mindspore.parallel.auto_parallel.AutoParallel(network, parallel_mode="semi_auto")](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html):通过静态图并行封装指定并行模式,其中`network`是待封装的顶层`Cell`或函数,`parallel_mode`取值`semi_auto`,表示半自动并行模式。该接口返回封装后包含并行配置的`Cell`。 +1. [mindspore.parallel.auto_parallel.AutoParallel(network, parallel_mode="semi_auto")](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html):通过静态图并行封装指定并行模式,其中`network`是待封装的顶层`Cell`或函数,`parallel_mode`取值`semi_auto`,表示半自动并行模式。该接口返回封装后包含并行配置的`Cell`。 -2. [mindspore.parallel.auto_parallel.AutoParallel.hsdp(shard_size=-1, threshold=64, optimizer_level="level1")](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html#mindspore.parallel.auto_parallel.AutoParallel.hsdp):通过该接口设置优化器并行的配置,并开启优化器并行。其中`shard_size`指定优化器权重切分通信域的大小。`threshold`表示切分参数时,要求目标参数所占内存的最小值。当目标参数小于该值时,将不会被切分。 `optimizer_level`是优化器切分级别,当级别为`level1`时,对权重和优化器状态进行切分;当级别为`level2`时,对权重、优化器状态和梯度进行切分;当级别为`level3`时,除了对权重、优化器状态和梯度进行切分外,在反向传播前,还会对权重进行all gather通信,以释放前向传播allgather占用的内存。 +2. [mindspore.parallel.auto_parallel.AutoParallel.hsdp(shard_size=-1, threshold=64, optimizer_level="level1")](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html#mindspore.parallel.auto_parallel.AutoParallel.hsdp):通过该接口设置优化器并行的配置,并开启优化器并行。其中`shard_size`指定优化器权重切分通信域的大小。`threshold`表示切分参数时,要求目标参数所占内存的最小值。当目标参数小于该值时,将不会被切分。 `optimizer_level`是优化器切分级别,当级别为`level1`时,对权重和优化器状态进行切分;当级别为`level2`时,对权重、优化器状态和梯度进行切分;当级别为`level3`时,除了对权重、优化器状态和梯度进行切分外,在反向传播前,还会对权重进行all gather通信,以释放前向传播allgather占用的内存。 -3. [mindspore.nn.Cell.set_comm_fusion(fusion_type=NUM)](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_comm_fusion):在自动/半自动模式下,每个参数都会产生一个对应的AllGather操作和ReduceScatter操作。这些通信算子是自动并行框架自动插入的。然而,随着参数量增多,对应的通信算子也会增多,通信操作中的算子调度和启动都会产生更多的开销。因此,可以通过`Cell`提供的`set_comm_fusion`方法,手动对每个`Cell`内参数对应的AllGather和ReduceScatter操作配置融合标记NUM,以提高通信效率。MindSpore将融合相同NUM参数对应的通信算子,以减少通信开销。 +3. [mindspore.nn.Cell.set_comm_fusion(fusion_type=NUM)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_comm_fusion):在自动/半自动模式下,每个参数都会产生一个对应的AllGather操作和ReduceScatter操作。这些通信算子是自动并行框架自动插入的。然而,随着参数量增多,对应的通信算子也会增多,通信操作中的算子调度和启动都会产生更多的开销。因此,可以通过`Cell`提供的`set_comm_fusion`方法,手动对每个`Cell`内参数对应的AllGather和ReduceScatter操作配置融合标记NUM,以提高通信效率。MindSpore将融合相同NUM参数对应的通信算子,以减少通信开销。 ## 基本原理 diff --git a/docs/mindspore/source_zh_cn/features/parallel/pipeline_parallel.md b/docs/mindspore/source_zh_cn/features/parallel/pipeline_parallel.md index dd8393cf49..e0891f0aff 100644 --- a/docs/mindspore/source_zh_cn/features/parallel/pipeline_parallel.md +++ b/docs/mindspore/source_zh_cn/features/parallel/pipeline_parallel.md @@ -1,6 +1,6 @@ # 流水线并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/parallel/pipeline_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/parallel/pipeline_parallel.md) ## 概述 @@ -10,15 +10,15 @@ 相关接口: -1. [mindspore.parallel.auto_parallel.AutoParallel(network, parallel_mode="semi_auto")](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html):通过静态图并行封装指定并行模式,其中`network`是待封装的顶层`Cell`或函数,`parallel_mode`取值`semi_auto`,表示半自动并行模式。该接口返回封装后包含并行配置的`Cell`。 +1. [mindspore.parallel.auto_parallel.AutoParallel(network, parallel_mode="semi_auto")](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html):通过静态图并行封装指定并行模式,其中`network`是待封装的顶层`Cell`或函数,`parallel_mode`取值`semi_auto`,表示半自动并行模式。该接口返回封装后包含并行配置的`Cell`。 -2. [mindspore.parallel.auto_parallel.AutoParallel.pipeline(stages=1, output_broadcast=False, interleave=False, scheduler='1f1b')](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html#mindspore.parallel.auto_parallel.AutoParallel.pipeline):设置流水线并行配置。`stages`表示流水线并行需要设置的切分总数,`output_broadcast`表示流水线并行推理时,最后一个stage的结果是否广播给其他stage,`interleave`表示是否开启interleave优化策略,`scheduler`表示流水线并行的调度策略,当前支持`gpipe`/`1f1b`/`seqpipe`/`seqvpp`/`seqsmartvpp`/`zero_bubble_v`。 +2. [mindspore.parallel.auto_parallel.AutoParallel.pipeline(stages=1, output_broadcast=False, interleave=False, scheduler='1f1b')](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html#mindspore.parallel.auto_parallel.AutoParallel.pipeline):设置流水线并行配置。`stages`表示流水线并行需要设置的切分总数,`output_broadcast`表示流水线并行推理时,最后一个stage的结果是否广播给其他stage,`interleave`表示是否开启interleave优化策略,`scheduler`表示流水线并行的调度策略,当前支持`gpipe`/`1f1b`/`seqpipe`/`seqvpp`/`seqsmartvpp`/`zero_bubble_v`。 -3. [mindspore.parallel.Pipeline(network, micro_size=1, stage_config={"cell1":0, "cell2":1})](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.nn.Pipeline.html):流水线并行需要需要在`network`外再添加一层`Pipeline`,并通过`micro_size`指定MicroBatch的个数,以及指出网络中各Cell在哪个`stage`中执行。如果对于`network`使用`nn.WithLossCell`封装,则会改变`Cell`的名称,并增加`_backbone`前缀。为了提升机器的利用率,MindSpore将MiniBatch切分成了更细粒度的MicroBatch,最终的loss则是所有MicroBatch计算的loss值累加。其中,micro_size必须大于等于stages的数量。 +3. [mindspore.parallel.Pipeline(network, micro_size=1, stage_config={"cell1":0, "cell2":1})](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.nn.Pipeline.html):流水线并行需要需要在`network`外再添加一层`Pipeline`,并通过`micro_size`指定MicroBatch的个数,以及指出网络中各Cell在哪个`stage`中执行。如果对于`network`使用`nn.WithLossCell`封装,则会改变`Cell`的名称,并增加`_backbone`前缀。为了提升机器的利用率,MindSpore将MiniBatch切分成了更细粒度的MicroBatch,最终的loss则是所有MicroBatch计算的loss值累加。其中,micro_size必须大于等于stages的数量。 -4. [mindspore.parallel.PipelineGradReducer(parameters, scale_sense=1.0, opt_shard=None)](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.nn.PipelineGradReducer.html):流水线并行需要使用`PipelineGradReducer`来完成梯度聚合。这是因为流水线并行中,其输出是由多个`MicroBatch`的结果相加得到,因此其梯度也需要进行累加。 +4. [mindspore.parallel.PipelineGradReducer(parameters, scale_sense=1.0, opt_shard=None)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.nn.PipelineGradReducer.html):流水线并行需要使用`PipelineGradReducer`来完成梯度聚合。这是因为流水线并行中,其输出是由多个`MicroBatch`的结果相加得到,因此其梯度也需要进行累加。 -5. [mindspore.parallel.sync_pipeline_shared_parameters(net)](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.sync_pipeline_shared_parameters.html): 在推理场景下,用于同步不同stage之间共享权重。 +5. [mindspore.parallel.sync_pipeline_shared_parameters(net)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.sync_pipeline_shared_parameters.html): 在推理场景下,用于同步不同stage之间共享权重。 ## 基本原理 diff --git a/docs/mindspore/source_zh_cn/features/runtime/memory_manager.md b/docs/mindspore/source_zh_cn/features/runtime/memory_manager.md index 1277066617..62756cc04d 100644 --- a/docs/mindspore/source_zh_cn/features/runtime/memory_manager.md +++ b/docs/mindspore/source_zh_cn/features/runtime/memory_manager.md @@ -1,6 +1,6 @@ # 内存管理 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/runtime/memory_manager.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/runtime/memory_manager.md) ## 概述 @@ -13,14 +13,14 @@ ## 接口 -内存管理相关接口详见[runtime接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.runtime.html#%E5%86%85%E5%AD%98),其中最为重要的两个接口为内存设置接口和内存碎片管理接口: +内存管理相关接口详见[runtime接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.runtime.html#%E5%86%85%E5%AD%98),其中最为重要的两个接口为内存设置接口和内存碎片管理接口: -1. 内存设置接口:[mindspore.runtime.set_memory](https://www.mindspore.cn/docs/zh-CN/master/api_python/runtime/mindspore.runtime.set_memory.html#mindspore.runtime.set_memory),设置使用内存池管理的内存参数以及内存复用算法。 -2. 内存碎片管理接口:[环境变量MS_ALLOC_CONF](https://www.mindspore.cn/docs/zh-CN/master/api_python/env_var_list.html#%E5%9B%BE%E7%BC%96%E8%AF%91%E6%89%A7%E8%A1%8C),根据硬件驱动是否具备虚拟内存跟物理内存映射能力来确定行为,如果具备则默认打开,否则默认关闭。可通过export MS_ALLOC_CONF=“enable_vmm:false”强制关闭。 +1. 内存设置接口:[mindspore.runtime.set_memory](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/runtime/mindspore.runtime.set_memory.html#mindspore.runtime.set_memory),设置使用内存池管理的内存参数以及内存复用算法。 +2. 内存碎片管理接口:[环境变量MS_ALLOC_CONF](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/env_var_list.html#%E5%9B%BE%E7%BC%96%E8%AF%91%E6%89%A7%E8%A1%8C),根据硬件驱动是否具备虚拟内存跟物理内存映射能力来确定行为,如果具备则默认打开,否则默认关闭。可通过export MS_ALLOC_CONF=“enable_vmm:false”强制关闭。 ## 内存池 -内存池作为内存管理的底座,其核心思想是预先分配一大块连续内存,申请内存时直接从池中分配,释放时归还到池中复用,而非频繁调用系统中的内存申请释放接口,减少了频繁动态分配的开销,提升了系统性能。MindSpore主要使用最佳适应(BestFit)内存分配算法,支持动态扩充内存块和碎片整理,通过接口[mindspore.runtime.set_memory(init_size,increase_size,max_size)](https://www.mindspore.cn/docs/zh-CN/master/api_python/runtime/mindspore.runtime.set_memory.html)设置内存池初始化参数用于控制内存池动态扩充大小和最大内存使用量。 +内存池作为内存管理的底座,其核心思想是预先分配一大块连续内存,申请内存时直接从池中分配,释放时归还到池中复用,而非频繁调用系统中的内存申请释放接口,减少了频繁动态分配的开销,提升了系统性能。MindSpore主要使用最佳适应(BestFit)内存分配算法,支持动态扩充内存块和碎片整理,通过接口[mindspore.runtime.set_memory(init_size,increase_size,max_size)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/runtime/mindspore.runtime.set_memory.html)设置内存池初始化参数用于控制内存池动态扩充大小和最大内存使用量。 ![memory_pool](./images/memory_pool.png) diff --git a/docs/mindspore/source_zh_cn/features/runtime/multilevel_pipeline.md b/docs/mindspore/source_zh_cn/features/runtime/multilevel_pipeline.md index a03302d0cd..272ff2365a 100644 --- a/docs/mindspore/source_zh_cn/features/runtime/multilevel_pipeline.md +++ b/docs/mindspore/source_zh_cn/features/runtime/multilevel_pipeline.md @@ -1,6 +1,6 @@ # 多级流水 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/runtime/multilevel_pipeline.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/runtime/multilevel_pipeline.md) ## 概述 diff --git a/docs/mindspore/source_zh_cn/features/runtime/multistream_concurrency.md b/docs/mindspore/source_zh_cn/features/runtime/multistream_concurrency.md index 424743dac1..2578f6ad20 100644 --- a/docs/mindspore/source_zh_cn/features/runtime/multistream_concurrency.md +++ b/docs/mindspore/source_zh_cn/features/runtime/multistream_concurrency.md @@ -1,6 +1,6 @@ # 多流并发 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/runtime/multistream_concurrency.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/runtime/multistream_concurrency.md) ## 概述 diff --git a/docs/mindspore/source_zh_cn/features/view.md b/docs/mindspore/source_zh_cn/features/view.md index 535f516fe3..5cff3be2b3 100644 --- a/docs/mindspore/source_zh_cn/features/view.md +++ b/docs/mindspore/source_zh_cn/features/view.md @@ -1,6 +1,6 @@ ## Tensor View 机制 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/features/view.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/features/view.md) View操作是指创建一个新的张量,该张量与原始张量共享相同的数据存储(data storage),但具有不同的形状或排列方式。换句话说,view操作不会复制数据,而是通过不同的视角来持有现有的数据。 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_api_mapping.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_api_mapping.md index c800555280..ceb2a29f25 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_api_mapping.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_api_mapping.md @@ -1,6 +1,6 @@ # PyTorch与MindSpore API映射表 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_api_mapping.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_api_mapping.md) 由社区提供的PyTorch APIs和MindSpore APIs之间的映射,可能在参数、输入、输出、逻辑功能和特定场景等方面存在差异,可详见各API描述或已提供的差异对比。 @@ -18,7 +18,7 @@ API映射一致标准:API功能一致,参数个数或顺序一致,参数 (2)MindSpore的API不支持传入复数类型的参数。 -**例外场景2**:相较于MindSpore的API,PyTorch的API多出的参数是[通用差异参数](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#通用差异参数表)。通用差异参数存在的原因是PyTorch有部分参数是为性能优化等非功能性而增加的参数,MindSpore的性能优化机制与PyTorch不同。 +**例外场景2**:相较于MindSpore的API,PyTorch的API多出的参数是[通用差异参数](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#通用差异参数表)。通用差异参数存在的原因是PyTorch有部分参数是为性能优化等非功能性而增加的参数,MindSpore的性能优化机制与PyTorch不同。 **例外场景3**:如果能保证MindSpore的API在使用默认配置(或用户不配置)的情况下,能够实现与PyTorch对应API完全一致的功能,则MindSpore的API多于PyTorch的API的参数,功能不被认为是差异。 @@ -50,472 +50,472 @@ mindspore.mint.argmax只有一种API形式,即mindspore.mint.argmax(input, dim | PyTorch 2.1 APIs | MindSpore APIs | 说明 | | -------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------ | -| [torch.abs](https://pytorch.org/docs/2.1/generated/torch.abs.html) | [mindspore.mint.abs](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.abs.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.acos](https://pytorch.org/docs/2.1/generated/torch.acos.html) | [mindspore.mint.acos](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.acos.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.acosh](https://pytorch.org/docs/2.1/generated/torch.acosh.html)| [mindspore.mint.acosh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.acosh.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.add](https://pytorch.org/docs/2.1/generated/torch.add.html)| [mindspore.mint.add](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.add.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.addbmm](https://pytorch.org/docs/2.1/generated/torch.addbmm.html)| [mindspore.mint.addbmm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.addbmm.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.addmm](https://pytorch.org/docs/2.1/generated/torch.addmm.html)| [mindspore.mint.addmm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.addmm.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.addmv](https://pytorch.org/docs/2.1/generated/torch.addmv.html)| [mindspore.mint.addmv](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.addmv.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.all](https://pytorch.org/docs/2.1/generated/torch.all.html#torch.all) | [mindspore.mint.all](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.all.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.allclose](https://pytorch.org/docs/2.1/generated/torch.allclose.html)| [mindspore.mint.allclose](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.allclose.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.amax](https://pytorch.org/docs/2.1/generated/torch.amax.html)| [mindspore.mint.amax](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.amax.html)| 功能一致,参数dim默认值不同 | -| [torch.amin](https://pytorch.org/docs/2.1/generated/torch.amin.html)| [mindspore.mint.amin](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.amin.html)| 功能一致,参数dim默认值不同 | -| [torch.any](https://pytorch.org/docs/2.1/generated/torch.any.html#torch.any) | [mindspore.mint.any](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.any.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.arange](https://pytorch.org/docs/2.1/generated/torch.arange.html)| [mindspore.mint.arange](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.arange.html)| 功能一致,参数end默认值不同 | -| [torch.arccos](https://pytorch.org/docs/2.1/generated/torch.arccos.html) | [mindspore.mint.arccos](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.arccos.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.arccosh](https://pytorch.org/docs/2.1/generated/torch.arccosh.html) | [mindspore.mint.arccosh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.arccosh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.arcsin](https://pytorch.org/docs/2.1/generated/torch.arcsin.html) | [mindspore.mint.arcsin](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.arcsin.html) | 功能一致,参数名不一致| -| [torch.arcsinh](https://pytorch.org/docs/2.1/generated/torch.arcsinh.html) | [mindspore.mint.arcsinh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.arcsinh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.arctan](https://pytorch.org/docs/2.1/generated/torch.arctan.html) | [mindspore.mint.arctan](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.arctan.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.arctan2](https://pytorch.org/docs/2.1/generated/torch.arctan2.html)| [mindspore.mint.arctan2](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.arctan2.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.arctanh](https://pytorch.org/docs/2.1/generated/torch.arctanh.html) | [mindspore.mint.arctanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.arctanh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.argmax](https://pytorch.org/docs/2.1/generated/torch.argmax.html) | [mindspore.mint.argmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.argmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.argmin](https://pytorch.org/docs/2.1/generated/torch.argmin.html) | [mindspore.mint.argmin](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.argmin.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.argsort](https://pytorch.org/docs/2.1/generated/torch.argsort.html)| [mindspore.mint.argsort](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.argsort.html)| [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.asin](https://pytorch.org/docs/2.1/generated/torch.asin.html) | [mindspore.mint.asin](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.asin.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.asinh](https://pytorch.org/docs/2.1/generated/torch.asinh.html)| [mindspore.mint.asinh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.asinh.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.atan](https://pytorch.org/docs/2.1/generated/torch.atan.html) | [mindspore.mint.atan](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.atan.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.atan2](https://pytorch.org/docs/2.1/generated/torch.atan2.html) | [mindspore.mint.atan2](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.atan2.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.atanh](https://pytorch.org/docs/2.1/generated/torch.atanh.html)| [mindspore.mint.atanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.atanh.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.baddbmm](https://pytorch.org/docs/1.8.1/generated/torch.baddbmm.html) | [mindspore.mint.baddbmm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.baddbmm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.bernoulli](https://pytorch.org/docs/2.1/generated/torch.bernoulli.html)| [mindspore.mint.bernoulli](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.bernoulli.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.bincount](https://pytorch.org/docs/2.1/generated/torch.bincount.html)| [mindspore.mint.bincount](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.bincount.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.bitwise_and](https://pytorch.org/docs/2.1/generated/torch.bitwise_and.html) | [mindspore.mint.bitwise_and](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.bitwise_and.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.bitwise_or](https://pytorch.org/docs/2.1/generated/torch.bitwise_or.html) | [mindspore.mint.bitwise_or](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.bitwise_or.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.bitwise_xor](https://pytorch.org/docs/2.1/generated/torch.bitwise_xor.html) | [mindspore.mint.bitwise_xor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.bitwise_xor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.bmm](https://pytorch.org/docs/2.1/generated/torch.bmm.html) | [mindspore.mint.bmm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.bmm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.broadcast_to](https://pytorch.org/docs/2.1/generated/torch.broadcast_to.html) | [mindspore.mint.broadcast_to](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.broadcast_to.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.cat](https://pytorch.org/docs/2.1/generated/torch.cat.html) | [mindspore.mint.cat](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.cat.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.cdist](https://pytorch.org/docs/2.1/generated/torch.cdist.html)| [mindspore.mint.cdist](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.cdist.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.chunk](https://pytorch.org/docs/2.1/generated/torch.chunk.html)| [mindspore.mint.chunk](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.chunk.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.ceil](https://pytorch.org/docs/2.1/generated/torch.ceil.html) | [mindspore.mint.ceil](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.ceil.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.clamp](https://pytorch.org/docs/2.1/generated/torch.clamp.html) | [mindspore.mint.clamp](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.clamp.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.clone](https://pytorch.org/docs/2.1/generated/torch.clone.html)| [mindspore.mint.clone](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.clone.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.concat](https://pytorch.org/docs/2.1/generated/torch.concat.html)| [mindspore.mint.concat](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.concat.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.count_nonzero](https://pytorch.org/docs/2.1/generated/torch.count_nonzero.html)| [mindspore.mint.count_nonzero](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.count_nonzero.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.cos](https://pytorch.org/docs/2.1/generated/torch.cos.html) | [mindspore.mint.cos](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.cos.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.cosh](https://pytorch.org/docs/2.1/generated/torch.cosh.html) | [mindspore.mint.cosh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.cosh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.cross](https://pytorch.org/docs/2.1/generated/torch.cross.html) | [mindspore.mint.cross](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.cross.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.cummax](https://pytorch.org/docs/2.1/generated/torch.cummax.html) | [mindspore.mint.cummax](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.cummax.html) |[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.cummin](https://pytorch.org/docs/2.1/generated/torch.cummin.html) | [mindspore.mint.cummin](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.cummin.html) |[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.cumprod](https://pytorch.org/docs/2.1/generated/torch.cumprod.html)| [mindspore.mint.cumprod](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.cumprod.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.cumsum](https://pytorch.org/docs/2.1/generated/torch.cumsum.html) | [mindspore.mint.cumsum](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.cumsum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.diff](https://pytorch.org/docs/2.1/generated/torch.diff.html)| [mindspore.mint.diff](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.diff.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.div](https://pytorch.org/docs/2.1/generated/torch.div.html) | [mindspore.mint.div](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.div.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.divide](https://pytorch.org/docs/2.1/generated/torch.divide.html) | [mindspore.mint.divide](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.divide.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.dot](https://pytorch.org/docs/2.1/generated/torch.dot.html)| [mindspore.mint.dot](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.dot.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.einsum](https://pytorch.org/docs/2.1/generated/torch.einsum.html)| [mindspore.mint.einsum](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.einsum.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.empty](https://pytorch.org/docs/2.1/generated/torch.empty.html)| [mindspore.mint.empty](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.empty.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.empty_like](https://pytorch.org/docs/2.1/generated/torch.empty_like.html)| [mindspore.mint.empty_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.empty_like.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.equal](https://pytorch.org/docs/2.1/generated/torch.equal.html)| [mindspore.mint.equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.equal.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.eq](https://pytorch.org/docs/2.1/generated/torch.eq.html) | [mindspore.mint.eq](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.eq.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.erf](https://pytorch.org/docs/2.1/generated/torch.erf.html) | [mindspore.mint.erf](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.erf.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.erfc](https://pytorch.org/docs/2.1/generated/torch.erfc.html) | [mindspore.mint.erfc](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.erfc.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.erfinv](https://pytorch.org/docs/2.1/generated/torch.erfinv.html) | [mindspore.mint.erfinv](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.erfinv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.exp](https://pytorch.org/docs/2.1/generated/torch.exp.html) | [mindspore.mint.exp](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.exp.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.exp2](https://pytorch.org/docs/2.1/generated/torch.exp2.html) | [mindspore.mint.exp2](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.exp2.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.expm1](https://pytorch.org/docs/2.1/generated/torch.expm1.html) | [mindspore.mint.expm1](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.expm1.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.eye](https://pytorch.org/docs/2.1/generated/torch.eye.html) | [mindspore.mint.eye](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.eye.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.float_power](https://pytorch.org/docs/2.1/generated/torch.float_power.html)| [mindspore.mint.float_power](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.float_power.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.flatten](https://pytorch.org/docs/2.1/generated/torch.flatten.html) | [mindspore.mint.flatten](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.flatten.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.flip](https://pytorch.org/docs/2.1/generated/torch.flip.html) | [mindspore.mint.flip](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.flip.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.floor](https://pytorch.org/docs/2.1/generated/torch.floor.html) | [mindspore.mint.floor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.floor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.fmod](https://pytorch.org/docs/2.1/generated/torch.fmod.html)| [mindspore.mint.fmod](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.fmod.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.frac](https://pytorch.org/docs/2.1/generated/torch.frac.html)| [mindspore.mint.frac](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.frac.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.full](https://pytorch.org/docs/2.1/generated/torch.full.html) | [mindspore.mint.full](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.full.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.full_like](https://pytorch.org/docs/2.1/generated/torch.full_like.html)| [mindspore.mint.full_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.full_like.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.gather](https://pytorch.org/docs/2.1/generated/torch.gather.html)| [mindspore.mint.gather](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.gather.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.greater](https://pytorch.org/docs/2.1/generated/torch.greater.html) | [mindspore.mint.greater](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.greater.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.greater_equal](https://pytorch.org/docs/2.1/generated/torch.greater_equal.html) | [mindspore.mint.greater_equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.greater_equal.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.gt](https://pytorch.org/docs/2.1/generated/torch.gt.html) | [mindspore.mint.gt](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.gt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.histc](https://pytorch.org/docs/2.1/generated/torch.histc.html)| [mindspore.mint.histc](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.histc.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.index_select](https://pytorch.org/docs/2.1/generated/torch.index_select.html) | [mindspore.mint.index_select](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.index_select.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.inverse](https://pytorch.org/docs/2.1/generated/torch.inverse.html) | [mindspore.mint.inverse](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.inverse.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.isclose](https://pytorch.org/docs/2.1/generated/torch.isclose.html) | [mindspore.mint.isclose](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.isclose.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.isfinite](https://pytorch.org/docs/2.1/generated/torch.isfinite.html) | [mindspore.mint.isfinite](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.isfinite.html) |[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.isinf](https://pytorch.org/docs/2.1/generated/torch.isinf.html)| [mindspore.mint.isinf](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.isinf.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.isneginf](https://pytorch.org/docs/2.1/generated/torch.isneginf.html)| [mindspore.mint.isneginf](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.isneginf.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.le](https://pytorch.org/docs/2.1/generated/torch.le.html) | [mindspore.mint.le](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.le.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.lerp](https://pytorch.org/docs/2.1/generated/torch.lerp.html)| [mindspore.mint.lerp](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.lerp.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.less](https://pytorch.org/docs/2.1/generated/torch.less.html) | [mindspore.mint.less](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.less.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.less_equal](https://pytorch.org/docs/2.1/generated/torch.less_equal.html) | [mindspore.mint.less_equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.less_equal.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.linspace](https://pytorch.org/docs/2.1/generated/torch.linspace.html) | [mindspore.mint.linspace](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.linspace.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.log](https://pytorch.org/docs/2.1/generated/torch.log.html) | [mindspore.mint.log](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.log.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.log2](https://pytorch.org/docs/2.1/generated/torch.log2.html)| [mindspore.mint.log2](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.log2.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.log10](https://pytorch.org/docs/2.1/generated/torch.log10.html)| [mindspore.mint.log10](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.log10.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.logaddexp](https://pytorch.org/docs/2.1/generated/torch.logaddexp.html)| [mindspore.mint.logaddexp](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.logaddexp.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.logsumexp](https://pytorch.org/docs/2.1/generated/torch.logsumexp.html)| [mindspore.mint.logsumexp](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.logsumexp.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.log1p](https://pytorch.org/docs/2.1/generated/torch.log1p.html#torch.log1p) | [mindspore.mint.log1p](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.log1p.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.logical_and](https://pytorch.org/docs/2.1/generated/torch.logical_and.html) | [mindspore.mint.logical_and](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.logical_and.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.logical_not](https://pytorch.org/docs/2.1/generated/torch.logical_not.html) | [mindspore.mint.logical_not](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.logical_not.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.logical_or](https://pytorch.org/docs/2.1/generated/torch.logical_or.html) | [mindspore.mint.logical_or](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.logical_or.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.logical_xor](https://pytorch.org/docs/2.1/generated/torch.logical_xor.html) | [mindspore.mint.logical_xor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.logical_xor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.lt](https://pytorch.org/docs/2.1/generated/torch.lt.html) | [mindspore.mint.lt](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.lt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.masked_select](https://pytorch.org/docs/2.1/generated/torch.masked_select.html) | [mindspore.mint.masked_select](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.masked_select.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.matmul](https://pytorch.org/docs/2.1/generated/torch.matmul.html#torch.matmul) | [mindspore.mint.matmul](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.matmul.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.max](https://pytorch.org/docs/2.1/generated/torch.max.html) | [mindspore.mint.max](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.max.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.maximum](https://pytorch.org/docs/2.1/generated/torch.maximum.html) | [mindspore.mint.maximum](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.maximum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.mean](https://pytorch.org/docs/2.1/generated/torch.mean.html) | [mindspore.mint.mean](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.mean.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.median](https://pytorch.org/docs/2.1/generated/torch.median.html) | [mindspore.mint.median](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.median.html) | 功能一致,MindSpore多dim和keepdim两个参数 | -| [torch.meshgrid](https://pytorch.org/docs/2.1/generated/torch.meshgrid.html)| [mindspore.mint.meshgrid](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.meshgrid.html)| 功能一致,参数indexing默认值不同 | -| [torch.mul](https://pytorch.org/docs/2.1/generated/torch.mul.html#torch.mul) | [mindspore.mint.mul](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.mul.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.min](https://pytorch.org/docs/2.1/generated/torch.min.html) | [mindspore.mint.min](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.min.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.minimum](https://pytorch.org/docs/2.1/generated/torch.minimum.html) | [mindspore.mint.minimum](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.minimum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.mm](https://pytorch.org/docs/2.1/generated/torch.mm.html) | [mindspore.mint.mm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.mm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.multinomial](https://pytorch.org/docs/2.1/generated/torch.multinomial.html) | [mindspore.mint.multinomial](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.multinomial.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.mv](https://pytorch.org/docs/2.1/generated/torch.mv.html) | [mindspore.mint.mv](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.mv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nansum](https://pytorch.org/docs/2.1/generated/torch.nansum.html) | [mindspore.mint.nansum](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nansum.html) | 功能一致,参数列表不一致 | -| [torch.nan_to_num](https://pytorch.org/docs/2.1/generated/torch.nan_to_num.html) | [mindspore.mint.nan_to_num](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nan_to_num.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.narrow](https://pytorch.org/docs/2.1/generated/torch.narrow.html) | [mindspore.mint.narrow](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.narrow.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.ne](https://pytorch.org/docs/2.1/generated/torch.ne.html)| [mindspore.mint.ne](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.ne.html)| [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.neg](https://pytorch.org/docs/2.1/generated/torch.neg.html)| [mindspore.mint.neg](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.neg.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.negative](https://pytorch.org/docs/2.1/generated/torch.negative.html) | [mindspore.mint.negative](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.negative.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nonzero](https://pytorch.org/docs/2.1/generated/torch.nonzero.html) | [mindspore.mint.nonzero](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nonzero.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.normal](https://pytorch.org/docs/2.1/generated/torch.normal.html) | [mindspore.mint.normal](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.normal.html) | 接口重载的参数不同 | -| [torch.norm](https://pytorch.org/docs/2.1/generated/torch.norm.html) | [mindspore.mint.norm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.norm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.ones](https://pytorch.org/docs/2.1/generated/torch.ones.html) | [mindspore.mint.ones](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.ones.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.ones_like](https://pytorch.org/docs/2.1/torch.html#torch.ones_like) | [mindspore.mint.ones_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.ones_like.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.outer](https://pytorch.org/docs/2.1/generated/torch.outer.html) | [mindspore.mint.outer](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.outer.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.permute](https://pytorch.org/docs/2.1/generated/torch.permute.html) | [mindspore.mint.permute](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.permute.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.polar](https://pytorch.org/docs/2.1/generated/torch.polar.html) | [mindspore.mint.polar](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.polar.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.pow](https://pytorch.org/docs/2.1/generated/torch.pow.html) | [mindspore.mint.pow](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.pow.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.prod](https://pytorch.org/docs/2.1/generated/torch.prod.html) | [mindspore.mint.prod](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.prod.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.rand](https://pytorch.org/docs/2.1/generated/torch.rand.html) | [mindspore.mint.rand](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.rand.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.rand_like](https://pytorch.org/docs/2.1/generated/torch.rand_like.html) | [mindspore.mint.rand_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.rand_like.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.randint](https://pytorch.org/docs/2.1/generated/torch.randint.html) | [mindspore.mint.randint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.randint.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.randint_like](https://pytorch.org/docs/2.1/generated/torch.randint_like.html) | [mindspore.mint.randint_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.randint_like.html) | 功能一致,参数low默认值不同 | -| [torch.randn](https://pytorch.org/docs/2.1/generated/torch.randn.html) | [mindspore.mint.randn](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.randn.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.randn_like](https://pytorch.org/docs/2.1/generated/torch.randn_like.html) | [mindspore.mint.randn_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.randn_like.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.randperm](https://pytorch.org/docs/2.1/generated/torch.randperm.html) | [mindspore.mint.randperm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.randperm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.ravel](https://pytorch.org/docs/2.1/generated/torch.ravel.html) | [mindspore.mint.ravel](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.ravel.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.reciprocal](https://pytorch.org/docs/2.1/generated/torch.reciprocal.html) | [mindspore.mint.reciprocal](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.reciprocal.html) |[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.remainder](https://pytorch.org/docs/2.1/generated/torch.remainder.html) | [mindspore.mint.remainder](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.remainder.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.repeat_interleave](https://pytorch.org/docs/2.1/generated/torch.repeat_interleave.html) | [mindspore.mint.repeat_interleave](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.repeat_interleave.html) | 功能一致,PyTorch涉及重载 | -| [torch.reshape](https://pytorch.org/docs/2.1/generated/torch.reshape.html) | [mindspore.mint.reshape](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.reshape.html) |[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.roll](https://pytorch.org/docs/2.1/generated/torch.roll.html) | [mindspore.mint.roll](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.roll.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.round](https://pytorch.org/docs/2.1/generated/torch.round.html)| [mindspore.mint.round](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.round.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.rsqrt](https://pytorch.org/docs/2.1/generated/torch.rsqrt.html) | [mindspore.mint.rsqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.rsqrt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.scatter](https://pytorch.org/docs/2.1/generated/torch.scatter.html) | [mindspore.mint.scatter](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.scatter.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.scatter_add](https://pytorch.org/docs/2.1/generated/torch.scatter_add.html) | [mindspore.mint.scatter_add](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.scatter_add.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.searchsorted](https://pytorch.org/docs/2.1/generated/torch.searchsorted.html) | [mindspore.mint.searchsorted](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.searchsorted.html) | 功能一致,参数side默认值不同 | -| [torch.select](https://pytorch.org/docs/2.1/generated/torch.select.html) | [mindspore.mint.select](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.select.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.sigmoid](https://pytorch.org/docs/2.1/generated/torch.sigmoid.html) | [mindspore.mint.sigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.sigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.sign](https://pytorch.org/docs/2.1/generated/torch.sign.html) | [mindspore.mint.sign](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.sign.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.sin](https://pytorch.org/docs/2.1/generated/torch.sin.html)| [mindspore.mint.sin](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.sin.html)| [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.sinc](https://pytorch.org/docs/2.1/generated/torch.sinc.html)| [mindspore.mint.sinc](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.sinc.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.sinh](https://pytorch.org/docs/2.1/generated/torch.sinh.html)| [mindspore.mint.sinh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.sinh.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.sort](https://pytorch.org/docs/2.1/generated/torch.sort.html) | [mindspore.mint.sort](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.sort.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.split](https://pytorch.org/docs/2.1/generated/torch.split.html) | [mindspore.mint.split](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.split.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.sqrt](https://pytorch.org/docs/2.1/generated/torch.sqrt.html) | [mindspore.mint.sqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.sqrt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.square](https://pytorch.org/docs/2.1/generated/torch.square.html)| [mindspore.mint.square](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.square.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.squeeze](https://pytorch.org/docs/2.1/generated/torch.squeeze.html) | [mindspore.mint.squeeze](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.squeeze.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.stack](https://pytorch.org/docs/2.1/generated/torch.stack.html) | [mindspore.mint.stack](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.stack.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.std](https://pytorch.org/docs/2.1/generated/torch.std.html) | [mindspore.mint.std](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.std.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.std_mean](https://pytorch.org/docs/2.1/generated/torch.std_mean.html) | [mindspore.mint.std_mean](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.std_mean.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.sub](https://pytorch.org/docs/2.1/generated/torch.sub.html#torch.sub) | [mindspore.mint.sub](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.sub.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.sum](https://pytorch.org/docs/2.1/generated/torch.sum.html) | [mindspore.mint.sum](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.sum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.swapaxes](https://pytorch.org/docs/2.1/generated/torch.swapaxes.html) | [mindspore.mint.swapaxes](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.swapaxes.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.t](https://pytorch.org/docs/2.1/generated/torch.t.html) | [mindspore.mint.t](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.t.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.tan](https://pytorch.org/docs/2.1/generated/torch.tan.html)| [mindspore.mint.tan](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.tan.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.tanh](https://pytorch.org/docs/2.1/generated/torch.tanh.html) | [mindspore.mint.tanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.tanh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.tile](https://pytorch.org/docs/2.1/generated/torch.tile.html) | [mindspore.mint.tile](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.tile.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.topk](https://pytorch.org/docs/2.1/generated/torch.topk.html) | [mindspore.mint.topk](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.topk.html) | 功能一致,参数dim默认值不同 | -| [torch.trace](https://pytorch.org/docs/2.1/generated/torch.trace.html) | [mindspore.mint.trace](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.trace.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.transpose](https://pytorch.org/docs/2.1/generated/torch.transpose.html) | [mindspore.mint.transpose](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.transpose.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.tril](https://pytorch.org/docs/2.1/generated/torch.tril.html) | [mindspore.mint.tril](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.tril.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.triu](https://pytorch.org/docs/2.1/generated/torch.triu.html) | [mindspore.mint.triu](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.triu.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.trunc](https://pytorch.org/docs/2.1/generated/torch.trunc.html)| [mindspore.mint.trunc](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.trunc.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.unbind](https://pytorch.org/docs/2.1/generated/torch.unbind.html) | [mindspore.mint.unbind](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.unbind.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.unique](https://pytorch.org/docs/2.1/generated/torch.unique.html#torch.unique) | [mindspore.mint.unique](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.unique.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.unique_consecutive](https://pytorch.org/docs/2.1/generated/torch.unique_consecutive.html) | [mindspore.mint.unique_consecutive](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.unique_consecutive.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.unsqueeze](https://pytorch.org/docs/2.1/generated/torch.unsqueeze.html) | [mindspore.mint.unsqueeze](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.unsqueeze.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.var](https://pytorch.org/docs/2.1/generated/torch.var.html) | [mindspore.mint.var](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.var.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.var_mean](https://pytorch.org/docs/2.1/generated/torch.var_mean.html) | [mindspore.mint.var_mean](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.var_mean.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.where](https://pytorch.org/docs/2.1/generated/torch.where.html) | [mindspore.mint.where](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.where.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.xlogy](https://pytorch.org/docs/2.1/generated/torch.xlogy.html) | [mindspore.mint.xlogy](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.xlogy.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.zeros](https://pytorch.org/docs/2.1/generated/torch.zeros.html) | [mindspore.mint.zeros](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.zeros.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.zeros_like](https://pytorch.org/docs/2.1/generated/torch.zeros_like.html#torch-zeros-like) | [mindspore.mint.zeros_like](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.zeros_like.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.abs](https://pytorch.org/docs/2.1/generated/torch.abs.html) | [mindspore.mint.abs](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.abs.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.acos](https://pytorch.org/docs/2.1/generated/torch.acos.html) | [mindspore.mint.acos](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.acos.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.acosh](https://pytorch.org/docs/2.1/generated/torch.acosh.html)| [mindspore.mint.acosh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.acosh.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.add](https://pytorch.org/docs/2.1/generated/torch.add.html)| [mindspore.mint.add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.add.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.addbmm](https://pytorch.org/docs/2.1/generated/torch.addbmm.html)| [mindspore.mint.addbmm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.addbmm.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.addmm](https://pytorch.org/docs/2.1/generated/torch.addmm.html)| [mindspore.mint.addmm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.addmm.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.addmv](https://pytorch.org/docs/2.1/generated/torch.addmv.html)| [mindspore.mint.addmv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.addmv.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.all](https://pytorch.org/docs/2.1/generated/torch.all.html#torch.all) | [mindspore.mint.all](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.all.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.allclose](https://pytorch.org/docs/2.1/generated/torch.allclose.html)| [mindspore.mint.allclose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.allclose.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.amax](https://pytorch.org/docs/2.1/generated/torch.amax.html)| [mindspore.mint.amax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.amax.html)| 功能一致,参数dim默认值不同 | +| [torch.amin](https://pytorch.org/docs/2.1/generated/torch.amin.html)| [mindspore.mint.amin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.amin.html)| 功能一致,参数dim默认值不同 | +| [torch.any](https://pytorch.org/docs/2.1/generated/torch.any.html#torch.any) | [mindspore.mint.any](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.any.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.arange](https://pytorch.org/docs/2.1/generated/torch.arange.html)| [mindspore.mint.arange](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.arange.html)| 功能一致,参数end默认值不同 | +| [torch.arccos](https://pytorch.org/docs/2.1/generated/torch.arccos.html) | [mindspore.mint.arccos](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.arccos.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.arccosh](https://pytorch.org/docs/2.1/generated/torch.arccosh.html) | [mindspore.mint.arccosh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.arccosh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.arcsin](https://pytorch.org/docs/2.1/generated/torch.arcsin.html) | [mindspore.mint.arcsin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.arcsin.html) | 功能一致,参数名不一致| +| [torch.arcsinh](https://pytorch.org/docs/2.1/generated/torch.arcsinh.html) | [mindspore.mint.arcsinh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.arcsinh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.arctan](https://pytorch.org/docs/2.1/generated/torch.arctan.html) | [mindspore.mint.arctan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.arctan.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.arctan2](https://pytorch.org/docs/2.1/generated/torch.arctan2.html)| [mindspore.mint.arctan2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.arctan2.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.arctanh](https://pytorch.org/docs/2.1/generated/torch.arctanh.html) | [mindspore.mint.arctanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.arctanh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.argmax](https://pytorch.org/docs/2.1/generated/torch.argmax.html) | [mindspore.mint.argmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.argmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.argmin](https://pytorch.org/docs/2.1/generated/torch.argmin.html) | [mindspore.mint.argmin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.argmin.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.argsort](https://pytorch.org/docs/2.1/generated/torch.argsort.html)| [mindspore.mint.argsort](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.argsort.html)| [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.asin](https://pytorch.org/docs/2.1/generated/torch.asin.html) | [mindspore.mint.asin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.asin.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.asinh](https://pytorch.org/docs/2.1/generated/torch.asinh.html)| [mindspore.mint.asinh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.asinh.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.atan](https://pytorch.org/docs/2.1/generated/torch.atan.html) | [mindspore.mint.atan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.atan.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.atan2](https://pytorch.org/docs/2.1/generated/torch.atan2.html) | [mindspore.mint.atan2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.atan2.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.atanh](https://pytorch.org/docs/2.1/generated/torch.atanh.html)| [mindspore.mint.atanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.atanh.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.baddbmm](https://pytorch.org/docs/1.8.1/generated/torch.baddbmm.html) | [mindspore.mint.baddbmm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.baddbmm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.bernoulli](https://pytorch.org/docs/2.1/generated/torch.bernoulli.html)| [mindspore.mint.bernoulli](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.bernoulli.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.bincount](https://pytorch.org/docs/2.1/generated/torch.bincount.html)| [mindspore.mint.bincount](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.bincount.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.bitwise_and](https://pytorch.org/docs/2.1/generated/torch.bitwise_and.html) | [mindspore.mint.bitwise_and](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.bitwise_and.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.bitwise_or](https://pytorch.org/docs/2.1/generated/torch.bitwise_or.html) | [mindspore.mint.bitwise_or](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.bitwise_or.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.bitwise_xor](https://pytorch.org/docs/2.1/generated/torch.bitwise_xor.html) | [mindspore.mint.bitwise_xor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.bitwise_xor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.bmm](https://pytorch.org/docs/2.1/generated/torch.bmm.html) | [mindspore.mint.bmm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.bmm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.broadcast_to](https://pytorch.org/docs/2.1/generated/torch.broadcast_to.html) | [mindspore.mint.broadcast_to](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.broadcast_to.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.cat](https://pytorch.org/docs/2.1/generated/torch.cat.html) | [mindspore.mint.cat](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.cat.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.cdist](https://pytorch.org/docs/2.1/generated/torch.cdist.html)| [mindspore.mint.cdist](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.cdist.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.chunk](https://pytorch.org/docs/2.1/generated/torch.chunk.html)| [mindspore.mint.chunk](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.chunk.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.ceil](https://pytorch.org/docs/2.1/generated/torch.ceil.html) | [mindspore.mint.ceil](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.ceil.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.clamp](https://pytorch.org/docs/2.1/generated/torch.clamp.html) | [mindspore.mint.clamp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.clamp.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.clone](https://pytorch.org/docs/2.1/generated/torch.clone.html)| [mindspore.mint.clone](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.clone.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.concat](https://pytorch.org/docs/2.1/generated/torch.concat.html)| [mindspore.mint.concat](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.concat.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.count_nonzero](https://pytorch.org/docs/2.1/generated/torch.count_nonzero.html)| [mindspore.mint.count_nonzero](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.count_nonzero.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.cos](https://pytorch.org/docs/2.1/generated/torch.cos.html) | [mindspore.mint.cos](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.cos.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.cosh](https://pytorch.org/docs/2.1/generated/torch.cosh.html) | [mindspore.mint.cosh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.cosh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.cross](https://pytorch.org/docs/2.1/generated/torch.cross.html) | [mindspore.mint.cross](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.cross.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.cummax](https://pytorch.org/docs/2.1/generated/torch.cummax.html) | [mindspore.mint.cummax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.cummax.html) |[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.cummin](https://pytorch.org/docs/2.1/generated/torch.cummin.html) | [mindspore.mint.cummin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.cummin.html) |[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.cumprod](https://pytorch.org/docs/2.1/generated/torch.cumprod.html)| [mindspore.mint.cumprod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.cumprod.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.cumsum](https://pytorch.org/docs/2.1/generated/torch.cumsum.html) | [mindspore.mint.cumsum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.cumsum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.diff](https://pytorch.org/docs/2.1/generated/torch.diff.html)| [mindspore.mint.diff](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.diff.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.div](https://pytorch.org/docs/2.1/generated/torch.div.html) | [mindspore.mint.div](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.div.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.divide](https://pytorch.org/docs/2.1/generated/torch.divide.html) | [mindspore.mint.divide](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.divide.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.dot](https://pytorch.org/docs/2.1/generated/torch.dot.html)| [mindspore.mint.dot](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.dot.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.einsum](https://pytorch.org/docs/2.1/generated/torch.einsum.html)| [mindspore.mint.einsum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.einsum.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.empty](https://pytorch.org/docs/2.1/generated/torch.empty.html)| [mindspore.mint.empty](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.empty.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.empty_like](https://pytorch.org/docs/2.1/generated/torch.empty_like.html)| [mindspore.mint.empty_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.empty_like.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.equal](https://pytorch.org/docs/2.1/generated/torch.equal.html)| [mindspore.mint.equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.equal.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.eq](https://pytorch.org/docs/2.1/generated/torch.eq.html) | [mindspore.mint.eq](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.eq.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.erf](https://pytorch.org/docs/2.1/generated/torch.erf.html) | [mindspore.mint.erf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.erf.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.erfc](https://pytorch.org/docs/2.1/generated/torch.erfc.html) | [mindspore.mint.erfc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.erfc.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.erfinv](https://pytorch.org/docs/2.1/generated/torch.erfinv.html) | [mindspore.mint.erfinv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.erfinv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.exp](https://pytorch.org/docs/2.1/generated/torch.exp.html) | [mindspore.mint.exp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.exp.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.exp2](https://pytorch.org/docs/2.1/generated/torch.exp2.html) | [mindspore.mint.exp2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.exp2.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.expm1](https://pytorch.org/docs/2.1/generated/torch.expm1.html) | [mindspore.mint.expm1](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.expm1.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.eye](https://pytorch.org/docs/2.1/generated/torch.eye.html) | [mindspore.mint.eye](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.eye.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.float_power](https://pytorch.org/docs/2.1/generated/torch.float_power.html)| [mindspore.mint.float_power](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.float_power.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.flatten](https://pytorch.org/docs/2.1/generated/torch.flatten.html) | [mindspore.mint.flatten](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.flatten.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.flip](https://pytorch.org/docs/2.1/generated/torch.flip.html) | [mindspore.mint.flip](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.flip.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.floor](https://pytorch.org/docs/2.1/generated/torch.floor.html) | [mindspore.mint.floor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.floor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.fmod](https://pytorch.org/docs/2.1/generated/torch.fmod.html)| [mindspore.mint.fmod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.fmod.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.frac](https://pytorch.org/docs/2.1/generated/torch.frac.html)| [mindspore.mint.frac](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.frac.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.full](https://pytorch.org/docs/2.1/generated/torch.full.html) | [mindspore.mint.full](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.full.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.full_like](https://pytorch.org/docs/2.1/generated/torch.full_like.html)| [mindspore.mint.full_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.full_like.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.gather](https://pytorch.org/docs/2.1/generated/torch.gather.html)| [mindspore.mint.gather](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.gather.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.greater](https://pytorch.org/docs/2.1/generated/torch.greater.html) | [mindspore.mint.greater](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.greater.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.greater_equal](https://pytorch.org/docs/2.1/generated/torch.greater_equal.html) | [mindspore.mint.greater_equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.greater_equal.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.gt](https://pytorch.org/docs/2.1/generated/torch.gt.html) | [mindspore.mint.gt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.gt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.histc](https://pytorch.org/docs/2.1/generated/torch.histc.html)| [mindspore.mint.histc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.histc.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.index_select](https://pytorch.org/docs/2.1/generated/torch.index_select.html) | [mindspore.mint.index_select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.index_select.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.inverse](https://pytorch.org/docs/2.1/generated/torch.inverse.html) | [mindspore.mint.inverse](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.inverse.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.isclose](https://pytorch.org/docs/2.1/generated/torch.isclose.html) | [mindspore.mint.isclose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.isclose.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.isfinite](https://pytorch.org/docs/2.1/generated/torch.isfinite.html) | [mindspore.mint.isfinite](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.isfinite.html) |[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.isinf](https://pytorch.org/docs/2.1/generated/torch.isinf.html)| [mindspore.mint.isinf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.isinf.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.isneginf](https://pytorch.org/docs/2.1/generated/torch.isneginf.html)| [mindspore.mint.isneginf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.isneginf.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.le](https://pytorch.org/docs/2.1/generated/torch.le.html) | [mindspore.mint.le](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.le.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.lerp](https://pytorch.org/docs/2.1/generated/torch.lerp.html)| [mindspore.mint.lerp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.lerp.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.less](https://pytorch.org/docs/2.1/generated/torch.less.html) | [mindspore.mint.less](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.less.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.less_equal](https://pytorch.org/docs/2.1/generated/torch.less_equal.html) | [mindspore.mint.less_equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.less_equal.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.linspace](https://pytorch.org/docs/2.1/generated/torch.linspace.html) | [mindspore.mint.linspace](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.linspace.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.log](https://pytorch.org/docs/2.1/generated/torch.log.html) | [mindspore.mint.log](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.log.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.log2](https://pytorch.org/docs/2.1/generated/torch.log2.html)| [mindspore.mint.log2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.log2.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.log10](https://pytorch.org/docs/2.1/generated/torch.log10.html)| [mindspore.mint.log10](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.log10.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.logaddexp](https://pytorch.org/docs/2.1/generated/torch.logaddexp.html)| [mindspore.mint.logaddexp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.logaddexp.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.logsumexp](https://pytorch.org/docs/2.1/generated/torch.logsumexp.html)| [mindspore.mint.logsumexp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.logsumexp.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.log1p](https://pytorch.org/docs/2.1/generated/torch.log1p.html#torch.log1p) | [mindspore.mint.log1p](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.log1p.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.logical_and](https://pytorch.org/docs/2.1/generated/torch.logical_and.html) | [mindspore.mint.logical_and](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.logical_and.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.logical_not](https://pytorch.org/docs/2.1/generated/torch.logical_not.html) | [mindspore.mint.logical_not](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.logical_not.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.logical_or](https://pytorch.org/docs/2.1/generated/torch.logical_or.html) | [mindspore.mint.logical_or](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.logical_or.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.logical_xor](https://pytorch.org/docs/2.1/generated/torch.logical_xor.html) | [mindspore.mint.logical_xor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.logical_xor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.lt](https://pytorch.org/docs/2.1/generated/torch.lt.html) | [mindspore.mint.lt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.lt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.masked_select](https://pytorch.org/docs/2.1/generated/torch.masked_select.html) | [mindspore.mint.masked_select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.masked_select.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.matmul](https://pytorch.org/docs/2.1/generated/torch.matmul.html#torch.matmul) | [mindspore.mint.matmul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.matmul.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.max](https://pytorch.org/docs/2.1/generated/torch.max.html) | [mindspore.mint.max](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.max.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.maximum](https://pytorch.org/docs/2.1/generated/torch.maximum.html) | [mindspore.mint.maximum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.maximum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.mean](https://pytorch.org/docs/2.1/generated/torch.mean.html) | [mindspore.mint.mean](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.mean.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.median](https://pytorch.org/docs/2.1/generated/torch.median.html) | [mindspore.mint.median](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.median.html) | 功能一致,MindSpore多dim和keepdim两个参数 | +| [torch.meshgrid](https://pytorch.org/docs/2.1/generated/torch.meshgrid.html)| [mindspore.mint.meshgrid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.meshgrid.html)| 功能一致,参数indexing默认值不同 | +| [torch.mul](https://pytorch.org/docs/2.1/generated/torch.mul.html#torch.mul) | [mindspore.mint.mul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.mul.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.min](https://pytorch.org/docs/2.1/generated/torch.min.html) | [mindspore.mint.min](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.min.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.minimum](https://pytorch.org/docs/2.1/generated/torch.minimum.html) | [mindspore.mint.minimum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.minimum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.mm](https://pytorch.org/docs/2.1/generated/torch.mm.html) | [mindspore.mint.mm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.mm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.multinomial](https://pytorch.org/docs/2.1/generated/torch.multinomial.html) | [mindspore.mint.multinomial](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.multinomial.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.mv](https://pytorch.org/docs/2.1/generated/torch.mv.html) | [mindspore.mint.mv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.mv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nansum](https://pytorch.org/docs/2.1/generated/torch.nansum.html) | [mindspore.mint.nansum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nansum.html) | 功能一致,参数列表不一致 | +| [torch.nan_to_num](https://pytorch.org/docs/2.1/generated/torch.nan_to_num.html) | [mindspore.mint.nan_to_num](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nan_to_num.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.narrow](https://pytorch.org/docs/2.1/generated/torch.narrow.html) | [mindspore.mint.narrow](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.narrow.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.ne](https://pytorch.org/docs/2.1/generated/torch.ne.html)| [mindspore.mint.ne](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.ne.html)| [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.neg](https://pytorch.org/docs/2.1/generated/torch.neg.html)| [mindspore.mint.neg](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.neg.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.negative](https://pytorch.org/docs/2.1/generated/torch.negative.html) | [mindspore.mint.negative](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.negative.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nonzero](https://pytorch.org/docs/2.1/generated/torch.nonzero.html) | [mindspore.mint.nonzero](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nonzero.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.normal](https://pytorch.org/docs/2.1/generated/torch.normal.html) | [mindspore.mint.normal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.normal.html) | 接口重载的参数不同 | +| [torch.norm](https://pytorch.org/docs/2.1/generated/torch.norm.html) | [mindspore.mint.norm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.norm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.ones](https://pytorch.org/docs/2.1/generated/torch.ones.html) | [mindspore.mint.ones](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.ones.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.ones_like](https://pytorch.org/docs/2.1/torch.html#torch.ones_like) | [mindspore.mint.ones_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.ones_like.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.outer](https://pytorch.org/docs/2.1/generated/torch.outer.html) | [mindspore.mint.outer](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.outer.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.permute](https://pytorch.org/docs/2.1/generated/torch.permute.html) | [mindspore.mint.permute](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.permute.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.polar](https://pytorch.org/docs/2.1/generated/torch.polar.html) | [mindspore.mint.polar](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.polar.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.pow](https://pytorch.org/docs/2.1/generated/torch.pow.html) | [mindspore.mint.pow](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.pow.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.prod](https://pytorch.org/docs/2.1/generated/torch.prod.html) | [mindspore.mint.prod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.prod.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.rand](https://pytorch.org/docs/2.1/generated/torch.rand.html) | [mindspore.mint.rand](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.rand.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.rand_like](https://pytorch.org/docs/2.1/generated/torch.rand_like.html) | [mindspore.mint.rand_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.rand_like.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.randint](https://pytorch.org/docs/2.1/generated/torch.randint.html) | [mindspore.mint.randint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.randint.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.randint_like](https://pytorch.org/docs/2.1/generated/torch.randint_like.html) | [mindspore.mint.randint_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.randint_like.html) | 功能一致,参数low默认值不同 | +| [torch.randn](https://pytorch.org/docs/2.1/generated/torch.randn.html) | [mindspore.mint.randn](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.randn.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.randn_like](https://pytorch.org/docs/2.1/generated/torch.randn_like.html) | [mindspore.mint.randn_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.randn_like.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.randperm](https://pytorch.org/docs/2.1/generated/torch.randperm.html) | [mindspore.mint.randperm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.randperm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.ravel](https://pytorch.org/docs/2.1/generated/torch.ravel.html) | [mindspore.mint.ravel](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.ravel.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.reciprocal](https://pytorch.org/docs/2.1/generated/torch.reciprocal.html) | [mindspore.mint.reciprocal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.reciprocal.html) |[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.remainder](https://pytorch.org/docs/2.1/generated/torch.remainder.html) | [mindspore.mint.remainder](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.remainder.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.repeat_interleave](https://pytorch.org/docs/2.1/generated/torch.repeat_interleave.html) | [mindspore.mint.repeat_interleave](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.repeat_interleave.html) | 功能一致,PyTorch涉及重载 | +| [torch.reshape](https://pytorch.org/docs/2.1/generated/torch.reshape.html) | [mindspore.mint.reshape](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.reshape.html) |[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.roll](https://pytorch.org/docs/2.1/generated/torch.roll.html) | [mindspore.mint.roll](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.roll.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.round](https://pytorch.org/docs/2.1/generated/torch.round.html)| [mindspore.mint.round](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.round.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.rsqrt](https://pytorch.org/docs/2.1/generated/torch.rsqrt.html) | [mindspore.mint.rsqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.rsqrt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.scatter](https://pytorch.org/docs/2.1/generated/torch.scatter.html) | [mindspore.mint.scatter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.scatter.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.scatter_add](https://pytorch.org/docs/2.1/generated/torch.scatter_add.html) | [mindspore.mint.scatter_add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.scatter_add.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.searchsorted](https://pytorch.org/docs/2.1/generated/torch.searchsorted.html) | [mindspore.mint.searchsorted](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.searchsorted.html) | 功能一致,参数side默认值不同 | +| [torch.select](https://pytorch.org/docs/2.1/generated/torch.select.html) | [mindspore.mint.select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.select.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.sigmoid](https://pytorch.org/docs/2.1/generated/torch.sigmoid.html) | [mindspore.mint.sigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.sigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.sign](https://pytorch.org/docs/2.1/generated/torch.sign.html) | [mindspore.mint.sign](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.sign.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.sin](https://pytorch.org/docs/2.1/generated/torch.sin.html)| [mindspore.mint.sin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.sin.html)| [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.sinc](https://pytorch.org/docs/2.1/generated/torch.sinc.html)| [mindspore.mint.sinc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.sinc.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.sinh](https://pytorch.org/docs/2.1/generated/torch.sinh.html)| [mindspore.mint.sinh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.sinh.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.sort](https://pytorch.org/docs/2.1/generated/torch.sort.html) | [mindspore.mint.sort](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.sort.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.split](https://pytorch.org/docs/2.1/generated/torch.split.html) | [mindspore.mint.split](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.split.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.sqrt](https://pytorch.org/docs/2.1/generated/torch.sqrt.html) | [mindspore.mint.sqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.sqrt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.square](https://pytorch.org/docs/2.1/generated/torch.square.html)| [mindspore.mint.square](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.square.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.squeeze](https://pytorch.org/docs/2.1/generated/torch.squeeze.html) | [mindspore.mint.squeeze](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.squeeze.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.stack](https://pytorch.org/docs/2.1/generated/torch.stack.html) | [mindspore.mint.stack](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.stack.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.std](https://pytorch.org/docs/2.1/generated/torch.std.html) | [mindspore.mint.std](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.std.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.std_mean](https://pytorch.org/docs/2.1/generated/torch.std_mean.html) | [mindspore.mint.std_mean](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.std_mean.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.sub](https://pytorch.org/docs/2.1/generated/torch.sub.html#torch.sub) | [mindspore.mint.sub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.sub.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.sum](https://pytorch.org/docs/2.1/generated/torch.sum.html) | [mindspore.mint.sum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.sum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.swapaxes](https://pytorch.org/docs/2.1/generated/torch.swapaxes.html) | [mindspore.mint.swapaxes](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.swapaxes.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.t](https://pytorch.org/docs/2.1/generated/torch.t.html) | [mindspore.mint.t](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.t.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.tan](https://pytorch.org/docs/2.1/generated/torch.tan.html)| [mindspore.mint.tan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.tan.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.tanh](https://pytorch.org/docs/2.1/generated/torch.tanh.html) | [mindspore.mint.tanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.tanh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.tile](https://pytorch.org/docs/2.1/generated/torch.tile.html) | [mindspore.mint.tile](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.tile.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.topk](https://pytorch.org/docs/2.1/generated/torch.topk.html) | [mindspore.mint.topk](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.topk.html) | 功能一致,参数dim默认值不同 | +| [torch.trace](https://pytorch.org/docs/2.1/generated/torch.trace.html) | [mindspore.mint.trace](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.trace.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.transpose](https://pytorch.org/docs/2.1/generated/torch.transpose.html) | [mindspore.mint.transpose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.transpose.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.tril](https://pytorch.org/docs/2.1/generated/torch.tril.html) | [mindspore.mint.tril](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.tril.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.triu](https://pytorch.org/docs/2.1/generated/torch.triu.html) | [mindspore.mint.triu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.triu.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.trunc](https://pytorch.org/docs/2.1/generated/torch.trunc.html)| [mindspore.mint.trunc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.trunc.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.unbind](https://pytorch.org/docs/2.1/generated/torch.unbind.html) | [mindspore.mint.unbind](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.unbind.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.unique](https://pytorch.org/docs/2.1/generated/torch.unique.html#torch.unique) | [mindspore.mint.unique](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.unique.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.unique_consecutive](https://pytorch.org/docs/2.1/generated/torch.unique_consecutive.html) | [mindspore.mint.unique_consecutive](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.unique_consecutive.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.unsqueeze](https://pytorch.org/docs/2.1/generated/torch.unsqueeze.html) | [mindspore.mint.unsqueeze](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.unsqueeze.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.var](https://pytorch.org/docs/2.1/generated/torch.var.html) | [mindspore.mint.var](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.var.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.var_mean](https://pytorch.org/docs/2.1/generated/torch.var_mean.html) | [mindspore.mint.var_mean](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.var_mean.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.where](https://pytorch.org/docs/2.1/generated/torch.where.html) | [mindspore.mint.where](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.where.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.xlogy](https://pytorch.org/docs/2.1/generated/torch.xlogy.html) | [mindspore.mint.xlogy](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.xlogy.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.zeros](https://pytorch.org/docs/2.1/generated/torch.zeros.html) | [mindspore.mint.zeros](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.zeros.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.zeros_like](https://pytorch.org/docs/2.1/generated/torch.zeros_like.html#torch-zeros-like) | [mindspore.mint.zeros_like](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.zeros_like.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | ## torch.linalg | PyTorch 2.1 APIs | MindSpore APIs | 说明 | | -------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ---- | -| [torch.linalg.inv](https://pytorch.org/docs/2.1/generated/torch.linalg.inv.html) | [mindspore.mint.linalg.inv](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.linalg.inv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.linalg.matrix_norm](https://pytorch.org/docs/2.1/generated/torch.linalg.matrix_norm.html) | [mindspore.mint.linalg.matrix_norm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.linalg.matrix_norm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.linalg.norm](https://pytorch.org/docs/2.1/generated/torch.linalg.norm.html) | [mindspore.mint.linalg.norm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.linalg.norm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.linalg.vector_norm](https://pytorch.org/docs/2.1/generated/torch.linalg.vector_norm.html) | [mindspore.mint.linalg.vector_norm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.linalg.vector_norm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.linalg.inv](https://pytorch.org/docs/2.1/generated/torch.linalg.inv.html) | [mindspore.mint.linalg.inv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.linalg.inv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.linalg.matrix_norm](https://pytorch.org/docs/2.1/generated/torch.linalg.matrix_norm.html) | [mindspore.mint.linalg.matrix_norm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.linalg.matrix_norm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.linalg.norm](https://pytorch.org/docs/2.1/generated/torch.linalg.norm.html) | [mindspore.mint.linalg.norm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.linalg.norm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.linalg.vector_norm](https://pytorch.org/docs/2.1/generated/torch.linalg.vector_norm.html) | [mindspore.mint.linalg.vector_norm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.linalg.vector_norm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| ## torch.distributed | PyTorch 2.1 APIs | MindSpore APIs | 说明 | | ---------------------------------------------------------------------------------------------------------------------------- |-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------| -| [torch.distributed.P2POp](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.P2POp) | [mindspore.mint.distributed.P2POp](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.P2POp.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.distributed.all_gather](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather) | [mindspore.mint.distributed.all_gather](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.all_gather.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.all_gather_into_tensor](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather_into_tensor) | [mindspore.mint.distributed.all_gather_into_tensor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.all_gather_into_tensor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.all_gather_object](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather_object) | [mindspore.mint.distributed.all_gather_object](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.all_gather_object.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.all_reduce](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_reduce) | [mindspore.mint.distributed.all_reduce](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.all_reduce.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.all_to_all_single](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_to_all_single) | [mindspore.mint.distributed.all_to_all_single](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.all_to_all_single.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.all_to_all](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_to_all) | [mindspore.mint.distributed.all_to_all](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.all_to_all.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.barrier](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.barrier) | [mindspore.mint.distributed.barrier](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.barrier.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.batch_isend_irecv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.batch_isend_irecv) | [mindspore.mint.distributed.batch_isend_irecv](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.batch_isend_irecv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.distributed.broadcast](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.broadcast) | [mindspore.mint.distributed.broadcast](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.broadcast.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.broadcast_object_list](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.broadcast_object_list) | [mindspore.mint.distributed.broadcast_object_list](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.broadcast_object_list.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| []() | [mindspore.mint.distributed.destroy_process_group](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.destroy_process_group.html) | MindSpore独有| -| [torch.distributed.gather](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.gather) | [mindspore.mint.distributed.gather](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.gather.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.gather_object](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.gather_object) | [mindspore.mint.distributed.gather_object](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.gather_object.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.distributed.get_backend](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_backend) | [mindspore.mint.distributed.get_backend](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.get_backend.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.distributed.get_global_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_global_rank) | [mindspore.mint.distributed.get_global_rank](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.get_global_rank.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.distributed.get_group_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_group_rank) | [mindspore.mint.distributed.get_group_rank](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.get_group_rank.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.distributed.get_process_group_ranks](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_process_group_ranks) | [mindspore.mint.distributed.get_process_group_ranks](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.get_process_group_ranks.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.distributed.get_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_rank) | [mindspore.mint.distributed.get_rank](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.get_rank.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.get_world_size](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_world_size) | [mindspore.mint.distributed.get_world_size](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.get_world_size.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.init_process_group](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.init_process_group) | [mindspore.mint.distributed.init_process_group](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.init_process_group.html) | 功能一致,参数名不一致 | -| [torch.distributed.irecv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.irecv) | [mindspore.mint.distributed.irecv](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.irecv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.distributed.isend](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.isend) | [mindspore.mint.distributed.isend](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.isend.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.distributed.new_group](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.new_group) | [mindspore.mint.distributed.new_group](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.new_group.html) | MindSpore多参数group_desc=None | -| [torch.distributed.recv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.recv) | [mindspore.mint.distributed.recv](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.recv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.reduce](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce) | [mindspore.mint.distributed.reduce](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.reduce.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.reduce_scatter](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce_scatter) | [mindspore.mint.distributed.reduce_scatter](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.reduce_scatter.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.reduce_scatter_tensor](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce_scatter_tensor) | [mindspore.mint.distributed.reduce_scatter_tensor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.reduce_scatter_tensor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.scatter](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.scatter) | [mindspore.mint.distributed.scatter](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.scatter.html) | 功能一致,参数scatter_list默认值不同 | -| [torch.distributed.scatter_object_list](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.scatter_object_list) | [mindspore.mint.distributed.scatter_object_list](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.scatter_object_list.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.distributed.send](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.send) | [mindspore.mint.distributed.send](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.distributed.send.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.P2POp](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.P2POp) | [mindspore.mint.distributed.P2POp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.P2POp.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.distributed.all_gather](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather) | [mindspore.mint.distributed.all_gather](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.all_gather.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.all_gather_into_tensor](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather_into_tensor) | [mindspore.mint.distributed.all_gather_into_tensor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.all_gather_into_tensor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.all_gather_object](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_gather_object) | [mindspore.mint.distributed.all_gather_object](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.all_gather_object.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.all_reduce](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_reduce) | [mindspore.mint.distributed.all_reduce](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.all_reduce.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.all_to_all_single](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_to_all_single) | [mindspore.mint.distributed.all_to_all_single](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.all_to_all_single.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.all_to_all](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.all_to_all) | [mindspore.mint.distributed.all_to_all](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.all_to_all.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.barrier](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.barrier) | [mindspore.mint.distributed.barrier](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.barrier.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.batch_isend_irecv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.batch_isend_irecv) | [mindspore.mint.distributed.batch_isend_irecv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.batch_isend_irecv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.distributed.broadcast](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.broadcast) | [mindspore.mint.distributed.broadcast](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.broadcast.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.broadcast_object_list](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.broadcast_object_list) | [mindspore.mint.distributed.broadcast_object_list](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.broadcast_object_list.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| []() | [mindspore.mint.distributed.destroy_process_group](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.destroy_process_group.html) | MindSpore独有| +| [torch.distributed.gather](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.gather) | [mindspore.mint.distributed.gather](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.gather.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.gather_object](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.gather_object) | [mindspore.mint.distributed.gather_object](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.gather_object.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.distributed.get_backend](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_backend) | [mindspore.mint.distributed.get_backend](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.get_backend.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.distributed.get_global_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_global_rank) | [mindspore.mint.distributed.get_global_rank](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.get_global_rank.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.distributed.get_group_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_group_rank) | [mindspore.mint.distributed.get_group_rank](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.get_group_rank.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.distributed.get_process_group_ranks](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_process_group_ranks) | [mindspore.mint.distributed.get_process_group_ranks](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.get_process_group_ranks.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.distributed.get_rank](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_rank) | [mindspore.mint.distributed.get_rank](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.get_rank.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.get_world_size](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.get_world_size) | [mindspore.mint.distributed.get_world_size](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.get_world_size.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.init_process_group](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.init_process_group) | [mindspore.mint.distributed.init_process_group](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.init_process_group.html) | 功能一致,参数名不一致 | +| [torch.distributed.irecv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.irecv) | [mindspore.mint.distributed.irecv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.irecv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.distributed.isend](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.isend) | [mindspore.mint.distributed.isend](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.isend.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.distributed.new_group](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.new_group) | [mindspore.mint.distributed.new_group](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.new_group.html) | MindSpore多参数group_desc=None | +| [torch.distributed.recv](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.recv) | [mindspore.mint.distributed.recv](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.recv.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.reduce](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce) | [mindspore.mint.distributed.reduce](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.reduce.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.reduce_scatter](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce_scatter) | [mindspore.mint.distributed.reduce_scatter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.reduce_scatter.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.reduce_scatter_tensor](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.reduce_scatter_tensor) | [mindspore.mint.distributed.reduce_scatter_tensor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.reduce_scatter_tensor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.scatter](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.scatter) | [mindspore.mint.distributed.scatter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.scatter.html) | 功能一致,参数scatter_list默认值不同 | +| [torch.distributed.scatter_object_list](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.scatter_object_list) | [mindspore.mint.distributed.scatter_object_list](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.scatter_object_list.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.distributed.send](https://pytorch.org/docs/2.1/distributed.html#torch.distributed.send) | [mindspore.mint.distributed.send](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.distributed.send.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| ## torch.nn | PyTorch 2.1 APIs | MindSpore APIs | 说明 | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| [torch.nn.AdaptiveAvgPool1d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool1d.html) | [mindspore.mint.nn.AdaptiveAvgPool1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.AdaptiveAvgPool2d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool2d.html) | [mindspore.mint.nn.AdaptiveAvgPool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.AdaptiveAvgPool3d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool3d.html) | [mindspore.mint.nn.AdaptiveAvgPool3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.AvgPool2d](https://PyTorch.org/docs/2.1/generated/torch.nn.AvgPool2d.html) | [mindspore.mint.nn.AvgPool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.AvgPool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.BCELoss](https://PyTorch.org/docs/2.1/generated/torch.nn.BCELoss.html) | [mindspore.mint.nn.BCELoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.BCELoss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.BCEWithLogitsLoss](https://pytorch.org/docs/2.1/generated/torch.nn.BCEWithLogitsLoss.html) | [mindspore.mint.nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.BCEWithLogitsLoss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.BatchNorm1d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm1d.html) | [mindspore.mint.nn.BatchNorm1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.BatchNorm1d.html) | 功能一致,MindSpore默认为推理模式 | -| [torch.nn.BatchNorm2d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm2d.html) | [mindspore.mint.nn.BatchNorm2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.BatchNorm2d.html) | 功能一致,MindSpore默认为推理模式 | -| [torch.nn.BatchNorm3d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm3d.html) | [mindspore.mint.nn.BatchNorm3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.BatchNorm3d.html) | 功能一致,MindSpore默认为推理模式 | -| [torch.nn.ConstantPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad1d.html) | [mindspore.mint.nn.ConstantPad1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ConstantPad1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ConstantPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad2d.html) | [mindspore.mint.nn.ConstantPad2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ConstantPad2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ConstantPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad3d.html) | [mindspore.mint.nn.ConstantPad3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ConstantPad3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Conv2d](https://pytorch.org/docs/2.1/generated/torch.nn.Conv2d.html) | [mindspore.mint.nn.Conv2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Conv2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Conv3d](https://pytorch.org/docs/2.1/generated/torch.nn.Conv3d.html) | [mindspore.mint.nn.Conv3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Conv3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ConvTranspose2d](https://pytorch.org/docs/2.1/generated/torch.nn.ConvTranspose2d.html) | [mindspore.mint.nn.ConvTranspose2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ConvTranspose2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.CrossEntropyLoss](https://pytorch.org/docs/2.1/generated/torch.nn.CrossEntropyLoss.html) | [mindspore.mint.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.CrossEntropyLoss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Dropout](https://pytorch.org/docs/2.1/generated/torch.nn.Dropout.html) | [mindspore.mint.nn.Dropout](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Dropout.html) | 功能一致,MindSpore默认为推理模式 | -| [torch.nn.Dropout2d](https://pytorch.org/docs/2.1/generated/torch.nn.Dropout2d.html) | [mindspore.mint.nn.Dropout2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Dropout2d.html) | 功能一致,MindSpore默认为推理模式 | -| [torch.nn.ELU](https://pytorch.org/docs/2.1/generated/torch.nn.ELU.html) | [mindspore.mint.nn.ELU](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ELU.html) | 功能一致,MindSpore不含参数inplace| -| [torch.nn.Embedding](https://pytorch.org/docs/2.1/generated/torch.nn.Embedding.html) | [mindspore.mint.nn.Embedding](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Embedding.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Flatten](https://pytorch.org/docs/2.1/generated/torch.nn.Flatten.html) | [mindspore.mint.flatten](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.flatten.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Fold](https://pytorch.org/docs/2.1/generated/torch.nn.Fold.html) | [mindspore.mint.nn.Fold](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Fold.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.GELU](https://pytorch.org/docs/2.1/generated/torch.nn.GELU.html) | [mindspore.mint.nn.GELU](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.GELU.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.GroupNorm](https://pytorch.org/docs/2.1/generated/torch.nn.GroupNorm.html) | [mindspore.mint.nn.GroupNorm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.GroupNorm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Hardshrink](https://pytorch.org/docs/2.1/generated/torch.nn.Hardshrink.html#torch.nn.Hardshrink) | [mindspore.mint.nn.Hardshrink](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Hardshrink.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Hardsigmoid](https://pytorch.org/docs/2.1/generated/torch.nn.Hardsigmoid.html) | [mindspore.mint.nn.Hardsigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Hardsigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Hardswish](https://pytorch.org/docs/2.1/generated/torch.nn.Hardswish.html) | [mindspore.mint.nn.Hardswish](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Hardswish.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Identity](https://pytorch.org/docs/2.1/generated/torch.nn.Identity.html) | [mindspore.mint.nn.Identity](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Identity.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.L1Loss](https://pytorch.org/docs/2.1/generated/torch.nn.L1Loss.html#torch.nn.L1Loss) | [mindspore.mint.nn.L1Loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.L1Loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.LayerNorm](https://pytorch.org/docs/2.1/generated/torch.nn.LayerNorm.html) | [mindspore.mint.nn.LayerNorm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.LayerNorm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Linear](https://pytorch.org/docs/2.1/generated/torch.nn.Linear.html) | [mindspore.mint.nn.Linear](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Linear.html) | 功能一致,MindSpore多weight_init=None和bias_init=None两个参数 | -| [torch.nn.LogSigmoid](https://pytorch.org/docs/2.1/generated/torch.nn.LogSigmoid.html) | [mindspore.mint.nn.LogSigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.LogSigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.LogSoftMax](https://pytorch.org/docs/2.1/generated/torch.nn.LogSoftmax.html) | [mindspore.mint.nn.LogSoftmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.LogSoftmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.MSELoss](https://pytorch.org/docs/2.1/generated/torch.nn.MSELoss.html) | [mindspore.mint.nn.MSELoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.MSELoss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.MaxUnpool2d](https://pytorch.org/docs/2.1/generated/torch.nn.MaxUnpool2d.html) | [mindspore.mint.nn.MaxUnpool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.MaxUnpool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Mish](https://pytorch.org/docs/2.1/generated/torch.nn.Mish.html) | [mindspore.mint.nn.Mish](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Mish.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.NLLLoss](https://pytorch.org/docs/2.1/generated/torch.nn.NLLLoss.html) | [mindspore.mint.nn.NLLLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.NLLLoss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.PReLU](https://pytorch.org/docs/2.1/generated/torch.nn.PReLU.html) | [mindspore.mint.nn.PReLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.PReLU.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ReLU](https://pytorch.org/docs/2.1/generated/torch.nn.ReLU.html) | [mindspore.mint.nn.ReLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ReLU.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.ReLU6](https://pytorch.org/docs/2.1/generated/torch.nn.ReLU6.html) | [mindspore.mint.nn.ReLU6](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ReLU6.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ReflectionPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad1d.html) | [mindspore.mint.nn.ReflectionPad1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ReflectionPad1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ReflectionPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad2d.html) | [mindspore.mint.nn.ReflectionPad2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ReflectionPad2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ReflectionPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad3d.html) | [mindspore.mint.nn.ReflectionPad3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ReflectionPad3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ReplicationPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad1d.html) | [mindspore.mint.nn.ReplicationPad1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ReplicationPad1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ReplicationPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad2d.html) | [mindspore.mint.nn.ReplicationPad2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ReplicationPad2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ReplicationPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad3d.html) | [mindspore.mint.nn.ReplicationPad3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ReplicationPad3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.SeLU](https://pytorch.org/docs/2.1/generated/torch.nn.SELU.html) | [mindspore.mint.nn.SELU](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.SELU.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.SiLU](https://pytorch.org/docs/2.1/generated/torch.nn.SiLU.html) | [mindspore.mint.nn.SiLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.SiLU.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.SmoothL1Loss](https://pytorch.org/docs/2.1/generated/torch.nn.SmoothL1Loss.html) | [mindspore.mint.nn.SmoothL1Loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.SmoothL1Loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Softmax](https://pytorch.org/docs/2.1/generated/torch.nn.Softmax.html) | [mindspore.mint.nn.Softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Softmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.Softshrink](https://pytorch.org/docs/2.1/generated/torch.nn.Softshrink.html) | [mindspore.mint.nn.Softshrink](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Softshrink.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.SyncBatchNorm](https://pytorch.org/docs/2.1/generated/torch.nn.SyncBatchNorm.html) | [mindspore.mint.nn.SyncBatchNorm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.SyncBatchNorm.html) | 功能一致,MindSpore默认为推理模式 | -| [torch.nn.Tanh](https://pytorch.org/docs/2.1/generated/torch.nn.Tanh.html) | [mindspore.mint.nn.Tanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Tanh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.Unfold](https://pytorch.org/docs/2.1/generated/torch.nn.Unfold.html) | [mindspore.mint.nn.Unfold](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Unfold.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.Upsample](https://pytorch.org/docs/2.1/generated/torch.nn.Upsample.html) | [mindspore.mint.nn.Upsample](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.Upsample.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ZeroPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad1d.html) | [mindspore.mint.nn.ZeroPad1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ZeroPad1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ZeroPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad2d.html) | [mindspore.mint.nn.ZeroPad2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ZeroPad2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.ZeroPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad3d.html) | [mindspore.mint.nn.ZeroPad3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.ZeroPad3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.AdaptiveAvgPool1d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool1d.html) | [mindspore.mint.nn.AdaptiveAvgPool1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.AdaptiveAvgPool2d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool2d.html) | [mindspore.mint.nn.AdaptiveAvgPool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.AdaptiveAvgPool3d](https://PyTorch.org/docs/2.1/generated/torch.nn.AdaptiveAvgPool3d.html) | [mindspore.mint.nn.AdaptiveAvgPool3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.AdaptiveAvgPool3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.AvgPool2d](https://PyTorch.org/docs/2.1/generated/torch.nn.AvgPool2d.html) | [mindspore.mint.nn.AvgPool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.AvgPool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.BCELoss](https://PyTorch.org/docs/2.1/generated/torch.nn.BCELoss.html) | [mindspore.mint.nn.BCELoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.BCELoss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.BCEWithLogitsLoss](https://pytorch.org/docs/2.1/generated/torch.nn.BCEWithLogitsLoss.html) | [mindspore.mint.nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.BCEWithLogitsLoss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.BatchNorm1d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm1d.html) | [mindspore.mint.nn.BatchNorm1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.BatchNorm1d.html) | 功能一致,MindSpore默认为推理模式 | +| [torch.nn.BatchNorm2d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm2d.html) | [mindspore.mint.nn.BatchNorm2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.BatchNorm2d.html) | 功能一致,MindSpore默认为推理模式 | +| [torch.nn.BatchNorm3d](https://PyTorch.org/docs/2.1/generated/torch.nn.BatchNorm3d.html) | [mindspore.mint.nn.BatchNorm3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.BatchNorm3d.html) | 功能一致,MindSpore默认为推理模式 | +| [torch.nn.ConstantPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad1d.html) | [mindspore.mint.nn.ConstantPad1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ConstantPad1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ConstantPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad2d.html) | [mindspore.mint.nn.ConstantPad2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ConstantPad2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ConstantPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ConstantPad3d.html) | [mindspore.mint.nn.ConstantPad3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ConstantPad3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Conv2d](https://pytorch.org/docs/2.1/generated/torch.nn.Conv2d.html) | [mindspore.mint.nn.Conv2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Conv2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Conv3d](https://pytorch.org/docs/2.1/generated/torch.nn.Conv3d.html) | [mindspore.mint.nn.Conv3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Conv3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ConvTranspose2d](https://pytorch.org/docs/2.1/generated/torch.nn.ConvTranspose2d.html) | [mindspore.mint.nn.ConvTranspose2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ConvTranspose2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.CrossEntropyLoss](https://pytorch.org/docs/2.1/generated/torch.nn.CrossEntropyLoss.html) | [mindspore.mint.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.CrossEntropyLoss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Dropout](https://pytorch.org/docs/2.1/generated/torch.nn.Dropout.html) | [mindspore.mint.nn.Dropout](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Dropout.html) | 功能一致,MindSpore默认为推理模式 | +| [torch.nn.Dropout2d](https://pytorch.org/docs/2.1/generated/torch.nn.Dropout2d.html) | [mindspore.mint.nn.Dropout2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Dropout2d.html) | 功能一致,MindSpore默认为推理模式 | +| [torch.nn.ELU](https://pytorch.org/docs/2.1/generated/torch.nn.ELU.html) | [mindspore.mint.nn.ELU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ELU.html) | 功能一致,MindSpore不含参数inplace| +| [torch.nn.Embedding](https://pytorch.org/docs/2.1/generated/torch.nn.Embedding.html) | [mindspore.mint.nn.Embedding](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Embedding.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Flatten](https://pytorch.org/docs/2.1/generated/torch.nn.Flatten.html) | [mindspore.mint.flatten](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.flatten.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Fold](https://pytorch.org/docs/2.1/generated/torch.nn.Fold.html) | [mindspore.mint.nn.Fold](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Fold.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.GELU](https://pytorch.org/docs/2.1/generated/torch.nn.GELU.html) | [mindspore.mint.nn.GELU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.GELU.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.GroupNorm](https://pytorch.org/docs/2.1/generated/torch.nn.GroupNorm.html) | [mindspore.mint.nn.GroupNorm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.GroupNorm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Hardshrink](https://pytorch.org/docs/2.1/generated/torch.nn.Hardshrink.html#torch.nn.Hardshrink) | [mindspore.mint.nn.Hardshrink](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Hardshrink.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Hardsigmoid](https://pytorch.org/docs/2.1/generated/torch.nn.Hardsigmoid.html) | [mindspore.mint.nn.Hardsigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Hardsigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Hardswish](https://pytorch.org/docs/2.1/generated/torch.nn.Hardswish.html) | [mindspore.mint.nn.Hardswish](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Hardswish.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Identity](https://pytorch.org/docs/2.1/generated/torch.nn.Identity.html) | [mindspore.mint.nn.Identity](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Identity.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.L1Loss](https://pytorch.org/docs/2.1/generated/torch.nn.L1Loss.html#torch.nn.L1Loss) | [mindspore.mint.nn.L1Loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.L1Loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.LayerNorm](https://pytorch.org/docs/2.1/generated/torch.nn.LayerNorm.html) | [mindspore.mint.nn.LayerNorm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.LayerNorm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Linear](https://pytorch.org/docs/2.1/generated/torch.nn.Linear.html) | [mindspore.mint.nn.Linear](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Linear.html) | 功能一致,MindSpore多weight_init=None和bias_init=None两个参数 | +| [torch.nn.LogSigmoid](https://pytorch.org/docs/2.1/generated/torch.nn.LogSigmoid.html) | [mindspore.mint.nn.LogSigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.LogSigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.LogSoftMax](https://pytorch.org/docs/2.1/generated/torch.nn.LogSoftmax.html) | [mindspore.mint.nn.LogSoftmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.LogSoftmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.MSELoss](https://pytorch.org/docs/2.1/generated/torch.nn.MSELoss.html) | [mindspore.mint.nn.MSELoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.MSELoss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.MaxUnpool2d](https://pytorch.org/docs/2.1/generated/torch.nn.MaxUnpool2d.html) | [mindspore.mint.nn.MaxUnpool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.MaxUnpool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Mish](https://pytorch.org/docs/2.1/generated/torch.nn.Mish.html) | [mindspore.mint.nn.Mish](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Mish.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.NLLLoss](https://pytorch.org/docs/2.1/generated/torch.nn.NLLLoss.html) | [mindspore.mint.nn.NLLLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.NLLLoss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.PReLU](https://pytorch.org/docs/2.1/generated/torch.nn.PReLU.html) | [mindspore.mint.nn.PReLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.PReLU.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ReLU](https://pytorch.org/docs/2.1/generated/torch.nn.ReLU.html) | [mindspore.mint.nn.ReLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ReLU.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.ReLU6](https://pytorch.org/docs/2.1/generated/torch.nn.ReLU6.html) | [mindspore.mint.nn.ReLU6](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ReLU6.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ReflectionPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad1d.html) | [mindspore.mint.nn.ReflectionPad1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ReflectionPad1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ReflectionPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad2d.html) | [mindspore.mint.nn.ReflectionPad2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ReflectionPad2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ReflectionPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ReflectionPad3d.html) | [mindspore.mint.nn.ReflectionPad3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ReflectionPad3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ReplicationPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad1d.html) | [mindspore.mint.nn.ReplicationPad1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ReplicationPad1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ReplicationPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad2d.html) | [mindspore.mint.nn.ReplicationPad2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ReplicationPad2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ReplicationPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ReplicationPad3d.html) | [mindspore.mint.nn.ReplicationPad3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ReplicationPad3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.SeLU](https://pytorch.org/docs/2.1/generated/torch.nn.SELU.html) | [mindspore.mint.nn.SELU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.SELU.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.SiLU](https://pytorch.org/docs/2.1/generated/torch.nn.SiLU.html) | [mindspore.mint.nn.SiLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.SiLU.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.SmoothL1Loss](https://pytorch.org/docs/2.1/generated/torch.nn.SmoothL1Loss.html) | [mindspore.mint.nn.SmoothL1Loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.SmoothL1Loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Softmax](https://pytorch.org/docs/2.1/generated/torch.nn.Softmax.html) | [mindspore.mint.nn.Softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Softmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.Softshrink](https://pytorch.org/docs/2.1/generated/torch.nn.Softshrink.html) | [mindspore.mint.nn.Softshrink](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Softshrink.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.SyncBatchNorm](https://pytorch.org/docs/2.1/generated/torch.nn.SyncBatchNorm.html) | [mindspore.mint.nn.SyncBatchNorm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.SyncBatchNorm.html) | 功能一致,MindSpore默认为推理模式 | +| [torch.nn.Tanh](https://pytorch.org/docs/2.1/generated/torch.nn.Tanh.html) | [mindspore.mint.nn.Tanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Tanh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.Unfold](https://pytorch.org/docs/2.1/generated/torch.nn.Unfold.html) | [mindspore.mint.nn.Unfold](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Unfold.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.Upsample](https://pytorch.org/docs/2.1/generated/torch.nn.Upsample.html) | [mindspore.mint.nn.Upsample](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.Upsample.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ZeroPad1d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad1d.html) | [mindspore.mint.nn.ZeroPad1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ZeroPad1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ZeroPad2d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad2d.html) | [mindspore.mint.nn.ZeroPad2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ZeroPad2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.ZeroPad3d](https://pytorch.org/docs/2.1/generated/torch.nn.ZeroPad3d.html) | [mindspore.mint.nn.ZeroPad3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.ZeroPad3d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | ## torch.nn.functional | PyTorch 2.1 APIs | MindSpore APIs | 说明 | | ---------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | -| [torch.nn.functional.adaptive_avg_pool1d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.adaptive_avg_pool1d) | [mindspore.mint.nn.functional.adaptive_avg_pool1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.adaptive_avg_pool1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.adaptive_avg_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.adaptive_avg_pool2d) | [mindspore.mint.nn.functional.adaptive_avg_pool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.adaptive_avg_pool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.avg_pool1d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.avg_pool1d) | [mindspore.mint.nn.functional.avg_pool1d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.avg_pool1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.avg_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.avg_pool2d) | [mindspore.mint.nn.functional.avg_pool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.avg_pool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.batch_norm](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.batch_norm) | [mindspore.mint.nn.functional.batch_norm](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.batch_norm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.functional.binary_cross_entropy](https://pytorch.org/docs/2.1/generated/torch.nn.functional.binary_cross_entropy.html) | [mindspore.mint.nn.functional.binary_cross_entropy](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.binary_cross_entropy.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.binary_cross_entropy_with_logits](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.binary_cross_entropy_with_logits) | [mindspore.mint.nn.functional.binary_cross_entropy_with_logits](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.binary_cross_entropy_with_logits.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.conv2d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.conv2d.html) | [mindspore.mint.nn.functional.conv2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.conv2d.html)| [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.conv3d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.conv3d.html) | [mindspore.mint.nn.functional.conv3d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.conv3d.html)| [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.conv_transpose2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.conv_transpose2d) | [mindspore.mint.nn.functional.conv_transpose2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.conv_transpose2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.dropout](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.dropout) | [mindspore.mint.nn.functional.dropout](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.dropout.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.dropout2d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.dropout2d.html#torch.nn.functional.dropout2d) | [mindspore.mint.nn.functional.dropout2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.dropout2d.html) | 功能一致,MindSpore不含参数inplace | -| [torch.nn.functional.elu](https://pytorch.org/docs/2.1/generated/torch.nn.functional.elu.html) | [mindspore.mint.nn.functional.elu](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.elu.html) | 功能一致,MindSpore不含参数inplace | -| [torch.nn.functional.embedding](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.embedding) | [mindspore.mint.nn.functional.embedding](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.embedding.html) | 功能一致,MindSpore不含参数sparse | -| [torch.nn.functional.fold](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.fold) | [mindspore.mint.nn.functional.fold](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.fold.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.functional.gelu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.gelu) | [mindspore.mint.nn.functional.gelu](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.gelu.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.functional.grid_sample](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.grid_sample) | [mindspore.mint.nn.functional.grid_sample](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.grid_sample.html) | 功能一致,参数align_corners默认值不同 | -| [torch.nn.functional.hardshrink](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardshrink) | [mindspore.mint.nn.functional.hardshrink](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.hardshrink.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.hardsigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardsigmoid) | [mindspore.mint.nn.functional.hardsigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.hardsigmoid.html) | 功能一致,MindSpore不含参数inplace | -| [torch.nn.functional.hardswish](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardswish) | [mindspore.mint.nn.functional.hardswish](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.hardswish.html) | 功能一致,MindSpore不含参数inplace | -| [torch.nn.functional.interpolate](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.interpolate) | [mindspore.mint.nn.functional.interpolate](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.interpolate.html) | 功能一致,MindSpore不含参数antialias | -| [torch.nn.functional.l1_loss](https://pytorch.org/docs/2.1/generated/torch.nn.functional.l1_loss.html) | [mindspore.mint.nn.functional.l1_loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.l1_loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.leaky_relu](https://pytorch.org/docs/2.1/generated/torch.nn.functional.leaky_relu.html) | [mindspore.mint.nn.functional.leaky_relu](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.leaky_relu.html) | 功能一致,MindSpore不含参数inplace | -| [torch.nn.functional.linear](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.linear) | [mindspore.mint.nn.functional.linear](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.linear.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.log_softmax](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.log_softmax) | [mindspore.mint.nn.functional.log_softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.log_softmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.logsigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.logsigmoid) | [mindspore.mint.nn.functional.logsigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.logsigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.max_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.max_pool2d) | [mindspore.mint.nn.functional.max_pool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.max_pool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.max_unpool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.max_unpool2d) | [mindspore.mint.nn.functional.max_unpool2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.max_unpool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.mish](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.mish) | [mindspore.mint.nn.functional.mish](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.mish.html) | 功能一致,MindSpore不含参数inplace | -| [torch.nn.functional.mse_loss](https://pytorch.org/docs/2.1/generated/torch.nn.functional.mse_loss.html) | [mindspore.mint.nn.functional.mse_loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.mse_loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.nll_loss](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.nll_loss) | [mindspore.mint.nn.functional.nll_loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.nll_loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.normalize](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.normalize) | [mindspore.mint.nn.functional.normalize](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.normalize.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.one_hot](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.one_hot) | [mindspore.mint.nn.functional.one_hot](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.one_hot.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.pad](https://pytorch.org/docs/2.1/generated/torch.nn.functional.pad.html) | [mindspore.mint.nn.functional.pad](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.pad.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.prelu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.prelu) | [mindspore.mint.nn.functional.prelu](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.prelu.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.relu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu) | [mindspore.mint.nn.functional.relu](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.relu.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.relu6](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu6) | [mindspore.mint.nn.functional.relu6](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.relu6.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.relu_](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu_) | [mindspore.mint.nn.functional.relu_](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.relu_.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.selu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.selu) | [mindspore.mint.nn.functional.selu](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.selu.html) | 功能一致,MindSpore不含参数inplace | -| [torch.nn.functional.sigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.sigmoid) | [mindspore.mint.nn.functional.sigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.sigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.silu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.silu) | [mindspore.mint.nn.functional.silu](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.silu.html) | 功能一致,MindSpore不含参数inplace | -| [torch.nn.functional.smooth_l1_loss](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.smooth_l1_loss) | [mindspore.mint.nn.functional.smooth_l1_loss](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.smooth_l1_loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.softmax](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softmax) | [mindspore.mint.nn.functional.softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.softmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.nn.functional.softplus](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softplus) | [mindspore.mint.nn.functional.softplus](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.softplus.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.softshrink](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softshrink) | [mindspore.mint.nn.functional.softshrink](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.softshrink.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.tanh](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.tanh) | [mindspore.mint.nn.functional.tanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.tanh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.nn.functional.unfold](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.unfold) | [mindspore.mint.nn.functional.unfold](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.nn.functional.unfold.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.adaptive_avg_pool1d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.adaptive_avg_pool1d) | [mindspore.mint.nn.functional.adaptive_avg_pool1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.adaptive_avg_pool1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.adaptive_avg_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.adaptive_avg_pool2d) | [mindspore.mint.nn.functional.adaptive_avg_pool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.adaptive_avg_pool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.avg_pool1d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.avg_pool1d) | [mindspore.mint.nn.functional.avg_pool1d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.avg_pool1d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.avg_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.avg_pool2d) | [mindspore.mint.nn.functional.avg_pool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.avg_pool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.batch_norm](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.batch_norm) | [mindspore.mint.nn.functional.batch_norm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.batch_norm.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.functional.binary_cross_entropy](https://pytorch.org/docs/2.1/generated/torch.nn.functional.binary_cross_entropy.html) | [mindspore.mint.nn.functional.binary_cross_entropy](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.binary_cross_entropy.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.binary_cross_entropy_with_logits](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.binary_cross_entropy_with_logits) | [mindspore.mint.nn.functional.binary_cross_entropy_with_logits](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.binary_cross_entropy_with_logits.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.conv2d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.conv2d.html) | [mindspore.mint.nn.functional.conv2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.conv2d.html)| [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.conv3d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.conv3d.html) | [mindspore.mint.nn.functional.conv3d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.conv3d.html)| [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.conv_transpose2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.conv_transpose2d) | [mindspore.mint.nn.functional.conv_transpose2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.conv_transpose2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.dropout](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.dropout) | [mindspore.mint.nn.functional.dropout](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.dropout.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.dropout2d](https://pytorch.org/docs/2.1/generated/torch.nn.functional.dropout2d.html#torch.nn.functional.dropout2d) | [mindspore.mint.nn.functional.dropout2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.dropout2d.html) | 功能一致,MindSpore不含参数inplace | +| [torch.nn.functional.elu](https://pytorch.org/docs/2.1/generated/torch.nn.functional.elu.html) | [mindspore.mint.nn.functional.elu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.elu.html) | 功能一致,MindSpore不含参数inplace | +| [torch.nn.functional.embedding](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.embedding) | [mindspore.mint.nn.functional.embedding](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.embedding.html) | 功能一致,MindSpore不含参数sparse | +| [torch.nn.functional.fold](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.fold) | [mindspore.mint.nn.functional.fold](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.fold.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.functional.gelu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.gelu) | [mindspore.mint.nn.functional.gelu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.gelu.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.functional.grid_sample](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.grid_sample) | [mindspore.mint.nn.functional.grid_sample](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.grid_sample.html) | 功能一致,参数align_corners默认值不同 | +| [torch.nn.functional.hardshrink](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardshrink) | [mindspore.mint.nn.functional.hardshrink](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.hardshrink.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.hardsigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardsigmoid) | [mindspore.mint.nn.functional.hardsigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.hardsigmoid.html) | 功能一致,MindSpore不含参数inplace | +| [torch.nn.functional.hardswish](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.hardswish) | [mindspore.mint.nn.functional.hardswish](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.hardswish.html) | 功能一致,MindSpore不含参数inplace | +| [torch.nn.functional.interpolate](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.interpolate) | [mindspore.mint.nn.functional.interpolate](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.interpolate.html) | 功能一致,MindSpore不含参数antialias | +| [torch.nn.functional.l1_loss](https://pytorch.org/docs/2.1/generated/torch.nn.functional.l1_loss.html) | [mindspore.mint.nn.functional.l1_loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.l1_loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.leaky_relu](https://pytorch.org/docs/2.1/generated/torch.nn.functional.leaky_relu.html) | [mindspore.mint.nn.functional.leaky_relu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.leaky_relu.html) | 功能一致,MindSpore不含参数inplace | +| [torch.nn.functional.linear](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.linear) | [mindspore.mint.nn.functional.linear](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.linear.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.log_softmax](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.log_softmax) | [mindspore.mint.nn.functional.log_softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.log_softmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.logsigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.logsigmoid) | [mindspore.mint.nn.functional.logsigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.logsigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.max_pool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.max_pool2d) | [mindspore.mint.nn.functional.max_pool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.max_pool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.max_unpool2d](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.max_unpool2d) | [mindspore.mint.nn.functional.max_unpool2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.max_unpool2d.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.mish](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.mish) | [mindspore.mint.nn.functional.mish](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.mish.html) | 功能一致,MindSpore不含参数inplace | +| [torch.nn.functional.mse_loss](https://pytorch.org/docs/2.1/generated/torch.nn.functional.mse_loss.html) | [mindspore.mint.nn.functional.mse_loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.mse_loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.nll_loss](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.nll_loss) | [mindspore.mint.nn.functional.nll_loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.nll_loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.normalize](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.normalize) | [mindspore.mint.nn.functional.normalize](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.normalize.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.one_hot](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.one_hot) | [mindspore.mint.nn.functional.one_hot](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.one_hot.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.pad](https://pytorch.org/docs/2.1/generated/torch.nn.functional.pad.html) | [mindspore.mint.nn.functional.pad](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.pad.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.prelu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.prelu) | [mindspore.mint.nn.functional.prelu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.prelu.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.relu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu) | [mindspore.mint.nn.functional.relu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.relu.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.relu6](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu6) | [mindspore.mint.nn.functional.relu6](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.relu6.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.relu_](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.relu_) | [mindspore.mint.nn.functional.relu_](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.relu_.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.selu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.selu) | [mindspore.mint.nn.functional.selu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.selu.html) | 功能一致,MindSpore不含参数inplace | +| [torch.nn.functional.sigmoid](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.sigmoid) | [mindspore.mint.nn.functional.sigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.sigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.silu](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.silu) | [mindspore.mint.nn.functional.silu](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.silu.html) | 功能一致,MindSpore不含参数inplace | +| [torch.nn.functional.smooth_l1_loss](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.smooth_l1_loss) | [mindspore.mint.nn.functional.smooth_l1_loss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.smooth_l1_loss.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.softmax](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softmax) | [mindspore.mint.nn.functional.softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.softmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.nn.functional.softplus](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softplus) | [mindspore.mint.nn.functional.softplus](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.softplus.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.softshrink](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.softshrink) | [mindspore.mint.nn.functional.softshrink](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.softshrink.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.tanh](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.tanh) | [mindspore.mint.nn.functional.tanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.tanh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.nn.functional.unfold](https://pytorch.org/docs/2.1/nn.functional.html#torch.nn.functional.unfold) | [mindspore.mint.nn.functional.unfold](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.nn.functional.unfold.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | ## torch.special | PyTorch 2.1 APIs | MindSpore APIs | 说明 | | -------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ---- | -| [torch.special.erfc](https://pytorch.org/docs/2.1/special.html#torch.special.erfc) | [mindspore.mint.special.erfc](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.special.erfc.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.special.exp2](https://pytorch.org/docs/2.1/special.html#torch.special.exp2) | [mindspore.mint.special.exp2](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.special.exp2.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.special.expm1](https://pytorch.org/docs/2.1/special.html#torch.special.expm1) | [mindspore.mint.special.expm1](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.special.expm1.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.special.log1p](https://pytorch.org/docs/2.1/special.html#torch.special.log1p) | [mindspore.mint.special.log1p](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.special.log1p.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.special.log_softmax](https://pytorch.org/docs/2.1/special.html#torch.special.log_softmax) | [mindspore.mint.special.log_softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.special.log_softmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.special.round](https://pytorch.org/docs/2.1/special.html#torch.special.round) | [mindspore.mint.special.round](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.special.round.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.special.sinc](https://pytorch.org/docs/2.1/special.html#torch.special.sinc) | [mindspore.mint.special.sinc](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.special.sinc.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.special.erfc](https://pytorch.org/docs/2.1/special.html#torch.special.erfc) | [mindspore.mint.special.erfc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.special.erfc.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.special.exp2](https://pytorch.org/docs/2.1/special.html#torch.special.exp2) | [mindspore.mint.special.exp2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.special.exp2.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.special.expm1](https://pytorch.org/docs/2.1/special.html#torch.special.expm1) | [mindspore.mint.special.expm1](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.special.expm1.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.special.log1p](https://pytorch.org/docs/2.1/special.html#torch.special.log1p) | [mindspore.mint.special.log1p](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.special.log1p.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.special.log_softmax](https://pytorch.org/docs/2.1/special.html#torch.special.log_softmax) | [mindspore.mint.special.log_softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.special.log_softmax.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.special.round](https://pytorch.org/docs/2.1/special.html#torch.special.round) | [mindspore.mint.special.round](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.special.round.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.special.sinc](https://pytorch.org/docs/2.1/special.html#torch.special.sinc) | [mindspore.mint.special.sinc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.special.sinc.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | ## torch.Tensor | PyTorch 2.1 APIs | MindSpore APIs | 说明 | |--------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------| -| [torch.Tensor.abs](https://pytorch.org/docs/2.1/generated/torch.Tensor.abs.html) | [mindspore.Tensor.abs](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.abs.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.absolute](https://pytorch.org/docs/2.1/generated/torch.Tensor.absolute.html) | [mindspore.Tensor.absolute](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.absolute.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.add](https://pytorch.org/docs/2.1/generated/torch.Tensor.add.html)| [mindspore.Tensor.add](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.add.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.all](https://pytorch.org/docs/2.1/generated/torch.Tensor.all.html) | [mindspore.Tensor.all](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.all.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.any](https://pytorch.org/docs/2.1/generated/torch.Tensor.any.html) | [mindspore.Tensor.any](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.any.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.argmax](https://pytorch.org/docs/2.1/generated/torch.Tensor.argmax.html) | [mindspore.Tensor.argmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.argmax.html#mindspore.Tensor.argmax) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.arctan2](https://pytorch.org/docs/2.1/generated/torch.Tensor.arctan2.html) | [mindspore.Tensor.arctan2](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.arctan2.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.argmin](https://pytorch.org/docs/2.1/generated/torch.Tensor.argmin.html) | [mindspore.Tensor.argmin](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.argmin.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.atan2](https://pytorch.org/docs/2.1/generated/torch.Tensor.atan2.html) | [mindspore.Tensor.atan2](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.atan2.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.bool](https://pytorch.org/docs/2.1/generated/torch.Tensor.bool.html)| [mindspore.Tensor.bool](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.bool.html)| [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.ceil](https://pytorch.org/docs/2.1/generated/torch.Tensor.ceil.html) | [mindspore.Tensor.ceil](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.ceil.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.chunk](https://pytorch.org/docs/2.1/generated/torch.Tensor.chunk.html) | [mindspore.Tensor.chunk](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.chunk.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.clamp](https://pytorch.org/docs/2.1/generated/torch.Tensor.clamp.html) | [mindspore.Tensor.clamp](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.clamp.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.clamp_](https://pytorch.org/docs/2.1/generated/torch.Tensor.clamp_.html) | [mindspore.Tensor.clamp_](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.clamp_.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.clip](https://pytorch.org/docs/2.1/generated/torch.Tensor.clip.html) | [mindspore.Tensor.clip](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.clip.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.cos](https://pytorch.org/docs/2.1/generated/torch.Tensor.cos.html) | [mindspore.Tensor.cos](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.cos.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.cumsum](https://pytorch.org/docs/2.1/generated/torch.Tensor.cumsum.html) | [mindspore.Tensor.cumsum](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.cumsum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.div](https://pytorch.org/docs/2.1/generated/torch.Tensor.div.html) | [mindspore.Tensor.div](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.div.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.divide](https://pytorch.org/docs/2.1/generated/torch.Tensor.divide.html) | [mindspore.Tensor.divide](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.divide.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.eq](https://pytorch.org/docs/2.1/generated/torch.Tensor.eq.html) | [mindspore.Tensor.eq](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.eq.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.erf](https://pytorch.org/docs/2.1/generated/torch.Tensor.erf.html) | [mindspore.Tensor.erf](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.erf.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.exp](https://pytorch.org/docs/2.1/generated/torch.Tensor.exp.html) | [mindspore.Tensor.exp](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.exp.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.expand_as](https://pytorch.org/docs/2.1/generated/torch.Tensor.expand_as.html) | [mindspore.Tensor.expand_as](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.expand_as.html#mindspore.Tensor.expand_as) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.flatten](https://pytorch.org/docs/2.1/generated/torch.Tensor.flatten.html) | [mindspore.Tensor.flatten](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.flatten.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.float](https://pytorch.org/docs/2.1/generated/torch.Tensor.float.html) | [mindspore.Tensor.float](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.float.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.floor](https://pytorch.org/docs/2.1/generated/torch.Tensor.floor.html) | [mindspore.Tensor.floor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.floor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.gather](https://pytorch.org/docs/2.1/generated/torch.Tensor.gather.html) | [mindspore.Tensor.gather](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.gather.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.greater](https://pytorch.org/docs/2.1/generated/torch.Tensor.greater.html) | [mindspore.Tensor.greater](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.greater.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.gt](https://pytorch.org/docs/2.1/generated/torch.Tensor.gt.html) | [mindspore.Tensor.gt](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.gt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.half](https://pytorch.org/docs/2.1/generated/torch.Tensor.half.html) | [mindspore.Tensor.half](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.half.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.index_select](https://pytorch.org/docs/2.1/generated/torch.Tensor.index_select.html) | [mindspore.Tensor.index_select](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.index_select.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.int](https://pytorch.org/docs/2.1/generated/torch.Tensor.int.html) | [mindspore.Tensor.int](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.int.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.isfinite](https://pytorch.org/docs/2.1/generated/torch.Tensor.isfinite.html) | [mindspore.Tensor.isfinite](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.isfinite.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.isnan](https://pytorch.org/docs/2.1/generated/torch.Tensor.isnan.html) | [mindspore.Tensor.isnan](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.isnan.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.item](https://pytorch.org/docs/2.1/generated/torch.Tensor.item.html) | [mindspore.Tensor.item](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.item.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.le](https://pytorch.org/docs/2.1/generated/torch.Tensor.le.html) | [mindspore.Tensor.le](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.le.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.less](https://pytorch.org/docs/2.1/generated/torch.Tensor.less.html) | [mindspore.Tensor.less](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.less.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.less_equal](https://pytorch.org/docs/2.1/generated/torch.Tensor.less_equal.html) | [mindspore.Tensor.less_equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.less_equal.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.log](https://pytorch.org/docs/2.1/generated/torch.Tensor.log.html) | [mindspore.Tensor.log](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.log.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.logical_and](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_and.html) | [mindspore.Tensor.logical_and](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.logical_and.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.logical_not](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_not.html) | [mindspore.Tensor.logical_not](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.logical_not.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.logical_or](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_or.html) | [mindspore.Tensor.logical_or](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.logical_or.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.long](https://pytorch.org/docs/2.1/generated/torch.Tensor.long.html) | [mindspore.Tensor.long](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.long.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.lt](https://pytorch.org/docs/2.1/generated/torch.Tensor.lt.html) | [mindspore.Tensor.lt](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.lt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.masked_fill](https://pytorch.org/docs/2.1/generated/torch.Tensor.masked_fill.html) | [mindspore.Tensor.masked_fill](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.masked_fill.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.masked_select](https://pytorch.org/docs/2.1/generated/torch.Tensor.masked_select.html) | [mindspore.Tensor.masked_select](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.masked_select.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.matmul](https://pytorch.org/docs/2.1/generated/torch.Tensor.matmul.html) | [mindspore.Tensor.matmul](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.matmul.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.max](https://pytorch.org/docs/2.1/generated/torch.Tensor.max.html) | [mindspore.Tensor.max](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.max.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.maximum](https://pytorch.org/docs/2.1/generated/torch.Tensor.maximum.html) | [mindspore.Tensor.maximum](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.maximum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.mean](https://pytorch.org/docs/2.1/generated/torch.Tensor.mean.html) | [mindspore.Tensor.mean](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.mean.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.min](https://pytorch.org/docs/2.1/generated/torch.Tensor.min.html) | [mindspore.Tensor.min](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.min.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.minimum](https://pytorch.org/docs/2.1/generated/torch.Tensor.minimum.html) | [mindspore.Tensor.minimum](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.minimum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.mul](https://pytorch.org/docs/2.1/generated/torch.Tensor.mul.html) | [mindspore.Tensor.mul](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.mul.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.nan_to_num](https://pytorch.org/docs/2.1/generated/torch.Tensor.nan_to_num.html) | [mindspore.Tensor.nan_to_num](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.nan_to_num.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.ne](https://pytorch.org/docs/2.1/generated/torch.Tensor.ne.html) | [mindspore.Tensor.ne](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.ne.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.neg](https://pytorch.org/docs/2.1/generated/torch.Tensor.neg.html) | [mindspore.Tensor.neg](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.neg.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.negative](https://pytorch.org/docs/2.1/generated/torch.Tensor.negative.html) | [mindspore.Tensor.negative](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.negative.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.not_equal](https://pytorch.org/docs/2.1/generated/torch.Tensor.not_equal.html) | [mindspore.Tensor.not_equal](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.not_equal.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.pow](https://pytorch.org/docs/2.1/generated/torch.Tensor.pow.html) | [mindspore.Tensor.pow](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.pow.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.prod](https://pytorch.org/docs/2.1/generated/torch.Tensor.prod.html) | [mindspore.Tensor.prod](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.prod.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.reciprocal](https://pytorch.org/docs/2.1/generated/torch.Tensor.reciprocal.html) | [mindspore.Tensor.reciprocal](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.reciprocal.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.remainder](https://pytorch.org/docs/2.1/generated/torch.Tensor.remainder.html) | [mindspore.Tensor.remainder](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.remainder.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.repeat_interleave](https://pytorch.org/docs/2.1/generated/torch.Tensor.repeat_interleave.html) | [mindspore.Tensor.repeat_interleave](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.repeat_interleave.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.reshape](https://pytorch.org/docs/2.1/generated/torch.Tensor.reshape.html) | [mindspore.Tensor.reshape](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.reshape.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.round](https://pytorch.org/docs/2.1/generated/torch.Tensor.round.html)| [mindspore.Tensor.round](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.round.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.rsqrt](https://pytorch.org/docs/2.1/generated/torch.Tensor.rsqrt.html) | [mindspore.Tensor.rsqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.rsqrt.html) |[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.scatter](https://pytorch.org/docs/2.1/generated/torch.Tensor.scatter.html) | [mindspore.Tensor.scatter](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.scatter.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.scatter_add](https://pytorch.org/docs/2.1/generated/torch.Tensor.scatter_add.html) | [mindspore.Tensor.scatter_add](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.scatter_add.html) |[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.sigmoid](https://pytorch.org/docs/2.1/generated/torch.Tensor.sigmoid.html) | [mindspore.Tensor.sigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.sigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.sin](https://pytorch.org/docs/2.1/generated/torch.Tensor.sin.html)| [mindspore.Tensor.sin](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.sin.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.sort](https://pytorch.org/docs/2.1/generated/torch.Tensor.sort.html) | [mindspore.Tensor.sort](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.sort.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.split](https://pytorch.org/docs/2.1/generated/torch.Tensor.split.html) | [mindspore.Tensor.split](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.split.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.sqrt](https://pytorch.org/docs/2.1/generated/torch.Tensor.sqrt.html) | [mindspore.Tensor.sqrt](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.sqrt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.square](https://pytorch.org/docs/2.1/generated/torch.Tensor.square.html)| [mindspore.Tensor.square](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.square.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.sub](https://pytorch.org/docs/2.1/generated/torch.Tensor.sub.html) | [mindspore.Tensor.sub](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.sub.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.sum](https://pytorch.org/docs/2.1/generated/torch.Tensor.sum.html) | [mindspore.Tensor.sum](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.sum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.t](https://pytorch.org/docs/2.1/generated/torch.Tensor.t.html) | [mindspore.Tensor.t](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.t.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.tanh](https://pytorch.org/docs/2.1/generated/torch.Tensor.tanh.html) | [mindspore.Tensor.tanh](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.tanh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.tile](https://pytorch.org/docs/2.1/generated/torch.Tensor.tile.html) | [mindspore.Tensor.tile](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.tile.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.topk](https://pytorch.org/docs/2.1/generated/torch.Tensor.topk.html)| [mindspore.Tensor.topk](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.topk.html)|[一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.tril](https://pytorch.org/docs/2.1/generated/torch.Tensor.tril.html) | [mindspore.Tensor.tril](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.tril.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.trunc](https://pytorch.org/docs/2.1/generated/torch.Tensor.trunc.html) | [mindspore.Tensor.trunc](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.trunc.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| -| [torch.Tensor.view_as](https://pytorch.org/docs/2.1/generated/torch.Tensor.view_as.html) | [mindspore.Tensor.view_as](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.view_as.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | -| [torch.Tensor.where](https://pytorch.org/docs/2.1/generated/torch.Tensor.where.html) | [mindspore.Tensor.where](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.where.html) | [一致](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.abs](https://pytorch.org/docs/2.1/generated/torch.Tensor.abs.html) | [mindspore.Tensor.abs](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.abs.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.absolute](https://pytorch.org/docs/2.1/generated/torch.Tensor.absolute.html) | [mindspore.Tensor.absolute](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.absolute.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.add](https://pytorch.org/docs/2.1/generated/torch.Tensor.add.html)| [mindspore.Tensor.add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.add.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.all](https://pytorch.org/docs/2.1/generated/torch.Tensor.all.html) | [mindspore.Tensor.all](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.all.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.any](https://pytorch.org/docs/2.1/generated/torch.Tensor.any.html) | [mindspore.Tensor.any](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.any.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.argmax](https://pytorch.org/docs/2.1/generated/torch.Tensor.argmax.html) | [mindspore.Tensor.argmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.argmax.html#mindspore.Tensor.argmax) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.arctan2](https://pytorch.org/docs/2.1/generated/torch.Tensor.arctan2.html) | [mindspore.Tensor.arctan2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.arctan2.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.argmin](https://pytorch.org/docs/2.1/generated/torch.Tensor.argmin.html) | [mindspore.Tensor.argmin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.argmin.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.atan2](https://pytorch.org/docs/2.1/generated/torch.Tensor.atan2.html) | [mindspore.Tensor.atan2](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.atan2.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.bool](https://pytorch.org/docs/2.1/generated/torch.Tensor.bool.html)| [mindspore.Tensor.bool](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.bool.html)| [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.ceil](https://pytorch.org/docs/2.1/generated/torch.Tensor.ceil.html) | [mindspore.Tensor.ceil](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.ceil.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.chunk](https://pytorch.org/docs/2.1/generated/torch.Tensor.chunk.html) | [mindspore.Tensor.chunk](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.chunk.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.clamp](https://pytorch.org/docs/2.1/generated/torch.Tensor.clamp.html) | [mindspore.Tensor.clamp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.clamp.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.clamp_](https://pytorch.org/docs/2.1/generated/torch.Tensor.clamp_.html) | [mindspore.Tensor.clamp_](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.clamp_.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.clip](https://pytorch.org/docs/2.1/generated/torch.Tensor.clip.html) | [mindspore.Tensor.clip](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.clip.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.cos](https://pytorch.org/docs/2.1/generated/torch.Tensor.cos.html) | [mindspore.Tensor.cos](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.cos.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.cumsum](https://pytorch.org/docs/2.1/generated/torch.Tensor.cumsum.html) | [mindspore.Tensor.cumsum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.cumsum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.div](https://pytorch.org/docs/2.1/generated/torch.Tensor.div.html) | [mindspore.Tensor.div](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.div.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.divide](https://pytorch.org/docs/2.1/generated/torch.Tensor.divide.html) | [mindspore.Tensor.divide](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.divide.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.eq](https://pytorch.org/docs/2.1/generated/torch.Tensor.eq.html) | [mindspore.Tensor.eq](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.eq.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.erf](https://pytorch.org/docs/2.1/generated/torch.Tensor.erf.html) | [mindspore.Tensor.erf](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.erf.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.exp](https://pytorch.org/docs/2.1/generated/torch.Tensor.exp.html) | [mindspore.Tensor.exp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.exp.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.expand_as](https://pytorch.org/docs/2.1/generated/torch.Tensor.expand_as.html) | [mindspore.Tensor.expand_as](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.expand_as.html#mindspore.Tensor.expand_as) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.flatten](https://pytorch.org/docs/2.1/generated/torch.Tensor.flatten.html) | [mindspore.Tensor.flatten](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.flatten.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.float](https://pytorch.org/docs/2.1/generated/torch.Tensor.float.html) | [mindspore.Tensor.float](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.float.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.floor](https://pytorch.org/docs/2.1/generated/torch.Tensor.floor.html) | [mindspore.Tensor.floor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.floor.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.gather](https://pytorch.org/docs/2.1/generated/torch.Tensor.gather.html) | [mindspore.Tensor.gather](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.gather.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.greater](https://pytorch.org/docs/2.1/generated/torch.Tensor.greater.html) | [mindspore.Tensor.greater](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.greater.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.gt](https://pytorch.org/docs/2.1/generated/torch.Tensor.gt.html) | [mindspore.Tensor.gt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.gt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.half](https://pytorch.org/docs/2.1/generated/torch.Tensor.half.html) | [mindspore.Tensor.half](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.half.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.index_select](https://pytorch.org/docs/2.1/generated/torch.Tensor.index_select.html) | [mindspore.Tensor.index_select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.index_select.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.int](https://pytorch.org/docs/2.1/generated/torch.Tensor.int.html) | [mindspore.Tensor.int](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.int.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.isfinite](https://pytorch.org/docs/2.1/generated/torch.Tensor.isfinite.html) | [mindspore.Tensor.isfinite](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.isfinite.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.isnan](https://pytorch.org/docs/2.1/generated/torch.Tensor.isnan.html) | [mindspore.Tensor.isnan](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.isnan.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.item](https://pytorch.org/docs/2.1/generated/torch.Tensor.item.html) | [mindspore.Tensor.item](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.item.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.le](https://pytorch.org/docs/2.1/generated/torch.Tensor.le.html) | [mindspore.Tensor.le](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.le.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.less](https://pytorch.org/docs/2.1/generated/torch.Tensor.less.html) | [mindspore.Tensor.less](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.less.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.less_equal](https://pytorch.org/docs/2.1/generated/torch.Tensor.less_equal.html) | [mindspore.Tensor.less_equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.less_equal.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.log](https://pytorch.org/docs/2.1/generated/torch.Tensor.log.html) | [mindspore.Tensor.log](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.log.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.logical_and](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_and.html) | [mindspore.Tensor.logical_and](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.logical_and.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.logical_not](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_not.html) | [mindspore.Tensor.logical_not](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.logical_not.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.logical_or](https://pytorch.org/docs/2.1/generated/torch.Tensor.logical_or.html) | [mindspore.Tensor.logical_or](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.logical_or.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.long](https://pytorch.org/docs/2.1/generated/torch.Tensor.long.html) | [mindspore.Tensor.long](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.long.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.lt](https://pytorch.org/docs/2.1/generated/torch.Tensor.lt.html) | [mindspore.Tensor.lt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.lt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.masked_fill](https://pytorch.org/docs/2.1/generated/torch.Tensor.masked_fill.html) | [mindspore.Tensor.masked_fill](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.masked_fill.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.masked_select](https://pytorch.org/docs/2.1/generated/torch.Tensor.masked_select.html) | [mindspore.Tensor.masked_select](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.masked_select.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.matmul](https://pytorch.org/docs/2.1/generated/torch.Tensor.matmul.html) | [mindspore.Tensor.matmul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.matmul.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.max](https://pytorch.org/docs/2.1/generated/torch.Tensor.max.html) | [mindspore.Tensor.max](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.max.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.maximum](https://pytorch.org/docs/2.1/generated/torch.Tensor.maximum.html) | [mindspore.Tensor.maximum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.maximum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.mean](https://pytorch.org/docs/2.1/generated/torch.Tensor.mean.html) | [mindspore.Tensor.mean](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.mean.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.min](https://pytorch.org/docs/2.1/generated/torch.Tensor.min.html) | [mindspore.Tensor.min](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.min.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.minimum](https://pytorch.org/docs/2.1/generated/torch.Tensor.minimum.html) | [mindspore.Tensor.minimum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.minimum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.mul](https://pytorch.org/docs/2.1/generated/torch.Tensor.mul.html) | [mindspore.Tensor.mul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.mul.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.nan_to_num](https://pytorch.org/docs/2.1/generated/torch.Tensor.nan_to_num.html) | [mindspore.Tensor.nan_to_num](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.nan_to_num.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.ne](https://pytorch.org/docs/2.1/generated/torch.Tensor.ne.html) | [mindspore.Tensor.ne](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.ne.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.neg](https://pytorch.org/docs/2.1/generated/torch.Tensor.neg.html) | [mindspore.Tensor.neg](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.neg.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.negative](https://pytorch.org/docs/2.1/generated/torch.Tensor.negative.html) | [mindspore.Tensor.negative](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.negative.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.not_equal](https://pytorch.org/docs/2.1/generated/torch.Tensor.not_equal.html) | [mindspore.Tensor.not_equal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.not_equal.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.pow](https://pytorch.org/docs/2.1/generated/torch.Tensor.pow.html) | [mindspore.Tensor.pow](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.pow.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.prod](https://pytorch.org/docs/2.1/generated/torch.Tensor.prod.html) | [mindspore.Tensor.prod](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.prod.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.reciprocal](https://pytorch.org/docs/2.1/generated/torch.Tensor.reciprocal.html) | [mindspore.Tensor.reciprocal](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.reciprocal.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.remainder](https://pytorch.org/docs/2.1/generated/torch.Tensor.remainder.html) | [mindspore.Tensor.remainder](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.remainder.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.repeat_interleave](https://pytorch.org/docs/2.1/generated/torch.Tensor.repeat_interleave.html) | [mindspore.Tensor.repeat_interleave](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.repeat_interleave.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.reshape](https://pytorch.org/docs/2.1/generated/torch.Tensor.reshape.html) | [mindspore.Tensor.reshape](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.reshape.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.round](https://pytorch.org/docs/2.1/generated/torch.Tensor.round.html)| [mindspore.Tensor.round](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.round.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.rsqrt](https://pytorch.org/docs/2.1/generated/torch.Tensor.rsqrt.html) | [mindspore.Tensor.rsqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.rsqrt.html) |[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.scatter](https://pytorch.org/docs/2.1/generated/torch.Tensor.scatter.html) | [mindspore.Tensor.scatter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.scatter.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.scatter_add](https://pytorch.org/docs/2.1/generated/torch.Tensor.scatter_add.html) | [mindspore.Tensor.scatter_add](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.scatter_add.html) |[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.sigmoid](https://pytorch.org/docs/2.1/generated/torch.Tensor.sigmoid.html) | [mindspore.Tensor.sigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sigmoid.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.sin](https://pytorch.org/docs/2.1/generated/torch.Tensor.sin.html)| [mindspore.Tensor.sin](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sin.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.sort](https://pytorch.org/docs/2.1/generated/torch.Tensor.sort.html) | [mindspore.Tensor.sort](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sort.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.split](https://pytorch.org/docs/2.1/generated/torch.Tensor.split.html) | [mindspore.Tensor.split](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.split.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.sqrt](https://pytorch.org/docs/2.1/generated/torch.Tensor.sqrt.html) | [mindspore.Tensor.sqrt](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sqrt.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.square](https://pytorch.org/docs/2.1/generated/torch.Tensor.square.html)| [mindspore.Tensor.square](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.square.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.sub](https://pytorch.org/docs/2.1/generated/torch.Tensor.sub.html) | [mindspore.Tensor.sub](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sub.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.sum](https://pytorch.org/docs/2.1/generated/torch.Tensor.sum.html) | [mindspore.Tensor.sum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.sum.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.t](https://pytorch.org/docs/2.1/generated/torch.Tensor.t.html) | [mindspore.Tensor.t](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.t.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.tanh](https://pytorch.org/docs/2.1/generated/torch.Tensor.tanh.html) | [mindspore.Tensor.tanh](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.tanh.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.tile](https://pytorch.org/docs/2.1/generated/torch.Tensor.tile.html) | [mindspore.Tensor.tile](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.tile.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.topk](https://pytorch.org/docs/2.1/generated/torch.Tensor.topk.html)| [mindspore.Tensor.topk](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.topk.html)|[一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.tril](https://pytorch.org/docs/2.1/generated/torch.Tensor.tril.html) | [mindspore.Tensor.tril](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.tril.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.trunc](https://pytorch.org/docs/2.1/generated/torch.Tensor.trunc.html) | [mindspore.Tensor.trunc](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.trunc.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景)| +| [torch.Tensor.view_as](https://pytorch.org/docs/2.1/generated/torch.Tensor.view_as.html) | [mindspore.Tensor.view_as](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.view_as.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | +| [torch.Tensor.where](https://pytorch.org/docs/2.1/generated/torch.Tensor.where.html) | [mindspore.Tensor.where](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.where.html) | [一致](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#api映射一致标准及例外场景) | ## torch.optim | PyTorch 2.1 APIs | MindSpore APIs | 说明 | | ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------ | -| [torch.optim.Adam](https://pytorch.org/docs/2.1/optim.html#torch.optim.Adam) | [mindspore.mint.optim.Adam](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.optim.Adam.html) | 功能一致,PyTorch多一些优化参数 | -| [torch.optim.AdamW](https://pytorch.org/docs/2.1/optim.html#torch.optim.AdamW) | [mindspore.mint.optim.AdamW](https://www.mindspore.cn/docs/zh-CN/master/api_python/mint/mindspore.mint.optim.AdamW.html) | 功能一致,PyTorch多一些优化参数 | +| [torch.optim.Adam](https://pytorch.org/docs/2.1/optim.html#torch.optim.Adam) | [mindspore.mint.optim.Adam](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.optim.Adam.html) | 功能一致,PyTorch多一些优化参数 | +| [torch.optim.AdamW](https://pytorch.org/docs/2.1/optim.html#torch.optim.AdamW) | [mindspore.mint.optim.AdamW](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mint/mindspore.mint.optim.AdamW.html) | 功能一致,PyTorch多一些优化参数 | ## torch.utils | PyTorch 1.8.1 APIs | MindSpore APIs | 说明 | | ------------------ | --------------- | ------ | -| [torch.utils.data.DataLoader](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.DataLoader) | [mindspore.dataset.GeneratorDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/DataLoader.html) | -| [torch.utils.data.distributed.DistributedSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.distributed.DistributedSampler) | [mindspore.dataset.DistributedSampler](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.DistributedSampler.html#mindspore.dataset.DistributedSampler) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/DistributedSampler.html) | -| [torch.utils.data.RandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.RandomSampler) | [mindspore.dataset.RandomSampler](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.RandomSampler.html#mindspore.dataset.RandomSampler) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/RandomSampler.html) | -| [torch.utils.data.SequentialSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.SequentialSampler) | [mindspore.dataset.SequentialSampler](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.SequentialSampler.html#mindspore.dataset.SequentialSampler) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/SequentialSampler.html) | -| [torch.utils.data.SubsetRandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.SubsetRandomSampler) | [mindspore.dataset.SubsetRandomSampler](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.SubsetRandomSampler.html#mindspore.dataset.SubsetRandomSampler) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/SubsetRandomSampler.html) | -| [torch.utils.data.WeightedRandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.WeightedRandomSampler) | [mindspore.dataset.WeightedRandomSampler](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.WeightedRandomSampler.html#mindspore.dataset.WeightedRandomSampler) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/WeightedRandomSampler.html) | -| [torch.utils.checkpoint.checkpoint](https://pytorch.org/docs/1.8.1/checkpoint.html#torch.utils.checkpoint.checkpoint) | [mindspore.nn.Cell.recompute](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.recompute) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/checkpoint.html) | +| [torch.utils.data.DataLoader](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.DataLoader) | [mindspore.dataset.GeneratorDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/DataLoader.html) | +| [torch.utils.data.distributed.DistributedSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.distributed.DistributedSampler) | [mindspore.dataset.DistributedSampler](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.DistributedSampler.html#mindspore.dataset.DistributedSampler) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/DistributedSampler.html) | +| [torch.utils.data.RandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.RandomSampler) | [mindspore.dataset.RandomSampler](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.RandomSampler.html#mindspore.dataset.RandomSampler) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/RandomSampler.html) | +| [torch.utils.data.SequentialSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.SequentialSampler) | [mindspore.dataset.SequentialSampler](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.SequentialSampler.html#mindspore.dataset.SequentialSampler) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/SequentialSampler.html) | +| [torch.utils.data.SubsetRandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.SubsetRandomSampler) | [mindspore.dataset.SubsetRandomSampler](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.SubsetRandomSampler.html#mindspore.dataset.SubsetRandomSampler) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/SubsetRandomSampler.html) | +| [torch.utils.data.WeightedRandomSampler](https://pytorch.org/docs/1.8.1/data.html#torch.utils.data.WeightedRandomSampler) | [mindspore.dataset.WeightedRandomSampler](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.WeightedRandomSampler.html#mindspore.dataset.WeightedRandomSampler) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/WeightedRandomSampler.html) | +| [torch.utils.checkpoint.checkpoint](https://pytorch.org/docs/1.8.1/checkpoint.html#torch.utils.checkpoint.checkpoint) | [mindspore.nn.Cell.recompute](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.recompute) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/checkpoint.html) | ## torchaudio @@ -523,33 +523,33 @@ mindspore.mint.argmax只有一种API形式,即mindspore.mint.argmax(input, dim | TorchAudio 0.8.1 APIs | MindSpore APIs | 说明 | | ----------------------- | ------------------------- | ------------------ | -| [torchaudio.datasets.CMUARCTIC](https://pytorch.org/audio/0.8.0/datasets.html#cmuarctic) | [mindspore.dataset.CMUArcticDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CMUArcticDataset.html#mindspore.dataset.CMUArcticDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/CMUARCTIC.html) | -| [torchaudio.datasets.GTZAN](https://pytorch.org/audio/0.8.0/datasets.html#gtzan) | [mindspore.dataset.GTZANDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GTZANDataset.html#mindspore.dataset.GTZANDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/GTZAN.html) | -| [torchaudio.datasets.LIBRITTS](https://pytorch.org/audio/0.8.0/datasets.html#libritts) | [mindspore.dataset.LibriTTSDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.LibriTTSDataset.html#mindspore.dataset.LibriTTSDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/LIBRITTS.html) | -| [torchaudio.datasets.LJSPEECH](https://pytorch.org/audio/0.8.0/datasets.html#ljspeech) | [mindspore.dataset.LJSpeechDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/LJSPEECH.html) | -| [torchaudio.datasets.SPEECHCOMMANDS](https://pytorch.org/audio/0.8.0/datasets.html#speechcommands) | [mindspore.dataset.SpeechCommandsDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.SpeechCommandsDataset.html#mindspore.dataset.SpeechCommandsDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/SPEECHCOMMANDS.html) | -| [torchaudio.datasets.TEDLIUM](https://pytorch.org/audio/0.8.0/datasets.html#tedlium) | [mindspore.dataset.TedliumDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.TedliumDataset.html#mindspore.dataset.TedliumDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/TEDLIUM.html) | -| [torchaudio.datasets.YESNO](https://pytorch.org/audio/0.8.0/datasets.html#yesno) | [mindspore.dataset.YesNoDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.YesNoDataset.html#mindspore.dataset.YesNoDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/YESNO.html) | -| [torchaudio.transforms.AmplitudeToDB](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.AmplitudeToDB.html) | [mindspore.dataset.audio.AmplitudeToDB](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.AmplitudeToDB.html#mindspore.dataset.audio.AmplitudeToDB) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/AmplitudeToDB.html) | -| [torchaudio.transforms.ComplexNorm](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.ComplexNorm.html) | [mindspore.dataset.audio.ComplexNorm](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.ComplexNorm.html#mindspore.dataset.audio.ComplexNorm) | 一致 | -| [torchaudio.transforms.ComputeDeltas](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.ComputeDeltas.html) | [mindspore.dataset.audio.ComputeDeltas](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.ComputeDeltas.html#mindspore.dataset.audio.ComputeDeltas) | 功能一致,参数名不同 | -| [torchaudio.transforms.Fade](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Fade.html) | [mindspore.dataset.audio.Fade](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.Fade.html#mindspore.dataset.audio.Fade) | 一致 | -| [torchaudio.transforms.FrequencyMasking](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.FrequencyMasking.html) | [mindspore.dataset.audio.FrequencyMasking](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.FrequencyMasking.html#mindspore.dataset.audio.FrequencyMasking) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/FrequencyMasking.html) | -| [torchaudio.transforms.GriffinLim](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.GriffinLim.html) | [mindspore.dataset.audio.GriffinLim](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.GriffinLim.html#mindspore.dataset.audio.GriffinLim) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/GriffinLim.html) | -| [torchaudio.transforms.InverseMelScale](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.InverseMelScale.html) | [mindspore.dataset.audio.InverseMelScale](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.InverseMelScale.html#mindspore.dataset.audio.InverseMelScale) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/InverseMelScale.html) | -| [torchaudio.transforms.MelScale](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MelScale.html) | [mindspore.dataset.audio.MelScale](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.MelScale.html#mindspore.dataset.audio.MelScale) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/MelScale.html) | -| [torchaudio.transforms.MelSpectrogram](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MelSpectrogram.html) | [mindspore.dataset.audio.MelSpectrogram](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.MelSpectrogram.html#mindspore.dataset.audio.MelSpectrogram) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/MelSpectrogram.html) | -| [torchaudio.transforms.MFCC](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MFCC.html) | [mindspore.dataset.audio.MFCC](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.MFCC.html#mindspore.dataset.audio.MFCC) | 一致 | -| [torchaudio.transforms.MuLawEncoding](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MuLawEncoding.html) | [mindspore.dataset.audio.MuLawEncoding](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.MuLawEncoding.html#mindspore.dataset.audio.MuLawEncoding) | 一致 | -| [torchaudio.transforms.MuLawDecoding](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MuLawDecoding.html) | [mindspore.dataset.audio.MuLawDecoding](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.MuLawDecoding.html#mindspore.dataset.audio.MuLawDecoding) | 一致 | -| [torchaudio.transforms.Resample](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Resample.html) | [mindspore.dataset.audio.Resample](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.Resample.html#mindspore.dataset.audio.Resample) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/Resample.html) | -| [torchaudio.transforms.SlidingWindowCmn](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.SlidingWindowCmn.html) | [mindspore.dataset.audio.SlidingWindowCmn](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.SlidingWindowCmn.html#mindspore.dataset.audio.SlidingWindowCmn) | 一致 | -| [torchaudio.transforms.SpectralCentroid](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.SpectralCentroid.html) | [mindspore.dataset.audio.SpectralCentroid](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.SpectralCentroid.html#mindspore.dataset.audio.SpectralCentroid) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/SpectralCentroid.html) | -| [torchaudio.transforms.Spectrogram](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Spectrogram.html) | [mindspore.dataset.audio.Spectrogram](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.Spectrogram.html#mindspore.dataset.audio.Spectrogram) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/Spectrogram.html) | -| [torchaudio.transforms.TimeMasking](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.TimeMasking.html) | [mindspore.dataset.audio.TimeMasking](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.TimeMasking.html#mindspore.dataset.audio.TimeMasking) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/TimeMasking.html) | -| [torchaudio.transforms.TimeStretch](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.TimeStretch.html) | [mindspore.dataset.audio.TimeStretch](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.TimeStretch.html#mindspore.dataset.audio.TimeStretch) | 一致 | -| [torchaudio.transforms.Vad](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Vad.html) | [mindspore.dataset.audio.Vad](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.Vad.html#mindspore.dataset.audio.Vad) | 一致 | -| [torchaudio.transforms.Vol](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Vol.html) | [mindspore.dataset.audio.Vol](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.Vol.html#mindspore.dataset.audio.Vol) | 一致 | +| [torchaudio.datasets.CMUARCTIC](https://pytorch.org/audio/0.8.0/datasets.html#cmuarctic) | [mindspore.dataset.CMUArcticDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CMUArcticDataset.html#mindspore.dataset.CMUArcticDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/CMUARCTIC.html) | +| [torchaudio.datasets.GTZAN](https://pytorch.org/audio/0.8.0/datasets.html#gtzan) | [mindspore.dataset.GTZANDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GTZANDataset.html#mindspore.dataset.GTZANDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/GTZAN.html) | +| [torchaudio.datasets.LIBRITTS](https://pytorch.org/audio/0.8.0/datasets.html#libritts) | [mindspore.dataset.LibriTTSDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.LibriTTSDataset.html#mindspore.dataset.LibriTTSDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/LIBRITTS.html) | +| [torchaudio.datasets.LJSPEECH](https://pytorch.org/audio/0.8.0/datasets.html#ljspeech) | [mindspore.dataset.LJSpeechDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/LJSPEECH.html) | +| [torchaudio.datasets.SPEECHCOMMANDS](https://pytorch.org/audio/0.8.0/datasets.html#speechcommands) | [mindspore.dataset.SpeechCommandsDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.SpeechCommandsDataset.html#mindspore.dataset.SpeechCommandsDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/SPEECHCOMMANDS.html) | +| [torchaudio.datasets.TEDLIUM](https://pytorch.org/audio/0.8.0/datasets.html#tedlium) | [mindspore.dataset.TedliumDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.TedliumDataset.html#mindspore.dataset.TedliumDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/TEDLIUM.html) | +| [torchaudio.datasets.YESNO](https://pytorch.org/audio/0.8.0/datasets.html#yesno) | [mindspore.dataset.YesNoDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.YesNoDataset.html#mindspore.dataset.YesNoDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/YESNO.html) | +| [torchaudio.transforms.AmplitudeToDB](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.AmplitudeToDB.html) | [mindspore.dataset.audio.AmplitudeToDB](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.AmplitudeToDB.html#mindspore.dataset.audio.AmplitudeToDB) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/AmplitudeToDB.html) | +| [torchaudio.transforms.ComplexNorm](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.ComplexNorm.html) | [mindspore.dataset.audio.ComplexNorm](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.ComplexNorm.html#mindspore.dataset.audio.ComplexNorm) | 一致 | +| [torchaudio.transforms.ComputeDeltas](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.ComputeDeltas.html) | [mindspore.dataset.audio.ComputeDeltas](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.ComputeDeltas.html#mindspore.dataset.audio.ComputeDeltas) | 功能一致,参数名不同 | +| [torchaudio.transforms.Fade](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Fade.html) | [mindspore.dataset.audio.Fade](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.Fade.html#mindspore.dataset.audio.Fade) | 一致 | +| [torchaudio.transforms.FrequencyMasking](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.FrequencyMasking.html) | [mindspore.dataset.audio.FrequencyMasking](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.FrequencyMasking.html#mindspore.dataset.audio.FrequencyMasking) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/FrequencyMasking.html) | +| [torchaudio.transforms.GriffinLim](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.GriffinLim.html) | [mindspore.dataset.audio.GriffinLim](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.GriffinLim.html#mindspore.dataset.audio.GriffinLim) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/GriffinLim.html) | +| [torchaudio.transforms.InverseMelScale](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.InverseMelScale.html) | [mindspore.dataset.audio.InverseMelScale](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.InverseMelScale.html#mindspore.dataset.audio.InverseMelScale) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/InverseMelScale.html) | +| [torchaudio.transforms.MelScale](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MelScale.html) | [mindspore.dataset.audio.MelScale](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.MelScale.html#mindspore.dataset.audio.MelScale) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/MelScale.html) | +| [torchaudio.transforms.MelSpectrogram](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MelSpectrogram.html) | [mindspore.dataset.audio.MelSpectrogram](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.MelSpectrogram.html#mindspore.dataset.audio.MelSpectrogram) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/MelSpectrogram.html) | +| [torchaudio.transforms.MFCC](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MFCC.html) | [mindspore.dataset.audio.MFCC](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.MFCC.html#mindspore.dataset.audio.MFCC) | 一致 | +| [torchaudio.transforms.MuLawEncoding](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MuLawEncoding.html) | [mindspore.dataset.audio.MuLawEncoding](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.MuLawEncoding.html#mindspore.dataset.audio.MuLawEncoding) | 一致 | +| [torchaudio.transforms.MuLawDecoding](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.MuLawDecoding.html) | [mindspore.dataset.audio.MuLawDecoding](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.MuLawDecoding.html#mindspore.dataset.audio.MuLawDecoding) | 一致 | +| [torchaudio.transforms.Resample](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Resample.html) | [mindspore.dataset.audio.Resample](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.Resample.html#mindspore.dataset.audio.Resample) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/Resample.html) | +| [torchaudio.transforms.SlidingWindowCmn](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.SlidingWindowCmn.html) | [mindspore.dataset.audio.SlidingWindowCmn](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.SlidingWindowCmn.html#mindspore.dataset.audio.SlidingWindowCmn) | 一致 | +| [torchaudio.transforms.SpectralCentroid](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.SpectralCentroid.html) | [mindspore.dataset.audio.SpectralCentroid](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.SpectralCentroid.html#mindspore.dataset.audio.SpectralCentroid) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/SpectralCentroid.html) | +| [torchaudio.transforms.Spectrogram](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Spectrogram.html) | [mindspore.dataset.audio.Spectrogram](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.Spectrogram.html#mindspore.dataset.audio.Spectrogram) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/Spectrogram.html) | +| [torchaudio.transforms.TimeMasking](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.TimeMasking.html) | [mindspore.dataset.audio.TimeMasking](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.TimeMasking.html#mindspore.dataset.audio.TimeMasking) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/TimeMasking.html) | +| [torchaudio.transforms.TimeStretch](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.TimeStretch.html) | [mindspore.dataset.audio.TimeStretch](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.TimeStretch.html#mindspore.dataset.audio.TimeStretch) | 一致 | +| [torchaudio.transforms.Vad](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Vad.html) | [mindspore.dataset.audio.Vad](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.Vad.html#mindspore.dataset.audio.Vad) | 一致 | +| [torchaudio.transforms.Vol](https://pytorch.org/audio/0.8.0/transforms.html#torchaudio.transforms.Vol.html) | [mindspore.dataset.audio.Vol](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.Vol.html#mindspore.dataset.audio.Vol) | 一致 | ## torchtext @@ -557,31 +557,31 @@ mindspore.mint.argmax只有一种API形式,即mindspore.mint.argmax(input, dim | TorchText 0.9.1 APIs | MindSpore APIs | 说明 | | ---------------------- | ----------------------------- | ------------------------------ | -| [torchtext.data.functional.custom_replace](https://pytorch.org/text/0.9.0/data_functional.html#custom-replace) | [mindspore.dataset.text.RegexReplace](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_text/mindspore.dataset.text.RegexReplace.html#mindspore.dataset.text.RegexReplace) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/RegexReplace.html) | -| [torchtext.data.functional.load_sp_model](https://pytorch.org/text/0.9.0/data_functional.html#load-sp-model) | [mindspore.dataset.text.SentencePieceTokenizer](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/load_sp_model.html) | -| [torchtext.data.functional.numericalize_tokens_from_iterator](https://pytorch.org/text/0.9.0/data_functional.html#numericalize-tokens-from-iterator) | [mindspore.dataset.text.Lookup](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_text/mindspore.dataset.text.Lookup.html#mindspore.dataset.text.Lookup) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/Lookup.html) | -| [torchtext.data.functional.sentencepiece_numericalizer](https://pytorch.org/text/0.9.0/data_functional.html#sentencepiece-numericalizer) | [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.html) | -| [torchtext.data.functional.sentencepiece_tokenizer](https://pytorch.org/text/0.9.0/data_functional.html#sentencepiece-tokenizer) | [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_STRING.html) | -| [torchtext.data.functional.simple_space_split](https://pytorch.org/text/0.9.0/data_functional.html#simple-space-split) | [mindspore.dataset.text.WhitespaceTokenizer](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_text/mindspore.dataset.text.WhitespaceTokenizer.html#mindspore.dataset.text.WhitespaceTokenizer) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/WhitespaceTokenizer.html) | -| [torchtext.data.utils.ngrams_iterator](https://pytorch.org/text/0.9.0/data_functional.html#ngrams-iterator) | [mindspore.dataset.text.Ngram](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_text/mindspore.dataset.text.Ngram.html#mindspore.dataset.text.Ngram) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/Ngram.html) | -| [torchtext.datasets.AG_NEWS](https://pytorch.org/text/0.9.0/datasets.html#ag-news) | [mindspore.dataset.AGNewsDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.AGNewsDataset.html#mindspore.dataset.AGNewsDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/AGNEWS.html) | -| [torchtext.datasets.AmazonReviewFull](https://pytorch.org/text/0.9.0/datasets.html#amazonreviewfull) | [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/AmazonReviewFull.html) | -| [torchtext.datasets.AmazonReviewPolarity](https://pytorch.org/text/0.9.0/datasets.html#amazonreviewpolarity) | [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/AmazonReviewPolarity.html) | -| [torchtext.datasets.CoNLL2000Chunking](https://pytorch.org/text/0.9.0/datasets.html#conll2000chunking) | [mindspore.dataset.CoNLL2000Dataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CoNLL2000Dataset.html#mindspore.dataset.CoNLL2000Dataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/CoNLL2000Chunking.html) | -| [torchtext.datasets.DBpedia](https://pytorch.org/text/0.9.0/datasets.html#dbpedia) | [mindspore.dataset.DBpediaDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.DBpediaDataset.html#mindspore.dataset.DBpediaDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/DBpedia.html) | -| [torchtext.datasets.IMDB](https://pytorch.org/text/0.9.0/datasets.html#imdb) | [mindspore.dataset.IMDBDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.IMDBDataset.html#mindspore.dataset.IMDBDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/IMDB.html) | -| [torchtext.datasets.IWSLT2016](https://pytorch.org/text/0.9.0/datasets.html#iwslt2016) | [mindspore.dataset.IWSLT2016Dataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.IWSLT2016Dataset.html#mindspore.dataset.IWSLT2016Dataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/IWSLT2016.html) | -| [torchtext.datasets.IWSLT2017](https://pytorch.org/text/0.9.0/datasets.html#iwslt2017) | [mindspore.dataset.IWSLT2017Dataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/IWSLT2017.html) | -| [torchtext.datasets.PennTreebank](https://pytorch.org/text/0.9.0/datasets.html#penntreebank) | [mindspore.dataset.PennTreebankDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.PennTreebankDataset.html#mindspore.dataset.PennTreebankDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/PennTreebank.html) | -| [torchtext.datasets.SogouNews](https://pytorch.org/text/0.9.0/datasets.html#sogounews) | [mindspore.dataset.SogouNewsDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.SogouNewsDataset.html#mindspore.dataset.SogouNewsDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/SogouNews.html) | -| [torchtext.datasets.SQuAD1](https://pytorch.org/text/0.9.0/datasets.html#torchtext.datasets.SQuAD1) | [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/SQuAD1.html) | -| [torchtext.datasets.SQuAD2](https://pytorch.org/text/0.9.0/datasets.html#torchtext.datasets.SQuAD2) | [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/SQuAD2.html) | -| [torchtext.datasets.UDPOS](https://pytorch.org/text/0.9.0/datasets.html#udpos) | [mindspore.dataset.UDPOSDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.UDPOSDataset.html#mindspore.dataset.UDPOSDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/UDPOS.html) | -| [torchtext.datasets.WikiText103](https://pytorch.org/text/0.9.0/datasets.html#wikitext103) | [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/WikiText103.html) | -| [torchtext.datasets.WikiText2](https://pytorch.org/text/0.9.0/datasets.html#wikitext-2) | [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/WikiText2.html) | -| [torchtext.datasets.YahooAnswers](https://pytorch.org/text/0.9.0/datasets.html#yahooanswers) | [mindspore.dataset.YahooAnswersDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.YahooAnswersDataset.html#mindspore.dataset.YahooAnswersDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/YahooAnswers.html) | -| [torchtext.datasets.YelpReviewFull](https://pytorch.org/text/0.9.0/datasets.html#yelpreviewfull) | [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/YelpReviewFull.html) | -| [torchtext.datasets.YelpReviewPolarity](https://pytorch.org/text/0.9.0/datasets.html#yelpreviewpolarity) | [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/YelpReviewPolarity.html) | +| [torchtext.data.functional.custom_replace](https://pytorch.org/text/0.9.0/data_functional.html#custom-replace) | [mindspore.dataset.text.RegexReplace](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_text/mindspore.dataset.text.RegexReplace.html#mindspore.dataset.text.RegexReplace) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/RegexReplace.html) | +| [torchtext.data.functional.load_sp_model](https://pytorch.org/text/0.9.0/data_functional.html#load-sp-model) | [mindspore.dataset.text.SentencePieceTokenizer](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/load_sp_model.html) | +| [torchtext.data.functional.numericalize_tokens_from_iterator](https://pytorch.org/text/0.9.0/data_functional.html#numericalize-tokens-from-iterator) | [mindspore.dataset.text.Lookup](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_text/mindspore.dataset.text.Lookup.html#mindspore.dataset.text.Lookup) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/Lookup.html) | +| [torchtext.data.functional.sentencepiece_numericalizer](https://pytorch.org/text/0.9.0/data_functional.html#sentencepiece-numericalizer) | [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_INT.html) | +| [torchtext.data.functional.sentencepiece_tokenizer](https://pytorch.org/text/0.9.0/data_functional.html#sentencepiece-tokenizer) | [mindspore.dataset.text.SentencePieceTokenizer](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_text/mindspore.dataset.text.SentencePieceTokenizer.html#mindspore.dataset.text.SentencePieceTokenizer) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/SentencePieceTokenizer_Out_STRING.html) | +| [torchtext.data.functional.simple_space_split](https://pytorch.org/text/0.9.0/data_functional.html#simple-space-split) | [mindspore.dataset.text.WhitespaceTokenizer](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_text/mindspore.dataset.text.WhitespaceTokenizer.html#mindspore.dataset.text.WhitespaceTokenizer) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/WhitespaceTokenizer.html) | +| [torchtext.data.utils.ngrams_iterator](https://pytorch.org/text/0.9.0/data_functional.html#ngrams-iterator) | [mindspore.dataset.text.Ngram](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_text/mindspore.dataset.text.Ngram.html#mindspore.dataset.text.Ngram) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/Ngram.html) | +| [torchtext.datasets.AG_NEWS](https://pytorch.org/text/0.9.0/datasets.html#ag-news) | [mindspore.dataset.AGNewsDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.AGNewsDataset.html#mindspore.dataset.AGNewsDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/AGNEWS.html) | +| [torchtext.datasets.AmazonReviewFull](https://pytorch.org/text/0.9.0/datasets.html#amazonreviewfull) | [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/AmazonReviewFull.html) | +| [torchtext.datasets.AmazonReviewPolarity](https://pytorch.org/text/0.9.0/datasets.html#amazonreviewpolarity) | [mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/AmazonReviewPolarity.html) | +| [torchtext.datasets.CoNLL2000Chunking](https://pytorch.org/text/0.9.0/datasets.html#conll2000chunking) | [mindspore.dataset.CoNLL2000Dataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CoNLL2000Dataset.html#mindspore.dataset.CoNLL2000Dataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/CoNLL2000Chunking.html) | +| [torchtext.datasets.DBpedia](https://pytorch.org/text/0.9.0/datasets.html#dbpedia) | [mindspore.dataset.DBpediaDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.DBpediaDataset.html#mindspore.dataset.DBpediaDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/DBpedia.html) | +| [torchtext.datasets.IMDB](https://pytorch.org/text/0.9.0/datasets.html#imdb) | [mindspore.dataset.IMDBDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.IMDBDataset.html#mindspore.dataset.IMDBDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/IMDB.html) | +| [torchtext.datasets.IWSLT2016](https://pytorch.org/text/0.9.0/datasets.html#iwslt2016) | [mindspore.dataset.IWSLT2016Dataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.IWSLT2016Dataset.html#mindspore.dataset.IWSLT2016Dataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/IWSLT2016.html) | +| [torchtext.datasets.IWSLT2017](https://pytorch.org/text/0.9.0/datasets.html#iwslt2017) | [mindspore.dataset.IWSLT2017Dataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/IWSLT2017.html) | +| [torchtext.datasets.PennTreebank](https://pytorch.org/text/0.9.0/datasets.html#penntreebank) | [mindspore.dataset.PennTreebankDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.PennTreebankDataset.html#mindspore.dataset.PennTreebankDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/PennTreebank.html) | +| [torchtext.datasets.SogouNews](https://pytorch.org/text/0.9.0/datasets.html#sogounews) | [mindspore.dataset.SogouNewsDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.SogouNewsDataset.html#mindspore.dataset.SogouNewsDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/SogouNews.html) | +| [torchtext.datasets.SQuAD1](https://pytorch.org/text/0.9.0/datasets.html#torchtext.datasets.SQuAD1) | [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/SQuAD1.html) | +| [torchtext.datasets.SQuAD2](https://pytorch.org/text/0.9.0/datasets.html#torchtext.datasets.SQuAD2) | [mindspore.dataset.SQuADDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.SQuADDataset.html#mindspore.dataset.SQuADDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/SQuAD2.html) | +| [torchtext.datasets.UDPOS](https://pytorch.org/text/0.9.0/datasets.html#udpos) | [mindspore.dataset.UDPOSDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.UDPOSDataset.html#mindspore.dataset.UDPOSDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/UDPOS.html) | +| [torchtext.datasets.WikiText103](https://pytorch.org/text/0.9.0/datasets.html#wikitext103) | [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/WikiText103.html) | +| [torchtext.datasets.WikiText2](https://pytorch.org/text/0.9.0/datasets.html#wikitext-2) | [mindspore.dataset.WikiTextDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.WikiTextDataset.html#mindspore.dataset.WikiTextDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/WikiText2.html) | +| [torchtext.datasets.YahooAnswers](https://pytorch.org/text/0.9.0/datasets.html#yahooanswers) | [mindspore.dataset.YahooAnswersDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.YahooAnswersDataset.html#mindspore.dataset.YahooAnswersDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/YahooAnswers.html) | +| [torchtext.datasets.YelpReviewFull](https://pytorch.org/text/0.9.0/datasets.html#yelpreviewfull) | [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/YelpReviewFull.html) | +| [torchtext.datasets.YelpReviewPolarity](https://pytorch.org/text/0.9.0/datasets.html#yelpreviewpolarity) | [mindspore.dataset.YelpReviewDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.YelpReviewDataset.html#mindspore.dataset.YelpReviewDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/YelpReviewPolarity.html) | ## torchvision @@ -589,40 +589,40 @@ mindspore.mint.argmax只有一种API形式,即mindspore.mint.argmax(input, dim | TorchVision 0.9.1 APIs | MindSpore APIs | 说明 | | ------------------------------------------------------------------------------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -| [torchvision.datasets.CelebA](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CelebA) | [mindspore.dataset.CelebADataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CelebADataset.html#mindspore.dataset.CelebADataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/CelebA.html) | -| [torchvision.datasets.Cityscapes](https://pytorch.org/vision/0.9/datasets.html#cityscapes) | [mindspore.dataset.CityscapesDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CityscapesDataset.html#mindspore.dataset.CityscapesDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/Cityscapes.html) | -| [torchvision.datasets.CIFAR10](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CIFAR10) | [mindspore.dataset.Cifar10Dataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/CIFAR10.html) | -| [torchvision.datasets.CIFAR100](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CIFAR100) | [mindspore.dataset.Cifar100Dataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.Cifar100Dataset.html#mindspore.dataset.Cifar100Dataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/CIFAR100.html) | -| [torchvision.datasets.CocoDetection](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CocoDetection) | [mindspore.dataset.CocoDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CocoDataset.html#mindspore.dataset.CocoDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/CocoDataset.html) | -| [torchvision.datasets.ImageFolder](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.ImageFolder) | [mindspore.dataset.ImageFolderDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/ImageFolder.html) | -| [torchvision.datasets.MNIST](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.MNIST) | [mindspore.dataset.MnistDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MnistDataset.html#mindspore.dataset.MnistDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/MNIST.html) | -| [torchvision.datasets.VOCDetection](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.VOCDetection) | [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/VOCDetection.html) | -| [torchvision.datasets.VOCSegmentation](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.VOCSegmentation) | [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/VOCSegmentation.html) | -| [torchvision.ops.nms](https://pytorch.org/vision/0.9/ops.html#torchvision.ops.nms.html#torchvision.ops.nms) | [mindspore.ops.NMSWithMask](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.NMSWithMask.html#mindspore.ops.NMSWithMask) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/nms.html) | -| [torchvision.ops.roi_align](https://pytorch.org/vision/0.9/ops.html#torchvision.ops.roi_align.html#torchvision.ops.roi_align) | [mindspore.ops.ROIAlign](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ROIAlign.html#mindspore.ops.ROIAlign) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/roi_align.html) | -| [torchvision.transforms.CenterCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.CenterCrop) | [mindspore.dataset.vision.CenterCrop](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.CenterCrop.html#mindspore.dataset.vision.CenterCrop) | 一致 | -| [torchvision.transforms.ColorJitter](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ColorJitter) | [mindspore.dataset.vision.RandomColorAdjust](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomColorAdjust.html#mindspore.dataset.vision.RandomColorAdjust) | 一致 | -| [torchvision.transforms.Compose](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Compose) | [mindspore.dataset.transforms.Compose](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_transforms/mindspore.dataset.transforms.Compose.html#mindspore.dataset.transforms.Compose) | 一致 | -| [torchvision.transforms.ConvertImageDtype](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ConvertImageDtype) | [mindspore.dataset.transforms.TypeCast](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_transforms/mindspore.dataset.transforms.TypeCast.html#mindspore.dataset.transforms.TypeCast) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/TypeCast.html) | -| [torchvision.transforms.FiveCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.FiveCrop) | [mindspore.dataset.vision.FiveCrop](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.FiveCrop.html#mindspore.dataset.vision.FiveCrop) | 一致 | -| [torchvision.transforms.GaussianBlur](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.GaussianBlur) | [mindspore.dataset.vision.GaussianBlur](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.GaussianBlur.html#mindspore.dataset.vision.GaussianBlur) | 一致 | -| [torchvision.transforms.Grayscale](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Grayscale) | [mindspore.dataset.vision.Grayscale](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.Grayscale.html#mindspore.dataset.vision.Grayscale) | 一致 | -| [torchvision.transforms.LinearTransformation](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.LinearTransformation) | [mindspore.dataset.vision.LinearTransformation](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.LinearTransformation.html#mindspore.dataset.vision.LinearTransformation) | 一致 | -| [torchvision.transforms.Normalize](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Normalize) | [mindspore.dataset.vision.Normalize](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.Normalize.html#mindspore.dataset.vision.Normalize) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/Normalize.html) | -| [torchvision.transforms.Pad](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Pad) | [mindspore.dataset.vision.Pad](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.Pad.html#mindspore.dataset.vision.Pad) | 功能一致,参数名不同 | -| [torchvision.transforms.RandomAffine](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomAffine) | [mindspore.dataset.vision.RandomAffine](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomAffine.html#mindspore.dataset.vision.RandomAffine) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/RandomAffine.html) | -| [torchvision.transforms.RandomApply](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomApply) | [mindspore.dataset.transforms.RandomApply](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_transforms/mindspore.dataset.transforms.RandomApply.html#mindspore.dataset.transforms.RandomApply) | 功能一致,参数名不同 | -| [torchvision.transforms.RandomChoice](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomChoice) | [mindspore.dataset.transforms.RandomChoice](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_transforms/mindspore.dataset.transforms.RandomChoice.html#mindspore.dataset.transforms.RandomChoice) | 一致 | -| [torchvision.transforms.RandomCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomCrop) | [mindspore.dataset.vision.RandomCrop](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomCrop.html#mindspore.dataset.vision.RandomCrop) | 功能一致,参数名不同 | -| [torchvision.transforms.RandomGrayscale](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomGrayscale) | [mindspore.dataset.vision.RandomGrayscale](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomGrayscale.html#mindspore.dataset.vision.RandomGrayscale) | 功能一致,参数名不同 | -| [torchvision.transforms.RandomHorizontalFlip](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomHorizontalFlip) | [mindspore.dataset.vision.RandomHorizontalFlip](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomHorizontalFlip.html#mindspore.dataset.vision.RandomHorizontalFlip) | 功能一致,参数名不同 | -| [torchvision.transforms.RandomOrder](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomOrder) | [mindspore.dataset.transforms.RandomOrder](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_transforms/mindspore.dataset.transforms.RandomOrder.html#mindspore.dataset.transforms.RandomOrder) | 一致 | -| [torchvision.transforms.RandomPerspective](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomPerspective) | [mindspore.dataset.vision.RandomPerspective](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomPerspective.html#mindspore.dataset.vision.RandomPerspective) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/RandomPerspective.html) | -| [torchvision.transforms.RandomResizedCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomResizedCrop) | [mindspore.dataset.vision.RandomResizedCrop](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomResizedCrop.html#mindspore.dataset.vision.RandomResizedCrop) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/RandomResizedCrop.html) | -| [torchvision.transforms.RandomRotation](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomRotation) | [mindspore.dataset.vision.RandomRotation](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomRotation.html#mindspore.dataset.vision.RandomRotation) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/RandomRotation.html) | -| [torchvision.transforms.RandomVerticalFlip](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomVerticalFlip) | [mindspore.dataset.vision.RandomVerticalFlip](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomVerticalFlip.html#mindspore.dataset.vision.RandomVerticalFlip) | 功能一致,参数名不同 | -| [torchvision.transforms.Resize](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Resize) | [mindspore.dataset.vision.Resize](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.Resize.html#mindspore.dataset.vision.Resize) | 一致 | -| [torchvision.transforms.TenCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.TenCrop) | [mindspore.dataset.vision.TenCrop](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.TenCrop.html#mindspore.dataset.vision.TenCrop) | 功能一致,参数名不同 | -| [torchvision.transforms.ToPILImage](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ToPILImage) | [mindspore.dataset.vision.ToPIL](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.ToPIL.html#mindspore.dataset.vision.ToPIL) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/ToPIL.html) | -| [torchvision.transforms.ToTensor](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ToTensor) | [mindspore.dataset.vision.ToTensor](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.ToTensor.html#mindspore.dataset.vision.ToTensor) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/ToTensor.html) | -| [torchvision.ops.deform_conv2d](https://pytorch.org/vision/main/generated/torchvision.ops.deform_conv2d.html#deform-conv2d) | [mindspore.ops.deformable_conv2d](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.deformable_conv2d.html#mindspore-ops-deformable-conv2d) | [差异对比](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/deform_conv2d.html) | +| [torchvision.datasets.CelebA](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CelebA) | [mindspore.dataset.CelebADataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CelebADataset.html#mindspore.dataset.CelebADataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/CelebA.html) | +| [torchvision.datasets.Cityscapes](https://pytorch.org/vision/0.9/datasets.html#cityscapes) | [mindspore.dataset.CityscapesDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CityscapesDataset.html#mindspore.dataset.CityscapesDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/Cityscapes.html) | +| [torchvision.datasets.CIFAR10](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CIFAR10) | [mindspore.dataset.Cifar10Dataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/CIFAR10.html) | +| [torchvision.datasets.CIFAR100](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CIFAR100) | [mindspore.dataset.Cifar100Dataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.Cifar100Dataset.html#mindspore.dataset.Cifar100Dataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/CIFAR100.html) | +| [torchvision.datasets.CocoDetection](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.CocoDetection) | [mindspore.dataset.CocoDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CocoDataset.html#mindspore.dataset.CocoDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/CocoDataset.html) | +| [torchvision.datasets.ImageFolder](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.ImageFolder) | [mindspore.dataset.ImageFolderDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/ImageFolder.html) | +| [torchvision.datasets.MNIST](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.MNIST) | [mindspore.dataset.MnistDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MnistDataset.html#mindspore.dataset.MnistDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/MNIST.html) | +| [torchvision.datasets.VOCDetection](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.VOCDetection) | [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/VOCDetection.html) | +| [torchvision.datasets.VOCSegmentation](https://pytorch.org/vision/0.9/datasets.html#torchvision.datasets.VOCSegmentation) | [mindspore.dataset.VOCDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.VOCDataset.html#mindspore.dataset.VOCDataset) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/VOCSegmentation.html) | +| [torchvision.ops.nms](https://pytorch.org/vision/0.9/ops.html#torchvision.ops.nms.html#torchvision.ops.nms) | [mindspore.ops.NMSWithMask](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.NMSWithMask.html#mindspore.ops.NMSWithMask) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/nms.html) | +| [torchvision.ops.roi_align](https://pytorch.org/vision/0.9/ops.html#torchvision.ops.roi_align.html#torchvision.ops.roi_align) | [mindspore.ops.ROIAlign](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ROIAlign.html#mindspore.ops.ROIAlign) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/roi_align.html) | +| [torchvision.transforms.CenterCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.CenterCrop) | [mindspore.dataset.vision.CenterCrop](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.CenterCrop.html#mindspore.dataset.vision.CenterCrop) | 一致 | +| [torchvision.transforms.ColorJitter](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ColorJitter) | [mindspore.dataset.vision.RandomColorAdjust](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomColorAdjust.html#mindspore.dataset.vision.RandomColorAdjust) | 一致 | +| [torchvision.transforms.Compose](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Compose) | [mindspore.dataset.transforms.Compose](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.Compose.html#mindspore.dataset.transforms.Compose) | 一致 | +| [torchvision.transforms.ConvertImageDtype](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ConvertImageDtype) | [mindspore.dataset.transforms.TypeCast](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.TypeCast.html#mindspore.dataset.transforms.TypeCast) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/TypeCast.html) | +| [torchvision.transforms.FiveCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.FiveCrop) | [mindspore.dataset.vision.FiveCrop](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.FiveCrop.html#mindspore.dataset.vision.FiveCrop) | 一致 | +| [torchvision.transforms.GaussianBlur](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.GaussianBlur) | [mindspore.dataset.vision.GaussianBlur](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.GaussianBlur.html#mindspore.dataset.vision.GaussianBlur) | 一致 | +| [torchvision.transforms.Grayscale](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Grayscale) | [mindspore.dataset.vision.Grayscale](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.Grayscale.html#mindspore.dataset.vision.Grayscale) | 一致 | +| [torchvision.transforms.LinearTransformation](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.LinearTransformation) | [mindspore.dataset.vision.LinearTransformation](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.LinearTransformation.html#mindspore.dataset.vision.LinearTransformation) | 一致 | +| [torchvision.transforms.Normalize](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Normalize) | [mindspore.dataset.vision.Normalize](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.Normalize.html#mindspore.dataset.vision.Normalize) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/Normalize.html) | +| [torchvision.transforms.Pad](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Pad) | [mindspore.dataset.vision.Pad](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.Pad.html#mindspore.dataset.vision.Pad) | 功能一致,参数名不同 | +| [torchvision.transforms.RandomAffine](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomAffine) | [mindspore.dataset.vision.RandomAffine](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomAffine.html#mindspore.dataset.vision.RandomAffine) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/RandomAffine.html) | +| [torchvision.transforms.RandomApply](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomApply) | [mindspore.dataset.transforms.RandomApply](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.RandomApply.html#mindspore.dataset.transforms.RandomApply) | 功能一致,参数名不同 | +| [torchvision.transforms.RandomChoice](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomChoice) | [mindspore.dataset.transforms.RandomChoice](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.RandomChoice.html#mindspore.dataset.transforms.RandomChoice) | 一致 | +| [torchvision.transforms.RandomCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomCrop) | [mindspore.dataset.vision.RandomCrop](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomCrop.html#mindspore.dataset.vision.RandomCrop) | 功能一致,参数名不同 | +| [torchvision.transforms.RandomGrayscale](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomGrayscale) | [mindspore.dataset.vision.RandomGrayscale](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomGrayscale.html#mindspore.dataset.vision.RandomGrayscale) | 功能一致,参数名不同 | +| [torchvision.transforms.RandomHorizontalFlip](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomHorizontalFlip) | [mindspore.dataset.vision.RandomHorizontalFlip](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomHorizontalFlip.html#mindspore.dataset.vision.RandomHorizontalFlip) | 功能一致,参数名不同 | +| [torchvision.transforms.RandomOrder](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomOrder) | [mindspore.dataset.transforms.RandomOrder](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.RandomOrder.html#mindspore.dataset.transforms.RandomOrder) | 一致 | +| [torchvision.transforms.RandomPerspective](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomPerspective) | [mindspore.dataset.vision.RandomPerspective](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomPerspective.html#mindspore.dataset.vision.RandomPerspective) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/RandomPerspective.html) | +| [torchvision.transforms.RandomResizedCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomResizedCrop) | [mindspore.dataset.vision.RandomResizedCrop](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomResizedCrop.html#mindspore.dataset.vision.RandomResizedCrop) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/RandomResizedCrop.html) | +| [torchvision.transforms.RandomRotation](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomRotation) | [mindspore.dataset.vision.RandomRotation](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomRotation.html#mindspore.dataset.vision.RandomRotation) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/RandomRotation.html) | +| [torchvision.transforms.RandomVerticalFlip](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.RandomVerticalFlip) | [mindspore.dataset.vision.RandomVerticalFlip](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomVerticalFlip.html#mindspore.dataset.vision.RandomVerticalFlip) | 功能一致,参数名不同 | +| [torchvision.transforms.Resize](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.Resize) | [mindspore.dataset.vision.Resize](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.Resize.html#mindspore.dataset.vision.Resize) | 一致 | +| [torchvision.transforms.TenCrop](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.TenCrop) | [mindspore.dataset.vision.TenCrop](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.TenCrop.html#mindspore.dataset.vision.TenCrop) | 功能一致,参数名不同 | +| [torchvision.transforms.ToPILImage](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ToPILImage) | [mindspore.dataset.vision.ToPIL](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.ToPIL.html#mindspore.dataset.vision.ToPIL) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/ToPIL.html) | +| [torchvision.transforms.ToTensor](https://pytorch.org/vision/0.9/transforms.html#torchvision.transforms.ToTensor) | [mindspore.dataset.vision.ToTensor](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.ToTensor.html#mindspore.dataset.vision.ToTensor) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/ToTensor.html) | +| [torchvision.ops.deform_conv2d](https://pytorch.org/vision/main/generated/torchvision.ops.deform_conv2d.html#deform-conv2d) | [mindspore.ops.deformable_conv2d](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.deformable_conv2d.html#mindspore-ops-deformable-conv2d) | [差异对比](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/deform_conv2d.html) | diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AGNEWS.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AGNEWS.md index b185e23a4a..91af71e913 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AGNEWS.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AGNEWS.md @@ -1,6 +1,6 @@ # 比较与torchtext.datasets.AG_NEWS的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AGNEWS.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AGNEWS.md) ## torchtext.datasets.AG_NEWS @@ -26,7 +26,7 @@ class mindspore.dataset.AGNewsDataset( cache=None) ``` -更多内容详见[mindspore.dataset.AGNewsDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.AGNewsDataset.html#mindspore.dataset.AGNewsDataset)。 +更多内容详见[mindspore.dataset.AGNewsDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.AGNewsDataset.html#mindspore.dataset.AGNewsDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewFull.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewFull.md index 6494c58929..5f48c6c84f 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewFull.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewFull.md @@ -1,6 +1,6 @@ # 比较与torchtext.datasets.AmazonReviewFull的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewFull.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewFull.md) ## torchtext.datasets.AmazonReviewFull @@ -26,7 +26,7 @@ class mindspore.dataset.AmazonReviewDataset( cache=None) ``` -更多内容详见[mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset)。 +更多内容详见[mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md index 862e45356c..b5ec04b0dd 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md @@ -1,6 +1,6 @@ # 比较与torchtext.datasets.AmazonReviewPolarity的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmazonReviewPolarity.md) ## torchtext.datasets.AmazonReviewPolarity @@ -26,7 +26,7 @@ class mindspore.dataset.AmazonReviewDataset( cache=None) ``` -更多内容详见[mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset)。 +更多内容详见[mindspore.dataset.AmazonReviewDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.AmazonReviewDataset.html#mindspore.dataset.AmazonReviewDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmplitudeToDB.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmplitudeToDB.md index cec6b55a46..1203e661b7 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmplitudeToDB.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmplitudeToDB.md @@ -1,6 +1,6 @@ # 比较与torchaudio.transforms.AmplitudeToDB的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmplitudeToDB.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/AmplitudeToDB.md) ## torchaudio.transforms.AmplitudeToDB @@ -16,7 +16,7 @@ class torchaudio.transforms.AmplitudeToDB(stype: str = 'power', top_db: Optional class mindspore.dataset.audio.AmplitudeToDB(stype=ScaleType.POWER, ref_value=1.0, amin=1e-10, top_db=80.0) ``` -更多内容详见[mindspore.dataset.audio.AmplitudeToDB](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.AmplitudeToDB.html#mindspore.dataset.audio.AmplitudeToDB)。 +更多内容详见[mindspore.dataset.audio.AmplitudeToDB](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.AmplitudeToDB.html#mindspore.dataset.audio.AmplitudeToDB)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR10.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR10.md index f87429e9e4..56982aad93 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR10.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR10.md @@ -1,6 +1,6 @@ # 比较与torchvision.datasets.CIFAR10的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR10.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR10.md) ## torchvision.datasets.CIFAR10 @@ -30,7 +30,7 @@ class mindspore.dataset.Cifar10Dataset( cache=None) ``` -更多内容详见[mindspore.dataset.Cifar10Dataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset)。 +更多内容详见[mindspore.dataset.Cifar10Dataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR100.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR100.md index 2bf8b9acca..41f73ea8fe 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR100.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR100.md @@ -1,6 +1,6 @@ # 比较与torchvision.datasets.CIFAR100的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR100.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CIFAR100.md) ## torchvision.datasets.CIFAR100 @@ -30,7 +30,7 @@ class mindspore.dataset.Cifar100Dataset( cache=None) ``` -更多内容详见[mindspore.dataset.Cifar100Dataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.Cifar100Dataset.html#mindspore.dataset.Cifar100Dataset)。 +更多内容详见[mindspore.dataset.Cifar100Dataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.Cifar100Dataset.html#mindspore.dataset.Cifar100Dataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CMUARCTIC.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CMUARCTIC.md index b0626a48a1..9bcf79eeb8 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CMUARCTIC.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CMUARCTIC.md @@ -1,6 +1,6 @@ # 比较与torchaudio.datasets.CMUARCTIC的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CMUARCTIC.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CMUARCTIC.md) ## torchaudio.datasets.CMUARCTIC @@ -29,7 +29,7 @@ class mindspore.dataset.CMUArcticDataset( cache=None) ``` -更多内容详见[mindspore.dataset.CMUArcticDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CMUArcticDataset.html#mindspore.dataset.CMUArcticDataset)。 +更多内容详见[mindspore.dataset.CMUArcticDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CMUArcticDataset.html#mindspore.dataset.CMUArcticDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CelebA.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CelebA.md index 504c3e2296..c27758ca62 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CelebA.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CelebA.md @@ -1,6 +1,6 @@ # 比较与torchvision.datasets.CelebA的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CelebA.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CelebA.md) ## torchvision.datasets.CelebA @@ -34,7 +34,7 @@ class mindspore.dataset.CelebADataset( decrypt=None) ``` -更多内容详见[mindspore.dataset.CelebADataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CelebADataset.html#mindspore.dataset.CelebADataset)。 +更多内容详见[mindspore.dataset.CelebADataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CelebADataset.html#mindspore.dataset.CelebADataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Cityscapes.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Cityscapes.md index e4cb0cf4ca..67a5b837a0 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Cityscapes.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Cityscapes.md @@ -1,6 +1,6 @@ # 比较与torchvision.datasets.Cityscapes的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Cityscapes.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Cityscapes.md) ## torchvision.datasets.Cityscapes @@ -37,7 +37,7 @@ class mindspore.dataset.CityscapesDataset( ) ``` -更多内容详见[mindspore.dataset.CityscapesDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CityscapesDataset.html)。 +更多内容详见[mindspore.dataset.CityscapesDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CityscapesDataset.html)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md index 3ac7a72d6f..711dbfd7f7 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md @@ -1,6 +1,6 @@ # 比较与torchtext.datasets.CoNLL2000Chunking的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CoNLL2000Chunking.md) ## torchtext.datasets.CoNLL2000Chunking @@ -26,7 +26,7 @@ class mindspore.dataset.CoNLL2000Dataset( cache=None) ``` -更多内容详见[mindspore.dataset.CoNLL2000Dataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CoNLL2000Dataset.html#mindspore.dataset.CoNLL2000Dataset)。 +更多内容详见[mindspore.dataset.CoNLL2000Dataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CoNLL2000Dataset.html#mindspore.dataset.CoNLL2000Dataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CocoDataset.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CocoDataset.md index 2958195fdd..4bca584fcf 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CocoDataset.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CocoDataset.md @@ -1,6 +1,6 @@ # 比较与torchvision.datasets.CocoDetection的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CocoDataset.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/CocoDataset.md) ## torchvision.datasets.CocoDetection @@ -36,7 +36,7 @@ class mindspore.dataset.CocoDataset( ) ``` -更多内容详见[mindspore.dataset.CocoDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CocoDataset.html#mindspore.dataset.CocoDataset)。 +更多内容详见[mindspore.dataset.CocoDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CocoDataset.html#mindspore.dataset.CocoDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DBpedia.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DBpedia.md index 7e992d5233..8c32f6ed8c 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DBpedia.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DBpedia.md @@ -1,6 +1,6 @@ # 比较与torchtext.datasets.DBpedia的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DBpedia.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DBpedia.md) ## torchtext.datasets.DBpedia @@ -26,7 +26,7 @@ class mindspore.dataset.DBpediaDataset( cache=None) ``` -更多内容详见[mindspore.dataset.DBpediaDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.DBpediaDataset.html#mindspore.dataset.DBpediaDataset)。 +更多内容详见[mindspore.dataset.DBpediaDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.DBpediaDataset.html#mindspore.dataset.DBpediaDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DataLoader.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DataLoader.md index b9f01f267e..fc035aa4ce 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DataLoader.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DataLoader.md @@ -1,6 +1,6 @@ # 比较与torch.utils.data.DataLoader的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DataLoader.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DataLoader.md) ## torch.utils.data.DataLoader @@ -23,7 +23,7 @@ class mindspore.dataset.GeneratorDataset( num_shards=None, shard_id=None, python_multiprocessing=True, max_rowsize=None) ``` -更多内容详见[mindspore.dataset.GeneratorDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset)。 +更多内容详见[mindspore.dataset.GeneratorDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DistributedSampler.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DistributedSampler.md index f54e81e7fa..477dafda76 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DistributedSampler.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DistributedSampler.md @@ -1,6 +1,6 @@ # 比较与torch.utils.data.distributed.DistributedSampler的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DistributedSampler.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/DistributedSampler.md) ## torch.utils.data.distributed.DistributedSampler @@ -16,7 +16,7 @@ class torch.utils.data.distributed.DistributedSampler(dataset, num_replicas=None class mindspore.dataset.DistributedSampler(num_shards, shard_id, shuffle=True, num_samples=None, offset=-1) ``` -更多内容详见[mindspore.dataset.DistributedSampler](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.DistributedSampler.html)。 +更多内容详见[mindspore.dataset.DistributedSampler](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.DistributedSampler.html)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/FrequencyMasking.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/FrequencyMasking.md index cf3d5f8d8e..11e69de023 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/FrequencyMasking.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/FrequencyMasking.md @@ -1,6 +1,6 @@ # 比较与torchaudio.transforms.FrequencyMasking的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/FrequencyMasking.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/FrequencyMasking.md) ## torchaudio.transforms.FrequencyMasking @@ -16,7 +16,7 @@ class torchaudio.transforms.FrequencyMasking(freq_mask_param: int, iid_masks: bo class mindspore.dataset.audio.FrequencyMasking(iid_masks=False, freq_mask_param=0, mask_start=0, mask_value=0.0) ``` -更多内容详见[mindspore.dataset.audio.FrequencyMasking](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.FrequencyMasking.html#mindspore.dataset.audio.FrequencyMasking)。 +更多内容详见[mindspore.dataset.audio.FrequencyMasking](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.FrequencyMasking.html#mindspore.dataset.audio.FrequencyMasking)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GTZAN.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GTZAN.md index 3c0303dda1..ee98cd9e4e 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GTZAN.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GTZAN.md @@ -1,6 +1,6 @@ # 比较与torchaudio.datasets.GTZAN的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GTZAN.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GTZAN.md) ## torchaudio.datasets.GTZAN @@ -30,7 +30,7 @@ class mindspore.dataset.GTZANDataset( cache=None) ``` -更多内容详见[mindspore.dataset.GTZANDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GTZANDataset.html#mindspore.dataset.GTZANDataset)。 +更多内容详见[mindspore.dataset.GTZANDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GTZANDataset.html#mindspore.dataset.GTZANDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GriffinLim.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GriffinLim.md index f150f8a75a..20a68310e3 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GriffinLim.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GriffinLim.md @@ -1,6 +1,6 @@ # 比较与torchaudio.transforms.GriffinLim的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GriffinLim.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/GriffinLim.md) ## torchaudio.transforms.GriffinLim @@ -21,7 +21,7 @@ class mindspore.dataset.audio.GriffinLim(n_fft=400, n_iter=32, win_length=None, momentum=0.99, length=None, rand_init=True) ``` -更多内容详见[mindspore.dataset.audio.GriffinLim](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.GriffinLim.html#mindspore.dataset.audio.GriffinLim)。 +更多内容详见[mindspore.dataset.audio.GriffinLim](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.GriffinLim.html#mindspore.dataset.audio.GriffinLim)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IMDB.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IMDB.md index ca202c2ca3..2de788bd90 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IMDB.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IMDB.md @@ -1,6 +1,6 @@ # 比较与torchtext.datasets.IMDB的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IMDB.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IMDB.md) ## torchtext.datasets.IMDB @@ -27,7 +27,7 @@ class mindspore.dataset.IMDBDataset( cache=None) ``` -更多内容详见[mindspore.dataset.IMDBDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.IMDBDataset.html#mindspore.dataset.IMDBDataset)。 +更多内容详见[mindspore.dataset.IMDBDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.IMDBDataset.html#mindspore.dataset.IMDBDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2016.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2016.md index aa3b1ab6f0..e9d08c792a 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2016.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2016.md @@ -1,6 +1,6 @@ # 比较与torchtext.datasets.IWSLT2016的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2016.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2016.md) ## torchtext.datasets.IWSLT2016 @@ -32,7 +32,7 @@ class mindspore.dataset.IWSLT2016Dataset( cache=None) ``` -更多内容详见[mindspore.dataset.IWSLT2016Dataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.IWSLT2016Dataset.html#mindspore.dataset.IWSLT2016Dataset)。 +更多内容详见[mindspore.dataset.IWSLT2016Dataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.IWSLT2016Dataset.html#mindspore.dataset.IWSLT2016Dataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2017.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2017.md index 7d6f20e8cf..4a35837b1f 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2017.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2017.md @@ -1,6 +1,6 @@ # 比较与torchtext.datasets.IWSLT2017的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2017.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/IWSLT2017.md) ## torchtext.datasets.IWSLT2017 @@ -28,7 +28,7 @@ class mindspore.dataset.IWSLT2017Dataset( cache=None) ``` -更多内容详见[mindspore.dataset.IWSLT2017Dataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset)。 +更多内容详见[mindspore.dataset.IWSLT2017Dataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/ImageFolder.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/ImageFolder.md index 586143a8e7..01e36041f0 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/ImageFolder.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/ImageFolder.md @@ -1,6 +1,6 @@ # 比较与torchvision.datasets.ImageFolder的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/ImageFolder.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/ImageFolder.md) ## torchvision.datasets.ImageFolder @@ -33,7 +33,7 @@ class mindspore.dataset.ImageFolderDataset( decrypt=None) ``` -更多内容详见[mindspore.dataset.ImageFolderDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset)。 +更多内容详见[mindspore.dataset.ImageFolderDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/InverseMelScale.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/InverseMelScale.md index 3eb063de88..4271f1c050 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/InverseMelScale.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/InverseMelScale.md @@ -1,6 +1,6 @@ # 比较与torchaudio.transforms.InverseMelScale的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/InverseMelScale.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/InverseMelScale.md) ## torchaudio.transforms.InverseMelScale @@ -20,7 +20,7 @@ class mindspore.dataset.audio.InverseMelScale(n_stft, n_mels=128, sample_rate=16 norm=NormType.NONE, mel_type=MelType.HTK) ``` -更多内容详见[mindspore.dataset.audio.InverseMelScale](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.InverseMelScale.html#mindspore.dataset.audio.InverseMelScale)。 +更多内容详见[mindspore.dataset.audio.InverseMelScale](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.InverseMelScale.html#mindspore.dataset.audio.InverseMelScale)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LIBRITTS.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LIBRITTS.md index 2f8333bcab..df1d2a5b6d 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LIBRITTS.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LIBRITTS.md @@ -1,6 +1,6 @@ # 比较与torchaudio.datasets.LIBRITTS的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LIBRITTS.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LIBRITTS.md) ## torchaudio.datasets.LIBRITTS @@ -29,7 +29,7 @@ class mindspore.dataset.LibriTTSDataset( cache=None) ``` -更多内容详见[mindspore.dataset.LibriTTSDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.LibriTTSDataset.html#mindspore.dataset.LibriTTSDataset)。 +更多内容详见[mindspore.dataset.LibriTTSDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.LibriTTSDataset.html#mindspore.dataset.LibriTTSDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LJSPEECH.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LJSPEECH.md index 4787d07ece..23a52f1854 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LJSPEECH.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LJSPEECH.md @@ -1,6 +1,6 @@ # 比较与torchaudio.datasets.LJSPEECH的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LJSPEECH.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/LJSPEECH.md) ## torchaudio.datasets.LJSPEECH @@ -28,7 +28,7 @@ class mindspore.dataset.LJSpeechDataset( cache=None) ``` -更多内容详见[mindspore.dataset.LJSpeechDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset)。 +更多内容详见[mindspore.dataset.LJSpeechDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Lookup.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Lookup.md index c2a2c09f74..53893a8bc7 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Lookup.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Lookup.md @@ -1,6 +1,6 @@ # 比较与torchtext.data.functional.numericalize_tokens_from_iterator的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Lookup.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Lookup.md) ## torchtext.data.functional.numericalize_tokens_from_iterator @@ -24,7 +24,7 @@ class mindspore.dataset.text.Lookup( ) ``` -更多内容详见[mindspore.dataset.text.Lookup](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_text/mindspore.dataset.text.Lookup.html#mindspore.dataset.text.Lookup)。 +更多内容详见[mindspore.dataset.text.Lookup](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_text/mindspore.dataset.text.Lookup.html#mindspore.dataset.text.Lookup)。 ## 使用方式 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MNIST.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MNIST.md index d9705d1961..752175e8b7 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MNIST.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MNIST.md @@ -1,6 +1,6 @@ # 比较与torchvision.datasets.MNIST的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MNIST.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MNIST.md) ## torchvision.datasets.MNIST @@ -30,7 +30,7 @@ class mindspore.dataset.MnistDataset( cache=None) ``` -更多内容详见[mindspore.dataset.MnistDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MnistDataset.html#mindspore.dataset.MnistDataset)。 +更多内容详见[mindspore.dataset.MnistDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MnistDataset.html#mindspore.dataset.MnistDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelScale.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelScale.md index d4093caf00..2c623d34a7 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelScale.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelScale.md @@ -1,6 +1,6 @@ # 比较与torchaudio.transforms.MelScale的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelScale.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelScale.md) ## torchaudio.transforms.MelScale @@ -18,7 +18,7 @@ class mindspore.dataset.audio.MelScale(n_mels=128, sample_rate=16000, f_min=0.0, n_stft=201, norm=NormType.NONE, mel_type=MelType.HTK) ``` -更多内容详见[mindspore.dataset.audio.MelScale](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.MelScale.html#mindspore.dataset.audio.MelScale)。 +更多内容详见[mindspore.dataset.audio.MelScale](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.MelScale.html#mindspore.dataset.audio.MelScale)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelSpectrogram.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelSpectrogram.md index 840f9c5e45..ed9e4bd6bb 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelSpectrogram.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelSpectrogram.md @@ -1,6 +1,6 @@ # 比较与torchaudio.transforms.MelSpectrogram的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelSpectrogram.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/MelSpectrogram.md) ## torchaudio.transforms.MelSpectrogram @@ -23,7 +23,7 @@ class mindspore.dataset.audio.MelSpectrogram(sample_rate=16000, n_fft=400, win_l center=True, pad_mode=BorderType.REFLECT, onesided=True, norm=NormType.NONE, mel_scale=MelType.HTK) ``` -更多内容详见[mindspore.dataset.audio.MelSpectrogram](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_audio/mindspore.dataset.audio.MelSpectrogram.html#mindspore.dataset.audio.MelSpectrogram)。 +更多内容详见[mindspore.dataset.audio.MelSpectrogram](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_audio/mindspore.dataset.audio.MelSpectrogram.html#mindspore.dataset.audio.MelSpectrogram)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Ngram.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Ngram.md index b352a7f2ad..b94c6d9f36 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Ngram.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Ngram.md @@ -1,6 +1,6 @@ # 比较与torchtext.data.utils.ngrams_iterator的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Ngram.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Ngram.md) ## torchtext.data.utils.ngrams_iterator @@ -24,7 +24,7 @@ class mindspore.dataset.text.Ngram( ) ``` -更多内容详见[mindspore.dataset.text.Ngram](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_text/mindspore.dataset.text.Ngram.html#mindspore.dataset.text.Ngram)。 +更多内容详见[mindspore.dataset.text.Ngram](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_text/mindspore.dataset.text.Ngram.html#mindspore.dataset.text.Ngram)。 ## 使用方式 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Normalize.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Normalize.md index 8ee90a0691..4c551ab7a4 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Normalize.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Normalize.md @@ -1,6 +1,6 @@ # 比较与torchvision.transforms.Normalize的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Normalize.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/Normalize.md) ## torchvision.transforms.Normalize @@ -16,7 +16,7 @@ class torchvision.transforms.Normalize(mean, std, inplace=False) class mindspore.dataset.vision.Normalize(mean, std, is_hwc=True) ``` -更多内容详见[mindspore.dataset.vision.Normalize](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.Normalize.html)。 +更多内容详见[mindspore.dataset.vision.Normalize](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.Normalize.html)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/PennTreebank.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/PennTreebank.md index 56c3b92d20..cbcf6c2fd4 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/PennTreebank.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/PennTreebank.md @@ -1,6 +1,6 @@ # 比较与torchtext.datasets.PennTreebank的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/PennTreebank.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/PennTreebank.md) ## torchtext.datasets.PennTreebank @@ -26,7 +26,7 @@ class mindspore.dataset.PennTreebankDataset( cache=None) ``` -更多内容详见[mindspore.dataset.PennTreebankDataset](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.PennTreebankDataset.html#mindspore.dataset.PennTreebankDataset)。 +更多内容详见[mindspore.dataset.PennTreebankDataset](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.PennTreebankDataset.html#mindspore.dataset.PennTreebankDataset)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomAffine.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomAffine.md index c29216abd3..513f3864a5 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomAffine.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomAffine.md @@ -1,6 +1,6 @@ # 比较与torchvision.transforms.RandomAffine的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomAffine.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomAffine.md) ## torchvision.transforms.RandomAffine @@ -16,7 +16,7 @@ class torchvision.transforms.RandomAffine(degrees, translate=None, scale=None, s class mindspore.dataset.vision.RandomAffine(degrees, translate=None, scale=None, shear=None, resample=Inter.NEAREST, fill_value=0) ``` -更多内容详见[mindspore.dataset.vision.RandomAffine](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomAffine.html)。 +更多内容详见[mindspore.dataset.vision.RandomAffine](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomAffine.html)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomPerspective.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomPerspective.md index d14f73230d..dc8d1d55e1 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomPerspective.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomPerspective.md @@ -1,6 +1,6 @@ # 比较与torchvision.transforms.RandomPerspective的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomPerspective.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomPerspective.md) ## torchvision.transforms.RandomPerspective @@ -16,7 +16,7 @@ class torchvision.transforms.RandomPerspective(distortion_scale=0.5, p=0.5, inte class mindspore.dataset.vision.RandomPerspective(distortion_scale=0.5, prob=0.5, interpolation=Inter.BICUBIC) ``` -更多内容详见[mindspore.dataset.vision.RandomPerspective](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomPerspective.html)。 +更多内容详见[mindspore.dataset.vision.RandomPerspective](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomPerspective.html)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomResizedCrop.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomResizedCrop.md index 8f254e13ea..7be9deea86 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomResizedCrop.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomResizedCrop.md @@ -1,6 +1,6 @@ # 比较与torchvision.transforms.RandomResizedCrop的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomResizedCrop.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomResizedCrop.md) ## torchvision.transforms.RandomResizedCrop @@ -16,7 +16,7 @@ class torchvision.transforms.RandomResizedCrop(size, scale=(0.08, 1.0), ratio=(0 class mindspore.dataset.vision.RandomResizedCrop(size, scale=(0.08, 1.0), ratio=(3. / 4., 4. / 3.), interpolation=Inter.BILINEAR, max_attempts=10) ``` -更多内容详见[mindspore.dataset.vision.RandomResizedCrop](https://mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.RandomResizedCrop.html)。 +更多内容详见[mindspore.dataset.vision.RandomResizedCrop](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.RandomResizedCrop.html)。 ## 差异对比 diff --git a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomRotation.md b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomRotation.md index 06eb112d31..4eadca13e9 100644 --- a/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomRotation.md +++ b/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomRotation.md @@ -1,6 +1,6 @@ # 比较与torchvision.transforms.RandomRotation的差异 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomRotation.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/docs/mindspore/source_zh_cn/note/api_mapping/pytorch_diff/RandomRotation.md) ## torchvision.transforms.RandomRotation @@ -16,7 +16,7 @@ class torchvision.transforms.RandomRotation(degrees, interpolation= -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_ascend_install_conda.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_ascend_install_conda.md) [Conda](https://docs.conda.io/en/latest/)是一个开源跨平台语言无关的包管理与环境管理系统,允许用户方便地安装不同版本的二进制软件包,以及该计算平台需要的所有库。 @@ -122,7 +122,7 @@ pip uninstall te topi hccl -y conda install mindspore -c mindspore -c conda-forge ``` -在联网状态下,安装Conda安装包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装Conda安装包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 配置环境变量 diff --git a/install/mindspore_ascend_install_conda_en.md b/install/mindspore_ascend_install_conda_en.md index 3e8877f323..7a5803b509 100644 --- a/install/mindspore_ascend_install_conda_en.md +++ b/install/mindspore_ascend_install_conda_en.md @@ -15,7 +15,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_ascend_install_conda_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_ascend_install_conda_en.md) [Conda](https://docs.conda.io/en/latest/) is an open-source, cross-platform, language-agnostic package manager and environment management system. It allows users to easily install different versions of binary software packages and any required libraries appropriate for their computing platform. @@ -122,7 +122,7 @@ Ensure that you are in the Conda virtual environment and run the following comma conda install mindspore -c mindspore -c conda-forge ``` -When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependencies by yourself. +When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependencies by yourself. ## Configuring Environment Variables diff --git a/install/mindspore_ascend_install_docker.md b/install/mindspore_ascend_install_docker.md index 4ab32cce75..e8870eed5e 100644 --- a/install/mindspore_ascend_install_docker.md +++ b/install/mindspore_ascend_install_docker.md @@ -13,7 +13,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_ascend_install_docker.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_ascend_install_docker.md) [Docker](https://docs.docker.com/get-docker/)是一个开源的应用容器引擎,支持将开发者的应用和依赖包打包到一个轻量级、可移植的容器中。通过使用Docker,可以实现MindSpore的快速部署,并与系统环境隔离。 diff --git a/install/mindspore_ascend_install_docker_en.md b/install/mindspore_ascend_install_docker_en.md index 987a43fcb7..b6bb497f4a 100644 --- a/install/mindspore_ascend_install_docker_en.md +++ b/install/mindspore_ascend_install_docker_en.md @@ -13,7 +13,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_ascend_install_docker_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_ascend_install_docker_en.md) [Docker](https://docs.docker.com/get-docker/) is an open source application container engine, and supports packaging developers' applications and dependency packages into a lightweight, portable container. By using Docker, MindSpore can be rapidly deployed and separated from the system environment. diff --git a/install/mindspore_ascend_install_pip.md b/install/mindspore_ascend_install_pip.md index db595e218c..cce0b80b8e 100644 --- a/install/mindspore_ascend_install_pip.md +++ b/install/mindspore_ascend_install_pip.md @@ -14,7 +14,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_ascend_install_pip.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_ascend_install_pip.md) 本文档介绍如何在Ascend环境的Linux系统上,使用pip方式快速安装MindSpore。 @@ -143,7 +143,7 @@ pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/Mi pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/MindSpore/unified/aarch64/mindspore-${MS_VERSION/-/}-cp311-cp311-linux_aarch64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 配置环境变量 diff --git a/install/mindspore_ascend_install_pip_en.md b/install/mindspore_ascend_install_pip_en.md index f1e882fd11..4d21917ce0 100644 --- a/install/mindspore_ascend_install_pip_en.md +++ b/install/mindspore_ascend_install_pip_en.md @@ -14,7 +14,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_ascend_install_pip_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_ascend_install_pip_en.md) This document describes how to install MindSpore by pip on Linux in an Ascend environment. @@ -143,7 +143,7 @@ pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/Mi pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/MindSpore/unified/aarch64/mindspore-${MS_VERSION/-/}-cp311-cp311-linux_aarch64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependencies by yourself. +When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependencies by yourself. ## Configuring Environment Variables diff --git a/install/mindspore_ascend_install_source.md b/install/mindspore_ascend_install_source.md index e032e0459c..ccafbe8b38 100644 --- a/install/mindspore_ascend_install_source.md +++ b/install/mindspore_ascend_install_source.md @@ -20,7 +20,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_ascend_install_source.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_ascend_install_source.md) 本文档介绍如何在Ascend环境的Linux系统上,使用源码编译方式快速安装MindSpore。 @@ -225,7 +225,7 @@ pip install "numpy>=1.19.3,<=1.26.4" ## 从代码仓下载源码 ```bash -git clone https://gitee.com/mindspore/mindspore.git +git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` ## 配置环境变量 @@ -265,7 +265,7 @@ bash build.sh -e ascend -S on pip install output/mindspore-*.whl -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装whl包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_ascend_install_source_en.md b/install/mindspore_ascend_install_source_en.md index faa2b3f775..8a86268d9d 100644 --- a/install/mindspore_ascend_install_source_en.md +++ b/install/mindspore_ascend_install_source_en.md @@ -20,7 +20,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_ascend_install_source_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_ascend_install_source_en.md) This document describes how to install MindSpore by compiling source code on Linux in an Ascend environment. @@ -225,7 +225,7 @@ Note: The Numpy version used in the runtime environment must be no less than the ## Downloading the Source Code from the Code Repository ```bash -git clone https://gitee.com/mindspore/mindspore.git +git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` ## Configuring Environment Variables @@ -265,7 +265,7 @@ Where: pip install output/mindspore-*.whl -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependencies by yourself. +When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependencies by yourself. ## Installation Verification diff --git a/install/mindspore_cpu_install_conda.md b/install/mindspore_cpu_install_conda.md index 07242db897..afa0784aa0 100644 --- a/install/mindspore_cpu_install_conda.md +++ b/install/mindspore_cpu_install_conda.md @@ -13,7 +13,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_conda.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_conda.md) [Conda](https://docs.conda.io/en/latest/)是一个开源跨平台语言无关的包管理与环境管理系统,允许用户方便地安装不同版本的二进制软件包,以及该计算平台需要的所有库。 @@ -90,7 +90,7 @@ conda activate mindspore_py39 conda install mindspore -c mindspore -c conda-forge -y ``` -在联网状态下,安装Conda安装包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装Conda安装包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_cpu_install_conda_en.md b/install/mindspore_cpu_install_conda_en.md index 927e6c95fc..6a37cafda9 100644 --- a/install/mindspore_cpu_install_conda_en.md +++ b/install/mindspore_cpu_install_conda_en.md @@ -13,7 +13,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_conda_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_conda_en.md) [Conda](https://docs.conda.io/en/latest/) is an open-source, cross-platform, language-agnostic package manager and environment management system. It allows users to easily install different versions of binary software packages and any required libraries appropriate for their computing platform. @@ -90,7 +90,7 @@ Ensure that you are in the Conda virtual environment and run the following comma conda install mindspore -c mindspore -c conda-forge ``` -When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependency by yourself. +When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependency by yourself. ## Installation Verification diff --git a/install/mindspore_cpu_install_docker.md b/install/mindspore_cpu_install_docker.md index 09941be653..053c53c3d9 100644 --- a/install/mindspore_cpu_install_docker.md +++ b/install/mindspore_cpu_install_docker.md @@ -10,7 +10,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_docker.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_docker.md) [Docker](https://docs.docker.com/get-docker/)是一个开源的应用容器引擎,支持将开发者的应用和依赖包打包到一个轻量级、可移植的容器中。通过使用Docker,可以实现MindSpore的快速部署,并与系统环境隔离。 @@ -123,4 +123,4 @@ docker run -it swr.cn-south-1.myhuaweicloud.com/mindspore/mindspore-cpu:{tag} /b 进入[MindSpore安装指南页面](https://www.mindspore.cn/install),选择CPU硬件平台、Linux-x86_64操作系统和Source的安装方式,获得安装指南。运行容器后,下载MindSpore代码仓,并参考安装指南,通过源码编译方式安装MindSpore CPU版本,并进行验证。 -如果您想了解更多关于MindSpore Docker镜像的构建过程,请查看[docker repo](https://gitee.com/mindspore/mindspore/blob/master/scripts/docker/README.md#)了解详细信息。 +如果您想了解更多关于MindSpore Docker镜像的构建过程,请查看[docker repo](https://gitee.com/mindspore/mindspore/blob/br_base/scripts/docker/README.md#)了解详细信息。 diff --git a/install/mindspore_cpu_install_docker_en.md b/install/mindspore_cpu_install_docker_en.md index 262f71a289..cf275dccce 100644 --- a/install/mindspore_cpu_install_docker_en.md +++ b/install/mindspore_cpu_install_docker_en.md @@ -10,7 +10,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_docker_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_docker_en.md) [Docker](https://docs.docker.com/get-docker/) is an open source application container engine, and supports packaging developers' applications and dependency packages into a lightweight, portable container. By using Docker, MindSpore can be rapidly deployed and separated from the system environment. @@ -123,4 +123,4 @@ of which, Go to [MindSpore Installation Guide Page](https://www.mindspore.cn/install/en), and choose the CPU hardware platform, Linux-x86_64 operating system and pip installation method to get the installation guide. After running the container, download the MindSpore code repository and refer to the installation guide, install the MindSpore CPU version through source code compilation, and verify it. -If you want to know more about the MindSpore Docker image building process, please check [docker repo](https://gitee.com/mindspore/mindspore/blob/master/scripts/docker/README.md#) for details. +If you want to know more about the MindSpore Docker image building process, please check [docker repo](https://gitee.com/mindspore/mindspore/blob/br_base/scripts/docker/README.md#) for details. diff --git a/install/mindspore_cpu_install_nightly.md b/install/mindspore_cpu_install_nightly.md index 5c5bf18a07..7c7fe19ee9 100644 --- a/install/mindspore_cpu_install_nightly.md +++ b/install/mindspore_cpu_install_nightly.md @@ -12,7 +12,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_nightly.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_nightly.md) MindSpore Nightly是包含当前最新功能与bugfix的预览版本,但是可能未经完整的测试与验证,希望体验最新功能或者问题修复的用户可以使用该版本。 @@ -114,7 +114,7 @@ pip install mindspore-dev -i https://repo.huaweicloud.com/repository/pypi/simple 其中: -- 在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +- 在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 - pip会自动安装当前最新版本的MindSpore Nightly版本,如果需要安装指定版本,请参照下方升级MindSpore版本相关指导,在下载时手动指定版本。 ## 验证是否成功安装 diff --git a/install/mindspore_cpu_install_nightly_en.md b/install/mindspore_cpu_install_nightly_en.md index c2f8753eb0..a4552e7810 100644 --- a/install/mindspore_cpu_install_nightly_en.md +++ b/install/mindspore_cpu_install_nightly_en.md @@ -12,7 +12,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_nightly_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_nightly_en.md) MindSpore Nightly is a preview version which includes latest features and bugfixes, not fully supported and tested. Install MindSpore Nightly version if you wish to try out the latest features or bug fixes can use this version. @@ -114,7 +114,7 @@ pip install mindspore-dev -i https://repo.huaweicloud.com/repository/pypi/simple Of which, -- When the network is connected, dependencies are automatically downloaded during .whl package installation. (For details about the dependencies, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependencies by yourself. +- When the network is connected, dependencies are automatically downloaded during .whl package installation. (For details about the dependencies, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependencies by yourself. - pip will be installing the latest version of MindSpore Nightly automatically. If you wish to specify the version to be installed, please refer to the instruction below regarding to version update, and specify version manually. ## Installation Verification diff --git a/install/mindspore_cpu_install_pip.md b/install/mindspore_cpu_install_pip.md index 7f688c3c0c..f5e38c080a 100644 --- a/install/mindspore_cpu_install_pip.md +++ b/install/mindspore_cpu_install_pip.md @@ -12,7 +12,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_pip.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_pip.md) 本文档介绍如何在CPU环境的Linux系统上,使用pip方式快速安装MindSpore。下面以Ubuntu 18.04为例说明MindSpore安装步骤。 @@ -106,7 +106,7 @@ pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/Mi pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/MindSpore/unified/aarch64/mindspore-${MS_VERSION/-/}-cp311-cp311-linux_aarch64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_cpu_install_pip_en.md b/install/mindspore_cpu_install_pip_en.md index bb72a2a310..2f6cee6392 100644 --- a/install/mindspore_cpu_install_pip_en.md +++ b/install/mindspore_cpu_install_pip_en.md @@ -12,7 +12,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_pip_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_pip_en.md) This document describes how to install MindSpore by pip on Linux in a CPU environment. The following takes Ubuntu 18.04 as an example to describe how to install MindSpore. @@ -106,7 +106,7 @@ pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/Mi pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/MindSpore/unified/aarch64/mindspore-${MS_VERSION/-/}-cp311-cp311-linux_aarch64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependency by yourself. +When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependency by yourself. ## Installation Verification diff --git a/install/mindspore_cpu_install_source.md b/install/mindspore_cpu_install_source.md index f6265da64c..97537b04fd 100644 --- a/install/mindspore_cpu_install_source.md +++ b/install/mindspore_cpu_install_source.md @@ -17,7 +17,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_source.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_source.md) 本文档介绍如何在CPU环境的Linux系统上,使用源码编译方式快速安装MindSpore。下面以Ubuntu 18.04为例说明MindSpore编译安装步骤。 @@ -133,7 +133,7 @@ sudo apt-get install llvm-12-dev -y ## 从代码仓下载源码 ```bash -git clone https://gitee.com/mindspore/mindspore.git +git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` ## 编译MindSpore @@ -157,7 +157,7 @@ bash build.sh -e cpu -j4 -S on pip install output/mindspore-*.whl -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证安装是否成功 diff --git a/install/mindspore_cpu_install_source_en.md b/install/mindspore_cpu_install_source_en.md index 6aad29a8b5..2144d6b487 100644 --- a/install/mindspore_cpu_install_source_en.md +++ b/install/mindspore_cpu_install_source_en.md @@ -17,7 +17,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_source_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_source_en.md) This document describes how to install MindSpore by compiling source code on Linux system in the CPU environment. The following takes Ubuntu 18.04 as an example to describe how to compile and install MindSpore. @@ -131,7 +131,7 @@ sudo apt-get install llvm-12-dev -y ## Downloading the Source Code from the Code Repository ```bash -git clone https://gitee.com/mindspore/mindspore.git +git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` ## Compiling MindSpore @@ -155,7 +155,7 @@ Where: pip install output/mindspore-*.whl -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. For details about dependencies, see required_package in the [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py). In other cases, install the dependencies by yourself. +When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. For details about dependencies, see required_package in the [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py). In other cases, install the dependencies by yourself. ## Installation Verification diff --git a/install/mindspore_cpu_mac_install_conda.md b/install/mindspore_cpu_mac_install_conda.md index 123b534f73..bc08fa54f4 100644 --- a/install/mindspore_cpu_mac_install_conda.md +++ b/install/mindspore_cpu_mac_install_conda.md @@ -11,7 +11,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_conda.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_conda.md) [Conda](https://docs.conda.io/en/latest/)是一个开源跨平台语言无关的包管理与环境管理系统,允许用户方便地安装不同版本的二进制软件包,以及该计算平台需要的所有库。 @@ -50,7 +50,7 @@ conda install mindspore -c mindspore -c conda-forge ``` -在联网状态下,安装Conda安装包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装Conda安装包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_cpu_mac_install_conda_en.md b/install/mindspore_cpu_mac_install_conda_en.md index ef683e3c28..1575e0032d 100644 --- a/install/mindspore_cpu_mac_install_conda_en.md +++ b/install/mindspore_cpu_mac_install_conda_en.md @@ -11,7 +11,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_install_conda_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_install_conda_en.md) [Conda](https://docs.conda.io/en/latest/) is an open-source, cross-platform, language-agnostic package manager and environment management system. It allows users to easily install different versions of binary software packages and any required libraries appropriate for their computing platform. @@ -50,7 +50,7 @@ Ensure that you are in the Conda virtual environment and run the following comma conda install mindspore -c mindspore -c conda-forge ``` -When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependency by yourself. +When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependency by yourself. ## Installation Verification diff --git a/install/mindspore_cpu_mac_install_nightly.md b/install/mindspore_cpu_mac_install_nightly.md index 993f561357..8c46b4508c 100644 --- a/install/mindspore_cpu_mac_install_nightly.md +++ b/install/mindspore_cpu_mac_install_nightly.md @@ -11,7 +11,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_mac_install_pip.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_mac_install_pip.md) MindSpore Nightly是包含当前最新功能与bugfix的预览版本,但是可能未经完整的测试与验证,希望体验最新功能或者问题修复的用户可以使用该版本。 @@ -57,7 +57,7 @@ pip install mindspore-dev -i https://repo.huaweicloud.com/repository/pypi/simple 其中: -- 在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +- 在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 - pip会自动安装当前最新版本的MindSpore Nightly,如果需要安装指定版本,请参照下方升级MindSpore版本相关指导,在下载时手动指定版本。 ## 验证是否成功安装 diff --git a/install/mindspore_cpu_mac_install_nightly_en.md b/install/mindspore_cpu_mac_install_nightly_en.md index b89a59430c..f18d93906a 100644 --- a/install/mindspore_cpu_mac_install_nightly_en.md +++ b/install/mindspore_cpu_mac_install_nightly_en.md @@ -11,7 +11,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_mac_install_pip_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_mac_install_pip_en.md) MindSpore Nightly is a preview version which includes latest features and bugfixes, not fully supported and tested. Install MindSpore Nightly version if you wish to try out the latest features or bug fixes can use this version. @@ -57,7 +57,7 @@ pip install mindspore-dev -i https://repo.huaweicloud.com/repository/pypi/simple Of which, -- When the network is connected, dependencies are automatically downloaded during .whl package installation. (For details about the dependencies, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependencies by yourself. +- When the network is connected, dependencies are automatically downloaded during .whl package installation. (For details about the dependencies, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependencies by yourself. - pip will be installing the latest version of MindSpore Nightly automatically. If you wish to specify the version to be installed, please refer to the instruction below regarding to version update, and specify version manually. ## Installation Verification diff --git a/install/mindspore_cpu_mac_install_pip.md b/install/mindspore_cpu_mac_install_pip.md index e2acdf6b06..c70afb44f2 100644 --- a/install/mindspore_cpu_mac_install_pip.md +++ b/install/mindspore_cpu_mac_install_pip.md @@ -11,7 +11,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_mac_install_pip.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_mac_install_pip.md) [Conda](https://docs.conda.io/en/latest/)是一个开源跨平台语言无关的包管理与环境管理系统,允许用户方便地安装不同版本的二进制软件包,以及该计算平台需要的所有库。推荐在MacOS上通过Conda使用MindSpore。 @@ -70,7 +70,7 @@ pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/Mi pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/MindSpore/cpu/aarch64/mindspore-${MS_VERSION/-/}-cp311-cp311-macosx_11_0_arm64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_cpu_mac_install_pip_en.md b/install/mindspore_cpu_mac_install_pip_en.md index 1fc6787992..412edcd618 100644 --- a/install/mindspore_cpu_mac_install_pip_en.md +++ b/install/mindspore_cpu_mac_install_pip_en.md @@ -11,7 +11,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_mac_install_pip_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_mac_install_pip_en.md) [Conda](https://docs.conda.io/en/latest/) is an open-source, cross-platform, language-agnostic package manager and environment management system. It allows users to easily install different versions of binary software packages and any required libraries appropriate for their computing platform. @@ -70,7 +70,7 @@ pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/Mi pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/MindSpore/cpu/aarch64/mindspore-${MS_VERSION/-/}-cp311-cp311-macosx_11_0_arm64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. For details about dependencies, see required_package in the [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py). In other cases, install the dependencies by yourself. +When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. For details about dependencies, see required_package in the [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py). In other cases, install the dependencies by yourself. ## Installation Verification diff --git a/install/mindspore_cpu_mac_install_source.md b/install/mindspore_cpu_mac_install_source.md index e88d1bc8f8..8fa61fdd0d 100644 --- a/install/mindspore_cpu_mac_install_source.md +++ b/install/mindspore_cpu_mac_install_source.md @@ -13,7 +13,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_mac_install_source.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_mac_install_source.md) [Conda](https://docs.conda.io/en/latest/)是一个开源跨平台语言无关的包管理与环境管理系统,允许用户方便地安装不同版本的二进制软件包,以及该计算平台需要的所有库。推荐在MacOS上通过Conda使用MindSpore。 @@ -63,7 +63,7 @@ ## 从代码仓下载源码 ```bash -git clone https://gitee.com/mindspore/mindspore.git +git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` ## 编译MindSpore diff --git a/install/mindspore_cpu_mac_install_source_en.md b/install/mindspore_cpu_mac_install_source_en.md index 20a77591b0..b4af2eb4c1 100644 --- a/install/mindspore_cpu_mac_install_source_en.md +++ b/install/mindspore_cpu_mac_install_source_en.md @@ -13,7 +13,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_mac_install_source_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_mac_install_source_en.md) [Conda](https://docs.conda.io/en/latest/) is an open-source, cross-platform, language-agnostic package manager and environment management system. It allows users to easily install different versions of binary software packages and any required libraries appropriate for their computing platform. @@ -63,7 +63,7 @@ Create a Conda virtual environment based on the Python version you want to use a ## Downloading Source Code from Code Repository ```bash -git clone https://gitee.com/mindspore/mindspore.git +git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` ## Compiling MindSpore diff --git a/install/mindspore_cpu_win_install_conda.md b/install/mindspore_cpu_win_install_conda.md index 063ec764c5..8c22cece12 100644 --- a/install/mindspore_cpu_win_install_conda.md +++ b/install/mindspore_cpu_win_install_conda.md @@ -11,7 +11,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_win_install_conda.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_win_install_conda.md) [Conda](https://docs.conda.io/en/latest/)是一个开源跨平台语言无关的包管理与环境管理系统,允许用户方便地安装不同版本的二进制软件包,以及该计算平台需要的所有库。 @@ -45,7 +45,7 @@ conda activate mindspore_py39 conda install mindspore -c mindspore -c conda-forge ``` -在联网状态下,安装Conda安装包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装Conda安装包时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_cpu_win_install_conda_en.md b/install/mindspore_cpu_win_install_conda_en.md index bc1a31ad92..45e7284063 100644 --- a/install/mindspore_cpu_win_install_conda_en.md +++ b/install/mindspore_cpu_win_install_conda_en.md @@ -11,7 +11,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_win_install_conda_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_win_install_conda_en.md) [Conda](https://docs.conda.io/en/latest/) is an open-source, cross-platform, language-agnostic package manager and environment management system. It allows users to easily install different versions of binary software packages and any required libraries appropriate for their computing platform. @@ -45,7 +45,7 @@ Ensure that you are in the Conda virtual environment and run the following comma conda install mindspore -c mindspore -c conda-forge ``` -When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependency by yourself. +When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependency by yourself. ## Installation Verification diff --git a/install/mindspore_cpu_win_install_nightly.md b/install/mindspore_cpu_win_install_nightly.md index a38dd9d5b5..0b9020920f 100644 --- a/install/mindspore_cpu_win_install_nightly.md +++ b/install/mindspore_cpu_win_install_nightly.md @@ -10,7 +10,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_win_install_nightly.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_win_install_nightly.md) MindSpore Nightly是包含当前最新功能与bugfix的预览版本,但是可能未经完整的测试与验证,希望体验最新功能或者问题修复的用户可以使用该版本。 @@ -31,7 +31,7 @@ pip install mindspore-dev -i https://repo.huaweicloud.com/repository/pypi/simple 其中: -- 在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +- 在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 - pip会自动安装当前最新版本的MindSpore Nightly,如果需要安装指定版本,请参照下方升级MindSpore版本相关指导,在下载时手动指定版本。 ## 验证是否成功安装 diff --git a/install/mindspore_cpu_win_install_nightly_en.md b/install/mindspore_cpu_win_install_nightly_en.md index c25aa7df31..fa2a564c9e 100644 --- a/install/mindspore_cpu_win_install_nightly_en.md +++ b/install/mindspore_cpu_win_install_nightly_en.md @@ -10,7 +10,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_win_install_nightly_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_win_install_nightly_en.md) MindSpore Nightly is a preview version which includes latest features and bugfixes, not fully supported and tested. Install MindSpore Nightly version if you wish to try out the latest changes on MindSpore. @@ -31,7 +31,7 @@ pip install mindspore-dev -i https://repo.huaweicloud.com/repository/pypi/simple Of which, -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependency by yourself. +- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependency by yourself. - pip will be installing the latest version of MindSpore Nightly automatically. If you wish to specify the version to be installed, please refer to the instruction below regarding to version update, and specify version manually. ## Installation Verification diff --git a/install/mindspore_cpu_win_install_pip.md b/install/mindspore_cpu_win_install_pip.md index 62ac888dfb..6dfebeb72d 100644 --- a/install/mindspore_cpu_win_install_pip.md +++ b/install/mindspore_cpu_win_install_pip.md @@ -10,7 +10,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_win_install_pip.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_win_install_pip.md) 本文档介绍如何在CPU环境的Windows系统上,使用pip方式快速安装MindSpore。 @@ -38,7 +38,7 @@ pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/%MS_VERSION%/Min pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/%MS_VERSION%/MindSpore/cpu/x86_64/mindspore-%MS_VERSION:-=%-cp311-cp311-win_amd64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_cpu_win_install_pip_en.md b/install/mindspore_cpu_win_install_pip_en.md index b2f1bbdb71..c466b902bc 100644 --- a/install/mindspore_cpu_win_install_pip_en.md +++ b/install/mindspore_cpu_win_install_pip_en.md @@ -10,7 +10,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_win_install_pip_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_win_install_pip_en.md) This document describes how to install MindSpore by pip on Windows in a CPU environment. @@ -38,7 +38,7 @@ pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/%MS_VERSION%/Min pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/%MS_VERSION%/MindSpore/cpu/x86_64/mindspore-%MS_VERSION:-=%-cp311-cp311-win_amd64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependency by yourself. +When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependency by yourself. ## Installation Verification diff --git a/install/mindspore_cpu_win_install_source.md b/install/mindspore_cpu_win_install_source.md index 92ffaaf98f..9b72341802 100644 --- a/install/mindspore_cpu_win_install_source.md +++ b/install/mindspore_cpu_win_install_source.md @@ -12,7 +12,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_win_install_source.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_win_install_source.md) 本文档介绍如何在CPU环境的Windows系统上,使用源码编译方法快速安装MindSpore。 @@ -25,13 +25,13 @@ - 确认安装Python(>=3.9.0)。可以从[Python官网](https://www.python.org/downloads/windows/)或者[华为云](https://repo.huaweicloud.com/python/)选择合适的版本进行安装。 - 确认安装[wheel 0.32.0及以上版本](https://pypi.org/project/wheel/)。 - 确认安装[PyYAML](https://pypi.org/project/pyyaml/) (>=6.0 并且 <= 6.0.2)。如果没有安装,可以使用 `pip install pyyaml` 命令安装。 -- 确认安装[MSYS2软件](https://www.msys2.org/)。详细请查看[Windows上安装MSYS2软件](https://gitee.com/mindspore/docs/blob/master/install/third_party/msys_software_install.md)。 +- 确认安装[MSYS2软件](https://www.msys2.org/)。详细请查看[Windows上安装MSYS2软件](https://gitee.com/mindspore/docs/blob/br_base/install/third_party/msys_software_install.md)。 - 确认安装[Numpy](https://pypi.org/project/numpy/) (>=1.19.3 并且 <= 1.26.4)。如果没有安装,可以使用 `pip install numpy` 命令安装。 ## 从代码仓下载源码 ```bash -git clone https://gitee.com/mindspore/mindspore.git +git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` ## 编译MindSpore @@ -48,7 +48,7 @@ call build.bat ms_vs_cpu for %x in (output\mindspore*.whl) do pip install %x -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_cpu_win_install_source_en.md b/install/mindspore_cpu_win_install_source_en.md index 8838070125..c8aabd1da0 100644 --- a/install/mindspore_cpu_win_install_source_en.md +++ b/install/mindspore_cpu_win_install_source_en.md @@ -12,7 +12,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_cpu_win_install_source_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_cpu_win_install_source_en.md) This document describes how to install MindSpore by compiling source code on Windows system in a CPU environment. @@ -25,13 +25,13 @@ This document describes how to install MindSpore by compiling source code on Win - Ensure that you have Python(>=3.9.0) installed. If not installed, follow the links to [Python official website](https://www.python.org/downloads/windows/) or [Huawei Cloud](https://repo.huaweicloud.com/python/) to download and install Python. - Ensure that [wheel 0.32.0 and later](https://pypi.org/project/wheel/) is installed. - Ensure that [PyYAML](https://pypi.org/project/pyyaml/) (>=6.0 and <= 6.0.2) is installed. Use `pip install pyyaml` if it's not installed. -- Ensure that [MSYS2 software](https://www.msys2.org/) is installed. For details, please check [Installing MSYS2 Software on Windows](https://gitee.com/mindspore/docs/blob/master/install/third_party/msys_software_install_en.md). +- Ensure that [MSYS2 software](https://www.msys2.org/) is installed. For details, please check [Installing MSYS2 Software on Windows](https://gitee.com/mindspore/docs/blob/br_base/install/third_party/msys_software_install_en.md). - Ensure that [Numpy](https://pypi.org/project/numpy/) (>=1.19.3 and <= 1.26.4) is installed. Use `pip install numpy` if it's not installed. ## Downloading Source Code from Code Repository ```bash -git clone https://gitee.com/mindspore/mindspore.git +git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` ## Compiling MindSpore @@ -48,7 +48,7 @@ call build.bat ms_vs_cpu for %x in (output\mindspore*.whl) do pip install %x -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependency by yourself. +When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependency by yourself. ## Installation Verification diff --git a/install/mindspore_gpu_install_conda.md b/install/mindspore_gpu_install_conda.md index 0f08d90a2e..bcf6b4a5e7 100644 --- a/install/mindspore_gpu_install_conda.md +++ b/install/mindspore_gpu_install_conda.md @@ -16,7 +16,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_gpu_install_conda.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_gpu_install_conda.md) [Conda](https://docs.conda.io/en/latest/)是一个开源跨平台语言无关的包管理与环境管理系统,允许用户方便地安装不同版本的二进制软件包,以及该计算平台需要的所有库。 @@ -165,7 +165,7 @@ CUDA 11.6版本: conda install mindspore -c mindspore -c conda-forge ``` -在联网状态下,安装MindSpore时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装MindSpore时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_gpu_install_conda_en.md b/install/mindspore_gpu_install_conda_en.md index fb0ae2d542..1723a31111 100644 --- a/install/mindspore_gpu_install_conda_en.md +++ b/install/mindspore_gpu_install_conda_en.md @@ -16,7 +16,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_gpu_install_conda_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_gpu_install_conda_en.md) [Conda](https://docs.conda.io/en/latest/) is an open-source, cross-platform, language-agnostic package manager and environment management system. It allows users to easily install different versions of binary software packages and any required libraries appropriate for their computing platform. @@ -165,7 +165,7 @@ For CUDA 11.6: conda install mindspore -c mindspore -c conda-forge ``` -When the network is connected, dependency items are automatically downloaded during MindSpore installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependency by yourself. +When the network is connected, dependency items are automatically downloaded during MindSpore installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependency by yourself. ## Installation Verification diff --git a/install/mindspore_gpu_install_nightly.md b/install/mindspore_gpu_install_nightly.md index b60bd40b6c..fc237bee30 100644 --- a/install/mindspore_gpu_install_nightly.md +++ b/install/mindspore_gpu_install_nightly.md @@ -15,7 +15,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_gpu_install_pip.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_gpu_install_pip.md) MindSpore Nightly是包含当前最新功能与bugfix的预览版本,但是可能未经完整的测试与验证,希望体验最新功能或者问题修复的用户可以使用该版本。 @@ -180,7 +180,7 @@ pip install mindspore-dev -i https://repo.huaweicloud.com/repository/pypi/simple 其中: - MindSpore Nightly支持CUDA11.1、11.6的任意版本,启动时会根据当前环境中安装的CUDA版本自动适配。 -- 在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +- 在联网状态下,安装whl包时会自动下载mindspore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 - pip会自动安装当前最新版本的MindSpore Nightly,如果需要安装指定版本,请参照下方升级MindSpore版本相关指导,在下载时手动指定版本。 ## 验证是否成功安装 diff --git a/install/mindspore_gpu_install_nightly_en.md b/install/mindspore_gpu_install_nightly_en.md index ac0d181358..ae41b30de8 100644 --- a/install/mindspore_gpu_install_nightly_en.md +++ b/install/mindspore_gpu_install_nightly_en.md @@ -15,7 +15,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_gpu_install_pip_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_gpu_install_pip_en.md) MindSpore Nightly is a preview version which includes latest features and bugfixes, not fully supported and tested. Install MindSpore Nightly version if you wish to try out the latest changes on MindSpore. @@ -180,7 +180,7 @@ pip install mindspore-dev -i https://repo.huaweicloud.com/repository/pypi/simple Of which, - MindSpore Nightly supports CUDA 11.1 and 11.6, it will configure automatically according to the version of CUDA installed in your environment. -- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependency by yourself. +- When the network is connected, dependency items are automatically downloaded during .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependency by yourself. - pip will be installing the latest version of MindSpore GPU Nightly automatically. If you wish to specify the version to be installed, please refer to the instruction below regarding to version update, and specify version manually. ## Installation Verification diff --git a/install/mindspore_gpu_install_pip.md b/install/mindspore_gpu_install_pip.md index a13690feb2..2be56f6f18 100644 --- a/install/mindspore_gpu_install_pip.md +++ b/install/mindspore_gpu_install_pip.md @@ -15,7 +15,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_gpu_install_pip.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_gpu_install_pip.md) 本文档介绍如何在GPU环境的Linux系统上,使用pip方式快速安装MindSpore。下面以Ubuntu 18.04为例说明MindSpore安装步骤。 @@ -186,7 +186,7 @@ pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/Mi pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/MindSpore/unified/x86_64/mindspore-${MS_VERSION/-/}-cp311-cp311-linux_x86_64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -在联网状态下,安装MindSpore时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装MindSpore时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_gpu_install_pip_en.md b/install/mindspore_gpu_install_pip_en.md index 35cf09656d..c5a11a2b6c 100644 --- a/install/mindspore_gpu_install_pip_en.md +++ b/install/mindspore_gpu_install_pip_en.md @@ -15,7 +15,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_gpu_install_pip_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_gpu_install_pip_en.md) This document describes how to install MindSpore by pip on Linux in a GPU environment. The following takes Ubuntu 18.04 as an example to describe how to install MindSpore. @@ -186,7 +186,7 @@ pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/Mi pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MS_VERSION}/MindSpore/unified/x86_64/mindspore-${MS_VERSION/-/}-cp311-cp311-linux_x86_64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -When the network is connected, dependency items are automatically downloaded during MindSpore installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependency by yourself. +When the network is connected, dependency items are automatically downloaded during MindSpore installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependency by yourself. ## Installation Verification diff --git a/install/mindspore_gpu_install_source.md b/install/mindspore_gpu_install_source.md index 47bfa331af..5e280a3578 100644 --- a/install/mindspore_gpu_install_source.md +++ b/install/mindspore_gpu_install_source.md @@ -20,7 +20,7 @@ -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_gpu_install_source.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_gpu_install_source.md) 本文档介绍如何在GPU环境的Linux系统上,使用源码编译方式快速安装MindSpore。下面以Ubuntu 18.04为例说明MindSpore编译安装步骤。 @@ -223,7 +223,7 @@ cd - ## 从代码仓下载源码 ```bash -git clone https://gitee.com/mindspore/mindspore.git +git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` ## 编译MindSpore @@ -257,7 +257,7 @@ bash build.sh -e gpu -S on pip install output/mindspore-*.whl -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -在联网状态下,安装MindSpore时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)中的required_package),其余情况需自行安装依赖。 +在联网状态下,安装MindSpore时会自动下载MindSpore安装包的依赖项(依赖项详情参见[setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)中的required_package),其余情况需自行安装依赖。 ## 验证是否成功安装 diff --git a/install/mindspore_gpu_install_source_en.md b/install/mindspore_gpu_install_source_en.md index 363f546606..646a46c0e9 100644 --- a/install/mindspore_gpu_install_source_en.md +++ b/install/mindspore_gpu_install_source_en.md @@ -20,7 +20,7 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/mindspore_gpu_install_source_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/mindspore_gpu_install_source_en.md) This document describes how to install MindSpore by compiling source code on Linux in a GPU environment. The following takes Ubuntu 18.04 as an example to describe how to install MindSpore. @@ -223,7 +223,7 @@ cd - ## Downloading the Source Code from the Code Repository ```bash -git clone https://gitee.com/mindspore/mindspore.git +git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` ## Compiling MindSpore @@ -257,7 +257,7 @@ Where: pip install output/mindspore-*.whl -i https://repo.huaweicloud.com/repository/pypi/simple/ ``` -When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/master/setup.py)). In other cases, you need to install dependencies by yourself. +When the network is connected, dependencies of MindSpore are automatically downloaded during the .whl package installation. (For details about the dependency, see required_package in [setup.py](https://gitee.com/mindspore/mindspore/blob/br_base/setup.py)). In other cases, you need to install dependencies by yourself. ## Installation Verification diff --git a/install/third_party/msys_software_install.md b/install/third_party/msys_software_install.md index c92d8ba20c..c2fb545ca6 100644 --- a/install/third_party/msys_software_install.md +++ b/install/third_party/msys_software_install.md @@ -1,6 +1,6 @@ # Windows上安装MSYS2软件 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/third_party/msys_software_install.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/third_party/msys_software_install.md) 本文档介绍如何在Windows系统上,安装MSYS2软件的步骤。 diff --git a/install/third_party/msys_software_install_en.md b/install/third_party/msys_software_install_en.md index 841d7e1135..d1d7a4374d 100644 --- a/install/third_party/msys_software_install_en.md +++ b/install/third_party/msys_software_install_en.md @@ -1,6 +1,6 @@ # Installing MSYS2 Software on Windows -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/install/third_party/msys_software_install_en.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/third_party/msys_software_install_en.md) This document describes the steps on how to install the MSYS2 software on a Windows system. diff --git a/install/third_party/third_party_cpu_install.md b/install/third_party/third_party_cpu_install.md index 8bad15e827..f775056443 100644 --- a/install/third_party/third_party_cpu_install.md +++ b/install/third_party/third_party_cpu_install.md @@ -1,6 +1,6 @@ # 源码编译方式安装MindSpore CPU版本(含第三方依赖) -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/install/third_party/third_party_cpu_install.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/install/third_party/third_party_cpu_install.md) 作者:[damon0626](https://gitee.com/damon0626) @@ -339,7 +339,7 @@ ms-sd@mssd:~$ sudo apt-get install git (1)从代码仓库下载源码 ```text -ms-sd@mssd:~$ git clone https://gitee.com/mindspore/mindspore.git +ms-sd@mssd:~$ git clone -b br_base https://gitee.com/mindspore/mindspore.git ``` (2)安装依赖(根据编译过程中报错,整理如下) diff --git a/tools/generate_html/run.py b/tools/generate_html/run.py index cebecc4d10..dcb3d29382 100644 --- a/tools/generate_html/run.py +++ b/tools/generate_html/run.py @@ -113,7 +113,7 @@ def main(version, user, pd, WGETDIR, release_url, generate_list): # 读取json文件数据 if version == "daily" or os.path.exists(os.path.join(os.path.dirname(__file__), "daily_dev.json")): flag_dev = 1 - with open(os.path.join(os.path.dirname(__file__), "daily_dev.json"), 'r+', encoding='utf-8') as f: + with open(os.path.join(os.path.dirname(__file__), "daily_dev.json"), 'r+', encoding='utf-8') as g: data = json.load(g) else: flag_dev = 0 diff --git a/tutorials/source_en/beginner/accelerate_with_static_graph.md b/tutorials/source_en/beginner/accelerate_with_static_graph.md index cecbf71c56..0257034340 100644 --- a/tutorials/source_en/beginner/accelerate_with_static_graph.md +++ b/tutorials/source_en/beginner/accelerate_with_static_graph.md @@ -1,6 +1,6 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/beginner/accelerate_with_static_graph.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/beginner/accelerate_with_static_graph.md) -[Introduction](https://www.mindspore.cn/tutorials/en/master/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/master/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/master/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/master/beginner/model.html) || [Autograd](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/beginner/autograd.md) || [Train](https://www.mindspore.cn/tutorials/en/master/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/master/beginner/save_load.html) || **Accelerating with Static Graphs** +[Introduction](https://www.mindspore.cn/tutorials/en/br_base/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/br_base/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/br_base/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html) || [Autograd](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/beginner/autograd.md) || [Train](https://www.mindspore.cn/tutorials/en/br_base/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/br_base/beginner/save_load.html) || **Accelerating with Static Graphs** # Accelerating with Static Graphs @@ -68,7 +68,7 @@ print(output) ### Static Graph Mode -Compared to dynamic graphs, static graphs are characterized by separating the construction of the computational graph from the actual computation (Define and run). For more information on how the static graph model works, see [Static Graph Syntax Support](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#overview). +Compared to dynamic graphs, static graphs are characterized by separating the construction of the computational graph from the actual computation (Define and run). For more information on how the static graph model works, see [Static Graph Syntax Support](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#overview). In MindSpore, the static graph mode is also known as Graph mode. In Graph mode, based on techniques such as graph optimization and whole computational graph sinking, the compiler can globally optimize for graphs and obtain better performance, so it is more suitable for scenarios where the network is fixed and high performance is required. @@ -127,7 +127,7 @@ print(output) The MindSpore compiler is focused on the computation of Tensor data and its differential processing. Therefore operations using the MindSpore API and based on Tensor objects are more suitable for static graph compilation optimization. Other operations can be partially compiled into the graph, but the actual optimization is limited. In addition, the static graph mode compiles first and then executes, resulting in compilation time consumption. As a result, there may be no need to use static graph acceleration if the function does not need to be executed repeatedly. -For an example of using static graphs for network compilation, see [Network Build](https://www.mindspore.cn/tutorials/en/master/beginner/model.html). +For an example of using static graphs for network compilation, see [Network Build](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html). ## Static Graph Mode Startup Method @@ -353,8 +353,8 @@ print(output) ## Syntax Constraints for Static Graph -In Graph mode, Python code is not executed by the Python interpreter, but the code is compiled into a static computational graph and then the static computational graph is executed. As a result, the compiler cannot support the global Python syntax. MindSpore static graph compiler maintains a subset of common Python syntax to support neural network construction and training. For more details, see [Static Graph Syntax Support](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html). +In Graph mode, Python code is not executed by the Python interpreter, but the code is compiled into a static computational graph and then the static computational graph is executed. As a result, the compiler cannot support the global Python syntax. MindSpore static graph compiler maintains a subset of common Python syntax to support neural network construction and training. For more details, see [Static Graph Syntax Support](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html). ## Advanced Programming Techniques for Static Graphs -Using static graph advanced programming techniques can effectively improve the compilation efficiency as well as the execution efficiency, and can make the program run more stably. For details, please refer to [Advanced Programming Techniques with Static Graphs](https://www.mindspore.cn/tutorials/en/master/compile/static_graph_expert_programming.html). +Using static graph advanced programming techniques can effectively improve the compilation efficiency as well as the execution efficiency, and can make the program run more stably. For details, please refer to [Advanced Programming Techniques with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph_expert_programming.html). diff --git a/tutorials/source_en/beginner/autograd.md b/tutorials/source_en/beginner/autograd.md index 1f5589fc07..9d62094119 100644 --- a/tutorials/source_en/beginner/autograd.md +++ b/tutorials/source_en/beginner/autograd.md @@ -1,12 +1,12 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/beginner/autograd.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/beginner/autograd.md) -[Introduction](https://www.mindspore.cn/tutorials/en/master/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/master/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/master/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/master/beginner/model.html) || **Autograd** || [Train](https://www.mindspore.cn/tutorials/en/master/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/master/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html) +[Introduction](https://www.mindspore.cn/tutorials/en/br_base/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/br_base/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/br_base/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html) || **Autograd** || [Train](https://www.mindspore.cn/tutorials/en/br_base/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/br_base/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html) # Automatic Differentiation The training of the neural network mainly uses the back propagation algorithm. Model predictions (logits) and the correct labels are fed into the loss function to obtain the loss, and then the back propagation calculation is performed to obtain the gradients, which are finally updated to the model parameters. Automatic differentiation is able to calculate the value of the derivative of a derivable function at a point and is a generalization of the backpropagation algorithm. The main problem solved by automatic differentiation is to decompose a complex mathematical operation into a series of simple basic operations. The function shields the user from a large number of derivative details and processes, which greatly reduces the threshold of using the framework. -MindSpore uses the design philosophy of functional auto-differentiation to provide auto-differentiation interfaces [mindspore.grad](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.grad.html) and [mindspore.value_and_grad](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.value_and_grad.html) that are closer to the mathematical semantics. We introduce it below by using a simple single-level linear transform model. +MindSpore uses the design philosophy of functional auto-differentiation to provide auto-differentiation interfaces [mindspore.grad](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.grad.html) and [mindspore.value_and_grad](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.value_and_grad.html) that are closer to the mathematical semantics. We introduce it below by using a simple single-level linear transform model. ```python import numpy as np @@ -18,7 +18,7 @@ from mindspore import ops, nn, Tensor, Parameter Computing graphs are a way to represent mathematical functions in a graph-theoretic language and a unified way to represent neural network models in a deep learning framework. We will construct computing functions and neural networks based on the following computing graphs. -![compute-graph](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/beginner/images/comp-graph.png) +![compute-graph](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/beginner/images/comp-graph.png) In this model, $x$ is the input, $y$ is the correct value, and $w$ and $b$ are the parameters to be optimized. @@ -32,7 +32,7 @@ b = Parameter(Tensor(np.random.randn(3,), mindspore.float32), name='b') # bias We construct the computing function based on the computing process described by the computing graphs. -[binary_cross_entropy_with_logits](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.binary_cross_entropy_with_logits.html) is a loss function, +[binary_cross_entropy_with_logits](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.binary_cross_entropy_with_logits.html) is a loss function, which computes binary cross entropy between the logits and the label. ```python @@ -114,7 +114,7 @@ print(grads) [ 1.06568694e+00, 1.05373347e+00, 1.30146706e+00]]), Tensor(shape=[3], dtype=Float32, value= [ 1.06568694e+00, 1.05373347e+00, 1.30146706e+00])) ``` -You can see that the gradient values corresponding to $w$ and $b$ have changed. At this point, if you want to block out the effect of z on the gradient, i.e., still only find the derivative of the parameter with respect to loss, you can use the [mindspore.ops.stop_gradient](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.stop_gradient.html) interface to truncate the gradient here. We add the `function` implementation to `stop_gradient` and execute it. +You can see that the gradient values corresponding to $w$ and $b$ have changed. At this point, if you want to block out the effect of z on the gradient, i.e., still only find the derivative of the parameter with respect to loss, you can use the [mindspore.ops.stop_gradient](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.stop_gradient.html) interface to truncate the gradient here. We add the `function` implementation to `stop_gradient` and execute it. ```python def function_stop_gradient(x, y, w, b): diff --git a/tutorials/source_en/beginner/dataset.md b/tutorials/source_en/beginner/dataset.md index 9989fb7fc0..175d4e95f0 100644 --- a/tutorials/source_en/beginner/dataset.md +++ b/tutorials/source_en/beginner/dataset.md @@ -1,20 +1,20 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/beginner/dataset.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/beginner/dataset.md) -[Introduction](https://www.mindspore.cn/tutorials/en/master/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/master/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/master/beginner/tensor.html) || **Data Loading and Processing** || [Model](https://www.mindspore.cn/tutorials/en/master/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/master/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/master/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/master/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html) +[Introduction](https://www.mindspore.cn/tutorials/en/br_base/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/br_base/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/br_base/beginner/tensor.html) || **Data Loading and Processing** || [Model](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/br_base/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/br_base/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/br_base/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html) # Data Loading and Processing Data is the foundation of deep learning, and high-quality data input is beneficial to the entire deep neural network. -MindSpore provides Pipeline-based [Data Engine](https://www.mindspore.cn/docs/en/master/features/data_engine.html) and achieves efficient data preprocessing through `Dataset`, `Transforms` and `Batch` operator. The pipeline nodes are: +MindSpore provides Pipeline-based [Data Engine](https://www.mindspore.cn/docs/en/br_base/features/data_engine.html) and achieves efficient data preprocessing through `Dataset`, `Transforms` and `Batch` operator. The pipeline nodes are: -1. Dataset is the start of Pipeline and is used to load raw data to memory. `mindspore.dataset` provides [built-in dataset interfaces](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.loading.html) for loading text, image, audio, etc., and provides [interfaces](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.loading.html#user-defined) for loading customized datasets. +1. Dataset is the start of Pipeline and is used to load raw data to memory. `mindspore.dataset` provides [built-in dataset interfaces](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.loading.html) for loading text, image, audio, etc., and provides [interfaces](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.loading.html#user-defined) for loading customized datasets. -2. Data transforms perform further transformation operations on data in memory. [mindspore.dataset.transforms](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.transforms) provides general data transformation operations, [mindspore.dataset.transforms.vision](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision) provides image data transformation operations, [mindspore.dataset.transforms.text](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text) provides text data transformation operations, and [mindspore.dataset.transforms.audio](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio) provides audio data transformation operations. +2. Data transforms perform further transformation operations on data in memory. [mindspore.dataset.transforms](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.transforms) provides general data transformation operations, [mindspore.dataset.transforms.vision](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision) provides image data transformation operations, [mindspore.dataset.transforms.text](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text) provides text data transformation operations, and [mindspore.dataset.transforms.audio](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio) provides audio data transformation operations. -3. The dataset batch operation is used to batch the transformed data group for the final neural network training. The batch operation is performed on the dataset object. The interface can be referred to [batch operator](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MindDataset.html#batch); +3. The dataset batch operation is used to batch the transformed data group for the final neural network training. The batch operation is performed on the dataset object. The interface can be referred to [batch operator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MindDataset.html#batch); -4. Dataset iterators output the final data iteratively. The interface can be referred to [iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MindDataset.html#iterator). +4. Dataset iterators output the final data iteratively. The interface can be referred to [iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MindDataset.html#iterator). In addition, MindSpore's domain development library also provides a large number of preloaded datasets that can be downloaded and used with one click through the API. This tutorial will provide a detailed explanation of different dataset loading methods: custom datasets, standard format datasets, and common datasets, data transforms and batch methods. @@ -31,11 +31,11 @@ import matplotlib.pyplot as plt ## Loading a Dataset -The [mindspore.dataset](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.html) module provides loading APIs for custom datasets, standard format datasets, and commonly used publicly datasets. +The [mindspore.dataset](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.html) module provides loading APIs for custom datasets, standard format datasets, and commonly used publicly datasets. ### Customizing Dataset -For those datasets that MindSpore does not support yet, it is suggested to load data by constructing customized classes or customized generators. [GeneratorDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html) can help to load dataset based on the logic inside these classes/functions. +For those datasets that MindSpore does not support yet, it is suggested to load data by constructing customized classes or customized generators. [GeneratorDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html) can help to load dataset based on the logic inside these classes/functions. `GeneratorDataset` supports constructing customized datasets from random-accessible objects, iterable objects and Python generator, which are explained in detail below. @@ -150,7 +150,7 @@ for d in dataset: ### Standard-format Dataset -For those datasets that MindSpore does not support yet, it is suggested to convert the dataset into `MindRecord` format and load it through the [mindspore.dataset.MindDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MindDataset.html) interface. +For those datasets that MindSpore does not support yet, it is suggested to convert the dataset into `MindRecord` format and load it through the [mindspore.dataset.MindDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MindDataset.html) interface. Firstly, create a new `MindRecord` format dataset using the `MindRecord` format interface **FileWriter**, where each sample contains three fields: `file_name`, `label`, and `data`. @@ -258,7 +258,7 @@ Usually, the directly-loaded raw data cannot be directly fed into the neural net ### Built-in Transforms -`mindspore.dataset` provides built-in data transforms: [vision transforms](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision), [nlp transforms](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text), [audio transforms](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio). +`mindspore.dataset` provides built-in data transforms: [vision transforms](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision), [nlp transforms](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text), [audio transforms](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio). For example, `Rescale`, `Normalize`, and `HWC2CHW` operations are used for **data** in the Mnist dataset, and `TypeCast` operations are used for **label**. @@ -322,7 +322,7 @@ Packing the dataset into a fixed size `batch` is a compromise method for model o Generally we set a fixed batch size to divide the continuous data into several batches. The batched data is increased by one dimension, and the size is `batch_size`. -![op-batch](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/op_batch.png) +![op-batch](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/op_batch.png) ```python data = ([[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12]], [0, 1, 0, 1, 0, 1]) @@ -340,7 +340,7 @@ for data in dataset.create_tuple_iterator(): ## Iterating a Dataset -After the dataset is loaded and processed, the data is generally acquired in an iterative manner and then fed into the neural network for training. You can use the [create_tuple_iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html) or [create_dict_iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) interface to create a data iterator to iteratively access data. +After the dataset is loaded and processed, the data is generally acquired in an iterative manner and then fed into the neural network for training. You can use the [create_tuple_iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html) or [create_dict_iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) interface to create a data iterator to iteratively access data. The default type of data to be accessed is `Tensor`. If `output_numpy=True` is set, the type of data to be accessed is `Numpy`. diff --git a/tutorials/source_en/beginner/introduction.md b/tutorials/source_en/beginner/introduction.md index 06a15e52ef..15fb6a86f1 100644 --- a/tutorials/source_en/beginner/introduction.md +++ b/tutorials/source_en/beginner/introduction.md @@ -1,6 +1,6 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/beginner/introduction.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/beginner/introduction.md) -**Introduction** || [Quick Start](https://www.mindspore.cn/tutorials/en/master/beginner/quick_start.html#) || [Tensor](https://www.mindspore.cn/tutorials/en/master/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/master/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/master/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/master/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/master/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html) +**Introduction** || [Quick Start](https://www.mindspore.cn/tutorials/en/br_base/beginner/quick_start.html#) || [Tensor](https://www.mindspore.cn/tutorials/en/br_base/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/br_base/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/br_base/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/br_base/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html) # Overview @@ -16,7 +16,7 @@ The overall architecture of MindSpore is as follows: 2. Deep Learning + Scientific Computing: Provides developers with various Python interfaces required for AI model development, maximizing compatibility with developers' habits in the Python ecosystem; 3. Core: As the core of the AI framework, it builds the Tensor data structure, basic operation operators, autograd module for automatic differentiation, Parallel module for parallel computing, compile capabilities, and runtime management module. -![arch](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_en/features/images/arch_en.png) +![arch](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_en/features/images/arch_en.png) ### Design Philosophy @@ -30,7 +30,7 @@ Huawei Atlas AI computing solution is based on Ascend series AI processors and u The Ascend AI full stack is shown below: -![Ascend full stack](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/beginner/images/introduction1.png) +![Ascend full stack](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/beginner/images/introduction1.png) The functions of each module are described as follows: diff --git a/tutorials/source_en/beginner/model.md b/tutorials/source_en/beginner/model.md index 0f6a11d6ec..ead15c77f4 100644 --- a/tutorials/source_en/beginner/model.md +++ b/tutorials/source_en/beginner/model.md @@ -1,10 +1,10 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/beginner/model.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/beginner/model.md) -[Introduction](https://www.mindspore.cn/tutorials/en/master/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/master/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/master/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html) || **Model** || [Autograd](https://www.mindspore.cn/tutorials/en/master/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/master/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/master/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html) +[Introduction](https://www.mindspore.cn/tutorials/en/br_base/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/br_base/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/br_base/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html) || **Model** || [Autograd](https://www.mindspore.cn/tutorials/en/br_base/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/br_base/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/br_base/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html) # Building a Network -The neural network model consists of neural network layers and Tensor operations. [mindspore.nn](https://www.mindspore.cn/docs/en/master/api_python/mindspore.nn.html) provides common neural network layer implementations, and the [Cell](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html) class in MindSpore is the base class for building all networks and is the basic unit of the network. `Cell`, a neural network model, is composed of different sub-`Cells`. Using such a nested structure, the neural network structure can be constructed and managed simply by using object-oriented programming thinking. +The neural network model consists of neural network layers and Tensor operations. [mindspore.nn](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.nn.html) provides common neural network layer implementations, and the [Cell](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html) class in MindSpore is the base class for building all networks and is the basic unit of the network. `Cell`, a neural network model, is composed of different sub-`Cells`. Using such a nested structure, the neural network structure can be constructed and managed simply by using object-oriented programming thinking. In the following we will construct a neural network model for the Mnist dataset classification. @@ -17,7 +17,7 @@ from mindspore import nn, ops When define a neural network, we can inherit the `nn.Cell` class, instantiate and manage the state of the sub-Cell in the `__init__` method, and implement the Tensor operation in the `construct` method. -> `construct` means neural network (computational graph) construction. For more details, see [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html). +> `construct` means neural network (computational graph) construction. For more details, see [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html). ```python class Network(nn.Cell): @@ -101,7 +101,7 @@ print(input_image.shape) ### nn.Flatten -Initialize the [nn.Flatten](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Flatten.html) layer and convert a 28x28 2D tensor into a contiguous array of size 784. +Initialize the [nn.Flatten](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Flatten.html) layer and convert a 28x28 2D tensor into a contiguous array of size 784. ```python flatten = nn.Flatten() @@ -115,7 +115,7 @@ print(flat_image.shape) ### nn.Dense -[nn.Dense](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Dense.html) is the fully connected layer, which linearly transforms the input by using weights and deviations. +[nn.Dense](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Dense.html) is the fully connected layer, which linearly transforms the input by using weights and deviations. ```python layer1 = nn.Dense(in_channels=28*28, out_channels=20) @@ -129,7 +129,7 @@ print(hidden1.shape) ### nn.ReLU -[nn.ReLU](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.ReLU.html) layer adds a nonlinear activation function to the network, to help the neural network learn various complex features. +[nn.ReLU](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.ReLU.html) layer adds a nonlinear activation function to the network, to help the neural network learn various complex features. ```python print(f"Before ReLU: {hidden1}\n\n") @@ -168,7 +168,7 @@ After ReLU: [[0. 0.2939465 0. 0. 0. 0. ### nn.SequentialCell -[nn.SequentialCell](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.SequentialCell.html) is an ordered Cell container. The input Tensor will pass through all the Cells in the defined order, and we can use `nn.SequentialCell` to construct a neural network model quickly. +[nn.SequentialCell](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.SequentialCell.html) is an ordered Cell container. The input Tensor will pass through all the Cells in the defined order, and we can use `nn.SequentialCell` to construct a neural network model quickly. ```python seq_modules = nn.SequentialCell( @@ -188,7 +188,7 @@ print(logits.shape) ### nn.Softmax -Finally, the value of logits returned by the last fully-connected layer of the neural network is scaled to \[0, 1\] by using [nn.Softmax](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Softmax.html), indicating the predicted probability of each category. The dimensional values specified by `axis` sum to 1. +Finally, the value of logits returned by the last fully-connected layer of the neural network is scaled to \[0, 1\] by using [nn.Softmax](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Softmax.html), indicating the predicted probability of each category. The dimensional values specified by `axis` sum to 1. ```python softmax = nn.Softmax(axis=1) @@ -251,4 +251,4 @@ Size: (10,) Values : [0. 0.] ``` -For more built-in neural network layers, see [mindspore.nn API](https://www.mindspore.cn/docs/en/master/api_python/mindspore.nn.html). +For more built-in neural network layers, see [mindspore.nn API](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.nn.html). diff --git a/tutorials/source_en/beginner/quick_start.md b/tutorials/source_en/beginner/quick_start.md index 268e8a6461..064bb382ce 100644 --- a/tutorials/source_en/beginner/quick_start.md +++ b/tutorials/source_en/beginner/quick_start.md @@ -1,6 +1,6 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/beginner/quick_start.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/beginner/quick_start.md) -[Introduction](https://www.mindspore.cn/tutorials/en/master/beginner/introduction.html) || **Quick Start** || [Tensor](https://www.mindspore.cn/tutorials/en/master/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/master/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/master/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/master/beginner/train.html) || [Save and load](https://www.mindspore.cn/tutorials/en/master/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html) +[Introduction](https://www.mindspore.cn/tutorials/en/br_base/beginner/introduction.html) || **Quick Start** || [Tensor](https://www.mindspore.cn/tutorials/en/br_base/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/br_base/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/br_base/beginner/train.html) || [Save and load](https://www.mindspore.cn/tutorials/en/br_base/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html) # Quick Start @@ -15,7 +15,7 @@ from mindspore.dataset import MnistDataset ## Processing a Dataset -MindSpore provides Pipeline-based [Data Engine](https://www.mindspore.cn/docs/zh-CN/master/features/data_engine.html) and achieves efficient data preprocessing through [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html). In this tutorial, we use the Mnist dataset and pre-process dataset by using the data transformations provided by `mindspore.dataset`, after automatically downloaded. +MindSpore provides Pipeline-based [Data Engine](https://www.mindspore.cn/docs/zh-CN/br_base/features/data_engine.html) and achieves efficient data preprocessing through [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html). In this tutorial, we use the Mnist dataset and pre-process dataset by using the data transformations provided by `mindspore.dataset`, after automatically downloaded. > The sample code in this chapter relies on `download`, which can be installed by using the command `pip install download`. If this document is run as Notebook, you need to restart the kernel after installation to execute subsequent code. @@ -88,7 +88,7 @@ train_dataset = datapipe(train_dataset, 64) test_dataset = datapipe(test_dataset, 64) ``` -[create_tuple_iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html) or [create_dict_iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) could be used to iterate over the dataset, printing the shape and dtype for `image` and `label`. +[create_tuple_iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html) or [create_dict_iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) could be used to iterate over the dataset, printing the shape and dtype for `image` and `label`. ```python for image, label in test_dataset.create_tuple_iterator(): @@ -114,11 +114,11 @@ Shape of image [N, C, H, W]: (64, 1, 28, 28) Float32 Shape of label: (64,) Int32 ``` -For more detailed information, see [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html). +For more detailed information, see [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html). ## Building Network -`mindspore.nn` class is the base class for building all networks and is the basic unit of the network. When the user needs to customize the network, you can inherit the `nn.Cell` class and override the `__init__` method and the `construct` method. `__init__` contains the definitions of all network layers, and `construct` contains the transformation process of the data ([Tensor](https://www.mindspore.cn/tutorials/en/master/beginner/tensor.html)). +`mindspore.nn` class is the base class for building all networks and is the basic unit of the network. When the user needs to customize the network, you can inherit the `nn.Cell` class and override the `__init__` method and the `construct` method. `__init__` contains the definitions of all network layers, and `construct` contains the transformation process of the data ([Tensor](https://www.mindspore.cn/tutorials/en/br_base/beginner/tensor.html)). ```python # Define model @@ -156,7 +156,7 @@ Network< > ``` -For more detailed information, see [Model](https://www.mindspore.cn/tutorials/en/master/beginner/model.html). +For more detailed information, see [Model](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html). ## Training Model @@ -169,8 +169,8 @@ In model training, a complete training process (step) requires the following thr MindSpore uses a functional automatic differentiation mechanism, implemented through the steps above: 1. Define forward calculation function. -2. Obtain the gradient calculation function by function transformation, calling [value_and_grad](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.value_and_grad.html) for details. -3. Define training functions, set to training mode by calling [set_train](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_train) for setting of training mode, and perform forward computation, back propagation and parameter optimization. +2. Obtain the gradient calculation function by function transformation, calling [value_and_grad](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.value_and_grad.html) for details. +3. Define training functions, set to training mode by calling [set_train](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_train) for setting of training mode, and perform forward computation, back propagation and parameter optimization. ```python # Instantiate loss function and optimizer @@ -280,7 +280,7 @@ Test: Done! ``` -For the detailed information, see [Train](https://www.mindspore.cn/tutorials/en/master/beginner/train.html). +For the detailed information, see [Train](https://www.mindspore.cn/tutorials/en/br_base/beginner/train.html). ## Saving a Model @@ -333,4 +333,4 @@ for data, label in test_dataset: Predicted: "[3 9 6 1 6 7 4 5 2 2]", Actual: "[3 9 6 1 6 7 4 5 2 2]" ``` -For more detailed information, see [Save and Load](https://www.mindspore.cn/tutorials/en/master/beginner/save_load.html). +For more detailed information, see [Save and Load](https://www.mindspore.cn/tutorials/en/br_base/beginner/save_load.html). diff --git a/tutorials/source_en/beginner/save_load.md b/tutorials/source_en/beginner/save_load.md index 259b4e4f13..ea9ea1b5f0 100644 --- a/tutorials/source_en/beginner/save_load.md +++ b/tutorials/source_en/beginner/save_load.md @@ -1,6 +1,6 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/beginner/save_load.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/beginner/save_load.md) -[Introduction](https://www.mindspore.cn/tutorials/en/master/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/master/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/master/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/master/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/master/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/master/beginner/train.html) || **Save and Load** || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html) +[Introduction](https://www.mindspore.cn/tutorials/en/br_base/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/br_base/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/br_base/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/br_base/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/br_base/beginner/train.html) || **Save and Load** || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html) # Saving and Loading the Model @@ -27,7 +27,7 @@ def network(): ## Saving and Loading the Model Weight -Save model by using the [mindspore.save_checkpoint](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.save_checkpoint.html) interface, and specify the saving path by passing in the network: +Save model by using the [mindspore.save_checkpoint](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.save_checkpoint.html) interface, and specify the saving path by passing in the network: ```python model = network() @@ -63,7 +63,7 @@ mindspore.export(model, inputs, file_name="model", file_format="MINDIR") > MindIR saves both Checkpoint and model structure, so it needs to define the input Tensor to get the input shape. -The existing MindIR model can be easily loaded through the `load` interface and passed into [mindspore.nn.GraphCell](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.GraphCell.html) for inference. +The existing MindIR model can be easily loaded through the `load` interface and passed into [mindspore.nn.GraphCell](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.GraphCell.html) for inference. > `nn.GraphCell` only supports graph mode. @@ -82,14 +82,14 @@ print(outputs.shape) Not all Python syntax and data types are supported for MindIR export. Unsupported cases will raise errors during export. -1. MindIR export only supports **basic syntax at the STRICT level**. For detailed coverage, refer to [Static Graph Syntax Support Documentation](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html). +1. MindIR export only supports **basic syntax at the STRICT level**. For detailed coverage, refer to [Static Graph Syntax Support Documentation](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html). 2. Return value data types are limited to: - Python built-in types: `int`, `float`, `bool`, `str`, `tuple`, `list`. - - MindSpore framework types: [Tensor](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.Tensor.html), [Parameter](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.Parameter.html), [COOTensor](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.COOTensor.html), [CSRTensor](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.CSRTensor.html). + - MindSpore framework types: [Tensor](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.Tensor.html), [Parameter](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.Parameter.html), [COOTensor](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.COOTensor.html), [CSRTensor](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.CSRTensor.html). - For example, in the following program, the return value type is [mindspore.dtype](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.dtype.html), which is not supported. As a result, an error is reported when MindIR is exported. + For example, in the following program, the return value type is [mindspore.dtype](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.dtype.html), which is not supported. As a result, an error is reported when MindIR is exported. ```python import mindspore @@ -101,7 +101,7 @@ Not all Python syntax and data types are supported for MindIR export. Unsupporte     return x.dtype ``` -3. In `nn.Cell`'s `construct()` method, random number generators from [mindspore.mint](https://www.mindspore.cn/docs/en/master/api_python/mindspore.mint.html) (e.g., `mint.rand`, `mint.randn`, `mint.randint`, `mint.randperm`) are prohibited. Use equivalent [mindspore.ops](https://www.mindspore.cn/docs/en/master/api_python/mindspore.ops.html) interfaces instead. +3. In `nn.Cell`'s `construct()` method, random number generators from [mindspore.mint](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.mint.html) (e.g., `mint.rand`, `mint.randn`, `mint.randint`, `mint.randperm`) are prohibited. Use equivalent [mindspore.ops](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.ops.html) interfaces instead. 4. `Parameter` objects must be defined either in `nn.Cell`'s `__init__()` method or as function input arguments. Otherwise, MindIR export will fail. For instance, a globally defined `Parameter` (as shown below) triggers an unsupported error. diff --git a/tutorials/source_en/beginner/tensor.md b/tutorials/source_en/beginner/tensor.md index eebcc25910..dd8ff073ea 100644 --- a/tutorials/source_en/beginner/tensor.md +++ b/tutorials/source_en/beginner/tensor.md @@ -1,12 +1,12 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/beginner/tensor.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/beginner/tensor.md) -[Introduction](https://www.mindspore.cn/tutorials/en/master/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/master/beginner/quick_start.html) || **Tensor** || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/master/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/master/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/master/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/master/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html) +[Introduction](https://www.mindspore.cn/tutorials/en/br_base/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/br_base/beginner/quick_start.html) || **Tensor** || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/br_base/beginner/autograd.html) || [Train](https://www.mindspore.cn/tutorials/en/br_base/beginner/train.html) || [Save and Load](https://www.mindspore.cn/tutorials/en/br_base/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html) # Tensor Tensor is a multilinear function that can be used to represent linear relationships between vectors, scalars, and other tensors. The basic examples of these linear relations are the inner product, the outer product, the linear map, and the Cartesian product. In the $n$ dimensional space, its coordinates have $n^{r}$ components. Each component is a function of coordinates, and these components are also linearly transformed according to certain rules when the coordinates are transformed. $r$ is called the rank or order of this tensor (not related to the rank or order of the matrix). -A tensor is a special data structure that is similar to arrays and matrices. [Tensor](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.Tensor.html) is the basic data structure in MindSpore network operations. This tutorial describes the attributes and usage of tensors. +A tensor is a special data structure that is similar to arrays and matrices. [Tensor](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.Tensor.html) is the basic data structure in MindSpore network operations. This tutorial describes the attributes and usage of tensors. ```python import numpy as np @@ -51,9 +51,9 @@ There are multiple methods for creating tensors. When building a tensor, you can When `init` is used to initialize a tensor, the `init`, `shape`, and `dtype` parameters can be transferred. - - `init`: supports the subclass of [initializer](https://mindspore.cn/docs/en/master/api_python/mindspore.common.initializer.html). For example, [One()](https://www.mindspore.cn/docs/en/master/api_python/mindspore.common.initializer.html#mindspore.common.initializer.One) and [Normal()](https://www.mindspore.cn/docs/en/master/api_python/mindspore.common.initializer.html#mindspore.common.initializer.Normal) below. + - `init`: supports the subclass of [initializer](https://mindspore.cn/docs/en/br_base/api_python/mindspore.common.initializer.html). For example, [One()](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.common.initializer.html#mindspore.common.initializer.One) and [Normal()](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.common.initializer.html#mindspore.common.initializer.Normal) below. - `shape`: supports `list`, `tuple`, and `int`. - - `dtype`: supports [mindspore.dtype](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.dtype.html#mindspore.dtype). + - `dtype`: supports [mindspore.dtype](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.dtype.html#mindspore.dtype). ```python from mindspore.common.initializer import One, Normal @@ -192,7 +192,7 @@ mod: [0. 1. 0.] floordiv: [4. 2. 2.] ``` -[concat](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.concat.html) connects a series of tensors in a given dimension. +[concat](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.concat.html) connects a series of tensors in a given dimension. ```python data1 = Tensor(np.array([[0, 1], [2, 3]]).astype(np.float32)) @@ -212,7 +212,7 @@ shape: (4, 2) ``` -[stack](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.stack.html) combines two tensors from another dimension. +[stack](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.stack.html) combines two tensors from another dimension. ```python data1 = Tensor(np.array([[0, 1], [2, 3]]).astype(np.float32)) @@ -239,7 +239,7 @@ Tensor and NumPy can be converted to each other. ### Tensor to NumPy -Use [Tensor.asnumpy()](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.asnumpy.html) to convert Tensor to NumPy, which is same as tensor building. +Use [Tensor.asnumpy()](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.asnumpy.html) to convert Tensor to NumPy, which is same as tensor building. ```python t = Tensor([1., 1., 1., 1., 1.]) diff --git a/tutorials/source_en/beginner/train.md b/tutorials/source_en/beginner/train.md index 125510f7d8..d93ae59144 100644 --- a/tutorials/source_en/beginner/train.md +++ b/tutorials/source_en/beginner/train.md @@ -1,6 +1,6 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/beginner/train.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/beginner/train.md) -[Introduction](https://www.mindspore.cn/tutorials/en/master/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/master/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/master/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/master/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/master/beginner/autograd.html) || **Train** || [Save and Load](https://www.mindspore.cn/tutorials/en/master/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html) +[Introduction](https://www.mindspore.cn/tutorials/en/br_base/beginner/introduction.html) || [Quick Start](https://www.mindspore.cn/tutorials/en/br_base/beginner/quick_start.html) || [Tensor](https://www.mindspore.cn/tutorials/en/br_base/beginner/tensor.html) || [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html) || [Model](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html) || [Autograd](https://www.mindspore.cn/tutorials/en/br_base/beginner/autograd.html) || **Train** || [Save and Load](https://www.mindspore.cn/tutorials/en/br_base/beginner/save_load.html) || [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html) # Model Training @@ -15,7 +15,7 @@ After we have the dataset and the model, we can train and evaluate the model. ## Building a Dataset -First load the previous code from [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html) to build a dataset. +First load the previous code from [Data Loading and Processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html) to build a dataset. ```python import mindspore @@ -59,7 +59,7 @@ Successfully downloaded / unzipped to ./ ## Defining a Neural Network Model -Load the code from [Model](https://www.mindspore.cn/tutorials/en/master/beginner/model.html) to define a neural network model. +Load the code from [Model](https://www.mindspore.cn/tutorials/en/br_base/beginner/model.html) to define a neural network model. ```python class Network(nn.Cell): @@ -108,7 +108,7 @@ learning_rate = 1e-2 The loss function is used to evaluate the error between the model's predictions (logits) and targets (targets). When training a model, a randomly initialized neural network model starts to predict the wrong results. The loss function evaluates how different the predicted results are from the targets, and the goal of model training is to reduce the error obtained by the loss function. -Common loss functions include [mindspore.nn.MSELoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.MSELoss.html) (mean squared error) for regression tasks and [mindspore.nn.NLLLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.NLLLoss.html) (negative log-likelihood) for classification. [mindspore.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.CrossEntropyLoss.html) combines [mindspore.nn.LogSoftmax](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.LogSoftmax.html) and [mindspore.nn.NLLLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.NLLLoss.html) to normalize logits and calculate prediction errors. +Common loss functions include [mindspore.nn.MSELoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.MSELoss.html) (mean squared error) for regression tasks and [mindspore.nn.NLLLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.NLLLoss.html) (negative log-likelihood) for classification. [mindspore.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.CrossEntropyLoss.html) combines [mindspore.nn.LogSoftmax](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.LogSoftmax.html) and [mindspore.nn.NLLLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.NLLLoss.html) to normalize logits and calculate prediction errors. ```python loss_fn = nn.CrossEntropyLoss() @@ -139,7 +139,7 @@ Once the hyperparameters, loss function and optimizer are set, we can loop the i Next, we define the `train_loop` function for training and the `test_loop` function for testing. -To use functional automatic differentiation, we need to define the forward function `forward_fn` and use [value_and_grad](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.value_and_grad.html) to obtain the differentiation function `grad_fn`. Then, we encapsulate the execution of the differentiation function and the optimizer into the `train_step` function, and then just iterate through the dataset for training. +To use functional automatic differentiation, we need to define the forward function `forward_fn` and use [value_and_grad](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.value_and_grad.html) to obtain the differentiation function `grad_fn`. Then, we encapsulate the execution of the differentiation function and the optimizer into the `train_step` function, and then just iterate through the dataset for training. ```python # Define forward function diff --git a/tutorials/source_en/compile/operators.md b/tutorials/source_en/compile/operators.md index 9dd6bb53e0..3543e92fa9 100644 --- a/tutorials/source_en/compile/operators.md +++ b/tutorials/source_en/compile/operators.md @@ -1,6 +1,6 @@ # Graph Mode Syntax - Operators -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/compile/operators.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/compile/operators.md) Arithmetic operators and assignment operators support the `Number` and `Tensor` operations, as well as the `Tensor` operations of different `dtype`. diff --git a/tutorials/source_en/compile/python_builtin_functions.md b/tutorials/source_en/compile/python_builtin_functions.md index 58ced23466..3be492e445 100644 --- a/tutorials/source_en/compile/python_builtin_functions.md +++ b/tutorials/source_en/compile/python_builtin_functions.md @@ -1,6 +1,6 @@ # Graph Mode Syntax - Python Built-in Functions -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/compile/python_builtin_functions.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/compile/python_builtin_functions.md) Python built-in functions supported by the current static graph mode include: `int`, `float`, `bool`, `str`, `tuple`, `list`, `dict`, `getattr`, `hasattr`, `len`, `isinstance`, `all`, `any`, `round`, `max`, `min`, `sum`, `abs`, `map`, `zip` , `range`, `enumerate`, `super`, `pow`, `print`, `filter`, `type`. The use of built-in functions in graph mode is similar to the corresponding Python built-in functions. @@ -345,7 +345,7 @@ c: (1,) The attribute of object in graph mode may be different from that in pynative mode. It is suggested to use `default` input or call `hasattr` before using `getattr` to avoid AttributeError. -'getattr(x.asnumpy(), "shape", np.array([0, 1, 2, 3, 4]))' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. +'getattr(x.asnumpy(), "shape", np.array([0, 1, 2, 3, 4]))' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. ## hasattr @@ -395,7 +395,7 @@ b: False c: True ``` -'hasattr(Tensor(np.array([1, 2, 3, 4])).asnumpy(), "__len__")' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. +'hasattr(Tensor(np.array([1, 2, 3, 4])).asnumpy(), "__len__")' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. ## len @@ -450,7 +450,7 @@ n_len:4 w_len:4 ``` -'len(w.asnumpy())' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. +'len(w.asnumpy())' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. ## isinstance @@ -501,7 +501,7 @@ z_is_tensor:True w_is_ndarray:True ``` -'isinstance(w.asnumpy(), np.ndarray)' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. +'isinstance(w.asnumpy(), np.ndarray)' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. ## all @@ -559,7 +559,7 @@ h: True i: False ``` -'all(x.asnumpy())' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. +'all(x.asnumpy())' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. ## any @@ -857,7 +857,7 @@ b: 100.12 c: [1 2] ``` -'abs(Tensor([-1, 2]).asnumpy())' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. +'abs(Tensor([-1, 2]).asnumpy())' is a high-level usage, and more introduction can be found in the [AST Extended Syntaxes (LAX level)](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#ast-extended-syntaxes-lax-level) chapter. ## map diff --git a/tutorials/source_en/compile/statements.md b/tutorials/source_en/compile/statements.md index 9b3582d442..90e8eeae8d 100644 --- a/tutorials/source_en/compile/statements.md +++ b/tutorials/source_en/compile/statements.md @@ -1,6 +1,6 @@ # Graph Mode Syntax - Python Statements -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/compile/statements.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/compile/statements.md) ## Simple Statements @@ -644,7 +644,7 @@ Usage restrictions are the same as list comprehension, i.e., the use of multiple In graph mode, the `with` statement is supported with limitations. The `with` statement requires that the object must have two magic methods: `__enter__()` and `__exit__()`. -It is worth noting that the class used in the with statement needs to be decorated with a decorator@ms.jit_class or inherited from nn. Cell, and more on this can be found in [Calling the Custom Class](https://www.mindspore.cn/tutorials/en/master/compile/static_graph_expert_programming.html#using-jit-class). +It is worth noting that the class used in the with statement needs to be decorated with a decorator@ms.jit_class or inherited from nn. Cell, and more on this can be found in [Calling the Custom Class](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph_expert_programming.html#using-jit-class). For example: diff --git a/tutorials/source_en/compile/static_graph.md b/tutorials/source_en/compile/static_graph.md index a6a0738034..b3df0823b9 100644 --- a/tutorials/source_en/compile/static_graph.md +++ b/tutorials/source_en/compile/static_graph.md @@ -1,6 +1,6 @@ # Introduction to Graph Mode Programming -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/compile/static_graph.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/compile/static_graph.md) ## Overview @@ -23,10 +23,10 @@ in the Cell `__call__` method, so the actual calling process is: `model(inputs) = model.compile(inputs) + model.construct(inputs)`, where `model` is the instantiated Cell object. -Just-In-Time (JIT) compilation can be achieved using the [JIT interface](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.jit.html) . Another way is to use the Graph mode by setting +Just-In-Time (JIT) compilation can be achieved using the [JIT interface](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.jit.html) . Another way is to use the Graph mode by setting `ms.set_context(mode=ms.GRAPH_MODE)`, then write the code in the `construct` function of the `Cell` so that the code in the `construct` function will be compiled into a static computation graph. For details -about the definition of `Cell`, click [Cell API document](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html). +about the definition of `Cell`, click [Cell API document](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html). Due to syntax parsing restrictions, the supported data types, syntax, and related operations during graph building are not completely consistent with the Python syntax. As a result, some usage is restricted. Borrowing the traditional JIT compilation idea, considers @@ -224,7 +224,7 @@ Supporting `int`, `float`, and `bool`, but does not support `complex` numbers. When the data is a constant, the value of the data can be achieved at compile time, the forcible conversion to `Number` is supported in the network. The syntax `y = int(x)`, `y = float(x)`, and `y = bool(x)` are supported. When the data is a variable, i.e., you can get the value only at runtime. It also supports data type conversion using built-in -functions [Python Built-in Functions](https://www.mindspore.cn/tutorials/en/master/compile/python_builtin_functions.html) +functions [Python Built-in Functions](https://www.mindspore.cn/tutorials/en/br_base/compile/python_builtin_functions.html) such as int(), float() and bool(). For example: ``` python @@ -894,10 +894,10 @@ Currently, MindSpore supports the following user-defined data types: ##### Tensor -For details of `Tensor`, click [Tensor API document](https://mindspore.cn/docs/en/master/api_python/mindspore/mindspore.Tensor.html#mindspore-tensor). +For details of `Tensor`, click [Tensor API document](https://mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.Tensor.html#mindspore-tensor). Supporting creating and using Tensor. The ways to create a `Tensor` -include using [tensor function interface](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.tensor.html#mindspore.tensor) +include using [tensor function interface](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.tensor.html#mindspore.tensor) and using the class \'ms.Tensor\' interface. It is recommended to use the former because users can specify the required dtype. The code case is as follows. @@ -962,7 +962,7 @@ ret.shape:(3, 4, 1, 6) Currently, the attributes and APIs related to `Primitive` and its subclasses cannot be called on the network. For details about the defined `Primitive`, click [Primitive API -document](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive). +document](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive). ##### Cell @@ -978,7 +978,7 @@ cannot be called on the network unless they are called through `self` in `construct` of `Cell`. For details about the definition of `Cell`, click [Cell API -document](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html). +document](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html). ##### Parameter @@ -986,14 +986,14 @@ document](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cel be updated during network training. For details about the definition of `Parameter`, click -[Parameter API document](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.Parameter.html#mindspore.Parameter). +[Parameter API document](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.Parameter.html#mindspore.Parameter). ### Operators Arithmetic operators and assignment operators support the `Number` and `Tensor` operations, as well as the `Tensor` operations of different `dtype`. For more details, please refer to -[Operators](https://www.mindspore.cn/tutorials/en/master/compile/operators.html) +[Operators](https://www.mindspore.cn/tutorials/en/br_base/compile/operators.html) ### Primaries @@ -1090,14 +1090,14 @@ ret:[[3. 3. 3. 3.]] Currently supported Python statements include raise statement, assert statement, pass statement, return statement, break statement, continue statement, if statement, for statement, while statement, with statement, list comprehension, generator expression and function definition statement. For more details, please refer to -[Statements](https://www.mindspore.cn/tutorials/en/master/compile/statements.html) +[Statements](https://www.mindspore.cn/tutorials/en/br_base/compile/statements.html) ### Python Built-in Functions Currently supported Python built-in functions include `int`, `float`, `bool`, `str`, `list`, `tuple`, `getattr`, `hasattr`, `len`, `isinstance`, `all`, `any`, `round`, `max`, `min` , `sum`, `abs`, `partial`, `map`, `range`, `enumerate`, `super`, `pow`, `filter`. The use of built-in functions in graph mode is similar to the corresponding -Python built-in functions. For more details, please refer to [Python Built-in Functions](https://www.mindspore.cn/tutorials/en/master/compile/python_builtin_functions.html). +Python built-in functions. For more details, please refer to [Python Built-in Functions](https://www.mindspore.cn/tutorials/en/br_base/compile/python_builtin_functions.html). ### Network Definition @@ -1477,7 +1477,7 @@ Combining view and in-place operations improves memory efficiency and computatio The execution graph in graph mode is converted from source code, and not all Python syntax can support it. The following describes some of the syntax constraints that exist under the basic syntax. More network -compilation problems can be found in [Network compilation](https://www.mindspore.cn/docs/en/master/faq/network_compilation.html). +compilation problems can be found in [Network compilation](https://www.mindspore.cn/docs/en/br_base/faq/network_compilation.html). 1. When an undefined class member is used in the `construct` function, `AttributeError` exception will be thrown. For example: @@ -1568,7 +1568,7 @@ compilation problems can be found in [Network compilation](https://www.mindspore \'dictionary\' are not supported in the basic syntax scenario, and need to be supported when the JIT syntax support level option \'jit_syntax_level\' is \'LAX\'. -5. In the basic syntax scenario, in addition to the [Python Built-in Functions](https://www.mindspore.cn/tutorials/en/master/compile/python_builtin_functions.html) +5. In the basic syntax scenario, in addition to the [Python Built-in Functions](https://www.mindspore.cn/tutorials/en/br_base/compile/python_builtin_functions.html) supported in the current graph mode, there are still some built-in functions that are not supported in graph mode. For example: basestring, bin, bytearray, callable, chr, cmp, compile, delattr, dir, divmod, eval, execfile, file, frozenset, hash, hex, id, input, issubclass, iter, locals, long, memoryview, next, object, oct, open, ord, property, raw_input, reduce, reload, repr, reverse, set, slice, @@ -1624,10 +1624,10 @@ current extension base on AST compilation. which need to be installed first and then imported, such `NumPy` and `Scipy`. It should be noted that MindSpore suites such as `mindyolo` and `mindflow` are not treated as third-party libraries. For a detailed list, please refer to the `_modules_from_mindspore` list of the - [parser](https://gitee.com/mindspore/mindspore/blob/master/mindspore/python/mindspore/_extends/parse/parser.py) file. + [parser](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/python/mindspore/_extends/parse/parser.py) file. 3. Modules specified by the environment variable `MS_JIT_IGNORE_MODULES`. In contrast, there is the environment variable `MS_JIT_MODULES`. For more details, please refer to - [Environment Variables](https://www.mindspore.cn/docs/en/master/api_python/env_var_list.html). + [Environment Variables](https://www.mindspore.cn/docs/en/br_base/api_python/env_var_list.html). - Supporting data types of third-party libraries, allowing calling and returning objects of third-party libraries. @@ -1778,7 +1778,7 @@ is overloaded: \[\'+\', \'-\', \'not\', \'==\', \'!=\', \'\<\', \'\>\', \'\<=\', \'\>=\', \'in\', \'not in\', \'y=x\[0\]\'\]. For more details, please refer to -[Operators](https://www.mindspore.cn/tutorials/en/master/compile/operators.html). +[Operators](https://www.mindspore.cn/tutorials/en/br_base/compile/operators.html). When getting unsupported input type, those operators need to use extended static graph syntax to support, and make the output consistent with the output in the pynative mode. @@ -2050,7 +2050,7 @@ perfectly support more input types, such as third-party library data types. For example, in the following example, \'x.asnumpy()\' and \'np.ndarray\' are both types supported by extensions. More support for built-in functions can be found in the [Python built-in -functions](https://www.mindspore.cn/tutorials/en/master/compile/python_builtin_functions.html) section. +functions](https://www.mindspore.cn/tutorials/en/br_base/compile/python_builtin_functions.html) section. ``` python import numpy as np @@ -2270,9 +2270,9 @@ to specify the corresponding Python statement type, thereby determining the type of the interpretation node and avoiding the generation of `Any` type. For example, the difference between the -[Tensor](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.Tensor.html#mindspore.Tensor) +[Tensor](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.Tensor.html#mindspore.Tensor) class and the -[tensor](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.tensor.html#mindspore.tensor) +[tensor](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.tensor.html#mindspore.tensor) interface lies in the use of the Annotation Type mechanism within the tensor interface. When the dtype of the tensor function is determined, the function uses Annotation to specify the output type, thereby avoiding the generation of Any type. The use of `Annotation Type` only requires adding a comment `# @jit.typing: () -> tensor_type[float32]` above or after the corresponding Python statement, where @@ -2386,7 +2386,7 @@ net.attr: Tensor(shape=[3], dtype=Int64, value= [2, 3, 4]) 3\. When constructing graphs based on bytecode, control flow involving variable scenarios cannot be included in the graph. For related information on variables, please refer to -[Variables Generate Scenes](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#variables-generate-scenes) . +[Variables Generate Scenes](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#variables-generate-scenes) . An example is as follows: ``` python diff --git a/tutorials/source_en/compile/static_graph_expert_programming.md b/tutorials/source_en/compile/static_graph_expert_programming.md index 5d06e65b08..4079116038 100644 --- a/tutorials/source_en/compile/static_graph_expert_programming.md +++ b/tutorials/source_en/compile/static_graph_expert_programming.md @@ -1,8 +1,8 @@ # Graph Mode - Programming Techniques -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/compile/static_graph_expert_programming.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/compile/static_graph_expert_programming.md) -This chapter introduces some commonly used advanced programming techniques for static graph optimization, which can effectively improve the compilation efficiency as well as the execution efficiency of static graphs, and make the program run more stably. For a basic introduction to static graphs compilation, see [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/master/beginner/accelerate_with_static_graph.html). +This chapter introduces some commonly used advanced programming techniques for static graph optimization, which can effectively improve the compilation efficiency as well as the execution efficiency of static graphs, and make the program run more stably. For a basic introduction to static graphs compilation, see [Accelerating with Static Graphs](https://www.mindspore.cn/tutorials/en/br_base/beginner/accelerate_with_static_graph.html). ## How to Optimize Compilation Performance @@ -186,7 +186,7 @@ As in the example above, add the `@lazy_inline` decorator to the `__init__` func Usage Scenario: Use HyperMap to replace for loop to optimize compilation performance. -`HyperMap` is a special class. Class object construction needs to be passed into the mapping function f, and calling the object needs to be passed into the n parameter sequence of f. For more usage see: [HyperMap](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.HyperMap.html). The mapping function f must be of type `MultitypeFuncGraph`, see [MultitypeFuncGraph](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MultitypeFuncGraph.html). When using for loops to batch process list elements, network compilation performance can be optimized by `HyperMap`-equivalent semantic substitution. +`HyperMap` is a special class. Class object construction needs to be passed into the mapping function f, and calling the object needs to be passed into the n parameter sequence of f. For more usage see: [HyperMap](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.HyperMap.html). The mapping function f must be of type `MultitypeFuncGraph`, see [MultitypeFuncGraph](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MultitypeFuncGraph.html). When using for loops to batch process list elements, network compilation performance can be optimized by `HyperMap`-equivalent semantic substitution. ### Using the Compilation Cache @@ -194,9 +194,9 @@ Usage scenario: Compilation time is reduced by using a compilation cache if no c The essence of the compilation cache is to store the compilation intermediate process file of the network model. When the network model is unchanged, the production of the compilation intermediate process file is also the same, so you can reuse the intermediate process file produced by the last programming. -By setting the environment variable [MS_COMPILER_CACHE_ENABLE](https://www.mindspore.cn/docs/en/master/api_python/env_var_list.html?highlight=MS_COMPILER_CACHE_ENABLE), which can specify whether to save and load the compile cache. +By setting the environment variable [MS_COMPILER_CACHE_ENABLE](https://www.mindspore.cn/docs/en/br_base/api_python/env_var_list.html?highlight=MS_COMPILER_CACHE_ENABLE), which can specify whether to save and load the compile cache. -By setting the environment variable [MS_COMPILER_CACHE_PATH](https://www.mindspore.cn/docs/en/master/api_python/env_var_list.html?highlight=MS_COMPILER_CACHE_PATH), you can specify the MindSpore compilation cache directory for storing cache files generated by the graph and operator compilation process. +By setting the environment variable [MS_COMPILER_CACHE_PATH](https://www.mindspore.cn/docs/en/br_base/api_python/env_var_list.html?highlight=MS_COMPILER_CACHE_PATH), you can specify the MindSpore compilation cache directory for storing cache files generated by the graph and operator compilation process. A code sample that optimizes compilation performance by enabling compilation caching is shown below: @@ -292,7 +292,7 @@ When a user defines a class in a network script, it can be written as a class in - a custom class - After defining a custom class, you can instantiate the class and call the attributes and methods of the class object. Please refer to [the Use of Custom Classes](https://www.mindspore.cn/tutorials/en/master/compile/static_graph.html#supporting-the-use-of-custom-classes). Compared to `Cell` class definitions, custom classes are closer to the user habits of calling Python classes. The implementation of custom classes in static graph mode is different from `Cell`, for example, when calling a function method of a custom class object, the code in its function method will not be compiled into a static computational graph but will be interpreted and executed by the Python interpreter. + After defining a custom class, you can instantiate the class and call the attributes and methods of the class object. Please refer to [the Use of Custom Classes](https://www.mindspore.cn/tutorials/en/br_base/compile/static_graph.html#supporting-the-use-of-custom-classes). Compared to `Cell` class definitions, custom classes are closer to the user habits of calling Python classes. The implementation of custom classes in static graph mode is different from `Cell`, for example, when calling a function method of a custom class object, the code in its function method will not be compiled into a static computational graph but will be interpreted and executed by the Python interpreter. - a class decorated by `@jit_class` diff --git a/tutorials/source_en/custom_program/fusion_pass.md b/tutorials/source_en/custom_program/fusion_pass.md index f4067f849f..91562d3357 100644 --- a/tutorials/source_en/custom_program/fusion_pass.md +++ b/tutorials/source_en/custom_program/fusion_pass.md @@ -1,6 +1,6 @@ # Custom Fusion -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/fusion_pass.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/fusion_pass.md) ## Overview diff --git a/tutorials/source_en/custom_program/hook_program.md b/tutorials/source_en/custom_program/hook_program.md index eb14064c64..5ea0872cfd 100644 --- a/tutorials/source_en/custom_program/hook_program.md +++ b/tutorials/source_en/custom_program/hook_program.md @@ -1,10 +1,10 @@ # Hook Programming -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/hook_program.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/hook_program.md) Debugging deep learning networks is a big task for every practitioner in the field of deep learning. Since the deep learning network hides the input and output data as well as the inverse gradient of the intermediate layer operators, only the gradient of the network input data (feature quantity and weight) is provided, resulting in the inability to accurately sense the data changes of the intermediate layer operators, which reduces the debugging efficiency. In order to facilitate users to debug the deep learning network accurately and quickly, MindSpore designs Hook function in dynamic graph mode. **Using Hook function can capture the input and output data of intermediate layer operators as well as the reverse gradient**. -Currently, five forms of Hook functions are provided in dynamic graph mode: [HookBackward](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.HookBackward.html) operator and register_forward_pre_hook, register_forward_hook, register_backward_pre_hook, register_backward_hook functions registered on Cell objects. +Currently, five forms of Hook functions are provided in dynamic graph mode: [HookBackward](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.HookBackward.html) operator and register_forward_pre_hook, register_forward_hook, register_backward_pre_hook, register_backward_hook functions registered on Cell objects. ## HookBackward Operator @@ -41,7 +41,7 @@ hook_fn print grad_out: (Tensor(shape=[], dtype=Float32, value= 2),) output: (Tensor(shape=[], dtype=Float32, value= 4), Tensor(shape=[], dtype=Float32, value= 4)) ``` -For more descriptions of the HookBackward operator, refer to the [API documentation](https://mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.HookBackward.html). +For more descriptions of the HookBackward operator, refer to the [API documentation](https://mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.HookBackward.html). ## register_forward_pre_hook Function in Cell Object @@ -145,7 +145,7 @@ forward inputs: (Tensor(shape=[1], dtype=Float32, value= [ 2.00000000e+00]),) To avoid running failure when scripts switch to graph mode, it is not recommended to call the `register_forward_pre_hook` function and the `remove()` function of the `handle` object in the `construct` function of the Cell object. In dynamic graph mode, if the `register_forward_pre_hook` function is called in the `construct` function of the Cell object, the Cell object will register a new Hook function every time it runs. -For more information about the `register_forward_pre_hook` function of the Cell object, refer to the [API documentation](https://mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_forward_pre_hook). +For more information about the `register_forward_pre_hook` function of the Cell object, refer to the [API documentation](https://mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_forward_pre_hook). ## register_forward_hook Function of Cell Object @@ -210,7 +210,7 @@ forward outputs: [2.] If the user returns the newly created data directly in the Hook function, instead of returning new output data that is obtained after the original output data is calculated, then the back propagation of the gradient will cut off on that Cell object, which can be seen in the use case illustration of the `register_forward_pre_hook` function. To avoid running failure when the script switches to graph mode, it is not recommended to call the `register_forward_hook` function in the `construct` function of the Cell object and the `remove()` function of the `handle` object. In dynamic graph mode, if the `register_forward_hook` function is called in the `construct` function of the Cell object, the Cell object will register a new Hook function every time it runs. -For more information about the `register_forward_hook` function of the Cell object, please refer to the [API documentation](https://mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_forward_hook). +For more information about the `register_forward_hook` function of the Cell object, please refer to the [API documentation](https://mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_forward_hook). ## register_backward_pre_hook Function of Cell Object @@ -275,7 +275,7 @@ print("-------------\n", output) To avoid running failure when the scripts switch to graph mode, it is not recommended to call the `register_backward_pre_hook` function and the `remove()` function of the `handle` object in the `construct` function of the Cell object. In PyNative mode, if the `register_backward_pre_hook` function is called in the `construct` function of the Cell object, the Cell object will register a new Hook function every time it runs. -For more information about the `register_backward_pre_hook` function of the Cell object, please refer to the [API documentation](https://mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_pre_hook). +For more information about the `register_backward_pre_hook` function of the Cell object, please refer to the [API documentation](https://mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_pre_hook). ## register_backward_hook Function of Cell Object @@ -345,7 +345,7 @@ print("-------------\n", output) To avoid running failure when the scripts switch to graph mode, it is not recommended to call the `register_backward_hook` function and the `remove()` function of the `handle` object in the `construct` function of the Cell object. In PyNative mode, if the `register_backward_hook` function is called in the `construct` function of the Cell object, the Cell object will register a new Hook function every time it runs. -For more information about the `register_backward_hook` function of the Cell object, please refer to the [API documentation](https://mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_hook). +For more information about the `register_backward_hook` function of the Cell object, please refer to the [API documentation](https://mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_hook). ## Using the Multiple hook Function of Cell Object diff --git a/tutorials/source_en/custom_program/op_custom.rst b/tutorials/source_en/custom_program/op_custom.rst index f237861d22..cc550d58a3 100644 --- a/tutorials/source_en/custom_program/op_custom.rst +++ b/tutorials/source_en/custom_program/op_custom.rst @@ -1,8 +1,8 @@ Custom Operators ================= -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/op_custom.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/op_custom.rst :alt: View Source On Gitee .. toctree:: @@ -20,16 +20,16 @@ Custom Operators When built-in operators cannot meet requirements during network development, you can use MindSpore's custom operator functionality to integrate your operators. Currently, MindSpore provides two approaches for integrating custom operators: -- `Custom Primitive-Based Custom Operators `_ -- `CustomOpBuilder-Based Custom Operators `_ +- `Custom Primitive-Based Custom Operators `_ +- `CustomOpBuilder-Based Custom Operators `_ .. list-table:: :widths: 20 40 40 :header-rows: 1 * - Interface Comparison - - `Custom Primitive `_ - - `CustomOpBuilder `_ + - `Custom Primitive `_ + - `CustomOpBuilder `_ * - Supported Modes - Graph Mode and PyNative Mode - PyNative Mode diff --git a/tutorials/source_en/custom_program/operation/cpp_api_for_custom_ops.md b/tutorials/source_en/custom_program/operation/cpp_api_for_custom_ops.md index 64559b22ef..bd8a9b5fa9 100644 --- a/tutorials/source_en/custom_program/operation/cpp_api_for_custom_ops.md +++ b/tutorials/source_en/custom_program/operation/cpp_api_for_custom_ops.md @@ -1,6 +1,6 @@ # C++ API Description for Custom Operators -[![View Source File](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/operation/cpp_api_for_custom_ops.md) +[![View Source File](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/operation/cpp_api_for_custom_ops.md) ## Overview @@ -18,7 +18,7 @@ When developing custom operators, you can include the header files referenced by ### enum TypeId -The `TypeId` enumeration type is defined in the [type_id.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/core/include/mindapi/base/type_id.h) header file and specifies the tensor data types supported in MindSpore, including boolean, integer, floating-point, and complex types. +The `TypeId` enumeration type is defined in the [type_id.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/core/include/mindapi/base/type_id.h) header file and specifies the tensor data types supported in MindSpore, including boolean, integer, floating-point, and complex types. This interface is also included in the `namespace ms` namespace and can be accessed via `ms::TypeId`. @@ -51,7 +51,7 @@ kNumberTypeEnd, // End value for the Number type ### class Tensor -The `Tensor` class is defined in the [tensor.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/common/tensor.h) header file, representing the tensor object in MindSpore. It provides methods for operating on and querying tensor properties. +The `Tensor` class is defined in the [tensor.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/common/tensor.h) header file, representing the tensor object in MindSpore. It provides methods for operating on and querying tensor properties. #### Constructors @@ -302,7 +302,7 @@ The following methods are not part of the API and are used only in internal modu ### function tensor -Factory methods for constructing constant tensors, defined in the [tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/common/tensor_utils.h) header file. +Factory methods for constructing constant tensors, defined in the [tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/common/tensor_utils.h) header file. ```cpp Tensor tensor(int64_t value, TypeId dtype = TypeId::kNumberTypeInt64) @@ -319,7 +319,7 @@ Tensor tensor(const std::vector &value, TypeId dtype = TypeId::kNumberTy ### function ones -Factory method for constructing a tensor filled with ones, defined in the [tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/common/tensor_utils.h) header file. +Factory method for constructing a tensor filled with ones, defined in the [tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/common/tensor_utils.h) header file. ```cpp Tensor ones(const ShapeVector &shape, TypeId dtype = TypeId::kNumberTypeFloat32) @@ -333,7 +333,7 @@ Tensor ones(const ShapeVector &shape, TypeId dtype = TypeId::kNumberTypeFloat32) ### function zeros -Factory method for constructing a tensor filled with zeros, defined in the [tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/common/tensor_utils.h) header file. +Factory method for constructing a tensor filled with zeros, defined in the [tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/common/tensor_utils.h) header file. ```cpp Tensor zeros(const ShapeVector &shape, TypeId dtype = TypeId::kNumberTypeFloat32) @@ -349,7 +349,7 @@ Tensor zeros(const ShapeVector &shape, TypeId dtype = TypeId::kNumberTypeFloat32 ### class PyboostRunner -The `PyboostRunner` class for PyNative processes is defined in the [pyboost_extension.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/pynative/pyboost_extension.h) header file. It provides methods for managing execution, memory allocation, and kernel launching. +The `PyboostRunner` class for PyNative processes is defined in the [pyboost_extension.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/pynative/pyboost_extension.h) header file. It provides methods for managing execution, memory allocation, and kernel launching. `PyboostRunner` is a subclass of `std::enable_shared_from_this` and requires the use of the smart pointer `std::shared_ptr` to manage its objects. @@ -470,11 +470,11 @@ The `PyboostRunner` class for PyNative processes is defined in the [pyboost_exte ### class AtbOpRunner -The `AtbOpRunner` class is a runner for executing Ascend Transformer Boost (ATB) operators, defined in the [atb_common.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/ascend/atb/atb_common.h) header file. +The `AtbOpRunner` class is a runner for executing Ascend Transformer Boost (ATB) operators, defined in the [atb_common.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/ascend/atb/atb_common.h) header file. This class inherits from `PyboostRunner` and encapsulates the process of invoking ATB operators, including initialization, running the ATB operator, managing input/output tensors, memory allocation, and kernel scheduling. -Refer to the tutorial [CustomOpBuilder Using AtbOpRunner to Integrate ATB Operators](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/op_customopbuilder_atb.html) for usage methods. +Refer to the tutorial [CustomOpBuilder Using AtbOpRunner to Integrate ATB Operators](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/op_customopbuilder_atb.html) for usage methods. #### Constructor @@ -502,7 +502,7 @@ Refer to the tutorial [CustomOpBuilder Using AtbOpRunner to Integrate ATB Operat ### function RunAtbOp -The interface for executing ATB operators in dynamic graphs, defined in the [atb_common.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/ascend/atb/atb_common.h) header file. +The interface for executing ATB operators in dynamic graphs, defined in the [atb_common.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/ascend/atb/atb_common.h) header file. ```cpp template @@ -520,11 +520,11 @@ void RunAtbOp(const std::string &op_name, const ParamType ¶m, const std::vec ### class AsdSipFFTOpRunner -The `AsdSipFFTOpRunner` class is a runner for executing Ascend Sip Boost (ASDSIP) operators, defined in the [asdsip_common.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/ascend/asdsip/asdsip_common.h) header file. +The `AsdSipFFTOpRunner` class is a runner for executing Ascend Sip Boost (ASDSIP) operators, defined in the [asdsip_common.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/ascend/asdsip/asdsip_common.h) header file. This class inherits from `PyboostRunner` and encapsulates the process of invoking ASDSIP FFT operators, including initialization, running the ASDSIP FFT operator, managing input/output tensor, memory allocation, and kernel scheduling. -Refer to the tutorial [CustomOpBuilder Integrates the ASDSIP FFT Operators through AsdSipFFTOpRunner](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/op_customopbuilder_asdsip.html) for usage methods. +Refer to the tutorial [CustomOpBuilder Integrates the ASDSIP FFT Operators through AsdSipFFTOpRunner](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/op_customopbuilder_asdsip.html) for usage methods. #### Constructor @@ -550,7 +550,7 @@ Refer to the tutorial [CustomOpBuilder Integrates the ASDSIP FFT Operators throu ### function RunAsdSipFFTOp -The interface for executing ASDSIP FFT operators in dynamic graphs, defined in the [asdsip_common.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/ascend/asdsip/asdsip_common.h) header file. +The interface for executing ASDSIP FFT operators in dynamic graphs, defined in the [asdsip_common.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/ascend/asdsip/asdsip_common.h) header file. ```cpp inline void RunAsdSipFFTOp(const std::string &op_name, const FFTParam &fft_param, const ms::Tensor &input, diff --git a/tutorials/source_en/custom_program/operation/op_custom_adv.md b/tutorials/source_en/custom_program/operation/op_custom_adv.md index 80db5ffeb0..ec0aaca7e2 100644 --- a/tutorials/source_en/custom_program/operation/op_custom_adv.md +++ b/tutorials/source_en/custom_program/operation/op_custom_adv.md @@ -1,10 +1,10 @@ # Advanced Usage of Custom Operators -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/operation/op_custom_adv.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/operation/op_custom_adv.md) ## Registering the Operator Information -The operator information describes the supported inputs and outputs data type, the supported inputs and outputs format, attributes, and target (platform information) of the operator implementation. It is used to select and map operators by the backend. The operator information can be defined by using the [CustomRegOp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop) API, then you can use the [custom_info_register](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.custom_info_register.html#mindspore-ops-custom-info-register) decorator or just pass it to the `reg_info` parameter of [Custom](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Custom.html#mindspore-ops-custom) primitive to bind the information to the operator implementation. The operator information will be registered to the operator information library on the MindSpore C++ side at last. The `reg_info` parameter takes higher priority than the `custom_info_register` decorator. +The operator information describes the supported inputs and outputs data type, the supported inputs and outputs format, attributes, and target (platform information) of the operator implementation. It is used to select and map operators by the backend. The operator information can be defined by using the [CustomRegOp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop) API, then you can use the [custom_info_register](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.custom_info_register.html#mindspore-ops-custom-info-register) decorator or just pass it to the `reg_info` parameter of [Custom](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Custom.html#mindspore-ops-custom) primitive to bind the information to the operator implementation. The operator information will be registered to the operator information library on the MindSpore C++ side at last. The `reg_info` parameter takes higher priority than the `custom_info_register` decorator. The target value in operator information can be "Ascend", "GPU" or "CPU", which describes the operator information on a specific target. For the same operator implementation, it may have different supported data types on different targets. The operator information on a specific target will be registered only once, so you can use the target value in operator information to distinguish between them. @@ -86,4 +86,4 @@ The execution result is as follows: [ 2. 8. 18.] ``` -> More examples can be found in the MindSpore source code [tests/st/graph_kernel/custom](https://gitee.com/mindspore/mindspore/tree/master/tests/st/graph_kernel/custom). +> More examples can be found in the MindSpore source code [tests/st/graph_kernel/custom](https://gitee.com/mindspore/mindspore/tree/br_base/tests/st/graph_kernel/custom). diff --git a/tutorials/source_en/custom_program/operation/op_custom_aot.md b/tutorials/source_en/custom_program/operation/op_custom_aot.md index 4d88ce523e..11f807a348 100644 --- a/tutorials/source_en/custom_program/operation/op_custom_aot.md +++ b/tutorials/source_en/custom_program/operation/op_custom_aot.md @@ -1,12 +1,12 @@ # AOT-Type Custom Operators(CPU/GPU) -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/operation/op_custom_aot.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/operation/op_custom_aot.md) ## Overview -AOT (Ahead-Of-Time) type of custom operators employ a pre-compilation approach, which requires network developers to manually write the source code files corresponding to the operator implementation functions based on specific interfaces. These source code files need to be compiled into dynamic link libraries (DLLs) in advance. During network runtime, the framework will automatically invoke and execute the functions contained within these dynamic link libraries. AOT-type custom operators support the CUDA language for GPU platforms and the C and C++ languages for CPU platforms. For the development of custom operators specifically on the Ascend platform, please refer to [AOT-Type Custom Operators(Ascend)](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/op_custom_ascendc.html). +AOT (Ahead-Of-Time) type of custom operators employ a pre-compilation approach, which requires network developers to manually write the source code files corresponding to the operator implementation functions based on specific interfaces. These source code files need to be compiled into dynamic link libraries (DLLs) in advance. During network runtime, the framework will automatically invoke and execute the functions contained within these dynamic link libraries. AOT-type custom operators support the CUDA language for GPU platforms and the C and C++ languages for CPU platforms. For the development of custom operators specifically on the Ascend platform, please refer to [AOT-Type Custom Operators(Ascend)](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/op_custom_ascendc.html). -In this tutorial, we provide several simple use cases of AOT-type custom operators on both CPU and GPU platforms as demonstrations. For more comprehensive examples of AOT-type custom operators, please refer to the [examples](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/test_custom_aot.py) section in the MindSpore source code. +In this tutorial, we provide several simple use cases of AOT-type custom operators on both CPU and GPU platforms as demonstrations. For more comprehensive examples of AOT-type custom operators, please refer to the [examples](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/test_custom_aot.py) section in the MindSpore source code. ## The Introduction to the General Usage Features of AOT-type Custom Operators @@ -33,7 +33,7 @@ In the Python script, the format for the `func` input in `Custom` is `Path_To_Fu Operator output shape and data type inference can be realized by defining Python functions to describe the inference logic. -If the operator only supports some specific input and output data types, the operator information needs to be registered. For the creation of operator information, please refer to [Registering the Operator Information](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/op_custom_adv.html#registering-the-operator-information). +If the operator only supports some specific input and output data types, the operator information needs to be registered. For the creation of operator information, please refer to [Registering the Operator Information](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/op_custom_adv.html#registering-the-operator-information). The following examples introduce the development process of AOT-type custom operator on GPU platform and CPU platform, where the custom operator implements the function of adding two input tensors. @@ -200,7 +200,7 @@ In the rest of tutorial, we will demonstrate advanced features of AOT-type custo - Attributes and intermediate variables of AOT-type custom operators; - Dynamic shape support for AOT-type custom operators. -For the complete source code of the example, check [here](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/test_custom_aot_fused.py) in the MindSpore source code. +For the complete source code of the example, check [here](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/test_custom_aot_fused.py) in the MindSpore source code. ### Auto-compilation of AOT-type Custom Operators @@ -282,7 +282,7 @@ The function name `FuncName` is the name of the operator main function. The retu - ndims (int \*): Array of dimensions for input and output shapes. - shapes (int64_t \*\*): Array of shapes for inputs and outputs. - dtypes (const char \*\*): Array of data types for inputs and outputs. -- extra (AotExtra \*): Custom operator extensions with attributes. The `AotExtra` type is defined in the header file [custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h) provided by MindSpore. +- extra (AotExtra \*): Custom operator extensions with attributes. The `AotExtra` type is defined in the header file [custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h) provided by MindSpore. ### Shape Inference Function @@ -298,7 +298,7 @@ The meaning of the parameter list is as follows: - `ndims` (int \*): Array of dimensions for input shapes. - `shapes` (int64_t \*\*): Array of shapes for inputs. -- `extra` (AotExtra \*): Pointer to an extension for attribute-bearing custom operators. The `AotExtra` type is defined in the header file [custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h) provided by MindSpore. +- `extra` (AotExtra \*): Pointer to an extension for attribute-bearing custom operators. The `AotExtra` type is defined in the header file [custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h) provided by MindSpore. ### Type Inference Function @@ -321,7 +321,7 @@ The initialization of operator attributes is implemented through the operator re def attr(self, name=None, param_type=None, value_type=None, default_value=None, **kwargs) ``` -Please refer to the [CustomRegOp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop) interface documentation for the meaning of each parameter. When registering a custom operator of Aot type, we set the following four parameters: +Please refer to the [CustomRegOp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop) interface documentation for the meaning of each parameter. When registering a custom operator of Aot type, we set the following four parameters: - `name`: the name of the attribute of the AOT-type custom operator; - `param_type`: the parameter type of the attribute. For attributes of AOT-type custom operators, this input is fixed to be "required", which means it is a required parameter; @@ -350,7 +350,7 @@ To implement the operator, we create a source file named `kernel.cc`, which incl First, we define a data structure to store operator attributes, which inherits from `AotKernelData`. `AotKernelData` is the base class for custom operator attribute data structures. -By downloading the header file [custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h) provided by MindSpore and placing it in the same directory as the source file, we can use the related interfaces by including it with `#include "custom_aot_extra.h"` at the beginning of the file. +By downloading the header file [custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h) provided by MindSpore and placing it in the same directory as the source file, we can use the related interfaces by including it with `#include "custom_aot_extra.h"` at the beginning of the file. ```c++ #include @@ -538,7 +538,7 @@ The `ReduceDynNet` in this file includes two parts: the operator registration fu #### Operator Registration The assignment of operator attributes during initialization is implemented through the operator registration function. -For the function of custom operator registration, please refer to the relevant documentation of [CustomRegOp](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop). +For the function of custom operator registration, please refer to the relevant documentation of [CustomRegOp](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop). For each attribute, we create an `attr` for the operator registration file `reduce_cpu_info`, setting the attribute name and value. Each `attr` item here has four inputs: the first is the name, such as `"axis"` or `"keep_dim"`; the middle two are `"required"` and `"all"`; the last input needs to specify the input name as `value=`, and the input value is the value of the attribute, for example, `value=axis` and `value=keep_dim` here. @@ -594,7 +594,7 @@ Custom operators of the AOT-type support multiple outputs (outputs as tuples). T - Operator registration file: The names and data type information of multiple outputs need to be listed; - Operator computation function: It needs to identify the pointers corresponding to multiple outputs. -Below, we demonstrate the method of defining a custom operator of the AOT-type with multiple outputs using an example. For specific file usage, please refer to [here](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/test_custom_aot.py#L405). +Below, we demonstrate the method of defining a custom operator of the AOT-type with multiple outputs using an example. For specific file usage, please refer to [here](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/test_custom_aot.py#L405). ### Operator Inference Function @@ -695,7 +695,7 @@ void *output2 = params[3]; void *output3 = params[4]; ``` -For the complete operator computation file, please refer to [here](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/aot_test_files/add_mul_div.cu). +For the complete operator computation file, please refer to [here](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/aot_test_files/add_mul_div.cu). ### Operator in Scripts diff --git a/tutorials/source_en/custom_program/operation/op_custom_ascendc.md b/tutorials/source_en/custom_program/operation/op_custom_ascendc.md index d183988abf..c64134065e 100644 --- a/tutorials/source_en/custom_program/operation/op_custom_ascendc.md +++ b/tutorials/source_en/custom_program/operation/op_custom_ascendc.md @@ -1,6 +1,6 @@ # Custom Primitive AOT-Type Custom Operators(Ascend) -[![View Source File](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/operation/op_custom_ascendc.md) +[![View Source File](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/operation/op_custom_ascendc.md) ## Overview @@ -10,7 +10,7 @@ Custom operators of the AOT (Ahead-Of-Time) type adopt a pre-compilation approac 2. **Offline Compilation and Deployment**: After completing the operator development, perform offline compilation to ensure that the operator can run efficiently on the Ascend AI processor and deploy it. 3. **Using Custom Operators in MindSpore**: Integrate the compiled Ascend C custom operators into the MindSpore framework to enable their use in actual AI applications. -This chapter aims to help developers fully understand and master the entire lifecycle of Ascend C custom operators, from development to deployment, and to effectively utilize them in MindSpore. For AOT custom operator development for other platforms, refer to [AOT type custom operator (CPU/GPU platforms)](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/op_custom_aot.html). +This chapter aims to help developers fully understand and master the entire lifecycle of Ascend C custom operators, from development to deployment, and to effectively utilize them in MindSpore. For AOT custom operator development for other platforms, refer to [AOT type custom operator (CPU/GPU platforms)](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/op_custom_aot.html). ## Custom Operator Development @@ -85,7 +85,7 @@ If you have already completed the compilation and deployment of the custom opera ## Using Custom Operators in MindSpore -MindSpore's custom operator interface is [ops.Custom](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Custom.html). Detailed interface instructions can be found at [ops.Custom](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Custom.html). This article focuses on how to use [ops.Custom](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Custom.html) to access Ascend C custom operators. +MindSpore's custom operator interface is [ops.Custom](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Custom.html). Detailed interface instructions can be found at [ops.Custom](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Custom.html). This article focuses on how to use [ops.Custom](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Custom.html) to access Ascend C custom operators. ### Environment Preparation @@ -99,12 +99,12 @@ ops.Custom(func, bprop=None, out_dtype=None, func_type='aot', out_shape=None, re - `func`(str): Name of the custom operator. - `out_shape`(Union[function, list, tuple]):Output shape or shape inference function. Default value: `None`. -- `out_dtype` (Union[function, [mindspore.dtype](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.dtype.html#mindspore.dtype), list, tuple]):Output type or type inference function. Default value: `None`. +- `out_dtype` (Union[function, [mindspore.dtype](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.dtype.html#mindspore.dtype), list, tuple]):Output type or type inference function. Default value: `None`. - `func_type`(str):Function type of the custom operator. For Ascend C custom operators, specify `func_type="aot"`. - `bprop`(function):Backpropagation function for the custom operator. Default value: `None`. - `reg_info`(Union[str, dict, list, tuple]):Registration information for the custom operator. Default value: `None`. Ascend C custom operators do not need to pass this parameter and can use the default value. -**Scenario Limitations**: Currently, dynamic graphs and static graphs in GE backend only support input and output of Tensor types. Static graphs in O0/O1 modes have no type restrictions. For dynamic graph scenarios with Ascend C custom operators, it is recommended to use [CustomOpBuilder-Based Custom Operators](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/op_customopbuilder.html). +**Scenario Limitations**: Currently, dynamic graphs and static graphs in GE backend only support input and output of Tensor types. Static graphs in O0/O1 modes have no type restrictions. For dynamic graph scenarios with Ascend C custom operators, it is recommended to use [CustomOpBuilder-Based Custom Operators](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/op_customopbuilder.html). ### Simple Example @@ -145,7 +145,7 @@ assert output.asnumpy().dtype == 'float32' assert output.asnumpy().shape == (1280, 1280) ``` -You can view the [custom operator test cases](https://gitee.com/mindspore/mindspore/tree/master/tests/st/graph_kernel/custom/custom_ascendc) in the MindSpore repository to obtain Ascend C custom operator test cases for more data types and usage scenarios. The sample project directory structure is as follows: +You can view the [custom operator test cases](https://gitee.com/mindspore/mindspore/tree/br_base/tests/st/graph_kernel/custom/custom_ascendc) in the MindSpore repository to obtain Ascend C custom operator test cases for more data types and usage scenarios. The sample project directory structure is as follows: ```text . @@ -345,7 +345,7 @@ Here, the function name `FuncName` is the operator name. For single-output, the - ndims (int \*): Array of input shape dimensions. - shapes (int64_t \*\*): Array of input shapes. -- extra (AotExtra \*): Used for extending custom operators with attributes. The `AotExtra` type is defined in the MindSpore-provided header file [custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h). +- extra (AotExtra \*): Used for extending custom operators with attributes. The `AotExtra` type is defined in the MindSpore-provided header file [custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h). **Infer Type Function Prototype** diff --git a/tutorials/source_en/custom_program/operation/op_custom_prim.rst b/tutorials/source_en/custom_program/operation/op_custom_prim.rst index 62db5b8525..a7abdee92b 100644 --- a/tutorials/source_en/custom_program/operation/op_custom_prim.rst +++ b/tutorials/source_en/custom_program/operation/op_custom_prim.rst @@ -1,11 +1,11 @@ Custom Primitive-Based Custom Operators ======================================== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/operation/op_custom_prim.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/operation/op_custom_prim.rst :alt: View Source On Gitee -When built-in operators cannot meet requirements during network development, you can call the Python API `Custom `_ primitive defined in MindSpore to quickly create different types of custom operators for use. +When built-in operators cannot meet requirements during network development, you can call the Python API `Custom `_ primitive defined in MindSpore to quickly create different types of custom operators for use. Traditional methods to add a custom operator need three steps: registering the operator primitive, implementing the operator, and registering the operator information. @@ -23,7 +23,7 @@ Compared with traditional custom operator creating methods, creating custom oper Custom operator classification and adaptation scenarios ----------------------------------------------------------- -The operator development methods supported by custom operator based on the `Custom `_ primitive include: aot, pyfunc. +The operator development methods supported by custom operator based on the `Custom `_ primitive include: aot, pyfunc. The difference between these operator development methods are as follows: @@ -39,7 +39,7 @@ The difference between these operator development methods are as follows: - Python - `CPU` - Fast algorithm verification scenarios - * - `aot `_ + * - `aot `_ - Ascend C/CUDA/C++ - `Ascend` `GPU` `CPU` - high-performance scenarios @@ -54,11 +54,11 @@ Different custom operator defining methods use different development languages t To help you better use custom operators, we have used [the pyfunc-type custom operator](#an-example-of-custom-operators) as an example of a custom operator. In addition, we provide tutorials for other custom operators including: -- AOT-type custom op on `Ascend backend `_ and `GPU/CPU backend `_ ; -- `Advanced usage of custom operators `_ : registering the operator information and defining the backward functions for operators. +- AOT-type custom op on `Ascend backend `_ and `GPU/CPU backend `_ ; +- `Advanced usage of custom operators `_ : registering the operator information and defining the backward functions for operators. .. note:: - More examples can be found in the MindSpore source code `tests/st/graph_kernel/custom `_ . + More examples can be found in the MindSpore source code `tests/st/graph_kernel/custom `_ . An Example of Custom Operators -------------------------------- @@ -147,4 +147,4 @@ Then we have the following results as sin values of above inputs. [0. 0.841471 0.19866933 0.29552022 0.38941833] -Then we have completed the definition of a custom operator of type pyfunc. For more complete examples of pyfunc-type custom operators, see the `use cases `_ in the MindSpore source code. +Then we have completed the definition of a custom operator of type pyfunc. For more complete examples of pyfunc-type custom operators, see the `use cases `_ in the MindSpore source code. diff --git a/tutorials/source_en/custom_program/operation/op_customopbuilder.md b/tutorials/source_en/custom_program/operation/op_customopbuilder.md index 1f7bd0887e..ae59538f31 100644 --- a/tutorials/source_en/custom_program/operation/op_customopbuilder.md +++ b/tutorials/source_en/custom_program/operation/op_customopbuilder.md @@ -1,14 +1,14 @@ # CustomOpBuilder-Based Custom Operators -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/operation/op_customopbuilder.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/operation/op_customopbuilder.md) ## Overview In dynamic graph mode, network workflows are easier to debug, supporting operations like single-operator execution, normal functions/networks, and standalone gradient computations. -While [Custom Primitive-Based Custom Operators](https://www.mindspore.cn/tutorials/en/master/custom_program/op_custom.html) support both static and dynamic graphs, they require extensive definitions. To simplify the integration of custom operators in dynamic graphs, MindSpore has introduced a new Python API, [CustomOpBuilder](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.CustomOpBuilder.html), which not only improves usability but also enhances the execution performance of custom operators in dynamic graphs. +While [Custom Primitive-Based Custom Operators](https://www.mindspore.cn/tutorials/en/br_base/custom_program/op_custom.html) support both static and dynamic graphs, they require extensive definitions. To simplify the integration of custom operators in dynamic graphs, MindSpore has introduced a new Python API, [CustomOpBuilder](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.CustomOpBuilder.html), which not only improves usability but also enhances the execution performance of custom operators in dynamic graphs. -When developing operators using [C++ Interface](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/cpp_api_for_custom_ops.html), users need to define the operator function body, including deriving and constructing output tensors, calling and executing device operators, and more. Once the function body is defined, the [pybind11](https://github.com/pybind/pybind11) component can be used to register C++ functions as Python module interfaces. +When developing operators using [C++ Interface](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/cpp_api_for_custom_ops.html), users need to define the operator function body, including deriving and constructing output tensors, calling and executing device operators, and more. Once the function body is defined, the [pybind11](https://github.com/pybind/pybind11) component can be used to register C++ functions as Python module interfaces. ## Introduction to Dynamic Graph Operator Execution Process @@ -27,7 +27,7 @@ As shown in the figure, the operator execution process in MindSpore's dynamic gr ## Custom Operators Support Multi-Stage Pipeline through PyboostRunner -The dynamic graph multi-stage pipeline involves a complex invocation process with many interfaces and data structures. To simplify the integration of custom operators into dynamic graphs, MindSpore encapsulates the [PyboostRunner class](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/cpp_api_for_custom_ops.html#class-pyboostrunner). +The dynamic graph multi-stage pipeline involves a complex invocation process with many interfaces and data structures. To simplify the integration of custom operators into dynamic graphs, MindSpore encapsulates the [PyboostRunner class](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/cpp_api_for_custom_ops.html#class-pyboostrunner). Below is an example demonstrating the integration process of custom operators into a dynamic graph: @@ -214,5 +214,5 @@ Running the above script produces the following result: ## More Usage Scenarios -- [Integrating ATB Operators Using AtbOpRunner](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/op_customopbuilder_atb.html): Introduces methods for quickly integrating ATB operators as custom operators. -- [Integrating ASDSIP FFT Operators Using AsdSipFFTOpRunner](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_customopbuilder_asdsip.html): Introduces methods for quickly integrating ASDSIP FFT operators as custom operators. +- [Integrating ATB Operators Using AtbOpRunner](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/op_customopbuilder_atb.html): Introduces methods for quickly integrating ATB operators as custom operators. +- [Integrating ASDSIP FFT Operators Using AsdSipFFTOpRunner](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_customopbuilder_asdsip.html): Introduces methods for quickly integrating ASDSIP FFT operators as custom operators. diff --git a/tutorials/source_en/custom_program/operation/op_customopbuilder_asdsip.md b/tutorials/source_en/custom_program/operation/op_customopbuilder_asdsip.md index 2d14959aab..4dd77d5a0a 100644 --- a/tutorials/source_en/custom_program/operation/op_customopbuilder_asdsip.md +++ b/tutorials/source_en/custom_program/operation/op_customopbuilder_asdsip.md @@ -1,6 +1,6 @@ # CustomOpBuilder: Integrating ASDSIP FFT Operators Using AsdSipFFTOpRunner -[![View Source File](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/operation/op_customopbuilder.md) +[![View Source File](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/operation/op_customopbuilder.md) ## Overview @@ -8,11 +8,11 @@ When users need to use operators from the ASDSIP acceleration library that are not provided by MindSpore, they can quickly integrate and use them through custom operators. -In [Custom Operators Based on CustomOpBuilder](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/op_customopbuilder.html), MindSpore provides the `PyboostRunner` tool to allow users to integrate custom operators in dynamic graphs. Now, for ASDSIP FFT operators, MindSpore additionally provides the `AsdSipFFTOpRunner` tool to encapsulate the ASDSIP FFT operator's workflow and the dynamic graph's multi-stage pipeline. +In [Custom Operators Based on CustomOpBuilder](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/op_customopbuilder.html), MindSpore provides the `PyboostRunner` tool to allow users to integrate custom operators in dynamic graphs. Now, for ASDSIP FFT operators, MindSpore additionally provides the `AsdSipFFTOpRunner` tool to encapsulate the ASDSIP FFT operator's workflow and the dynamic graph's multi-stage pipeline. -When integrating ASDSIP FFT operators using the [AsdSipFFTOpRunner class](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/cpp_api_for_custom_ops.html#class-asdsipfftoprunner), users only need to provide a `Param` (used as the key for caching `Operation`) and call the `Init` interface for initialization (constructing `Operation`), followed by the `Run` interface to execute the ASDSIP FFT operator. Additionally, users can directly call the [RunAsdSipFFTOp](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/cpp_api_for_custom_ops.html#function-launchasdsipfft) function for one-click execution (the function internally includes calls to both `Init` and `Run` interfaces). +When integrating ASDSIP FFT operators using the [AsdSipFFTOpRunner class](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/cpp_api_for_custom_ops.html#class-asdsipfftoprunner), users only need to provide a `Param` (used as the key for caching `Operation`) and call the `Init` interface for initialization (constructing `Operation`), followed by the `Run` interface to execute the ASDSIP FFT operator. Additionally, users can directly call the [RunAsdSipFFTOp](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/cpp_api_for_custom_ops.html#function-launchasdsipfft) function for one-click execution (the function internally includes calls to both `Init` and `Run` interfaces). -This guide uses `FftC2C` as an example to demonstrate the ASDSIP FFT operator integration process. The complete code can be found in the [code repository](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/jit_test_files/asdsip_fftc2c.cpp). +This guide uses `FftC2C` as an example to demonstrate the ASDSIP FFT operator integration process. The complete code can be found in the [code repository](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/jit_test_files/asdsip_fftc2c.cpp). ## Installing the ASDSIP Acceleration Library diff --git a/tutorials/source_en/custom_program/operation/op_customopbuilder_atb.md b/tutorials/source_en/custom_program/operation/op_customopbuilder_atb.md index 2026436a49..daecb3a633 100644 --- a/tutorials/source_en/custom_program/operation/op_customopbuilder_atb.md +++ b/tutorials/source_en/custom_program/operation/op_customopbuilder_atb.md @@ -1,6 +1,6 @@ # CustomOpBuilder: Integrating ATB Operators Using AtbOpRunner -[![View Source File](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/custom_program/operation/op_customopbuilder_atb.md) +[![View Source File](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/custom_program/operation/op_customopbuilder_atb.md) ## Overview @@ -8,13 +8,13 @@ When users need to use operators from the ATB acceleration library that are not provided by MindSpore, they can quickly integrate and use them through custom operators. -In [Custom Operators Based on CustomOpBuilder](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/op_customopbuilder.html), MindSpore provides the `PyboostRunner` tool to allow users to integrate custom operators in dynamic graphs. Now, for ATB operators, MindSpore additionally provides the `AtbOpRunner` tool to encapsulate the ATB operator's workflow and the dynamic graph's multi-stage pipeline. +In [Custom Operators Based on CustomOpBuilder](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/op_customopbuilder.html), MindSpore provides the `PyboostRunner` tool to allow users to integrate custom operators in dynamic graphs. Now, for ATB operators, MindSpore additionally provides the `AtbOpRunner` tool to encapsulate the ATB operator's workflow and the dynamic graph's multi-stage pipeline. In the complete [ATB operator workflow](https://www.hiascend.com/document/detail/zh/canncommercial/81RC1/developmentguide/acce/ascendtb/ascendtb_0037.html), users need to execute steps such as constructing `Param`, creating `Operation` and `Context`, setting `variantPack` (operator input-output tensors), calling `Setup`, calling `Execute`, and destroying `Context` and `Operation`. However, for a single operator, its `Operation` only depends on operator attributes (`Param`), and its `Context` only depends on the stream, both of which can be reused. Therefore, MindSpore provides a cache to store these data structures, avoiding unnecessary time consumption caused by repeated creation and destruction. -When integrating ATB operators using the [AtbOpRunner class](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/cpp_api_for_custom_ops.html#class-atboprunner), users only need to provide a corresponding hash function for `Param` (used as the key for caching `Operation`) and call the `Init` interface for initialization (constructing `Operation`), followed by the `Run` interface to execute the ATB operator. Additionally, users can directly call the [RunAtbOp](https://www.mindspore.cn/tutorials/en/master/custom_program/operation/cpp_api_for_custom_ops.html#function-runatbop) function for one-click execution (the function internally includes calls to both `Init` and `Run` interfaces). +When integrating ATB operators using the [AtbOpRunner class](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/cpp_api_for_custom_ops.html#class-atboprunner), users only need to provide a corresponding hash function for `Param` (used as the key for caching `Operation`) and call the `Init` interface for initialization (constructing `Operation`), followed by the `Run` interface to execute the ATB operator. Additionally, users can directly call the [RunAtbOp](https://www.mindspore.cn/tutorials/en/br_base/custom_program/operation/cpp_api_for_custom_ops.html#function-runatbop) function for one-click execution (the function internally includes calls to both `Init` and `Run` interfaces). -This guide uses `SwiGLU` as an example to demonstrate the ATB operator integration process. The complete code can be found in the [code repository](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/jit_test_files/atb_swiglu.cpp). +This guide uses `SwiGLU` as an example to demonstrate the ATB operator integration process. The complete code can be found in the [code repository](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/jit_test_files/atb_swiglu.cpp). ## Installing the ATB Acceleration Library diff --git a/tutorials/source_en/cv/fcn8s.md b/tutorials/source_en/cv/fcn8s.md index dcb98c22dd..03953b4598 100644 --- a/tutorials/source_en/cv/fcn8s.md +++ b/tutorials/source_en/cv/fcn8s.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/cv/fcn8s.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/cv/fcn8s.md) # FCN for Image Semantic Segmentation @@ -6,7 +6,7 @@ Fully convolutional network (FCN) is a framework for image semantic segmentation FCN is the first end-to-end network for pixel-level prediction. -![fcn-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_1.png) +![fcn-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_1.png) ## Semantic Segmentation @@ -16,7 +16,7 @@ Semantic segmentation is an important part of image understanding in image proce A purpose of semantic segmentation is to classify each pixel in an image. Different from a common classification task that outputs only a class, the semantic segmentation task outputs an image whose size is the same as that of the input, and each pixel of the output image corresponds to a class of each pixel of the input image. In the image field, semantics refers to the content of an image. The following figure shows some semantic segmentation instances. -![fcn-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_2.png) +![fcn-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_2.png) ## Model Introduction @@ -28,19 +28,19 @@ The fully convolutional neural network mainly uses the following three technolog VGG-16 is used as the FCN backbone. The input of VGG-16 is a 224 x 224 RGB image, and the output is 1000 prediction values. VGG-16 accepts only fixed-size input, discards spatial coordinates, and generates non-spatial output. There are three fully-connected layers in total in the VGG-16, and the fully-connected layers may also be considered as convolutions covering an entire region. Converting a fully-connected layer into a convolutional layer can change a network output from a one-dimensional non-spatial output to a two-dimensional matrix, and generate a heatmap mapped to an input image by using the output. - ![fcn-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_3.png) + ![fcn-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_3.png) 2. Upsampling A convolution operation and a pooling operation in a convolution process reduce the size of a feature map. To obtain dense image prediction of the size of an original image, an upsampling operation needs to be performed on the obtained feature map. The parameters of bilinear interpolation are used to initialize the parameters of upsampling inverse convolution, and then the nonlinear upsampling is learned through backpropagation. Upsampling is performed in the network for end-to-end learning through backpropagation of pixel loss. - ![fcn-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_4.png) + ![fcn-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_4.png) 3. Skip layer Upsampling is performed on the feature map of the last layer to obtain the segmentation of the original image size. The segmentation is a prediction with a step of 32 pixels, which is called FCN-32s. Because the feature map at the last layer is too small and too many details are lost, the skips structure is used to combine the prediction at the last layer with the prediction at the shallower layer. Then, the prediction result can obtain more local details. The 2x upsampling is performed on the prediction (FCN-32s) of the bottom layer (stride 32) to obtain an image of the original size, and the image is fused (added) with the prediction performed from the pool 4 layer (stride 16). This part of network is called FCN-16s. Then, the 2x upsampling is performed on this part of prediction again and fused with the prediction obtained from the pool 3 layer. This part of network is called FCN-8s. The skips structure combines deep global information with shallow local information. - ![fcn-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_5.png) + ![fcn-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_5.png) ## Network Features @@ -206,7 +206,7 @@ The following figure shows the FCN process. 6. FCN-16s deconvolutes the output of conv 7 to double the size of the original image to 1/16 of the original image, fuses the output with the feature map output by pool 4, and then expands the output to the original size through deconvolution. 7. FCN-8s deconvolutes the output of conv 7 to increase the size by four times, deconvolutes the feature map output by pool 4 to increase the size by two times, and takes out the feature map output by pool 3. After the three are fused, the size is increased to the original size through deconvolution. -![fcn-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_6.png) +![fcn-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_6.png) Use the following code to build an FCN-8s network. diff --git a/tutorials/source_en/cv/resnet50.md b/tutorials/source_en/cv/resnet50.md index fd73202ef2..8e1f4b8f02 100644 --- a/tutorials/source_en/cv/resnet50.md +++ b/tutorials/source_en/cv/resnet50.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/cv/resnet50.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/cv/resnet50.md) # ResNet-50 for Image Classification @@ -8,11 +8,11 @@ Image classification is the most basic computer vision application and belongs t ResNet-50 was proposed by He Kaiming of Microsoft Research in 2015 and won the championship in the 2015 ILSVRC. Before ResNet was proposed, a convolutional neural network was obtained by stacking a series of convolutional layers and pooling layers. However, when the network was stacked to a specific depth, a degradation problem occurred. The following figures show the training error and test error of a 56-layer network and a 20-layer network on the CIFAR-10 dataset. The data in the figures shows that the training error and test error of the 56-layer network are greater than those of the 20-layer network. As the network depth increases, the errors do not decrease as expected. -![resnet-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_1.png) +![resnet-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_1.png) In ResNet, a residual network is proposed to alleviate the degradation problem, and a relatively deep network (with more than 1,000 layers) can be built by using ResNet. The following figure shows the training error and test error of ResNet on the CIFAR-10 dataset. In the figure, the dotted lines indicate the training errors, and the solid lines indicate the test errors. As shown in the figure, a deeper ResNet indicates a smaller training error and a smaller test error. -![resnet-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_4.png) +![resnet-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_4.png) > For more details about ResNet, see [*Deep Residual Learning for Image Recognition*](https://arxiv.org/pdf/1512.03385.pdf). @@ -47,7 +47,7 @@ datasets-cifar10-bin/cifar-10-batches-bin ``` -Then, the [mindspore.dataset.Cifar10Dataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html) interface is used to load the dataset and perform the associated image transforms. +Then, the [mindspore.dataset.Cifar10Dataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html) interface is used to load the dataset and perform the associated image transforms. ```python import mindspore as ms @@ -171,7 +171,7 @@ The residual network is a main highlight of ResNet, with which the degradation p The following figure shows the structure of a residual network. The residual network consists of two parts: main body and a shortcut (see the arc in the figure). The main body is obtained by stacking a series of convolution operations. The shortcut is directly from input to output. $F(x)+x$ is obtained by adding the feature matrix $F(x)$ output by the main body to the feature matrix $x$ output by the shortcut. After the ReLU activation function is used, the final output of the residual network is obtained. -![residual](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_3.png) +![residual](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_3.png) There are two residual network structures. One is the building block, which is applicable to shallow ResNet, such as ResNet-18 and ResNet-34. The other is the bottleneck, which is applicable to deep ResNet, such as ResNet-50, ResNet-101, and ResNet-152. @@ -184,7 +184,7 @@ The following figure shows the structure of the building block. The main body ha Finally, the feature matrix output by the main body is added to the feature matrix output by the shortcut. After the ReLU activation function is used, the final output of the building block is obtained. -![building-block-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_5.png) +![building-block-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_5.png) When adding the feature matrix output by the main body to that output by the shortcut, ensure that the shape of the feature matrix output by the main body is the same as that of the feature matrix output by the shortcut. If the shapes are different, for example, when the number of output channels is twice that of input channels, the number of convolution kernels used by the shortcut for convolution operations is the same as that of the output channels and the size is $1\times1$. If the size of the output image is half of that of the input image, `stride` in the convolution operation of the shortcut must be set to 2, and `stride` in the first-layer convolution operation of the main body must also be set to 2. @@ -247,7 +247,7 @@ The following figure shows the bottleneck structure. With the same input, the bo Finally, the feature matrix output by the main body is added to that output by the shortcut. After the ReLU activation function is used, the final output of the bottleneck is obtained. -![building-block-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_6.png) +![building-block-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_6.png) When adding the feature matrix output by the main body to that output by the shortcut, ensure that the shape of the feature matrix output by the main body is the same as that of the feature matrix output by the shortcut. If the shapes are different, for example, when the number of output channels is twice that of input channels, the number of convolution kernels used by the shortcut for convolution operations is the same as that of the output channels and the size is $1\times1$. If the size of the output image is half of that of the input image, `stride` in the convolution operation of the shortcut must be set to 2, and `stride` in the second-layer convolution operation of the main body must also be set to 2. @@ -301,7 +301,7 @@ class ResidualBlock(nn.Cell): The following figure shows the structure of ResNet. Take the input color image $224\times224$ as an example. 64 conv1 whose size is $7\times7$ and whose stride is 2 are used. The output image size at this layer is $112\times112$, and the number of the output channels is 64. Then, a maximum downsampling pooling layer with a size of $3\times3$ is used. The output image size at this layer is $56\times56$, and the number of output channels is 64. Four residual network blocks (conv2_x, conv3_x, conv4_x, and conv5_x) are stacked. In this case, the size of the output image is $7\times7$, and the number of the output channels is 2048. Finally, the classification probability is obtained through an average pooling layer, a fully-connected layer, and softmax. -![resnet-layer](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_2.png) +![resnet-layer](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_2.png) For each residual network block, conv2_x in ResNet-50 is used as an example. The residual network block is formed by stacking three bottleneck structures, and each bottleneck structure has 64 input channels and 256 output channels. diff --git a/tutorials/source_en/cv/ssd.md b/tutorials/source_en/cv/ssd.md index 082667c434..60403d1bbb 100644 --- a/tutorials/source_en/cv/ssd.md +++ b/tutorials/source_en/cv/ssd.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/cv/ssd.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/cv/ssd.md) # SSD for Object Detection @@ -17,13 +17,13 @@ Mainstream SSD object detection algorithms are classified into the following typ SSD is the one-stage object detection algorithm. Feature extraction is performed by using a convolutional neural network, and different feature layers are used for detection output. Therefore, the SSD is a multi-scale detection method. At the feature layer to be detected, a 3 $\times$ 3 convolution is directly used to transform the channel. SSD uses the anchor policy, and anchors with different length-width ratios are preset. Each output feature layer predicts a plurality of detection boxes (4 or 6) based on the anchor. A multi-scale detection method is used. The shallow layer is used to detect small objects, and the deep layer is used to detect large objects. The following figure shows the SSD framework. -![SSD-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_1.png) +![SSD-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_1.png) ### Model Structure The SSD uses VGG-16 as a basic model, and then adds a convolutional layer based on VGG-16 to obtain more feature maps for detection. The following figure shows the SSD network structure. The upper part is the SSD model, and the lower part is the YOLO model. It can be seen that the SSD uses a multi-scale feature map for detection. -![SSD-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_2.jpg) +![SSD-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_2.jpg)
Comparison of two one-stage object detection algorithms:
@@ -328,7 +328,7 @@ def create_ssd_dataset(mindrecord_file, batch_size=32, device_num=1, rank=0, The SSD network structure consists of the following parts: -![SSD-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_3.jpg) +![SSD-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_3.jpg) - VGG16 Base Layer @@ -342,7 +342,7 @@ The SSD network structure consists of the following parts: ### Backbone Layer -![SSD-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/cv/images/SSD_4.png) +![SSD-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/cv/images/SSD_4.png) After being preprocessed, the size of the input image is fixed at 300 x 300. The image passes through the backbone first. In this case, the first 13 convolutional layers of the VGG-16 network are used. Then, the fully-connected layers fc6 and fc7 of VGG-16 are respectively converted into 3 $\times$ 3 convolutional layer block 6 and 1 $\times$ 1 convolutional layer block 7, and features are further extracted. In block 6, a dilated convolution with 6 dilations is used, and padding of the dilated convolution is also 6. This is to increase a receptive field and keep the parameter quantity and the feature map size unchanged. @@ -350,7 +350,7 @@ After being preprocessed, the size of the input image is fixed at 300 x 300. The On the basis of VGG-16, the SSD further adds four deep convolutional layers to extract higher-layer semantic information: -![SSD-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_5.png) +![SSD-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_5.png) Blocks 8 to 11 are used to extract higher semantic information. The number of channels in block 8 is 512, and the number of channels in block 9, block 10, and block 11 is 256. From block 7 to block 11, sizes of the five convolutional output feature maps are 19 x 19, 10 x 10, 5 x 5, 3 x 3, and 1 x 1 in sequence. To reduce the number of parameters, 1 x 1 convolution is used to reduce the number of channels to half of the number of output channels at this layer, and then 3 x 3 convolution is used for feature extraction. @@ -360,25 +360,25 @@ The SSD uses the PriorBox to generate regions. The PriorBox with a fixed width a PriorBox generation rule: The SSD uses six feature layers to detect objects. At different feature layers, the scale of the PriorBox is different. The scale of the lowest layer is 0.1, and the scale of the highest layer is 0.95. The calculation formulas for other layers are as follows: -![SSD-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_6.jpg) +![SSD-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_6.jpg) If the scale of a feature layer is fixed, PriorBox with different aspect ratios is set. The length and width of PriorBox are calculated as follows: -![SSD-7](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_7.jpg) +![SSD-7](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_7.jpg) When ratio is 1, a PriorBox (length-width ratio=1) of a specific scale is calculated based on the feature layer and the next feature layer. The calculation formula is as follows: -![SSD-8](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_8.jpg) +![SSD-8](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_8.jpg) PriorBox is generated for each point at each feature layer based on the preceding rules. (cx,cy) is determined by the current center point. Therefore, a large number of dense PriorBoxes are generated at each feature layer, as shown in the following figure. -![SSD-9](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_9.png) +![SSD-9](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_9.png) The SSD uses feature maps obtained by using six convolutional layers: the fourth, the seventh, the eighth, the ninth, the tenth, and the eleventh layers. Sizes of the six feature maps become smaller, and receptive fields corresponding to the six feature maps become larger. Each point on the six feature maps corresponds to 4, 6, 6, 6, 4, and 4 PriorBoxes respectively. Coordinates of a point in a feature map may be obtained based on a downsampling rate in the original image. Four or six PriorBoxes of different sizes are generated by using the coordinates as a center. Then, a prediction amount of a class and a location corresponding to each PriorBox is predicted by using a feature of the feature map. For example, the size of the feature map obtained by the eighth convolutional layer is 10 x 10 x 512, each point corresponds to six PriorBoxes, and there are 600 PriorBoxes in total. The MultiBox class is defined to generate multiple prediction boxes. ### Detection Layer -![SSD-10](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/cv/images/SSD_10.jpg) +![SSD-10](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/cv/images/SSD_10.jpg) The SSD model has six prediction feature maps in total. For a prediction feature map whose size is m\*n and channel is p, it is assumed that each pixel of the prediction feature map generates k anchors, and each anchor corresponds to c classes and four regression offsets. A convolution operation is performed on the prediction feature map by using (4+c)k convolution kernels whose sizes are 3x3 and channel is p, to obtain an output feature map whose sizes are m\*n and channel is (4+c)m\*k. It contains the regression offset and probability scores of each anchor generated on the prediction feature map. Therefore, for a prediction feature map whose size is m\*n, a total of (4+c)k\*m\*n results are generated. The number of output channels of the cls branch is k\*class_num, and the number of output channels of the loc branch is k\*4. @@ -574,7 +574,7 @@ class SSD300Vgg16(nn.Cell): The object function of the SSD algorithm is divided into two parts: calculating a confidence loss (conf) between a corresponding preselection box and a target category and a corresponding location loss (loc): -![SSD-11](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_11.jpg) +![SSD-11](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_11.jpg) In the preceding information:
N indicates the number of positive samples in the prior box.
@@ -587,13 +587,13 @@ g indicates the location parameter of the ground truth.
Smooth L1 Loss is used for all positive samples. The location information is encoded. -![SSD-12](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_12.jpg) +![SSD-12](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_12.jpg) ### Confidence Loss Function The confidence loss is the softmax loss on multi-class confidence (c). -![SSD-13](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/cv/images/SSD_13.jpg) +![SSD-13](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/cv/images/SSD_13.jpg) ```python def class_loss(logits, label): @@ -813,7 +813,7 @@ Notes: 3. If the IoUs of multiple ground truths and a prior box are all greater than the threshold, the prior box is matched only with the largest IoU. -![SSD-14](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/cv/images/SSD_14.jpg) +![SSD-14](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/cv/images/SSD_14.jpg) As shown in the preceding figure, the basic idea of matching prior boxes and ground truth boxes during training is as follows: Each prior box is regressed to the ground truth box. The control of this process requires the help of the loss layer. The loss layer calculates the error between the actual value and the prediction value to guide the learning direction. @@ -1072,13 +1072,13 @@ Customize the eval_net() class to evaluate the trained model and invoke the SsdI - Average precision (AP): - ![SSD-15](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_15.jpg) + ![SSD-15](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_15.jpg) The AP is the ratio of the correct prediction result of positive samples to the sum of the prediction result of the positive samples and the incorrect prediction result, and mainly reflects an error rate of a prediction result. - Average recall (AR): - ![SSD-16](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_16.jpg) + ![SSD-16](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_16.jpg) The AR is the ratio of the correct prediction result of positive samples to the sum of the correct prediction result of positive samples and the incorrect prediction result of positive samples. The AR mainly reflects the missing detection rate in the prediction result. diff --git a/tutorials/source_en/cv/transfer_learning.md b/tutorials/source_en/cv/transfer_learning.md index 0797b25f5b..8ebd121f67 100644 --- a/tutorials/source_en/cv/transfer_learning.md +++ b/tutorials/source_en/cv/transfer_learning.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/cv/transfer_learning.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/cv/transfer_learning.md) # ResNet50 Transfer Learning @@ -124,7 +124,7 @@ step_size_val = dataset_val.get_dataset_size() ### Dataset Visualization -The training dataset loaded from the [mindspore.dataset.ImageFolderDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html) interface returns a dictionary, and the user can create a data iterator by using the [create_dict_iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) interface to iteratively access the dataset by using `next`. In this chapter, `batch_size` is set to 18, so use `next` to get 18 images and label data at a time. +The training dataset loaded from the [mindspore.dataset.ImageFolderDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html) interface returns a dictionary, and the user can create a data iterator by using the [create_dict_iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) interface to iteratively access the dataset by using `next`. In this chapter, `batch_size` is set to 18, so use `next` to get 18 images and label data at a time. ```python data = next(dataset_train.create_dict_iterator()) diff --git a/tutorials/source_en/cv/vit.md b/tutorials/source_en/cv/vit.md index 3dcf3e6e82..e34306f0f1 100644 --- a/tutorials/source_en/cv/vit.md +++ b/tutorials/source_en/cv/vit.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/cv/vit.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/cv/vit.md) # Vision Transformer Image Classification @@ -14,7 +14,7 @@ ViT is the convergence result of two fields: natural language processing and com The main structure of the ViT model is based on the Encoder part of the Transformer model (part of the structure order has been adjusted, e.g., the location of Normalization is different from that of the standard Transformer). Its structure diagram [1] is as follows: -![vit-architecture](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/vit_architecture.png) +![vit-architecture](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/vit_architecture.png) ### Model Features @@ -96,11 +96,11 @@ The following is a detailed dissection of the internal structure of the ViT mode The Transformer model originated from a 2017 article [2]. The encoder-decoder type structure based on the Attention mechanism proposed in this article has been a great success in the field of natural language processing. The model structure is shown in the following figure: -![transformer-architecture](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/transformer_architecture.png) +![transformer-architecture](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/transformer_architecture.png) Its main structure is composed of several Encoder and Decoder modules, where the detailed structure of Encoder and Decoder is shown in the following figure [2]: -![encoder-decoder](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/encoder_decoder.png) +![encoder-decoder](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/encoder_decoder.png) Encoder and Decoder consist of many structures, such as Multi-Head Attention layer, Feed Forward layer, Normalization layer, and even Residual Connection ("Add" in the figure). However, one of the most important structures is the Multi-Head Attention structure, which is based on the Self-Attention mechanism and is a parallel composition of multiple Self-Attentions. @@ -123,7 +123,7 @@ In the Self-Attention: \tag{1} $$ - ![self-attention1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/self_attention_1.png) + ![self-attention1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/self_attention_1.png) 2. The self-attentiveness of the self-attentive mechanism is mainly reflected by the fact that its Q, K, and V all originate from itself, that is, the process is extracting the connections and features of the input vectors of different orders, which are finally expressed by the connection closeness between the vectors of different orders (the result of the product of Q and K after Softmax). **After obtaining Q, K, V, we need to obtain the inter-vector weights, that is, to point multiple Q and K and divide by the square root of the dimension, and Softmax the results of all vectors. By the operation in equation (2), we obtain the relation weights between vectors.** @@ -136,11 +136,11 @@ In the Self-Attention: \tag{2} $$ - ![self-attention3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/self_attention_3.png) + ![self-attention3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/self_attention_3.png) $$ Softmax: \hat a_{1,i} = exp(a_{1,i}) / \sum_j exp(a_{1,j}),\hspace{1em} j = 1,2,3 \ldots \tag{3}$$ - ![self-attention2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/self_attention_2.png) + ![self-attention2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/self_attention_2.png) 3. The final output is obtained by weight sum of the mapped vector V with Q, K after Softmax, and the process can be understood as a global self-attentive representation. **Each set of Q, K, and V ends up with a V output, which is the final result obtained by Self-Attention, and is the result of the current vector after combining its associated weights with other vectors.** @@ -151,7 +151,7 @@ In the Self-Attention: The following diagram provides an overall grasp of the entire Self-Attention process. -![self-attention](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/self_attention_process.png) +![self-attention](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/self_attention_process.png) The multi-head attention mechanism is to split the vector originally processed by self-Attention into multiple Heads for processing, which can also be reflected in the code, which is one aspect of the attention structure that allows parallel acceleration. @@ -159,7 +159,7 @@ To summarize, the multi-head attention mechanism maps the same query, key and va Therefore, for the same input vector, multiple attention mechanisms can process it simultaneously, i.e., using parallel computing to speed up the processing process and analyzing and utilizing the vector features during the processing. The following figure shows the multi-headed attention mechanism, whose parallelism capability is mainly reflected by the fact that $a_1$ and $a_2$ are obtained by partitioning the same vector in the following figure. -![multi-head-attention](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/multi_head_attention.png) +![multi-head-attention](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/multi_head_attention.png) The following Multi-Head Attention code, combined with the explanation above, clearly shows the process. @@ -253,7 +253,7 @@ class ResidualCell(nn.Cell): Next, Self-Attention is used to construct the TransformerEncoder part in the ViT model, similar to constructing the encoder part of a Transformer, as shown in the following figure [1]: -![vit-encoder](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/vit_encoder.png) +![vit-encoder](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/vit_encoder.png) 1. The basic structure in the ViT model is different from that of the standard Transformer, mainly in that the position of Normalization is placed before Self-Attention and Feed Forward, while other structures such as Residual Connection, Feed Forward, and Normalization are designed as the structure in the Transformer. @@ -442,7 +442,7 @@ class ViT(nn.Cell): The overall flow diagram is shown below: -![data-process](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/cv/images/data_process.png) +![data-process](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/cv/images/data_process.png) ## Model Training and Inference @@ -558,7 +558,7 @@ Train epoch time: 95270.282 ms, per step time: 762.162 ms ### Model Validation -The model validation process mainly applies interfaces such as [ImageFolderDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html), CrossEntropySmooth and Model. +The model validation process mainly applies interfaces such as [ImageFolderDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html), CrossEntropySmooth and Model. ImageFolderDataset is mainly used to read datasets. @@ -805,7 +805,7 @@ for i, image in enumerate(dataset_infer.create_dict_iterator(output_numpy=True)) After the inference process is completed, the inference result of the picture can be found under the inference folder, and it can be seen that the prediction result is Doberman, which is the same as the expected result and verifies the accuracy of the model. -![infer-result](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/infer_result.jpg) +![infer-result](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/infer_result.jpg) ## Summary diff --git a/tutorials/source_en/dataset/augment.md b/tutorials/source_en/dataset/augment.md index 8b3e3fd892..46a3907724 100644 --- a/tutorials/source_en/dataset/augment.md +++ b/tutorials/source_en/dataset/augment.md @@ -1,6 +1,6 @@ # Auto Augmentation -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/dataset/augment.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/dataset/augment.md) MindSpore not only allows you to customize data augmentation, but also provides an auto augmentation method to automatically perform data augmentation on images based on specific policies. @@ -350,7 +350,7 @@ Users can use the `RandomSelectSubpolicy` interface of the `mindspore.dataset.vi > For a better demonstration of the effect, only 5 images are loaded here, and no `shuffle` operation is performed when reading, nor `Normalize` and `HWC2CHW` operations are performed when automatic data augmentation is performed. -![augment](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/dataset/images/auto_augmentation.png) +![augment](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/dataset/images/auto_augmentation.png) The running result can be seen that the augmentation effect of each image in the batch, the vertical direction represents 5 images of 1 batch, and the horizontal direction represents 5 batches. diff --git a/tutorials/source_en/dataset/cache.md b/tutorials/source_en/dataset/cache.md index 9ed6d7c3ce..edaf061e72 100644 --- a/tutorials/source_en/dataset/cache.md +++ b/tutorials/source_en/dataset/cache.md @@ -1,6 +1,6 @@ # Single-Node Data Cache -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/dataset/cache.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/dataset/cache.md) Data cache refers to caching a dataset in local memory to speed up the reading of the dataset, and is suitable for situations that require multiple accesses to a remote dataset or multiple reads of a dataset from disk. @@ -367,7 +367,7 @@ During the single-node multi-device distributed training, the cache operation al done ``` - > Complete sample code: [cache.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/cache/cache.sh). + > Complete sample code: [cache.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/cache/cache.sh). 4. Create and apply a cache instance. @@ -410,7 +410,7 @@ During the single-node multi-device distributed training, the cache operation al print("Got {} samples on device {}".format(num_iter, args_opt.device)) ``` - > Complete sample code: [my_training_script.py](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/cache/my_training_script.py) + > Complete sample code: [my_training_script.py](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/cache/my_training_script.py) 5. Execute the training script. @@ -481,7 +481,7 @@ For complete sample code, refer to ModelZoo's [MobileNetV2](https://gitee.com/mi } ``` - > Complete sample code: [cache_util.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/cache/cache_util.sh). + > Complete sample code: [cache_util.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/cache/cache_util.sh). 2. In the Shell script `run_train_nfs_cache.sh` that starts NFS dataset training, turn on the cache server and generate a cache session saved in the Shell variable `CACHE_SESSION_ID`: diff --git a/tutorials/source_en/dataset/dataset_autotune.md b/tutorials/source_en/dataset/dataset_autotune.md index 96c18039f4..d0dd1b8898 100644 --- a/tutorials/source_en/dataset/dataset_autotune.md +++ b/tutorials/source_en/dataset/dataset_autotune.md @@ -1,6 +1,6 @@ # Dataset AutoTune for Dataset Pipeline -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/dataset/dataset_autotune.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/dataset/dataset_autotune.md) ## Overview @@ -53,7 +53,7 @@ print("tuning interval:", ds.config.get_autotune_interval()) ## Constraints - Both Dataset Profiling and Dataset AutoTune cannot be enabled concurrently, since Profiling's additional processing interferes with Dataset AutoTune's optimization processing. If both of them are enabled at the same time, a warning message will prompt the user to check whether there is a mistake. Do ensure Profiling is disabled when using Dataset AutoTune. -- [Offload for Dataset](https://www.mindspore.cn/tutorials/en/master/dataset/dataset_offload.html) and Dataset AutoTune can be enabled simultaneously. If any dataset node has been offloaded for hardware acceleration, the more efficient dataset pipeline configuration file will not be stored and a warning will be logged, because the dataset pipeline that is actually running is not the predefined one. +- [Offload for Dataset](https://www.mindspore.cn/tutorials/en/br_base/dataset/dataset_offload.html) and Dataset AutoTune can be enabled simultaneously. If any dataset node has been offloaded for hardware acceleration, the more efficient dataset pipeline configuration file will not be stored and a warning will be logged, because the dataset pipeline that is actually running is not the predefined one. - If the Dataset pipeline consists of a node that does not support deserialization (e.g. user-defined Python functions, GeneratorDataset), any attempt to deserialize the saved and improved dataset pipeline configuration file will report an error. In this case, it is recommended to manually modify the dataset pipeline script based on the contents of the tuning configuration file to achieve the purpose of a more efficient dataset pipeline. - In the distributed training scenario, `set_enable_autotune()` must be called after cluster communication has been initialized (mindspore.communication.management.init()), otherwise AutoTune can only detect device with id 0 and create only one tuned file (the number of expected tuned files equal to the number of devices). See the following example: @@ -246,7 +246,7 @@ The `new_dataset` is the tuned dataset object containing operations from Cifar t Before starting the next training process, the user can update the dataset loading code according to recommended improvements from Dataset AutoTune for a more efficient dataset pipeline. This allows the dataset pipeline to be run at an improved speed from the beginning of the training process. -By the way, MindSpore also provides APIs to set the global value of num_parallel_workers and prefetch_size. Please refer to [mindspore.dataset.config](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.loading.html#config). +By the way, MindSpore also provides APIs to set the global value of num_parallel_workers and prefetch_size. Please refer to [mindspore.dataset.config](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.loading.html#config). -- [mindspore.dataset.config.set_num_parallel_workers](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.config.set_num_parallel_workers.html#mindspore.dataset.config.set_num_parallel_workers) -- [mindspore.dataset.config.set_prefetch_size](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.config.set_prefetch_size.html#mindspore.dataset.config.set_prefetch_size) +- [mindspore.dataset.config.set_num_parallel_workers](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.config.set_num_parallel_workers.html#mindspore.dataset.config.set_num_parallel_workers) +- [mindspore.dataset.config.set_prefetch_size](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.config.set_prefetch_size.html#mindspore.dataset.config.set_prefetch_size) diff --git a/tutorials/source_en/dataset/dataset_offload.md b/tutorials/source_en/dataset/dataset_offload.md index ac32534f3e..0b4fa71cbf 100644 --- a/tutorials/source_en/dataset/dataset_offload.md +++ b/tutorials/source_en/dataset/dataset_offload.md @@ -1,6 +1,6 @@ # Enabling Heterogeneous Acceleration for Data -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/dataset/dataset_offload.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/dataset/dataset_offload.md) ## Overview diff --git a/tutorials/source_en/dataset/eager.md b/tutorials/source_en/dataset/eager.md index 9d371698c6..c7105010b6 100644 --- a/tutorials/source_en/dataset/eager.md +++ b/tutorials/source_en/dataset/eager.md @@ -1,10 +1,10 @@ # Data Operation/Data transformation -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/dataset/eager.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/dataset/eager.md) ## Data Operation -`mindspore.dataset` provides a series of dataset operations. Users can use these dataset operations, such as [.shuffle](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore.dataset.Dataset.shuffle) / [.filter](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.filter.html#mindspore.dataset.Dataset.filter) / [.skip](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.skip.html#mindspore.dataset.Dataset.skip) / [.take](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.take.html#mindspore.dataset.Dataset.take) / [.batch](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html#mindspore.dataset.Dataset.batch) / … to further shuffle, filter, skip, and batch combine datasets. +`mindspore.dataset` provides a series of dataset operations. Users can use these dataset operations, such as [.shuffle](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore.dataset.Dataset.shuffle) / [.filter](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.filter.html#mindspore.dataset.Dataset.filter) / [.skip](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.skip.html#mindspore.dataset.Dataset.skip) / [.take](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.take.html#mindspore.dataset.Dataset.take) / [.batch](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html#mindspore.dataset.Dataset.batch) / … to further shuffle, filter, skip, and batch combine datasets. Common data transformation operations include: @@ -229,13 +229,13 @@ In the Eager mode, transforms is executed in the form of a functional call. The MindSpore currently supports executing various Transforms in the Eager mode, as shown below. For more details, please refer to the API documentation. -- [vision module](https://mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision), data transform implemented based on OpenCV/Pillow. +- [vision module](https://mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision), data transform implemented based on OpenCV/Pillow. -- [text module](https://mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text), data transform implemented based on Jieba, ICU4C, etc. +- [text module](https://mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text), data transform implemented based on Jieba, ICU4C, etc. -- [audio module](https://mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio), data transform implemented based on C++, etc. +- [audio module](https://mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio), data transform implemented based on C++, etc. -- [transforms module](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.transforms), general-purpose data transform implemented based on C++/Python/NumPy. +- [transforms module](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.transforms), general-purpose data transform implemented based on C++/Python/NumPy. The following sample code downloads the image data to the specified location. With the Eager mode, you only need to treat Transform itself as an executable function. @@ -263,7 +263,7 @@ Successfully downloaded file to ./banana.jpg This example will use Transform in the `mindspore.dataset.vision` module to transform a given image. -The Eager mode of the Vision Transform supports `numpy.array` or `PIL.Image` type data as input parameters. For more examples, please refer to: [Illustration Of Vision Transforms](https://www.mindspore.cn/docs/en/master/api_python/samples/dataset/vision_gallery.html) +The Eager mode of the Vision Transform supports `numpy.array` or `PIL.Image` type data as input parameters. For more examples, please refer to: [Illustration Of Vision Transforms](https://www.mindspore.cn/docs/en/br_base/api_python/samples/dataset/vision_gallery.html) ```python import numpy as np @@ -312,7 +312,7 @@ Image.type: , Image.shape: (360, 360) This example will transform the given text by using the Transforms in the `text` module. -Eager mode of Text Transforms supports `numpy.array` type data as input parameters. For more examples, please refer to: [Illustration Of Text Transforms](https://www.mindspore.cn/docs/en/master/api_python/samples/dataset/text_gallery.html) +Eager mode of Text Transforms supports `numpy.array` type data as input parameters. For more examples, please refer to: [Illustration Of Text Transforms](https://www.mindspore.cn/docs/en/br_base/api_python/samples/dataset/text_gallery.html) ```python import mindspore.dataset.text.transforms as text @@ -340,7 +340,7 @@ ToNumber result: [123456], type: int32 This example will transform the given audio by using the Transforms in the `audio` module. -Eager mode of Audio Transforms supports `numpy.array` type data as input parameters. For more examples, please refer to: [Illustration Of Audio Transforms](https://www.mindspore.cn/docs/en/master/api_python/samples/dataset/audio_gallery.html) +Eager mode of Audio Transforms supports `numpy.array` type data as input parameters. For more examples, please refer to: [Illustration Of Audio Transforms](https://www.mindspore.cn/docs/en/br_base/api_python/samples/dataset/audio_gallery.html) ```python import numpy as np diff --git a/tutorials/source_en/dataset/optimize.ipynb b/tutorials/source_en/dataset/optimize.ipynb index 124eaa5c04..5282bc3003 100644 --- a/tutorials/source_en/dataset/optimize.ipynb +++ b/tutorials/source_en/dataset/optimize.ipynb @@ -7,7 +7,7 @@ "source": [ "# Optimizing the Data Processing\n", "\n", - "[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/dataset/optimize.ipynb)" + "[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/dataset/optimize.ipynb)" ] }, { @@ -23,7 +23,7 @@ "id": "4ff7dd16", "metadata": {}, "source": [ - "![pipeline](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/dataset/images/pipeline.png)" + "![pipeline](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/dataset/images/pipeline.png)" ] }, { @@ -111,7 +111,7 @@ "id": "05a69c00", "metadata": {}, "source": [ - "![data-loading-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/dataset/images/data_loading_performance_scheme.png)" + "![data-loading-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/dataset/images/data_loading_performance_scheme.png)" ] }, { @@ -121,9 +121,9 @@ "source": [ "Suggestions on data loading performance optimization are as follows:\n", "\n", - "- For commonly used datasets that have already provided loading interfaces, it is preferred to use the dataset loading interface provided by MindSpore to load, which can obtain better loading performance. For details, see [Built-in Dataset Loading Interfaces](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.html). If the performance cannot meet the requirements, use the multi-thread concurrency solution, i.e., adjust the parameter `num_parallel_workers`(default: 8) of the dataset interface to achieve a better performance.\n", - "- For a dataset format that is not supported, it is recommended to convert the dataset to the MindRecord data format before loading it using the `MindDataset` class (Please refer to the [API](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MindDataset.html) for detailed use). For detailed contents, please refer to [Converting Dataset to MindRecord](https://www.mindspore.cn/tutorials/en/master/dataset/record.html). If the performance cannot meet the requirements, use the multi-thread concurrency solution, i.e., adjust the parameter `num_parallel_workers`(default: 8) of the dataset interface to achieve a better performance.\n", - "- For dataset formats that are not supported, the user-defined `GeneratorDataset` class is preferred for implementing fast algorithm verification (Please refer to the [API](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html) for detailed use). If the performance cannot meet the requirements, the multi-process / multi-thread concurrency solution can be used. For example:\n", + "- For commonly used datasets that have already provided loading interfaces, it is preferred to use the dataset loading interface provided by MindSpore to load, which can obtain better loading performance. For details, see [Built-in Dataset Loading Interfaces](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.html). If the performance cannot meet the requirements, use the multi-thread concurrency solution, i.e., adjust the parameter `num_parallel_workers`(default: 8) of the dataset interface to achieve a better performance.\n", + "- For a dataset format that is not supported, it is recommended to convert the dataset to the MindRecord data format before loading it using the `MindDataset` class (Please refer to the [API](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MindDataset.html) for detailed use). For detailed contents, please refer to [Converting Dataset to MindRecord](https://www.mindspore.cn/tutorials/en/br_base/dataset/record.html). If the performance cannot meet the requirements, use the multi-thread concurrency solution, i.e., adjust the parameter `num_parallel_workers`(default: 8) of the dataset interface to achieve a better performance.\n", + "- For dataset formats that are not supported, the user-defined `GeneratorDataset` class is preferred for implementing fast algorithm verification (Please refer to the [API](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html) for detailed use). If the performance cannot meet the requirements, the multi-process / multi-thread concurrency solution can be used. For example:\n", "\n", " 1. Increase the parameter `num_parallel_workers`(default: 8) of the dataset interface to improve concurrency;\n", " 2. Set the parameter `python_multiprocessing`(default: True) of the dataset interface to True / False to enable multi-process / multi-thread concurrency mode. The multi-process mode achieves better performance in CPU-bound tasks, and multi-thread mode is better in IO bound tasks;\n", @@ -131,7 +131,7 @@ " **Note**: If you set `python_multiprocessing=True`(default: True) and `num_parallel_workers>1`(default: 1), which indicates that the multi-process mode is started for data load acceleration. At this time, as the dataset iterates, the memory consumption of the subprocess will gradually increase, mainly because the subprocess of the user-defined dataset obtains the member variables from the main process in the Copy On Write way. Example: If you define a dataset with `__init__` function which contains a large number of member variable data (for example, a very large file name list is loaded during the dataset construction) and uses the multi-process mode, which may cause the problem of OOM (the estimated total memory usage is: `(num_parallel_workers+1) * size of the parent process` ). The simplest solution is to replace python objects (such as list/dict/int/float/string) with non referenced data types (such as Pandas, Numpy or PyArrow objects) for member variables, or load less meta data in member variables, or configure `python_multiprocessing=False` using multi-threading mode.\n", " 3. If there is `Using shared memory queue, but rowsize is larger than allocated memory ...` log prompt, then increase the parameter `max_rowsize` of dataset interface or set it to `None` to improve the efficiency of data transfer between processes.\n", "\n", - "Based on the preceding suggestions of data loading performance optimization, this experience uses the built-in dataset load operation `Cifar10Dataset` class (Please refer to the [API](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html) for detailed use), the `MindDataset` class after data conversion, and uses the `GeneratorDataset` class to load data. The sample code is displayed as follows:\n", + "Based on the preceding suggestions of data loading performance optimization, this experience uses the built-in dataset load operation `Cifar10Dataset` class (Please refer to the [API](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html) for detailed use), the `MindDataset` class after data conversion, and uses the `GeneratorDataset` class to load data. The sample code is displayed as follows:\n", "\n", "1. Use the `Cifar10Dataset` class of built-in dataset operation to load the CIFAR-10 dataset in binary format. The multi-thread optimization solution is used for data loading. Four threads are enabled to concurrently complete the task. Finally, a dictionary iterator is created for the data and a data record is read through the iterator." ] @@ -302,8 +302,8 @@ "source": [ "## Optimizing the Shuffle Performance\n", "\n", - "The shuffle operation is used to shuffle ordered datasets or repeated datasets. MindSpore provides the `shuffle` function for users which is based on memory cache. A larger value of `buffer_size` indicates a higher shuffling degree, consuming more computing resources and more time. This API allows users to shuffle the data at any time during the entire pipeline process. For the detailed contents, refer to [shuffle processing](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore-dataset-dataset-shuffle).\n", - "Because it's based on memory cache, the performance of this method is not as good as that of setting the parameter `shuffle=True`(default: True) of dataset interface to shuffle data directly. For details, see [Built-in Dataset Loading Interfaces](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.html).\n", + "The shuffle operation is used to shuffle ordered datasets or repeated datasets. MindSpore provides the `shuffle` function for users which is based on memory cache. A larger value of `buffer_size` indicates a higher shuffling degree, consuming more computing resources and more time. This API allows users to shuffle the data at any time during the entire pipeline process. For the detailed contents, refer to [shuffle processing](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore-dataset-dataset-shuffle).\n", + "Because it's based on memory cache, the performance of this method is not as good as that of setting the parameter `shuffle=True`(default: True) of dataset interface to shuffle data directly. For details, see [Built-in Dataset Loading Interfaces](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.html).\n", "\n", "Shuffle optimization suggestion:" ] @@ -313,7 +313,7 @@ "id": "7c126623", "metadata": {}, "source": [ - "![shuffle-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/dataset/images/shuffle_performance_scheme.png)" + "![shuffle-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/dataset/images/shuffle_performance_scheme.png)" ] }, { @@ -455,9 +455,9 @@ "\n", "During the training, especially when the dataset is small, users can use data augmentation to preprocess images to enrich the dataset. MindSpore provides multiple data augmentation methods, including:\n", "\n", - "- Vision data augmentation operations, mainly implemented in C++. Please refer: [vision augmentations](https://mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision)\n", - "- Nlp data augmentation operations, mainly implemented in C++. Please refer: [nlp augmentations](https://mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text).\n", - "- Audio data augmentation operations, mainly implemented in C++. Please refer: [audio augmentations](https://mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio).\n", + "- Vision data augmentation operations, mainly implemented in C++. Please refer: [vision augmentations](https://mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision)\n", + "- Nlp data augmentation operations, mainly implemented in C++. Please refer: [nlp augmentations](https://mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text).\n", + "- Audio data augmentation operations, mainly implemented in C++. Please refer: [audio augmentations](https://mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio).\n", "- Also users can define Python functions as needed to perform data augmentation.\n", "\n", "The performance varies according to the underlying implementation (C++ or Python) methods. This is shown below:\n", @@ -475,7 +475,7 @@ "id": "2729405a", "metadata": {}, "source": [ - "![data-enhancement-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/dataset/images/data_enhancement_performance_scheme.png)" + "![data-enhancement-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/dataset/images/data_enhancement_performance_scheme.png)" ] }, { @@ -491,11 +491,11 @@ " 2. use fusion operator\n", " If the cpu usage is too high (For example: single-machine multi-card training), better performance can be achieved by using the fusion operation (aggregating the functions of two or more operations into one operation) to reduce the CPU consumption. You can make it effective by configuring the environment variable `export OPTIMIZE=true`. Examples of fusion are as follows:\n", "\n", - " ![operation-fusion](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/dataset/images/operation_fusion.png)\n", + " ![operation-fusion](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/dataset/images/operation_fusion.png)\n", " 3. use compose operator\n", " If the cpu usage is too high (For example: single-machine multi-card training), receive multiple augmentation operations through one map operation (these operations will be applied in order) to reduce CPU contention and achieve better performance. Examples are as follows:\n", "\n", - " ![compose](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/dataset/images/compose.png)" + " ![compose](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/dataset/images/compose.png)" ] }, { @@ -629,7 +629,7 @@ "\n", " The data loading process involves frequent disk operations, and the performance of disk reading and writing directly affects the speed of data loading. Solid State Drive (SSD) is recommended for storing large datasets when the dataset is large. SSDs generally have higher read and write speeds than ordinary disks, reducing the impact of I/O operations on data processing performance.\n", "\n", - " In general, the loaded data will be cached into the operating system's page cache, which reduces the overhead of subsequent reads to a certain extent and accelerates the data loading speed of subsequent Epochs. Users can also manually cache the augmented data through the [single-node caching](https://www.mindspore.cn/tutorials/en/master/dataset/cache.html) technology provided by MindSpore, avoiding duplicate data loading and data augmentation.\n", + " In general, the loaded data will be cached into the operating system's page cache, which reduces the overhead of subsequent reads to a certain extent and accelerates the data loading speed of subsequent Epochs. Users can also manually cache the augmented data through the [single-node caching](https://www.mindspore.cn/tutorials/en/br_base/dataset/cache.html) technology provided by MindSpore, avoiding duplicate data loading and data augmentation.\n", "\n", "2. NUMA architecture\n", "\n", @@ -708,7 +708,7 @@ "source": [ "## Dataset AutoTune for Dataset Pipeline\n", "\n", - "MindSpore provides a tool named Dataset AutoTune for optimizing dataset.The Dataset AutoTune can automatically tune Dataset pipelines to improve performance. The detailed usage please refer to [Dataset AutoTune for Dataset Pipeline](https://www.mindspore.cn/tutorials/en/master/dataset/dataset_autotune.html)." + "MindSpore provides a tool named Dataset AutoTune for optimizing dataset.The Dataset AutoTune can automatically tune Dataset pipelines to improve performance. The detailed usage please refer to [Dataset AutoTune for Dataset Pipeline](https://www.mindspore.cn/tutorials/en/br_base/dataset/dataset_autotune.html)." ] }, { @@ -718,7 +718,7 @@ "source": [ "## Enabling Heterogeneous Acceleration for Data\n", "\n", - "MindSpore provides a computing load balancing technology which can distribute the MindSpore Tensor computing to different heterogeneous hardware. On one hand, it balances the computing overhead between different hardware, on the other hand, it uses the advantages of heterogeneous hardware to accelerate the computing. For the detailed usage, please refer to [Enabling Heterogeneous Acceleration for Data](https://www.mindspore.cn/tutorials/en/master/dataset/dataset_offload.html)." + "MindSpore provides a computing load balancing technology which can distribute the MindSpore Tensor computing to different heterogeneous hardware. On one hand, it balances the computing overhead between different hardware, on the other hand, it uses the advantages of heterogeneous hardware to accelerate the computing. For the detailed usage, please refer to [Enabling Heterogeneous Acceleration for Data](https://www.mindspore.cn/tutorials/en/br_base/dataset/dataset_offload.html)." ] } ], diff --git a/tutorials/source_en/dataset/overview.md b/tutorials/source_en/dataset/overview.md index c0e100a01b..6bb50493f7 100644 --- a/tutorials/source_en/dataset/overview.md +++ b/tutorials/source_en/dataset/overview.md @@ -1,6 +1,6 @@ # Data Processing Overview -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/dataset/overview.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/dataset/overview.md) MindSpore Dataset provides two types of data processing capabilities: pipeline mode and lightweight mode. @@ -12,19 +12,19 @@ MindSpore Dataset provides two types of data processing capabilities: pipeline m Dataset pipeline defined by an API is used. After a training process is run, the dataset cyclically loads data from the dataset, processes data, and batch data, and then iterators for training. -![MindSpore Dataset Pipeline](https://www.mindspore.cn/docs/en/master/_images/dataset_pipeline_en.png) +![MindSpore Dataset Pipeline](https://www.mindspore.cn/docs/en/br_base/_images/dataset_pipeline_en.png) As shown in the above figure, the mindspore dataset module makes it easy for users to define data preprocessing pipelines and transform samples in the dataset in the most efficient (multi-process / multi-thread) manner. The specific steps are as follows: -- Dataset loading: Users can easily load supported datasets using the Dataset class([Standard-format Dataset](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.loading.html#standard-format), [Vision Dataset](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.loading.html#vision), [NLP Dataset](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.loading.html#text), [Audio Dataset](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.loading.html#audio)), or load Python layer customized datasets through UDF Loader + [GeneratorDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset). At the same time, the loading class method can accept a variety of parameters such as sampler, data slicing, and data shuffle; +- Dataset loading: Users can easily load supported datasets using the Dataset class([Standard-format Dataset](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.loading.html#standard-format), [Vision Dataset](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.loading.html#vision), [NLP Dataset](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.loading.html#text), [Audio Dataset](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.loading.html#audio)), or load Python layer customized datasets through UDF Loader + [GeneratorDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset). At the same time, the loading class method can accept a variety of parameters such as sampler, data slicing, and data shuffle; -- Dataset operation: The user uses the dataset object method [.shuffle](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore.dataset.Dataset.shuffle) / [.filter](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.filter.html#mindspore.dataset.Dataset.filter) / [.skip](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.skip.html#mindspore.dataset.Dataset.skip) / [.split](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.split.html#mindspore.dataset.Dataset.split) / [.take](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.take.html#mindspore.dataset.Dataset.take) / … to further shuffle, filter, skip, and obtain the maximum number of samples of datasets; +- Dataset operation: The user uses the dataset object method [.shuffle](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore.dataset.Dataset.shuffle) / [.filter](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.filter.html#mindspore.dataset.Dataset.filter) / [.skip](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.skip.html#mindspore.dataset.Dataset.skip) / [.split](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.split.html#mindspore.dataset.Dataset.split) / [.take](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.take.html#mindspore.dataset.Dataset.take) / … to further shuffle, filter, skip, and obtain the maximum number of samples of datasets; -- Dataset sample transform operation: The user can add data transform operations ([vision transform](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision), [nlp transform](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text), [audio transform](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio)) to the map operation to perform transforms. During data preprocessing, multiple map operations can be defined to perform different transform operations to different fields. The data transform operation can also be a user-defined transform pyfunc (Python function); +- Dataset sample transform operation: The user can add data transform operations ([vision transform](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision), [nlp transform](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text), [audio transform](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio)) to the map operation to perform transforms. During data preprocessing, multiple map operations can be defined to perform different transform operations to different fields. The data transform operation can also be a user-defined transform pyfunc (Python function); -- Batch: After the transforms of the samples, the user can use the [.batch](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html#mindspore.dataset.Dataset.batch) operation to organize multiple samples into batches, or use self-defined batch logic with the parameter per_batch_map applied; +- Batch: After the transforms of the samples, the user can use the [.batch](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html#mindspore.dataset.Dataset.batch) operation to organize multiple samples into batches, or use self-defined batch logic with the parameter per_batch_map applied; -- Iterator: Finally, the user can use the dataset object method [.create_dict_iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html#mindspore.dataset.Dataset.create_dict_iterator) or [.create_tuple_iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html#mindspore.dataset.Dataset.create_tuple_iterator) to create an iterator, which can output the preprocessed data cyclically. +- Iterator: Finally, the user can use the dataset object method [.create_dict_iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html#mindspore.dataset.Dataset.create_dict_iterator) or [.create_tuple_iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html#mindspore.dataset.Dataset.create_tuple_iterator) to create an iterator, which can output the preprocessed data cyclically. ### Dataset Loading @@ -36,11 +36,11 @@ The dataset loading class is used to load training datasets from local disks, OB | Dataset API Category | API List | Description | |---|---|---| -| Standard-format Datasets | [MindDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MindDataset.html#mindspore.dataset.MindDataset), [TFRecordDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.TFRecordDataset.html#mindspore.dataset.TFRecordDataset), [CSVDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.CSVDataset.html#mindspore.dataset.CSVDataset), etc. | MindDataset depends on the MindRecord format. For details, see [Format Conversion](https://www.mindspore.cn/tutorials/en/master/dataset/record.html) | -| Customized Datasets | [GeneratorDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset), [RandomDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.RandomDataset.html#mindspore.dataset.RandomDataset), etc. | GeneratorDataset loads user-defined DataLoaders. For details, see [Custom DataSets](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html#customizing-dataset) | -| Common Datasets | [ImageFolderDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset), [Cifar10Dataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset), [IWSLT2017Dataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset), [LJSpeechDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset), etc. | Used for commonly used open source datasets | +| Standard-format Datasets | [MindDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MindDataset.html#mindspore.dataset.MindDataset), [TFRecordDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.TFRecordDataset.html#mindspore.dataset.TFRecordDataset), [CSVDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.CSVDataset.html#mindspore.dataset.CSVDataset), etc. | MindDataset depends on the MindRecord format. For details, see [Format Conversion](https://www.mindspore.cn/tutorials/en/br_base/dataset/record.html) | +| Customized Datasets | [GeneratorDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset), [RandomDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.RandomDataset.html#mindspore.dataset.RandomDataset), etc. | GeneratorDataset loads user-defined DataLoaders. For details, see [Custom DataSets](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html#customizing-dataset) | +| Common Datasets | [ImageFolderDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset), [Cifar10Dataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset), [IWSLT2017Dataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset), [LJSpeechDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset), etc. | Used for commonly used open source datasets | -You can configure different parameters for loading [datasets](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.loading.html#vision) to achieve different loading effects. Common parameters are as follows: +You can configure different parameters for loading [datasets](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.loading.html#vision) to achieve different loading effects. Common parameters are as follows: - `columns_list`: filters specified columns from the dataset. The parameter applies only to some dataset interfaces. The default value is None, indicating that all data columns are loaded. @@ -52,19 +52,19 @@ You can configure different parameters for loading [datasets](https://www.mindsp - `num_shards` and `shard_id`: specifies whether to shard a dataset. The default value is None, indicating that the dataset is not sharded. - - For more sampling logic, see [Data Sampling](https://www.mindspore.cn/tutorials/en/master/dataset/sampler.html). + - For more sampling logic, see [Data Sampling](https://www.mindspore.cn/tutorials/en/br_base/dataset/sampler.html). #### Dataset Combination -Dataset combination can combine multiple datasets in series/parallel mode to form a new dataset object, see [Data Operation](https://www.mindspore.cn/tutorials/en/master/dataset/eager.html#data-operation). +Dataset combination can combine multiple datasets in series/parallel mode to form a new dataset object, see [Data Operation](https://www.mindspore.cn/tutorials/en/br_base/dataset/eager.html#data-operation). #### Dataset Segmentation -The dataset is divided into a training dataset and a validation dataset, which are used in a training process and a validation process, respectively, see [Data Operation](https://www.mindspore.cn/tutorials/en/master/dataset/eager.html#data-operation). +The dataset is divided into a training dataset and a validation dataset, which are used in a training process and a validation process, respectively, see [Data Operation](https://www.mindspore.cn/tutorials/en/br_base/dataset/eager.html#data-operation). #### Dataset Saving -Re-save the dataset to the MindRecord data format, see [Data Operation](https://www.mindspore.cn/tutorials/en/master/dataset/eager.html#data-operation). +Re-save the dataset to the MindRecord data format, see [Data Operation](https://www.mindspore.cn/tutorials/en/br_base/dataset/eager.html#data-operation). ### Data Transforms @@ -84,50 +84,50 @@ The following describes how to use the `.map(...)`. - Use the data transform operation provided by Dataset in `.map(...)` - Dataset provides a rich list of built-in [data transform operations](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#) that can be used directly in `.map(...)`. For details, see the [Map Transform Operation](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html#built-in-transforms). + Dataset provides a rich list of built-in [data transform operations](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#) that can be used directly in `.map(...)`. For details, see the [Map Transform Operation](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html#built-in-transforms). - Use custom data transform operations in `.map(...)` - Dataset also supports user-defined data transform operations. You only need to pass user-defined functions to `.map(...)` to return. For details, see [Customizing Map Transform Operations](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html#user-defined-transforms). + Dataset also supports user-defined data transform operations. You only need to pass user-defined functions to `.map(...)` to return. For details, see [Customizing Map Transform Operations](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html#user-defined-transforms). - Return the Dict data structure in `.map(...)` - The dataset also supports the return of the Dict data structure in the user-defined data transform operation, which makes the defined data transform more flexible. For details, see [Custom Map Transform Operation Processing Dictionary Object](https://www.mindspore.cn/tutorials/en/master/dataset/python_objects.html#processing-dict-with-map-operation). + The dataset also supports the return of the Dict data structure in the user-defined data transform operation, which makes the defined data transform more flexible. For details, see [Custom Map Transform Operation Processing Dictionary Object](https://www.mindspore.cn/tutorials/en/br_base/dataset/python_objects.html#processing-dict-with-map-operation). #### Automatic Augmentation -In addition to the preceding common data transform, the dataset also provides an automatic data transform mode, which can automatically perform data transform processing on an image based on a specific policy. For details, see [Automatic Augmentation](https://www.mindspore.cn/tutorials/en/master/dataset/augment.html). +In addition to the preceding common data transform, the dataset also provides an automatic data transform mode, which can automatically perform data transform processing on an image based on a specific policy. For details, see [Automatic Augmentation](https://www.mindspore.cn/tutorials/en/br_base/dataset/augment.html). ### Data Batch Dataset provides the `.batch(...)` operation, which can easily organize samples after data transform into batches. There are two methods: -1. The default `.batch(...)` operation organizes batch_size samples into data whose shape is (batch_size, ...). For details, see the [Batch Operation](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html#batch-dataset). +1. The default `.batch(...)` operation organizes batch_size samples into data whose shape is (batch_size, ...). For details, see the [Batch Operation](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html#batch-dataset). -2. The customized `.batch(..., per_batch_map, ...)` operation allows users to organize multiple [np.ndarray, nd.ndarray, ...] data records in batches based on the customized logic. For details, see [Customizing Batch Operation](https://www.mindspore.cn/tutorials/en/master/dataset/python_objects.html#processing-dict-with-batch-operation). +2. The customized `.batch(..., per_batch_map, ...)` operation allows users to organize multiple [np.ndarray, nd.ndarray, ...] data records in batches based on the customized logic. For details, see [Customizing Batch Operation](https://www.mindspore.cn/tutorials/en/br_base/dataset/python_objects.html#processing-dict-with-batch-operation). ### Dataset Iterator -After defining the dataset loading `(xxDataset) -> data processing (.map) -> data batch (.batch)` dataset pipeline, you can use the iterator method `.create_dict_iterator(...)` / `.create_tuple_iterator(...)` to output data. For details, see [Dataset Iteration](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html#iterating-a-dataset). +After defining the dataset loading `(xxDataset) -> data processing (.map) -> data batch (.batch)` dataset pipeline, you can use the iterator method `.create_dict_iterator(...)` / `.create_tuple_iterator(...)` to output data. For details, see [Dataset Iteration](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html#iterating-a-dataset). ### Performance Optimization #### Data Processing Performance Optimization -If the performance of the data processing pipeline is insufficient, you can further optimize the performance by referring to [Data Processing Performance Optimization](https://www.mindspore.cn/tutorials/en/master/dataset/optimize.html) to meet end-to-end training performance requirements. +If the performance of the data processing pipeline is insufficient, you can further optimize the performance by referring to [Data Processing Performance Optimization](https://www.mindspore.cn/tutorials/en/br_base/dataset/optimize.html) to meet end-to-end training performance requirements. #### Single-node Data Cache -In addition, in the inference scenario, to achieve ultimate performance, you can use the [Single-node Data Cache](https://www.mindspore.cn/tutorials/en/master/dataset/cache.html) to cache datasets in the local memory to accelerate dataset reading and preprocessing. +In addition, in the inference scenario, to achieve ultimate performance, you can use the [Single-node Data Cache](https://www.mindspore.cn/tutorials/en/br_base/dataset/cache.html) to cache datasets in the local memory to accelerate dataset reading and preprocessing. ## Lightweight Mode You can directly use the data transform operation to process a piece of data. The return value is the data transform result. -Data transform operations ([vision transform](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision), [nlp transform](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text), [audio transform](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio)) can be used directly like calling a common function. Common usage is: first initialize the data transformation object, then call the data transformation operation method, pass in the data to be processed, and finally get the result of the process. For more examples, see [Lightweight Data Transformation](https://www.mindspore.cn/tutorials/en/master/dataset/eager.html#lightweight-data-transformation). +Data transform operations ([vision transform](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.vision), [nlp transform](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.text), [audio transform](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.transforms.html#module-mindspore.dataset.audio)) can be used directly like calling a common function. Common usage is: first initialize the data transformation object, then call the data transformation operation method, pass in the data to be processed, and finally get the result of the process. For more examples, see [Lightweight Data Transformation](https://www.mindspore.cn/tutorials/en/br_base/dataset/eager.html#lightweight-data-transformation). ## Other Feature ### Supporting Python Objects in Dataset Pipeline -Dataset pipeline accepts any Python type as input for some operations(such as user-defined dataset `GeneratorDataset`, user-defined `map` augmentation operation, `batch(per_batch_map=...)`. See [Supporting Python Objects in Dataset Pipeline](https://www.mindspore.cn/tutorials/en/master/dataset/python_objects.html). +Dataset pipeline accepts any Python type as input for some operations(such as user-defined dataset `GeneratorDataset`, user-defined `map` augmentation operation, `batch(per_batch_map=...)`. See [Supporting Python Objects in Dataset Pipeline](https://www.mindspore.cn/tutorials/en/br_base/dataset/python_objects.html). diff --git a/tutorials/source_en/dataset/python_objects.md b/tutorials/source_en/dataset/python_objects.md index 4647b500b2..ba4267e455 100644 --- a/tutorials/source_en/dataset/python_objects.md +++ b/tutorials/source_en/dataset/python_objects.md @@ -1,6 +1,6 @@ # Supporting Python Objects in Dataset Pipeline -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/dataset/python_objects.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/dataset/python_objects.md) Dataset pipeline accepts any Python type as input for some operations(such as user-defined dataset `GeneratorDataset`, user-defined `map` augmentation operation, `batch(per_batch_map=...)`). diff --git a/tutorials/source_en/dataset/record.ipynb b/tutorials/source_en/dataset/record.ipynb index 12047260fc..4c3ccbc36b 100644 --- a/tutorials/source_en/dataset/record.ipynb +++ b/tutorials/source_en/dataset/record.ipynb @@ -7,7 +7,7 @@ "source": [ "# MindRecord Format Conversion\n", "\n", - "[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/dataset/record.ipynb)\n" + "[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/dataset/record.ipynb)\n" ] }, { @@ -15,9 +15,9 @@ "id": "7fbc6b7a", "metadata": {}, "source": [ - "In MindSpore, the dataset used to train the network model can be converted into MindSpore-specific data format (MindSpore Record), making it easier to save and load data. The goal is to normalize the user's dataset and further enable the reading of the data through the [mindspore.dataset.MindDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MindDataset.html) interface and use it during the training process.\n", + "In MindSpore, the dataset used to train the network model can be converted into MindSpore-specific data format (MindSpore Record), making it easier to save and load data. The goal is to normalize the user's dataset and further enable the reading of the data through the [mindspore.dataset.MindDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MindDataset.html) interface and use it during the training process.\n", "\n", - "![conversion](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/dataset/images/data_conversion_concept.png)\n", + "![conversion](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/dataset/images/data_conversion_concept.png)\n", "\n", "In addition, the performance of MindSpore in some scenarios is optimized, and using the MindSpore Record data format can reduce disk IO and network IO overhead, which results in a better user experience.\n", "\n", @@ -38,7 +38,7 @@ "id": "1c420641", "metadata": {}, "source": [ - "![mindrecord](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/dataset/images/mindrecord.png)" + "![mindrecord](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/dataset/images/mindrecord.png)" ] }, { @@ -282,11 +282,11 @@ "\n", "MindSpore provides a tool class for converting commonly used datasets, capable of converting commonly used datasets to the MindSpore Record file format.\n", "\n", - "> For more detailed descriptions of dataset transformations, refer to [API Documentation](https://www.mindspore.cn/docs/en/master/api_python/mindspore.mindrecord.html).\n", + "> For more detailed descriptions of dataset transformations, refer to [API Documentation](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.mindrecord.html).\n", "\n", "### Dumping the CIFAR-10 Dataset\n", "\n", - "Users can convert CIFAR-10 raw data to MindSpore Record and read it using the `MindDataset` interface via the [mindspore.dataset.Dataset.save](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.save.html) method.\n", + "Users can convert CIFAR-10 raw data to MindSpore Record and read it using the `MindDataset` interface via the [mindspore.dataset.Dataset.save](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.save.html) method.\n", "\n", "1. Download the [CIFAR-10 Dataset](https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz) and use `Cifar10Dataset` to load." ] diff --git a/tutorials/source_en/dataset/sampler.md b/tutorials/source_en/dataset/sampler.md index 190c451c61..d761e87315 100644 --- a/tutorials/source_en/dataset/sampler.md +++ b/tutorials/source_en/dataset/sampler.md @@ -1,6 +1,6 @@ # Data Loading and Sampling -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/dataset/sampler.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/dataset/sampler.md) ## Data Loading @@ -8,7 +8,7 @@ Data is the foundation of training. The `mindspore.dataset` module provides APIs ### Customizing Dataset -MindSpore supports loading data by constructing customized classes or customized generators. [mindspore.dataset.GeneratorDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html) can help to load dataset based on the logic inside these classes/functions. +MindSpore supports loading data by constructing customized classes or customized generators. [mindspore.dataset.GeneratorDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html) can help to load dataset based on the logic inside these classes/functions. `GeneratorDataset` supports constructing customized datasets from random-accessible objects, iterable objects and Python generator, which are explained in detail below. @@ -148,7 +148,7 @@ for d in dataset: MindSpore also supports parsing and reading open source classic datasets such as MNIST, CIFAR-10, CLUE, LJSpeech, etc. -Take the MNIST dataset as an example. For more other datasets, please refer to [Open Source](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.loading.html#open-source). +Take the MNIST dataset as an example. For more other datasets, please refer to [Open Source](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.loading.html#open-source). ```python # Download data from open datasets @@ -184,13 +184,13 @@ plt.show() To meet training requirements and solve problems such as too large datasets or uneven distribution of sample categories, MindSpore provides multiple samplers for different purposes to help users sample datasets. Users only need to import the sampler object when loading the dataset to implement data sampling. -MindSpore provides multiple samplers, such as [mindspore.dataset.RandomSampler](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.RandomSampler.html), [mindspore.dataset.WeightedRandomSampler](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.WeightedRandomSampler.html), and [mindspore.dataset.SubsetRandomSampler](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.SubsetRandomSampler.html). In addition, users can customize sampler classes as required. +MindSpore provides multiple samplers, such as [mindspore.dataset.RandomSampler](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.RandomSampler.html), [mindspore.dataset.WeightedRandomSampler](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.WeightedRandomSampler.html), and [mindspore.dataset.SubsetRandomSampler](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.SubsetRandomSampler.html). In addition, users can customize sampler classes as required. -> For details about how to use the sampler, see [Sampler API](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.loading.html#sampler-1). +> For details about how to use the sampler, see [Sampler API](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.loading.html#sampler-1). The following uses the CIFAR-10 dataset as an example to describe how to use several common MindSpore samplers. -![cifar10](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/cifar10.jpg) +![cifar10](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/cifar10.jpg) > The sample code in this section relies on `matplotlib`, which can be installed with the command `pip install matplotlib`. If the code is run in Notebook, you need to restart the kernel after the installation to execute the subsequent code. diff --git a/tutorials/source_en/debug/dryrun.md b/tutorials/source_en/debug/dryrun.md index aedf213c62..18a38834cc 100644 --- a/tutorials/source_en/debug/dryrun.md +++ b/tutorials/source_en/debug/dryrun.md @@ -1,6 +1,6 @@ # DryRun -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/dryrun.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/dryrun.md) ## Overview @@ -17,7 +17,7 @@ The MindSpore framework provides a DryRun mechanism that mocks all device-side i Users can set the simulation level by enabling the environment variable `export MS_SIMULATION_LEVEL=0/1/2/3` according to their needs. > - This feature is for simulation execution and cannot obtain the correct output information of operators. In scenarios involving dynamic shapes in static graphs, there may be cases where the input shape of an operator depends on the output shape of the previous operator, making this feature unsuitable for such situations. -> - In dynamic graph scenarios, the [mock interface](https://www.mindspore.cn/docs/en/master/api_python/mindspore.utils.html#mindspore.utils.dryrun.mock) needs to be used to manually adapt the script. +> - In dynamic graph scenarios, the [mock interface](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.utils.html#mindspore.utils.dryrun.mock) needs to be used to manually adapt the script. #### MS_SIMULATION_LEVEL=0 @@ -57,7 +57,7 @@ As shown in the figure, this graph displays the workspace size of the IndexSelec #### MS_SIMULATION_LEVEL=3 -Adds runtime statistics of computing operators on top of `2`, occupying CPU resources and computing resources corresponding to the number of cards to be simulated. In addition to memory analysis, it includes the execution of computing operators on the current card. Users can analyze the time consumption of computing operators in conjunction with [MindSpore Profiler](https://www.mindspore.cn/tutorials/en/master/debug/profiler.html). +Adds runtime statistics of computing operators on top of `2`, occupying CPU resources and computing resources corresponding to the number of cards to be simulated. In addition to memory analysis, it includes the execution of computing operators on the current card. Users can analyze the time consumption of computing operators in conjunction with [MindSpore Profiler](https://www.mindspore.cn/tutorials/en/br_base/debug/profiler.html). After enabling profiling, the `trace_view.json` file can be found, as shown below: diff --git a/tutorials/source_en/debug/dump.md b/tutorials/source_en/debug/dump.md index cabe663f95..4695f3cdb7 100644 --- a/tutorials/source_en/debug/dump.md +++ b/tutorials/source_en/debug/dump.md @@ -1,6 +1,6 @@ # Using Dump in the Graph Mode -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/dump.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/dump.md) To analyze the training process, MindSpore provides the dump function to store the input and output data of operators during the training process. @@ -22,7 +22,7 @@ In different backends, the Dump features supported by MindSpore are not entirely - [Dump in Ascend GE backend](#dump-in-ascend-ge-backend) - [Dump in CPU/GPU Backend](#dump-in-cpugpu-backend) -> - The differences between Ascend ms_backend and GE backend can be found in [the parameter jit](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.jit.html#mindspore.jit). +> - The differences between Ascend ms_backend and GE backend can be found in [the parameter jit](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.jit.html#mindspore.jit). > > - Dumping constant data is only supported in CPU/GPU backend, while not supported in Ascend ms_backend/GE backend. > @@ -131,14 +131,14 @@ MindSpore supports different Dump functionalities under various modes, as shown - `common_dump_settings`: - `op_debug_mode`: This attribute is used for operator overflow or operator exception debugging. 0: save all operators or specified operators; 3: only save overflow operators; 4: only save input of the exception operator. Set it to 0 when the data is dumped. If it is not set to 0, only the data of the overflow operator or exception operator will be dumped. Default: 0. - - `dump_mode`: 0: all operator data in the network dumped out; 1: the operator data specified in Dump `"kernels"`; 2: dump target and its contents using [mindspore.set_dump](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.set_dump.html). Specified data dump is supported only when "dump_mode' is set to `0`. + - `dump_mode`: 0: all operator data in the network dumped out; 1: the operator data specified in Dump `"kernels"`; 2: dump target and its contents using [mindspore.set_dump](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.set_dump.html). Specified data dump is supported only when "dump_mode' is set to `0`. - `path`: The absolute path to Dump saved data. - `net_name`: The customized net name: "ResNet50". - `iteration`: Specify the iterations of data required to be dumped, type is string. Use "|" to separate the step data of different intervals to be saved. For example, "0 | 5-8 | 100-120" represents dump the data of the 1st, 6th to 9th, and 101st to 121st steps. If iteration set to "all", data of every iteration will be dumped. Specified iteration dump is supported only when "op_debug_mode" is set to `0` or `3`, not supported when when "op_debug_mode" is set to `4`. - `saved_data`: Specify what data is to be dumped, type is string. Use "tensor" to indicate complete tensor data Dumped, use "statistic" to dump tensor statistics, use "full" to dump both tensor data and statistics. Default setting is "tensor". Statistic dump is only supported when "op_debug_mode" is set to `0`. - `input_output`: 0: dump input and output of kernel, 1:dump input of kernel, 2:dump output of kernel. When `op_debug_mode` is set to 3, `input_output` can only be set to save both the operator's inputs and outputs. Only input of kernel can be saved when "op_debug_mode" is set to `4`. - `kernels`: This item can be configured in three formats: - 1. List of operator names. Turn on the IR save switch by setting the environment variable `MS_DEV_SAVE_GRAPHS` to 2 and execute the network to obtain the operator name from the generated `trace_code_graph_{graph_id}`IR file. For details, please refer to [Saving IR](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/mindir.html#saving-ir). + 1. List of operator names. Turn on the IR save switch by setting the environment variable `MS_DEV_SAVE_GRAPHS` to 2 and execute the network to obtain the operator name from the generated `trace_code_graph_{graph_id}`IR file. For details, please refer to [Saving IR](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/mindir.html#saving-ir). Note that whether setting the environment variable `MS_DEV_SAVE_GRAPHS` to 2 may cause the different IDs of the same operator, so when dump specified operators, keep this setting unchanged after obtaining the operator name. Or you can obtain the operator names from the file `ms_output_trace_code_graph_{graph_id}.ir` saved by Dump. Refer to [Ascend ms_backend Dump Data Object Directory](#introduction-to-data-object-directory-and-data-file). 2. You can also specify an operator type. When there is no operator scope information or operator id information in the string, the background considers it as an operator type, such as "conv". The matching rule of operator type is: when the operator name contains an operator type string, the matching is considered successful (case insensitive). For example, "conv" can match operators "Conv2D-op1234" and "Conv3D-op1221". 3. Regular expressions are supported. When the string conforms to the format of "name-regex(xxx)", it would be considered a regular expression. For example, "name-regex(Default/.+)" can match all operators with names starting with "Default/". @@ -291,11 +291,11 @@ ms_execution_order_graph_{graph_id}.csv ### Data Analysis Sample -In order to better demonstrate the process of using dump to save and analyze data, we provide a set of [complete sample script](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/dump) , you only need to execute `bash dump_sync_dump.sh` for Ascend ms_backend dump. +In order to better demonstrate the process of using dump to save and analyze data, we provide a set of [complete sample script](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/dump) , you only need to execute `bash dump_sync_dump.sh` for Ascend ms_backend dump. After the graph corresponding to the script is saved to the disk through the Dump function, the final execution graph file `ms_output_trace_code_graph_{graph_id}.ir` will be generated. This file saves the stack information of each operator in the corresponding graph, and records the generation script corresponding to the operator. -Take [AlexNet script](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/dump/train_alexnet.py) as an example: +Take [AlexNet script](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/dump/train_alexnet.py) as an example: ```python ... @@ -479,14 +479,14 @@ For detailed configuration descriptions, please refer to the [Introduction to co - `common_dump_settings`: - `op_debug_mode`: This attribute is used for operator overflow or operator exception debugging. 0 is the only supported mode in CPU/GPU Dump backend, which means saving all operators or specified operators; - - `dump_mode`: 0: all operator data in the network dumped out; 1: the operator data specified in Dump `"kernels"`; 2: dump target and its contents using [mindspore.set_dump](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.set_dump.html). Specified data dump is supported only when "dump_mode' is set to `0`. + - `dump_mode`: 0: all operator data in the network dumped out; 1: the operator data specified in Dump `"kernels"`; 2: dump target and its contents using [mindspore.set_dump](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.set_dump.html). Specified data dump is supported only when "dump_mode' is set to `0`. - `path`: The absolute path to Dump saved data. - `net_name`: The customized net name: "ResNet50". - `iteration`: Specify the iterations of data required to be dumped, type is string. Use "|" to separate the step data of different intervals to be saved. For example, "0 | 5-8 | 100-120" represents dump the data of the 1st, 6th to 9th, and 101st to 121st steps. If iteration is set to "all", data of every iteration will be dumped. Specified iteration dump is supported only when "op_debug_mode" is set to `0` or `3`, not supported when when "op_debug_mode" is set to `4`. - `saved_data`: Specify what data is to be dumped, type is string. Use "tensor" to indicate complete tensor data Dumped, use "statistic" to dump tensor statistics, use "full" to dump both tensor data and statistics. Using "statistic" or "full" on CPU will result in exception. Default setting is "tensor". Statistic dump is only supported when "op_debug_mode" is set to `0`. - `input_output`: 0: dump input and output of kernel, 1: dump input of kernel, 2: dump output of kernel. Only input of kernel can be saved when "op_debug_mode" is set to `4`. - `kernels`: This item can be configured in three formats: - 1. List of operator names. Turn on the IR save switch by setting the environment variable `MS_DEV_SAVE_GRAPHS` to 2 and execute the network to obtain the operator name from the generated `trace_code_graph_{graph_id}`IR file. For details, please refer to [Saving IR](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/mindir.html#saving-ir). + 1. List of operator names. Turn on the IR save switch by setting the environment variable `MS_DEV_SAVE_GRAPHS` to 2 and execute the network to obtain the operator name from the generated `trace_code_graph_{graph_id}`IR file. For details, please refer to [Saving IR](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/mindir.html#saving-ir). Note that whether setting the environment variable `MS_DEV_SAVE_GRAPHS` to 2 may cause the different IDs of the same operator, so when dump specified operators, keep this setting unchanged after obtaining the operator name. Or you can obtain the operator names from the file `ms_output_trace_code_graph_{graph_id}.ir` saved by Dump. Refer to [Ascend ms_backend Dump Data Object Directory](#introduction-to-data-object-directory-and-data-file). 2. You can also specify an operator type. When there is no operator scope information or operator id information in the string, the background considers it as an operator type, such as "conv". The matching rule of operator type is: when the operator name contains an operator type string, the matching is considered successful (case insensitive). For example, "conv" can match operators "Conv2D-op1234" and "Conv3D-op1221". 3. Regular expressions are supported. When the string conforms to the format of "name-regex(xxx)", it would be considered a regular expression. For example, "name-regex(Default/.+)" can match all operators with names starting with "Default/". @@ -638,11 +638,11 @@ This file stores the list of iterations in which the graph was executed. After t ### Data Analysis Sample -In order to better demonstrate the process of using dump to save and analyze data, we provide a set of [complete sample script](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/dump) , you only need to execute `bash dump_sync_dump.sh` for CPU/GPU dump. +In order to better demonstrate the process of using dump to save and analyze data, we provide a set of [complete sample script](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/dump) , you only need to execute `bash dump_sync_dump.sh` for CPU/GPU dump. After the graph corresponding to the script is saved to the disk through the Dump function, the final execution graph file `ms_output_trace_code_graph_{graph_id}.ir` will be generated. This file saves the stack information of each operator in the corresponding graph, and records the generation script corresponding to the operator. -Take [AlexNet script](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/dump/train_alexnet.py) as an example: +Take [AlexNet script](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/dump/train_alexnet.py) as an example: ```python ... @@ -782,6 +782,6 @@ Generate the numpy.array data. - Dump only supports saving data with type of bool, int, int8, in16, int32, int64, uint, uint8, uint16, uint32, uint64, float, float16, float32, float64, bfloat16, double, complex64 and complex128. - Complex64 and complex128 only support saving as npy files, not as statistics information. - The Print operator has an input parameter with type of string, which is not a data type supported by Dump. Therefore, when the Print operator is included in the script, there will be an error log, which will not affect the saving data of other types. -- When Ascend GE dump is enabled, sink size can only be set to 1. User can use [Model.train()](https://www.mindspore.cn/docs/en/master/api_python/train/mindspore.train.Model.html#mindspore.train.Model.train) or [data_sink()](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.data_sink.html) to set up sink size. +- When Ascend GE dump is enabled, sink size can only be set to 1. User can use [Model.train()](https://www.mindspore.cn/docs/en/br_base/api_python/train/mindspore.train.Model.html#mindspore.train.Model.train) or [data_sink()](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.data_sink.html) to set up sink size. - When Ascend GE dump is enabled, if **statistical value dumping** is performed in scenarios with a large amount of data (such as when the network itself is of a large scale or multiple steps are dumped consecutively), it may cause the host-side memory to become full, leading to a failure in data flow synchronization. It is recommended to replace it with the new version of [**statistical value dumping**](https://gitee.com/ascend/mstt/blob/master/debug/accuracy_tools/msprobe/docs/06.data_dump_MindSpore.md#51-%E9%9D%99%E6%80%81%E5%9B%BE%E5%9C%BA%E6%99%AF). -- By default, Dump ignores invalid operator outputs, such as the outputs of the Send/Print operator or the third reserved output of the FlashAttentionScore operator. If you need to retain these invalid outputs, you can set the environment variable `MINDSPORE_DUMP_IGNORE_USELESS_OUTPUT` to `0`. For details, please refer to [Environment Variables - Dump Debugging](https://www.mindspore.cn/docs/en/master/api_python/env_var_list.html#dump-debugging). +- By default, Dump ignores invalid operator outputs, such as the outputs of the Send/Print operator or the third reserved output of the FlashAttentionScore operator. If you need to retain these invalid outputs, you can set the environment variable `MINDSPORE_DUMP_IGNORE_USELESS_OUTPUT` to `0`. For details, please refer to [Environment Variables - Dump Debugging](https://www.mindspore.cn/docs/en/br_base/api_python/env_var_list.html#dump-debugging). diff --git a/tutorials/source_en/debug/error_analysis.rst b/tutorials/source_en/debug/error_analysis.rst index 59d5b9556d..d58cf279b2 100644 --- a/tutorials/source_en/debug/error_analysis.rst +++ b/tutorials/source_en/debug/error_analysis.rst @@ -1,8 +1,8 @@ Error Reporting Analysis ========================= -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/error_analysis.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/error_analysis.rst :alt: View Source On Gitee .. toctree:: @@ -53,7 +53,7 @@ Understanding the meaning of error description information plays an important ro MindSpore error messages are processed by using Python Traceback processing, including Python stack information, error types and error descriptions, error messages related to network developers, and error messages related to framework developers. As shown in the following figure: -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/debug/images/graph_errmsg.png +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/debug/images/graph_errmsg.png - Python stack information: @@ -83,7 +83,7 @@ The general process of MindSpore network training is data loading and processing 3) Analyze the location where the error is reported based on the Python call stack and the error information. In dynamic graph mode, it is easier to determine the location of the code error. In the static graph mode, you need to analyze the location of the error report according to the error message "The Traceback of Net Construct Code" part of the error message. 4) Based on possible error problem scenarios and types, hypothesize the possible causes of the error problem. -Please refer to error analysis for details on how to perform `error analysis `_ based on different scenarios. +Please refer to error analysis for details on how to perform `error analysis `_ based on different scenarios. Error Search ^^^^^^^^^^^^^ @@ -94,7 +94,7 @@ Based on the error message and the location of the error code, combined with the MindSpore provides FAQ for common error reporting issues, including data processing, compilation execution, distributed parallelism and other scenarios. Based on the problem scenarios derived from the error analysis, you can search for problems by using the error description information. - The search address is as follows: \ `FAQ `__\ . + The search address is as follows: \ `FAQ `__\ . - Error reporting case @@ -135,7 +135,7 @@ Strategy Selection Dynamic diagram mode uses asynchronous execution by default in order to improve the efficiency of dynamic diagram execution, and error information are displayed at the last stage of execution. In Figure 3, you can see that the asynchronous execution method of error reporting will have alarm messages that interfere with the error reporting analysis. - MindSpore provides a way to switch synchronous execution by setting \ ``set_context(mode=mindspore.PYNATIVE_MODE, pynative_synchronize=True)`` to switch to synchronous execution. If the operator execution error occurs, the task terminates directly and displays the current error message. For details, see \ `PyNative Synchronous Execution `__\ . + MindSpore provides a way to switch synchronous execution by setting \ ``set_context(mode=mindspore.PYNATIVE_MODE, pynative_synchronize=True)`` to switch to synchronous execution. If the operator execution error occurs, the task terminates directly and displays the current error message. For details, see \ `PyNative Synchronous Execution `__\ . - Dichotomy Strategy @@ -166,7 +166,7 @@ Debugging Verification 1. ops.print\_ - In static graph mode, MindSpore provides `ops.print_ `_ interface to print Tensor information or string information in the computational graph. + In static graph mode, MindSpore provides `ops.print_ `_ interface to print Tensor information or string information in the computational graph. The outputs are printed to screen by default, and it can also be saved in a file. 2. Debugger @@ -210,16 +210,16 @@ Network execution debugging is the corresponding debugging capability provided b | | | such as: accuracy, loss, preci sion, recall, F1. | | +---------------------------+------------------------------------------+-------------------------------------------------------------------------------------------------------------+-------------------------------------------------------+ | | print\_ interface | The print\_ interface prints out the Tensor or | `print\_ interface introduction `_ | | | | | | +---------------------------+------------------------------------------+-------------------------------------------------------------------------------------------------------------+-------------------------------------------------------+ | | Intermediate file saving | Used to save the intermediate files generated | `Reading IR `_ | +---------------------------+------------------------------------------+-------------------------------------------------------------------------------------------------------------+-------------------------------------------------------+ | | Data Dump | When training the network, if the training result deviates from the expectation, | `Dump function debugging `_ | +| | | the operator input and output data are saved for debugging by the Dump function. | tutorials/en/br_base/debug/dump.html>`_ | +---------------------------+------------------------------------------+-------------------------------------------------------------------------------------------------------------+-------------------------------------------------------+ | Execution control | Callback | Users can use callback functions to perform specific actions | | | | | at specific times or to observe network information | | @@ -227,7 +227,7 @@ Network execution debugging is the corresponding debugging capability provided b | | | dynamically adjust parameters, terminate training tasks early. | | +---------------------------+------------------------------------------+-------------------------------------------------------------------------------------------------------------+-------------------------------------------------------+ | | Hook | The Hook function in pynative mode captures the input and output data | `Hook function `_ | | | | HookBackward operator and register_forward_pre_hook, register_forward_hook, | | | | | and register_backward_hook functions | | @@ -235,7 +235,7 @@ Network execution debugging is the corresponding debugging capability provided b +---------------------------+------------------------------------------+-------------------------------------------------------------------------------------------------------------+-------------------------------------------------------+ | | Synchronous execution | In dynamic graph mode, operators are executed asynchronously | `Synchronized execution of dynamic graph `_ | | | | to control whether the arithmetic is executed asynchronously on the device. | | +---------------------------+------------------------------------------+-------------------------------------------------------------------------------------------------------------+-------------------------------------------------------+ @@ -249,7 +249,7 @@ MindSpore provides framework developers with rich debugging tools. Debugging fea | Function classification | Main debugging functions | Description of use | Detailed introduction | +=========================+==========================+===============================================================+=======================================================+ | Process records | Logs | used to record information at each stage of the framework | `Log-related environment variables and configurations | -| | | implementation to provide information for understanding | `_ | | | | | | +-------------------------+--------------------------+---------------------------------------------------------------+-------------------------------------------------------+ diff --git a/tutorials/source_en/debug/error_analysis/cann_error_cases.md b/tutorials/source_en/debug/error_analysis/cann_error_cases.md index ac0914d350..a3939d93be 100644 --- a/tutorials/source_en/debug/error_analysis/cann_error_cases.md +++ b/tutorials/source_en/debug/error_analysis/cann_error_cases.md @@ -1,6 +1,6 @@ # CANN Common Error Analysis -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/error_analysis/cann_error_cases.md)   +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/error_analysis/cann_error_cases.md)   This article focuses on the handling of common CANN errors by users. When encountering CANN errors, MindSpore logs may not be sufficient to analyze the related errors. You can print CANN logs to better analyze the errors by setting the following two environment variables: diff --git a/tutorials/source_en/debug/error_analysis/error_scenario_analysis.md b/tutorials/source_en/debug/error_analysis/error_scenario_analysis.md index b06bd8caa0..168b0dd990 100644 --- a/tutorials/source_en/debug/error_analysis/error_scenario_analysis.md +++ b/tutorials/source_en/debug/error_analysis/error_scenario_analysis.md @@ -1,6 +1,6 @@ # Error Analysis -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/error_analysis/error_scenario_analysis.md)   +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/error_analysis/error_scenario_analysis.md)   As mentioned before, error analysis refers to analyzing and inferring possible error causes based on the obtained network and framework information (such as error messages and network code). @@ -10,7 +10,7 @@ During error analysis, the first step is to identify the scenario where the erro When an error is reported during data processing, check whether C++ error messages are contained as shown in Figure 1. Typically, the name of the data processing operation using the C++ language is the same as that using Python. Therefore, you can determine the data processing operation that reports the error based on the error message and locate the error in the Python code. -![minddata-errmsg](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/debug/error_analysis/images/minddata_errmsg.png) +![minddata-errmsg](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/debug/error_analysis/images/minddata_errmsg.png) *Figure 1* @@ -20,9 +20,9 @@ Data loading and processing has three phases: data preparation, data loading, an | Error Type| Error Description| Case Analysis| |-------------|---------|---| -| Data preparation error| The dataset is faulty, involving a path or MindRecord file problem.| [Data Preparation Error Case](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/minddata_debug.html#data-preparation)| -| Data loading error| Incorrect resource configuration, customized loading method, or iterator usage in the data loading phase.| [Data Loading Error Case](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/minddata_debug.html#data-loading)| -| Data augmentation error| Unmatched data format/size, high resource usage, or multi-thread suspension.| [Data Augmentation Error Case](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/minddata_debug.html#data-augmentation)| +| Data preparation error| The dataset is faulty, involving a path or MindRecord file problem.| [Data Preparation Error Case](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/minddata_debug.html#data-preparation)| +| Data loading error| Incorrect resource configuration, customized loading method, or iterator usage in the data loading phase.| [Data Loading Error Case](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/minddata_debug.html#data-loading)| +| Data augmentation error| Unmatched data format/size, high resource usage, or multi-thread suspension.| [Data Augmentation Error Case](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/minddata_debug.html#data-augmentation)| ## Network Construction and Training Error Analysis @@ -32,11 +32,11 @@ The following table lists common network construction and training errors. | Error Type | Error Description| Case Analysis| | - | - | - | -| Incorrect context configuration| An error occurs when the system configures the context.| [Incorrect Context Configuration Analysis](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/mindrt_debug.html#incorrect-context-configuration)| -| Syntax error | Python syntax errors and MindSpore static graph syntax errors, such as unsupported control flow syntax and tensor slicing errors.| [Syntax Errors Analysis](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/mindrt_debug.html#syntax-errors)| -| Operator build error | The operator parameter value, type, or shape does not meet the requirements, or the operator function is restricted.| [Operator Build Errors Analysis](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/mindrt_debug.html#operator-build-errors)| -| Operator execution error | Input data exceptions, operator implementation errors, function restrictions, resource restrictions, etc.| [Operator Execution Errors Analysis](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/mindrt_debug.html#operator-execution-errors)| -| Insufficient resources | The device memory is insufficient, the number of function call stacks exceeds the threshold, and the number of flow resources exceeds the threshold.| [Insufficient Resources Analysis](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/mindrt_debug.html#insufficient-resources)| +| Incorrect context configuration| An error occurs when the system configures the context.| [Incorrect Context Configuration Analysis](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/mindrt_debug.html#incorrect-context-configuration)| +| Syntax error | Python syntax errors and MindSpore static graph syntax errors, such as unsupported control flow syntax and tensor slicing errors.| [Syntax Errors Analysis](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/mindrt_debug.html#syntax-errors)| +| Operator build error | The operator parameter value, type, or shape does not meet the requirements, or the operator function is restricted.| [Operator Build Errors Analysis](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/mindrt_debug.html#operator-build-errors)| +| Operator execution error | Input data exceptions, operator implementation errors, function restrictions, resource restrictions, etc.| [Operator Execution Errors Analysis](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/mindrt_debug.html#operator-execution-errors)| +| Insufficient resources | The device memory is insufficient, the number of function call stacks exceeds the threshold, and the number of flow resources exceeds the threshold.| [Insufficient Resources Analysis](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/mindrt_debug.html#insufficient-resources)| ### Error Analysis of the Dynamic Graph Mode @@ -44,7 +44,7 @@ In dynamic graph mode, the program is executed line by line according to the cod Generally, the error message may contain `WARNING` logs. During error analysis, analyze the error message following Traceback first. -![pynative-errmsg](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/debug/error_analysis/images/pynative_errmsg.png) +![pynative-errmsg](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/debug/error_analysis/images/pynative_errmsg.png) *Figure 2* @@ -52,13 +52,13 @@ In dynamic graph mode, common network construction and training errors are found - Determine the object where the error is reported based on the error description, for example, the operator API name. - Locate the code line where the error is reported based on the Python call stack information. -- Analyze the code input data and calculation logic at the position where the error occurs, and find the error cause based on the description and specifications of the error object in the [MindSpore API](https://www.mindspore.cn/docs/en/master/api_python/mindspore.html). +- Analyze the code input data and calculation logic at the position where the error occurs, and find the error cause based on the description and specifications of the error object in the [MindSpore API](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.html). ### Error Analysis of the Static Graph Mode In static graph mode, MindSpore builds the network structure into a computational graph, and then performs the computation operations involved in the graph. Therefore, errors reported in static graph mode include computational graph build errors and computational graph execution errors. Figure 3 shows the error message reported during computational graph build. When an error occurs, the `analyze_failed.ir` file is automatically saved to help analyze the location of the error code. -![graph-errmsg](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/debug/images/graph_errmsg.png) +![graph-errmsg](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/debug/images/graph_errmsg.png) *Figure 3* @@ -69,9 +69,9 @@ Check whether the error is caused by graph build or graph execution based on the - If the error is reported during computational graph build, analyze the cause and location of the failure based on the error description and the `analyze_failed.ir` file automatically saved when the error occurs. - If the error is reported during computational graph execution, the error may be caused by insufficient resources or improper operator execution. You need to further distinguish the error based on the error message. If the error is reported during operator execution, locate the operator, use the dump function to save the input data of the operator, and analyze the cause of the error based on the input data. -For details about how to analyze and infer the failure cause, see the analysis methods described in [`analyze_failed.ir`](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/mindir.html#how-to-derive-the-cause-of-the-failure-based-on-the-analyze-fail-ir-file-analysis-graph). +For details about how to analyze and infer the failure cause, see the analysis methods described in [`analyze_failed.ir`](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/mindir.html#how-to-derive-the-cause-of-the-failure-based-on-the-analyze-fail-ir-file-analysis-graph). -For details about how to use Dump to save the operator input data, see [Dump Function Debugging](https://www.mindspore.cn/tutorials/en/master/debug/dump.html). +For details about how to use Dump to save the operator input data, see [Dump Function Debugging](https://www.mindspore.cn/tutorials/en/br_base/debug/dump.html). ## Distributed Parallel Error Analysis @@ -114,7 +114,7 @@ class MyStridedSlice(nn.Cell): Error cause: -The piece of code performs the slice operation on dimension 0. However, the configured policy (2,1) indicates that the slice operation is performed on both dimension 0 and dimension 1 of the input tensor. According to the description of operator slicing in the [MindSpore API](https://www.mindspore.cn/docs/en/master/api_python/operator_list_parallel.html), +The piece of code performs the slice operation on dimension 0. However, the configured policy (2,1) indicates that the slice operation is performed on both dimension 0 and dimension 1 of the input tensor. According to the description of operator slicing in the [MindSpore API](https://www.mindspore.cn/docs/en/br_base/api_python/operator_list_parallel.html), > only the mask whose value is all 0s is supported. All dimensions that are sliced must be extracted together. The input dimensions whose stride is not set to 1 cannot be sliced. @@ -274,11 +274,11 @@ Among the 4-bit error codes, 0000~8999 are user-class errors and 9000~9999 are i | Common Error Types | Error Description | Case Analysis | | - | - | - | -| AICORE Operator Compilation Problem | AICORE Operator Error During Compilation | [AICORE Operator Compilation Problem](https://www.mindspore.cn/tutorials/en/master/debug/error_analysis/cann_error_cases.html#aicore-operator-compilation-problem)| -| AICORE Operator Execution Problem | AICORE Operator Error During Execution| [AICORE Operator Execution Problem](https://mindspore.cn/tutorials/en/master/debug/error_analysis/cann_error_cases.html#aicore-operator-execution-problem) | -| AICPU Operator Execution Problem | AICPU Operator Error During Execution | [AICPU Operator Execution Problem](https://mindspore.cn/tutorials/en/master/debug/error_analysis/cann_error_cases.html#aicpu-operator-execution-problem) | -| runtime FAQ | Including input data exceptions, operator implementation errors, functional limitations, resource limitations, etc. | [runtime FAQ](https://mindspore.cn/tutorials/en/master/debug/error_analysis/cann_error_cases.html#runtime-faq) | -| HCCL & HCCP FAQ | Common communication problems during multi-machine multi-card training, including socket build timeout, notify wait timeout, ranktable configuration error, etc. | [HCCL & HCCP FAQ](https://mindspore.cn/tutorials/en/master/debug/error_analysis/cann_error_cases.html#hccl-hccp-faq) | -| profiling FAQ | Errors when running profiling for performance tuning | [profiling FAQ](https://mindspore.cn/tutorials/en/master/debug/error_analysis/cann_error_cases.html#profiling-faq) | +| AICORE Operator Compilation Problem | AICORE Operator Error During Compilation | [AICORE Operator Compilation Problem](https://www.mindspore.cn/tutorials/en/br_base/debug/error_analysis/cann_error_cases.html#aicore-operator-compilation-problem)| +| AICORE Operator Execution Problem | AICORE Operator Error During Execution| [AICORE Operator Execution Problem](https://mindspore.cn/tutorials/en/br_base/debug/error_analysis/cann_error_cases.html#aicore-operator-execution-problem) | +| AICPU Operator Execution Problem | AICPU Operator Error During Execution | [AICPU Operator Execution Problem](https://mindspore.cn/tutorials/en/br_base/debug/error_analysis/cann_error_cases.html#aicpu-operator-execution-problem) | +| runtime FAQ | Including input data exceptions, operator implementation errors, functional limitations, resource limitations, etc. | [runtime FAQ](https://mindspore.cn/tutorials/en/br_base/debug/error_analysis/cann_error_cases.html#runtime-faq) | +| HCCL & HCCP FAQ | Common communication problems during multi-machine multi-card training, including socket build timeout, notify wait timeout, ranktable configuration error, etc. | [HCCL & HCCP FAQ](https://mindspore.cn/tutorials/en/br_base/debug/error_analysis/cann_error_cases.html#hccl-hccp-faq) | +| profiling FAQ | Errors when running profiling for performance tuning | [profiling FAQ](https://mindspore.cn/tutorials/en/br_base/debug/error_analysis/cann_error_cases.html#profiling-faq) | For more information about CANN errors, refer to the [Ascend CANN Developer Documentation](https://www.hiascend.com/document/moreVersion/zh/CANNCommunityEdition/) to check the troubleshooting section of the corresponding CANN version. diff --git a/tutorials/source_en/debug/error_analysis/minddata_debug.md b/tutorials/source_en/debug/error_analysis/minddata_debug.md index ae841e0109..9cdcd8f1bd 100644 --- a/tutorials/source_en/debug/error_analysis/minddata_debug.md +++ b/tutorials/source_en/debug/error_analysis/minddata_debug.md @@ -1,6 +1,6 @@ # Data Processing Debugging Methods and Common Errors Analysis -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/error_analysis/minddata_debug.md)   +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/error_analysis/minddata_debug.md)   ## Data Processing Debugging Methods @@ -219,7 +219,7 @@ mindspore/ccsrc/minddata/dataset/kernels/image/crop_op.cc(33). According to the printed information you can see that `Crop` processed the first sample and reported an error. The shape of the first sample (32, 32, 3), was transformed by `RandomResize` to (3, 16, 3), but the shape transformed by `Crop` did not printed and then an error is reported. So it is the fact that the shape cannot be processed by `Crop` that causes the error. Further, according to the Dataset Pipeline Error Message, the input sample has a height of only 3, but is expected to be cropped to a region with a high dimension of 8, hence the error is reported. -Checking the [API description](https://www.mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.Crop.html) of `Crop` , `Crop` requires the input sample to be in shape or , so `Crop` treats (3, 48, 48) as , and naturally it can't crop out the region with H=8, W=8 when H=3, W=48, C=48. +Checking the [API description](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.Crop.html) of `Crop` , `Crop` requires the input sample to be in shape or , so `Crop` treats (3, 48, 48) as , and naturally it can't crop out the region with H=8, W=8 when H=3, W=48, C=48. To quickly fix this, We just need to change the parameter size of `RandomResize` from (3, 16) to (16, 16), and run it again to find that the use case passes. @@ -242,13 +242,13 @@ data (8, 8, 48) #### Way Two: Debugging Map Operation Through Data Pipline Debugging Mode -We can also turn on the dataset pipline debug mode by calling the [set_debug_mode](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.config.set_debug_mode.html) . +We can also turn on the dataset pipline debug mode by calling the [set_debug_mode](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.config.set_debug_mode.html) . When debug mode is enabled, the random seed is set to 1 if it is not already set, so that executing the dataset pipeline in debug mode can yield deterministic results. The process is as follows: 1. Print the shape and type of the input and output data for each transform op in the `map` operator. -2. Enable the dataset pipeline debug mode and use either a predefined debug hook provided by MindData or a user-defined debug hook. It must define the class inherited from [DebugHook](https://mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.debug.DebugHook.html). +2. Enable the dataset pipeline debug mode and use either a predefined debug hook provided by MindData or a user-defined debug hook. It must define the class inherited from [DebugHook](https://mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.debug.DebugHook.html). The following is a modification of the `Way One` use case, using the predefined debug hooks provided by MindData. @@ -303,7 +303,7 @@ E mindspore/ccsrc/minddata/dataset/kernels/image/crop_op.cc(33). ``` Based on the printed information, we can clearly see that `Crop` is getting an error when processing the input shape of -(3, 16, 3). Refer to `Crop`'s [API description](https://www.mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.Crop.html), and we just need to change the parameter size of `RandomResize` from (3, 16) to (16, 16), and run it again to see that the use case passes. +(3, 16, 3). Refer to `Crop`'s [API description](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.Crop.html), and we just need to change the parameter size of `RandomResize` from (3, 16) to (16, 16), and run it again to see that the use case passes. ```text [Dataset debugger] Print the [INPUT] of the operation [RandomResize]. diff --git a/tutorials/source_en/debug/error_analysis/mindir.md b/tutorials/source_en/debug/error_analysis/mindir.md index e501a81a73..3e59eb15c6 100644 --- a/tutorials/source_en/debug/error_analysis/mindir.md +++ b/tutorials/source_en/debug/error_analysis/mindir.md @@ -1,6 +1,6 @@ # IR File Analysis -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/error_analysis/mindir.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/error_analysis/mindir.md) ## Overview diff --git a/tutorials/source_en/debug/error_analysis/mindrt_debug.md b/tutorials/source_en/debug/error_analysis/mindrt_debug.md index 75681a1526..80dfec9c6b 100644 --- a/tutorials/source_en/debug/error_analysis/mindrt_debug.md +++ b/tutorials/source_en/debug/error_analysis/mindrt_debug.md @@ -1,6 +1,6 @@ # Network Construction and Training Error Analysis -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/error_analysis/mindrt_debug.md)   +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/error_analysis/mindrt_debug.md)   The following lists the common network construction and training errors in static graph mode. @@ -18,7 +18,7 @@ For details, visit the following website: [MindSpore Configuration Error - 'set_context' Configuration Error](https://www.hiascend.com/developer/blog/details/0229106885219029083) -For details about the context configuration, see ['set_context'](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.set_context.html). +For details about the context configuration, see ['set_context'](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.set_context.html). ## Syntax Errors @@ -62,7 +62,7 @@ According to the error message, the dimension shapes of the return values of dif For details, visit the following website: -[MindSpore Syntax Error - Type (Shape) Join Failed](https://www.mindspore.cn/docs/en/master/faq/network_compilation.html) +[MindSpore Syntax Error - Type (Shape) Join Failed](https://www.mindspore.cn/docs/en/br_base/faq/network_compilation.html) The number of loops of the for and while statements may exceed the permitted range. As a result, the function call stack exceeds the threshold. The error message is displayed as follows: diff --git a/tutorials/source_en/debug/profiler.md b/tutorials/source_en/debug/profiler.md index 47013bf20b..235ba65306 100644 --- a/tutorials/source_en/debug/profiler.md +++ b/tutorials/source_en/debug/profiler.md @@ -1,6 +1,6 @@ # Ascend Performance Tuning -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/profiler.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/profiler.md) ## Overview @@ -10,7 +10,7 @@ This tutorial introduces how to use MindSpore Profiler for performance tuning on 1. Prepare the training script; -2. Call the performance debugging interface in the training script, such as [mindspore.profiler.profile](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.profiler.profile.html) and [mindspore.profiler.DynamicProfilerMonitor](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.profiler.DynamicProfilerMonitor.html) interfaces; +2. Call the performance debugging interface in the training script, such as [mindspore.profiler.profile](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.profiler.profile.html) and [mindspore.profiler.DynamicProfilerMonitor](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.profiler.DynamicProfilerMonitor.html) interfaces; 3. Run the training script; @@ -22,7 +22,7 @@ There are five ways to collect training performance data, and the following desc ### Method 1: mindspore.Profiler Interface Enabling -Add the MindSpore Profiler related interfaces in the training script, users can refer to [MindSpore Profiler parameter details](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.Profiler.html) and [_ExperimentalConfig Parameter Details](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.profiler._ExperimentalConfig.html) to configure parameters such as profiler_level according to their data requirements. +Add the MindSpore Profiler related interfaces in the training script, users can refer to [MindSpore Profiler parameter details](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.Profiler.html) and [_ExperimentalConfig Parameter Details](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.profiler._ExperimentalConfig.html) to configure parameters such as profiler_level according to their data requirements. The interface supports two collection modes: CallBack mode and custom for loop mode, and supports both Graph and PyNative modes. @@ -67,20 +67,20 @@ with mindspore.profiler.profile(activities=[ProfilerActivity.CPU, ProfilerActivi ``` - schedule: After schedule is enabled, kernel_details.csv in disk drive data contains a column of Step ID information. According to the schedule configuration, skip_first skips 0 steps, wait 0 step, warmup 0 step. Based on the active value being 1, data collection starts from step 0 and continues for 1 step. Therefore, the Step ID is 0, indicating that the 0th step is being collected. -- on_trace_ready: The disk loading path of profiler is specified through the tensorboard_trace_handler parameter of on_trace_ready. tensorboard_trace_handler will parse the performance data by default. If the user does not configure tensorboard_trace_handler, the data will be written to the '/data' folder in the same-level directory of the current script by default. The performance data can be parsed through the off-line parsing function. The off-line parsing function can be referred to [Method 4: Off-line Parsing](https://www.mindspore.cn/tutorials/en/master/debug/profiler.html#method-4-off-line-parsing). +- on_trace_ready: The disk loading path of profiler is specified through the tensorboard_trace_handler parameter of on_trace_ready. tensorboard_trace_handler will parse the performance data by default. If the user does not configure tensorboard_trace_handler, the data will be written to the '/data' folder in the same-level directory of the current script by default. The performance data can be parsed through the off-line parsing function. The off-line parsing function can be referred to [Method 4: Off-line Parsing](https://www.mindspore.cn/tutorials/en/br_base/debug/profiler.html#method-4-off-line-parsing). -For the complete case, refer to [custom for loop collection complete code example](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/profiler/for_loop_profiler.py). +For the complete case, refer to [custom for loop collection complete code example](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/profiler/for_loop_profiler.py). **The principle of configuring schedule parameters is as follows:** -As illustrated in the following figure, schedule has 5 configurable parameters: skip_first, wait, warmup, active, and repeat. Among them, skip_first indicates skipping the first skip_first steps; wait represents the waiting phase, skipping wait steps; warmup represents the warm-up phase, skipping warmup steps; active indicates collecting active steps; repeat indicates the number of repetitions. One repeat includes wait+warmup+active steps. After all steps in a repeat are executed, the callback function configured via on_trace_ready will be executed to parse performance data. For detailed descriptions of each parameter, please refer to the [schedule API](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.profiler.schedule.html). +As illustrated in the following figure, schedule has 5 configurable parameters: skip_first, wait, warmup, active, and repeat. Among them, skip_first indicates skipping the first skip_first steps; wait represents the waiting phase, skipping wait steps; warmup represents the warm-up phase, skipping warmup steps; active indicates collecting active steps; repeat indicates the number of repetitions. One repeat includes wait+warmup+active steps. After all steps in a repeat are executed, the callback function configured via on_trace_ready will be executed to parse performance data. For detailed descriptions of each parameter, please refer to the [schedule API](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.profiler.schedule.html). ![schedule.png](../../source_zh_cn/debug/images/schedule.png) For example: If there are 100 steps (0-99) in model training and the schedule is configured as `schedule(skip_first=10, wait=10, warmup=5, active=5, repeat=2)` . Profiler will first skip the first 10 steps (0-9). Starting from step 10, the first repeat will wait for 10 steps (10-19), warm up for 5 steps (20-24), and finally collect performance data for 5 steps (25-29). The second repeat will again wait for 10 steps (30-39), warm up for 5 steps (40-44), and finally collect performance data for 5 steps (45-49). > - In single-card scenarios, profiler generates multiple performance data files in the same directory based on the repeat count. Each repeat corresponds to a folder containing performance data collected from all active steps in that repeat. In multi-card scenarios, each card generates performance data independently, and the data from each card is divided into multiple parts based on the repeat count. When repeat is configured to 0, the specific number of repetitions is determined by the total number of steps, continuously repeating the wait-warmup-active cycle until all steps are completed. -> - The schedule needs to be used with [mindspore.profiler.profile.step](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.profiler.profile.html#mindspore.profiler.profile.step) interface. If you only configure schedule without using mindspore.profiler.profile.step interface to collect data, all collected data will belong to step 0. Therefore, performance data files will only be generated when step 0 corresponds to active (wait, warmup, skip_first are all set to 0). +> - The schedule needs to be used with [mindspore.profiler.profile.step](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.profiler.profile.html#mindspore.profiler.profile.step) interface. If you only configure schedule without using mindspore.profiler.profile.step interface to collect data, all collected data will belong to step 0. Therefore, performance data files will only be generated when step 0 corresponds to active (wait, warmup, skip_first are all set to 0). #### CallBack Mode Collection Example @@ -112,7 +112,7 @@ class StopAtStep(mindspore.Callback): self.profiler.stop() ``` -For the complete case, refer to [CallBack mode collection complete code example](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/profiler/call_back_profiler.py). +For the complete case, refer to [CallBack mode collection complete code example](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/profiler/call_back_profiler.py). ### Method 2: Dynamic Profiler Enabling @@ -146,7 +146,7 @@ JSON configuration example as follows: } ``` -1. Users need to configure the above JSON configuration file before instantiating DynamicProfilerMonitor, and save the configuration files in cfg_path. See [DynamicProfilerMonitor parameter details](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.profiler.DynamicProfilerMonitor.html) for details, and save the configuration file to cfg_path; +1. Users need to configure the above JSON configuration file before instantiating DynamicProfilerMonitor, and save the configuration files in cfg_path. See [DynamicProfilerMonitor parameter details](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.profiler.DynamicProfilerMonitor.html) for details, and save the configuration file to cfg_path; 2. Call the step interface of DynamicProfilerMonitor after the model training to collect data; 3. If users want to change the collection and analysis tasks during training, they can modify the JSON configuration file, such as changing the start_step in the above JSON configuration to 8, stop_step to 10, save it, and DynamicProfilerMonitor will automatically identify that the configuration file has changed to the new collection and analysis tasks. @@ -168,11 +168,11 @@ for _ in range(STEP_NUM): At this point, the results include two folders: rank0_start2_stop5 and rank0_start8_stop10, representing the collection of steps 2-5 and 8-10 respectively. -For the complete case, refer to [dynamic profiler enabling method case](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/profiler/dynamic_profiler.py). +For the complete case, refer to [dynamic profiler enabling method case](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/profiler/dynamic_profiler.py). ### Method 3: Environment Variable Enabling -Users can use the environment variable enabling method to enable Profiler most simply. Currently, only single-card scenarios are supported. This method only needs to configure the parameters to the environment variables, and the performance data will be automatically collected during the model training. schedule, on_trace_ready, and experimental_config parameters are not supported in this mode, and other parameters can be used. See [environment variable enabling method parameter details](https://www.mindspore.cn/docs/en/master/api_python/env_var_list.html) for details. +Users can use the environment variable enabling method to enable Profiler most simply. Currently, only single-card scenarios are supported. This method only needs to configure the parameters to the environment variables, and the performance data will be automatically collected during the model training. schedule, on_trace_ready, and experimental_config parameters are not supported in this mode, and other parameters can be used. See [environment variable enabling method parameter details](https://www.mindspore.cn/docs/en/br_base/api_python/env_var_list.html) for details. > If environment variables are enabled, set device_id using environment variables before executing the script. Do not use set_context to set device_id in the script. @@ -193,7 +193,7 @@ After loading the environment variable, start the training script directly to co ### Method 4: Off-line Parsing -If users want to analyze the collected performance data, you can use [mindspore.profiler.profiler.analyse](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.profiler.profiler.analyse.html) interface for offline analysis. For details about the analyse interface, please refer to [offline parse analyse interface parameters](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.profiler.profiler.analyse.html). +If users want to analyze the collected performance data, you can use [mindspore.profiler.profiler.analyse](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.profiler.profiler.analyse.html) interface for offline analysis. For details about the analyse interface, please refer to [offline parse analyse interface parameters](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.profiler.profiler.analyse.html). The offline analysis sample is shown below: @@ -209,7 +209,7 @@ To address the traditional profiler process being time-consuming and dealing wit ![mstx_profiler.png](../../source_zh_cn/debug/images/mstx_profiler.png) -When using the lightweight marking feature, ensure that the mstx in the [_ExperimentalConfig](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.profiler._ExperimentalConfig.html) interface is set to True. Also note that lightweight marking data is only valid during the profiler's data collection interval. For details about the mstx interface, please refer to [mstx API](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.profiler.mstx.html). +When using the lightweight marking feature, ensure that the mstx in the [_ExperimentalConfig](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.profiler._ExperimentalConfig.html) interface is set to True. Also note that lightweight marking data is only valid during the profiler's data collection interval. For details about the mstx interface, please refer to [mstx API](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.profiler.mstx.html). The lightweight marking sample is shown below: @@ -222,7 +222,7 @@ mstx.mark("start") mstx.range_end(range_id) ``` -For the complete case, refer to [mstx lightweight marking method case](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/profiler/mstx_profiler.py). +For the complete case, refer to [mstx lightweight marking method case](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/profiler/mstx_profiler.py). ## Performance Data diff --git a/tutorials/source_en/debug/pynative.md b/tutorials/source_en/debug/pynative.md index 7afc21f36e..f5856738b5 100644 --- a/tutorials/source_en/debug/pynative.md +++ b/tutorials/source_en/debug/pynative.md @@ -1,6 +1,6 @@ # Dynamic Graph Debugging -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/pynative.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/pynative.md) ## Overview @@ -82,7 +82,7 @@ In the debugging process, it is often necessary to view the log to locate the pr - 3-ERROR, indicates that there is an error in the execution of the program, the error log is output, and the program may not be terminated. - 4-CRITICAL, indicates that the program execution is abnormal and will be terminated. -See [environment variables](https://www.mindspore.cn/docs/en/master/api_python/env_var_list.html#log) for detailed logging controls. +See [environment variables](https://www.mindspore.cn/docs/en/br_base/api_python/env_var_list.html#log) for detailed logging controls. ### Common PDB Debugging Commands @@ -125,7 +125,7 @@ When you need to see if the backpropagation accuracy is accurate under a dynamic print(output) ``` - Detailed API usage instructions can be [referenced](https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.register_hook.html#mindspore.Tensor.register_hook). + Detailed API usage instructions can be [referenced](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/Tensor/mindspore.Tensor.register_hook.html#mindspore.Tensor.register_hook). - Viewing the gradient during execution can be done with `mindspore.ops.HookBackward`, for example: @@ -151,7 +151,7 @@ When you need to see if the backpropagation accuracy is accurate under a dynamic print(output) ``` - Detailed API usage instructions can be [referenced](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.HookBackward.html). + Detailed API usage instructions can be [referenced](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.HookBackward.html). - Viewing the gradient of a particular Cell can be done with `mindspore.nn.Cell.register_backward_hook`, for example: @@ -180,7 +180,7 @@ When you need to see if the backpropagation accuracy is accurate under a dynamic print(output) ``` - Detailed API usage instructions can be [referenced](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_hook). + Detailed API usage instructions can be [referenced](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_hook). ## More Practical Examples diff --git a/tutorials/source_en/debug/sdc.md b/tutorials/source_en/debug/sdc.md index 45fd8379c2..7e76ebacba 100644 --- a/tutorials/source_en/debug/sdc.md +++ b/tutorials/source_en/debug/sdc.md @@ -1,6 +1,6 @@ # Feature Value Detection -[![View Source File](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/debug/sdc.md) +[![View Source File](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/debug/sdc.md) ## Overview @@ -46,7 +46,7 @@ The environment variable `NPU_ASD_UPPER_THRESH` controls the absolute numerical The environment variable `NPU_ASD_SIGMA_THRESH` controls the relative numerical threshold of detection, in the same format as the above, where the first element controls the first-level threshold of numerical changes, and the second element controls the second-level threshold of numerical changes; by default, `NPU_ASD_SIGMA_THRESH=100000,5000`. -For details of above environment variables, see [Environment Variables](https://www.mindspore.cn/docs/en/master/api_python/env_var_list.html). +For details of above environment variables, see [Environment Variables](https://www.mindspore.cn/docs/en/br_base/api_python/env_var_list.html). ## Use Cases diff --git a/tutorials/source_en/generative/cyclegan.md b/tutorials/source_en/generative/cyclegan.md index 0d21267448..210984bc5b 100644 --- a/tutorials/source_en/generative/cyclegan.md +++ b/tutorials/source_en/generative/cyclegan.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/generative/cyclegan.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/generative/cyclegan.md) # CycleGAN for Image Style Migration @@ -16,13 +16,13 @@ An important application field of this model is domain adaptation, which can be Essentially, a CycleGAN consists of two mirror-symmetric GANs. The following figure shows the CycleGAN structure. (The figure comes from the original paper.) -![CycleGAN](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/CycleGAN.png) +![CycleGAN](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/CycleGAN.png) For ease of understanding, apples and oranges are used as examples. In the preceding figure, $X$ indicates apples, $Y$ indicates oranges, $G$ indicates an apple-to-orange style generator, $F$ indicates an orange-to-apple style generator, and $D_{X}$ and $D_{Y}$ are corresponding discriminators. For details about the structures of the generators and discriminators, see the following code. The model can finally output weights of the two models, and separately migrate styles of the two images to each other to generate new images. An important part of this model is loss functions, in which the cycle consistency loss is the most important function. The following figure shows the process of calculating the cycle loss. (The figure comes from the original paper.) -![Cycle Consistency Loss](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/CycleGAN_1.png) +![Cycle Consistency Loss](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/CycleGAN_1.png) In the preceding figure, the apple image $x$ passes through the generator $G$ to obtain the pseudo orange $\hat{Y}$, and then sends the pseudo orange $\hat{Y}$ result to the generator $F$ to generate the apple-style result $\hat{x}$. Finally, the generated apple-style result $\hat{x}$ and the original apple image $x$ are used to calculate the cycle consistency loss, and vice versa. Cycle loss captures the intuition that if we translate from one domain to the other and back again we should arrive at where we started. For details about the training process, see the following code. @@ -46,7 +46,7 @@ download(url, ".", kind="zip", replace=True) ### Loading a Dataset -Use the [MindDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MindDataset.html) API of MindSpore to read and parse the dataset. +Use the [MindDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MindDataset.html) API of MindSpore to read and parse the dataset. ```python from mindspore.dataset import MindDataset @@ -102,7 +102,7 @@ The model structure of generators in this case is the same as that of the ResNet The structure of the generators is as follows: -![CycleGAN Generator](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/CycleGAN_2.jpg) +![CycleGAN Generator](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/CycleGAN_2.jpg) For details about the model structure, see the following code: diff --git a/tutorials/source_en/generative/dcgan.md b/tutorials/source_en/generative/dcgan.md index 8c970fd868..6d2259b74e 100644 --- a/tutorials/source_en/generative/dcgan.md +++ b/tutorials/source_en/generative/dcgan.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/generative/dcgan.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/generative/dcgan.md) # Generating Cartoon Head Portrait via DCGAN @@ -6,7 +6,7 @@ In the following tutorial, we will use sample code to show how to set up the net ## GAN Basic Principle -For this part of the principle, refer to [GAN image generation](https://www.mindspore.cn/tutorials/en/master/generative/gan.html#model-introduction). +For this part of the principle, refer to [GAN image generation](https://www.mindspore.cn/tutorials/en/br_base/generative/gan.html#model-introduction). ## DCGAN Principle @@ -99,7 +99,7 @@ def create_dataset_imagenet(dataset_path): dataset = create_dataset_imagenet('./faces') ``` -Use the [create_dict_iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) function to convert data into a dictionary iterator, and then use the `matplotlib` module to visualize some training data. +Use the [create_dict_iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) function to convert data into a dictionary iterator, and then use the `matplotlib` module to visualize some training data. ```python import matplotlib.pyplot as plt @@ -129,7 +129,7 @@ Generator `G` maps the implicit vector `z` to the data space. Because the data i The following shows the image generated by DCGAN: -![dcgangenerator](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/dcgan.png) +![dcgangenerator](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/dcgan.png) > Image source: [Unsupervised Representation Learning With Deep Convolutional Generative Adversarial Networks](https://arxiv.org/pdf/1511.06434.pdf). @@ -215,7 +215,7 @@ discriminator = Discriminator() ### Loss Function -When `D` and `G` are defined, the binary cross-entropy loss function [BCELoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.BCELoss.html) defined in MindSpore will be used. +When `D` and `G` are defined, the binary cross-entropy loss function [BCELoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.BCELoss.html) defined in MindSpore will be used. ```python # Define loss function @@ -378,7 +378,7 @@ def showGif(image_list): showGif(image_list) ``` -![dcgan](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/dcgan.gif) +![dcgan](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/dcgan.gif) From the image above, we can see that the image quality is getting better as the number of training cycles increases. If we increase the number of training cycles, when `num_epochs` reaches above 50, the generated anime avatar images are more similar to those in the dataset. We generate the images by loading the generator network model parameter file below with the following code: diff --git a/tutorials/source_en/generative/diffusion.md b/tutorials/source_en/generative/diffusion.md index 9bda8e79ac..275217d158 100644 --- a/tutorials/source_en/generative/diffusion.md +++ b/tutorials/source_en/generative/diffusion.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/generative/diffusion.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/generative/diffusion.md) # Diffusion Model @@ -12,7 +12,7 @@ Actually, the idea of diffusion-based generative models was already introduced b The method stated in this document is achieved on MindSpore AI framework and refers to Phil Wang's [Denoising Diffusion Probabilistic Model, in PyTorch](https://github.com/lucidrains/denoising-diffusion-pytorch) (which is achieved based on [TensorFlow](https://github.com/hojonathanho/diffusion)). -![Image-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/diffusion_1.png) +![Image-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/diffusion_1.png) We adopt the discrete time (potential variable model) in the experiment. In addition, you can see [other opinions](https://twitter.com/sedielem/status/1530894256168222722?s=20&t=mfv4afx1GcNQU5fZklpACw) on diffusion models. @@ -51,7 +51,7 @@ Processing images using a diffusion model consists of 2 processes. - A reverse denoising diffusion process $p_\theta$ that learns to gradually denoise pure noise through a neural network to generate an actual image. -![Image-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/diffusion_2.png) +![Image-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/diffusion_2.png) Both the forward and reverse processes indexed by $t$ occur within the number of limited time steps $T$ (the DDPM authors use $T = 1000$). We start with $t=0$, sample the real image $\mathbf{x}_0$ from the data distribution. A cat image from ImageNet is used to show the forward diffusion process, which samples some noise from a Gaussian distribution at each time step $t$ and adds the noise to the image of the previous time step. Assume that a sufficiently large $T$ and a well behaved schedule for adding noise at each time step, you will end up with what is called an [Isotropic Gaussian Distribution](https://math.stackexchange.com/questions/1991961/gaussian-distribution-is-isotropic) at $t = T$ via a gradual process. @@ -138,7 +138,7 @@ $\mathbf{x}_0$ is the initial (real and undamaged) image here, $\mathbf{\epsilon The training algorithm is shown as follows: -![Image-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/diffusion_3.png) +![Image-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/diffusion_3.png) In other words: @@ -160,7 +160,7 @@ What we always use here is very similar to the [autoencoder](https://en.wikipedi As for model architecture, the DDPM authors chose U-Net, which is introduced by [Ronneberger et al., 2015](https://arxiv.org/abs/1505.04597) and got the highest level achievement in medical image segmentation at that time. Like any autoencoder, this network consists of a bottleneck in the middle, ensuring that the network learns only the most important information. Importantly, it introduces residual connections between the encoder and decoder, greatly improving gradient flows (which is inspired by [He et al., 2015](https://arxiv.org/abs/1512.03385)). -![Image-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/diffusion_4.jpg) +![Image-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/diffusion_4.jpg) We can see that the U-Net model downsamples the input (that is, makes the input smaller in terms of spatial resolution), and then performs upsampling. @@ -818,7 +818,7 @@ dict_keys(['image']) Since we will sample from the model during training (to track progress), we define the following code: Sampling is summarized in this document as algorithm 2. -![Image-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/diffusion_5.png) +![Image-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/diffusion_5.png) Generating a new image from a diffusion model is achieved by reversing the diffusion process: starting with $T$, we sample pure noise from the Gaussian distribution, and then use our neural network to gradually denoise (using the conditional probability it learns), until we finally end up at the time step $t = 0$. As shown above, we can derive a slightly less denoised image $\mathbf{x}_{t-1 }$ by plugging in the reparametrization of the mean, using our noise predictor. Note that the variance is known in advance. diff --git a/tutorials/source_en/generative/gan.md b/tutorials/source_en/generative/gan.md index 2bf4db404b..ed9d0f8df1 100644 --- a/tutorials/source_en/generative/gan.md +++ b/tutorials/source_en/generative/gan.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/generative/gan.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/generative/gan.md) # GAN for Image Generation @@ -34,7 +34,7 @@ Theoretically, it reaches the nash equilibrium when $p_{G}(x;\theta) = p_{data}( 3. The generator generates data that is closer to the real data distribution through optimization. 4. The data generated by the generator reaches the same distribution as the real data. In this case, the output of the discriminator is 1/2. -![gan](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/gan_image.png) +![gan](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/gan_image.png) In the preceding figure, the blue dotted line indicates the discriminator, the black dotted line indicates the real data distribution, the green solid line indicates the false data distribution generated by the generator, $z$ indicates the implicit vector, and $x$ indicates the generated fake image $G(z)$. The image comes from [Generative Adversarial Nets](https://papers.nips.cc/paper/5423-generative-adversarial-nets.pdf). For details about the training method, see the original paper. @@ -82,7 +82,7 @@ Successfully downloaded / unzipped to . ### Data Loading -Use MindSpore's own [MnistDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.MnistDataset.html) API to read and parse the source files of the MNIST dataset to build the dataset. Then, pre-process the data. +Use MindSpore's own [MnistDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.MnistDataset.html) API to read and parse the source files of the MNIST dataset to build the dataset. Then, pre-process the data. ```python import numpy as np @@ -200,7 +200,7 @@ net_g.update_parameters_name('generator') ### Discriminator -As described above, `Discriminator` is a binary network model, and outputs the probability that the image is determined as a real image. It is processed through a series of [Dense](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Dense.html) and [LeakyReLU](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.LeakyReLU.html) layers. Finally, the [Sigmoid](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Sigmoid.html) activation function is used to return the data within the range of [0, 1] to obtain the final probability. After instantiating the discriminator, you need to change the parameter name. Otherwise, an error is reported in static graph mode. +As described above, `Discriminator` is a binary network model, and outputs the probability that the image is determined as a real image. It is processed through a series of [Dense](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Dense.html) and [LeakyReLU](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.LeakyReLU.html) layers. Finally, the [Sigmoid](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Sigmoid.html) activation function is used to return the data within the range of [0, 1] to obtain the final probability. After instantiating the discriminator, you need to change the parameter name. Otherwise, an error is reported in static graph mode. ```python # Discriminator @@ -228,7 +228,7 @@ net_d.update_parameters_name('discriminator') ### Loss Function and Optimizer -After `Generator` and `Discriminator` are defined, the binary cross-entropy loss function [BCELoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.BCELoss.html) in MindSpore is used as the loss function. Both the generator and discriminator use the [Adam](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Adam.html) optimizer. However, you need to build two optimizers with different names to update the parameters of the two models. For details, see the following code. Note that the parameter names of the optimizer also need to be changed. +After `Generator` and `Discriminator` are defined, the binary cross-entropy loss function [BCELoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.BCELoss.html) in MindSpore is used as the loss function. Both the generator and discriminator use the [Adam](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Adam.html) optimizer. However, you need to build two optimizers with different names to update the parameters of the two models. For details, see the following code. Note that the parameter names of the optimizer also need to be changed. ```python lr = 0.0002 # Learning rate @@ -417,7 +417,7 @@ ani.save('train_test.gif', writer='pillow', fps=1) ``` -![Dynamic test image during training](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/train_test.gif) +![Dynamic test image during training](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/train_test.gif) As shown in the preceding figure, the image quality becomes better as the number of training epochs increases. If the value of `epoch` is greater than 100, the generated handwritten digit image is similar to that in the dataset. Now, let's load the generator network model parameter file to generate an image. The code is as follows: diff --git a/tutorials/source_en/generative/pix2pix.md b/tutorials/source_en/generative/pix2pix.md index a294005b0b..c5feed0fa5 100644 --- a/tutorials/source_en/generative/pix2pix.md +++ b/tutorials/source_en/generative/pix2pix.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/generative/pix2pix.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/generative/pix2pix.md) # Pix2Pix for Image Translation @@ -27,7 +27,7 @@ The formula is a loss function of cGAN. `D` tries to correctly classify real ima $$arg\min_{G}\max_{D}L_{cGAN}(G,D)$$ -![pix2pix1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/pix2pix_1.png) +![pix2pix1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/pix2pix_1.png) To compare the differences between cGAN and GAN, the objectives of GAN can be expressed as follows: @@ -88,7 +88,7 @@ After the data is processed, you can set up the network. The generator, discrimi U-Net is a fully convolutional structure proposed by the pattern recognition and image processing team of University of Freiburg in Germany. It is divided into two parts. The left part is the compression path formed by convolution and downsampling operations, and the right part is the expansion path formed by convolution and upsampling. The input of each expanded network block is formed by combining the features sampled at the upper layer and the features of the compression path part. The network model is a U-shaped structure and therefore is called U-Net. Compared with the common network where the sampling is reduced to a low dimension and then increased to the original resolution, the U-Net adds skip-connection. The corresponding feature maps and the decoded feature maps of the same size are combined by channel. It is used to reserve pixel-level details at different resolutions. -![pix2pix2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/pix2pix_2.png) +![pix2pix2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/pix2pix_2.png) #### Defining the U-Net Skip Connection Block @@ -416,7 +416,7 @@ ms per step:289.41 epoch:100/100 step:24/25 Dloss:0.4199 Gloss:9.2418 ## Inference -Obtain the CKPT file after the preceding training process is complete, import the weight parameters in the CKPT file to the model by using [load_checkpoint](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.load_checkpoint.html) and [load_param_into_net](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.load_param_into_net.html), obtain data for inference, and demonstrate the inference effect. (Only 100 epochs are performed during the training process.) +Obtain the CKPT file after the preceding training process is complete, import the weight parameters in the CKPT file to the model by using [load_checkpoint](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.load_checkpoint.html) and [load_param_into_net](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.load_param_into_net.html), obtain data for inference, and demonstrate the inference effect. (Only 100 epochs are performed during the training process.) ```python from mindspore import load_checkpoint, load_param_into_net @@ -442,7 +442,7 @@ plt.show() The inference effect of each dataset is as follows: -![pix2pix3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/pix2pix_3.png) +![pix2pix3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/pix2pix_3.png) ## Reference diff --git a/tutorials/source_en/model_infer/lite_infer/overview.md b/tutorials/source_en/model_infer/lite_infer/overview.md index e22d804d74..5f3fb946fa 100644 --- a/tutorials/source_en/model_infer/lite_infer/overview.md +++ b/tutorials/source_en/model_infer/lite_infer/overview.md @@ -1,5 +1,5 @@ # Lite Inference Overview -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/model_infer/lite_infer/overview.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/model_infer/lite_infer/overview.md) MindSpore Lite is a lightweight inference engine focused on efficient inference deployment solutions for offline models and high performance inference for end-to-end devices. For more information, please refer to [Lite documentation](https://www.mindspore.cn/lite/docs/en/master/index.html). \ No newline at end of file diff --git a/tutorials/source_en/model_migration/model_migration.md b/tutorials/source_en/model_migration/model_migration.md index e2bf3a4d99..9bb9482766 100644 --- a/tutorials/source_en/model_migration/model_migration.md +++ b/tutorials/source_en/model_migration/model_migration.md @@ -1,6 +1,6 @@ # Model Migration -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/model_migration/model_migration.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/model_migration/model_migration.md) This chapter mainly gives a brief introduction to the dataset, model, and training and inference processes necessary for model migration scenarios to be built on MindSpore. It also shows the differences between MindSpore and PyTorch in terms of dataset packing, model building, and training process code. @@ -18,11 +18,11 @@ Third parties that compute on the CPU like Numpy, OpenCV, as well as Python oper ## Dataset Packing -MindSpore provides a variety of typical open source datasets for parsing and reading, such as MNIST, CIFAR-10, CLUE, LJSpeech, etc. For details, please refer to [mindspore.dataset](https://www.mindspore.cn/docs/en/master/api_python/mindspore.dataset.html). +MindSpore provides a variety of typical open source datasets for parsing and reading, such as MNIST, CIFAR-10, CLUE, LJSpeech, etc. For details, please refer to [mindspore.dataset](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.dataset.html). ### Customized Data Loading GeneratorDataset -In migration scenarios, the most common way to load data is [GeneratorDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset), which can be directly docked to the MindSpore model for training and inference by simply packing the Python iterator. +In migration scenarios, the most common way to load data is [GeneratorDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset), which can be directly docked to the MindSpore model for training and inference by simply packing the Python iterator. ```python import numpy as np @@ -56,17 +56,17 @@ GeneratorDataset needs to contain at least: - source: a Python iterator; - column_names: the name of each output of the iterator\_\_getitem\_\_ method. -For more use methods, refer to [GeneratorDataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset). +For more use methods, refer to [GeneratorDataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset). dataset.batch takes consecutive batch_size entries in the dataset and combines them into a single batch, which needs to contain at least: - batch_size: Specifies the data entries contained in each batch of data. -For more use methods, refer to [Dataset.batch](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html). +For more use methods, refer to [Dataset.batch](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html). ### Differences with PyTorch Dataset Construction -![generatordataset_dataloader.png](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/model_migration/images/generatordataset_dataloader.png) +![generatordataset_dataloader.png](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/model_migration/images/generatordataset_dataloader.png) The main differences between MindSpore's GeneratorDataset and PyTorch's DataLoader are: @@ -74,7 +74,7 @@ The main differences between MindSpore's GeneratorDataset and PyTorch's DataLoad - PyTorch's data augmentation inputs are of type Tensor, MindSpore's data augmentation inputs are of type numpy, and data processing cannot be done with MindSpore's mint, ops, and nn operators; - PyTorch's batch operation is a property of the DataLoader, MindSpore's batch operation is a separate method. -For more details, refer to [Differences with torch.utils.data.DataLoader](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_diff/DataLoader.html). +For more details, refer to [Differences with torch.utils.data.DataLoader](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_diff/DataLoader.html). ## Model Construction @@ -134,13 +134,13 @@ for i in net.get_parameters(): -MindSpore and PyTorch build models in pretty much the same way, and the differences in the use of operators can be found in the [API Differences document](https://www.mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html). +MindSpore and PyTorch build models in pretty much the same way, and the differences in the use of operators can be found in the [API Differences document](https://www.mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html). #### Model Saving and Loading PyTorch provides `state_dict()` for parameter state viewing and saving, and `load_state_dict` for model parameter loading. -MindSpore can use [save_checkpoint](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.save_checkpoint.html) and [load_checkpoint](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.load_checkpoint.html). +MindSpore can use [save_checkpoint](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.save_checkpoint.html) and [load_checkpoint](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.load_checkpoint.html). @@ -180,7 +180,7 @@ ms_model.load_state_dict(param_dict) ### Optimizer -Comparison of similarities and differences between optimizers supported by both PyTorch and MindSpore, see [API mapping table](https://mindspore.cn/docs/en/master/note/api_mapping/pytorch_api_mapping.html#torch-optim). +Comparison of similarities and differences between optimizers supported by both PyTorch and MindSpore, see [API mapping table](https://mindspore.cn/docs/en/br_base/note/api_mapping/pytorch_api_mapping.html#torch-optim). #### Implementation and Usage Differences of Optimizers @@ -190,7 +190,7 @@ When using the optimizer in MindSpore, simply compute the gradients directly and If the learning rate needs to be dynamically adjusted during training, PyTorch provides the `LRScheduler` class for learning rate management. When using dynamic learning rates, pass the `optimizer` instance into the `LRScheduler` subclass and perform the learning rate modification by calling `scheduler.step()` in a loop and synchronizing the modification to the optimizer. -MindSpore provides both `Cell` and `list` methods for dynamically modifying the learning rate. When used, the corresponding dynamic learning rate object is passed directly into the optimizer, and the update of the learning rate is executed automatically in the optimizer, please refer to [Dynamic Learning Rate](https://www.mindspore.cn/docs/en/master/api_python/mindspore.nn.html#dynamic-learning-rate). +MindSpore provides both `Cell` and `list` methods for dynamically modifying the learning rate. When used, the corresponding dynamic learning rate object is passed directly into the optimizer, and the update of the learning rate is executed automatically in the optimizer, please refer to [Dynamic Learning Rate](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.nn.html#dynamic-learning-rate).
diff --git a/tutorials/source_en/nlp/sentiment_analysis.md b/tutorials/source_en/nlp/sentiment_analysis.md index 7b046196e6..52cbfdb6f9 100644 --- a/tutorials/source_en/nlp/sentiment_analysis.md +++ b/tutorials/source_en/nlp/sentiment_analysis.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/nlp/sentiment_analysis.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/nlp/sentiment_analysis.md) # Sentiment Classification Implemented by RNN @@ -157,7 +157,7 @@ len(imdb_train) 25000 ``` -After the IMDB dataset is loaded to the memory and built as an iteration object, you can use the [Generatordataset](https://www.mindspore.cn/docs/en/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html) API provided by `mindspore.dataset` to load the dataset iteration object and then perform data processing. The following encapsulates a function to load train and test using `GeneratorDataset`, and set `column_name` of the text and label in the dataset to `text` and `label`, respectively. +After the IMDB dataset is loaded to the memory and built as an iteration object, you can use the [Generatordataset](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html) API provided by `mindspore.dataset` to load the dataset iteration object and then perform data processing. The following encapsulates a function to load train and test using `GeneratorDataset`, and set `column_name` of the text and label in the dataset to `text` and `label`, respectively. ```python import mindspore.dataset as ds @@ -268,9 +268,9 @@ Word segmentation is performed on the IMDB dataset loaded by the loader, but the - Use the Vocab to convert all tokens to index IDs. - The length of the text sequence is unified. If the length is insufficient, `` is used to supplement the length. If the length exceeds the limit, the excess part is truncated. -Here, the API provided in `mindspore.dataset` is used for preprocessing. The APIs used here are designed for MindSpore high-performance data engines. The operations corresponding to each API are considered as a part of the data pipeline. For details, see [MindSpore Data Engine](https://www.mindspore.cn/docs/en/master/features/data_engine.html). +Here, the API provided in `mindspore.dataset` is used for preprocessing. The APIs used here are designed for MindSpore high-performance data engines. The operations corresponding to each API are considered as a part of the data pipeline. For details, see [MindSpore Data Engine](https://www.mindspore.cn/docs/en/br_base/features/data_engine.html). -For the table query operation from a token to an index ID, use the `text.Lookup` API to load the built vocabulary and specify `unknown_token`. The [PadEnd](https://www.mindspore.cn/docs/en/master/api_python/dataset_transforms/mindspore.dataset.transforms.PadEnd.html) API is used to unify the length of the text sequence. This API defines the maximum length and padding value (`pad_value`). In this example, the maximum length is 500, and the padding value corresponds to the index ID of `` in the vocabulary. +For the table query operation from a token to an index ID, use the `text.Lookup` API to load the built vocabulary and specify `unknown_token`. The [PadEnd](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.PadEnd.html) API is used to unify the length of the text sequence. This API defines the maximum length and padding value (`pad_value`). In this example, the maximum length is 500, and the padding value corresponds to the index ID of `` in the vocabulary. > In addition to pre-processing the `text` data in the dataset, the `label` data needs to be converted to the float32 format to meet the subsequent model training requirements. @@ -282,7 +282,7 @@ pad_op = ds.transforms.PadEnd([500], pad_value=vocab.tokens_to_ids('')) type_cast_op = ds.transforms.TypeCast(ms.float32) ``` -After the preprocessing is complete, you need to add data to the dataset processing pipeline and use the [map](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.map.html#mindspore.dataset.Dataset.map) API to add operations to the specified column. +After the preprocessing is complete, you need to add data to the dataset processing pipeline and use the [map](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.map.html#mindspore.dataset.Dataset.map) API to add operations to the specified column. ```python imdb_train = imdb_train.map(operations=[lookup_op, pad_op], input_columns=['text']) @@ -298,7 +298,7 @@ The IMDB dataset does not contain the validation set. Therefore, you need to man imdb_train, imdb_valid = imdb_train.split([0.7, 0.3]) ``` -Finally, specify the batch size of the dataset by using the [batch](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html) API and determine whether to discard the remaining data that cannot be exactly divided by the batch size. +Finally, specify the batch size of the dataset by using the [batch](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html) API and determine whether to discard the remaining data that cannot be exactly divided by the batch size. > Call the `map`, `split`, and `batch` APIs of the dataset to add corresponding operations to the dataset processing pipeline. The return value is of the new dataset type. Currently, only the pipeline operation is defined. During execution, the data processing pipeline is executed to obtain the processed data and send the data to the model for training. @@ -333,17 +333,17 @@ Here, the processed GloVe word vector matrix is used. `embedding_table` of `nn.E RNN is a type of neural network that uses sequence data as an input, performs recursion in the evolution direction of a sequence, and connects all nodes (circulating units) in a chain. The following figure shows the general RNN structure. -![RNN-0](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/nlp/images/0-RNN-0.png) +![RNN-0](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/nlp/images/0-RNN-0.png) > The left part of the figure shows an RNN Cell cycle, and the right part shows the RNN chain connection. Actually, there is only one Cell parameter regardless of a single RNN Cell or an RNN network, and the parameter is updated in continuous cyclic calculation. The recurrent feature of the RNN matches the sequence feature (a sentence is a sequence composed of words) of the natural language text. Therefore, the RNN is widely used in the research of natural language processing. The following figure shows the disassembled RNN structure. -![RNN](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/nlp/images/0-RNN.png) +![RNN](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/nlp/images/0-RNN.png) A structure of a single RNN Cell is simple, causing the gradient vanishing problem. Specifically, when a sequence in the RNN is relatively long, information of a sequence header is basically lost at a tail of the sequence. To solve this problem, the LSTM(Long short-term memory) is proposed. The gating mechanism is used to control the retention and discarding of information flows in each cycle. The following figure shows the disassembled LSTM structure. -![LSTM](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/nlp/images/0-LSTM.png) +![LSTM](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/nlp/images/0-LSTM.png) In this section, the LSTM variant instead of the classic RNN is used for feature extraction to avoid the gradient vanishing problem and obtain a better model effect. The formula corresponding to `nn.LSTM` in MindSpore is as follows: @@ -389,7 +389,7 @@ class RNN(nn.Cell): ### Loss Function and Optimizer -After the model body is built, instantiate the network based on the specified parameters, select the loss function and optimizer. For a feature of the sentimental classification problem in this section, that is, a binary classification problem for predicting positive or negative, [nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.BCEWithLogitsLoss.html) (binary cross entropy loss function) is selected. +After the model body is built, instantiate the network based on the specified parameters, select the loss function and optimizer. For a feature of the sentimental classification problem in this section, that is, a binary classification problem for predicting positive or negative, [nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.BCEWithLogitsLoss.html) (binary cross entropy loss function) is selected. ```python hidden_size = 256 @@ -532,7 +532,7 @@ You can see that the loss decreases gradually in each epoch and the accuracy of After model training is complete, you need to test or deploy the model. In this case, you need to load the saved optimal model (that is, checkpoint) for subsequent tests. The checkpoint loading and network weight loading APIs provided by MindSpore are used to load the saved model checkpoint to the memory and load the checkpoint to the model. -> The [load_param_into_net](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.load_param_into_net.html) API returns the weight name that does not match the checkpoint in the model. If the weight name matches the checkpoint, an empty list is returned. +> The [load_param_into_net](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.load_param_into_net.html) API returns the weight name that does not match the checkpoint in the model. If the weight name matches the checkpoint, an empty list is returned. ```python param_dict = ms.load_checkpoint(ckpt_file_name) diff --git a/tutorials/source_en/nlp/sequence_labeling.md b/tutorials/source_en/nlp/sequence_labeling.md index 5460de0b59..acff1c4d79 100644 --- a/tutorials/source_en/nlp/sequence_labeling.md +++ b/tutorials/source_en/nlp/sequence_labeling.md @@ -1,4 +1,4 @@ -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/nlp/sequence_labeling.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/nlp/sequence_labeling.md) # LSTM+CRF Sequence Labeling diff --git a/tutorials/source_en/orange_pi/dev_start.md b/tutorials/source_en/orange_pi/dev_start.md index 9d88c7676c..febb0801c8 100644 --- a/tutorials/source_en/orange_pi/dev_start.md +++ b/tutorials/source_en/orange_pi/dev_start.md @@ -1,6 +1,6 @@ # Quick Start -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/orange_pi/dev_start.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/orange_pi/dev_start.md) Since developers may perform custom model and case development in OrangePi AIpro (hereinafter: OrangePi Development Board), this chapter illustrates the development considerations in the OrangePi Development Board through a handwritten digit recognition case based on MindSpore. @@ -14,15 +14,15 @@ After obtaining the OrangePi AIpro development board, developers first need to c ### Image Burning -To run this case, it is necessary to burn the Ubuntu image on the OrangePi AIpro official website. Please refer to [Image Burning](https://www.mindspore.cn/tutorials/en/master/orange_pi/environment_setup.html#1-image-burning-taking-windows-as-an-example). +To run this case, it is necessary to burn the Ubuntu image on the OrangePi AIpro official website. Please refer to [Image Burning](https://www.mindspore.cn/tutorials/en/br_base/orange_pi/environment_setup.html#1-image-burning-taking-windows-as-an-example). ### CANN Upgrading -Please refer to [CANN Upgrading](https://www.mindspore.cn/tutorials/en/master/orange_pi/environment_setup.html#3-cann-upgrading). +Please refer to [CANN Upgrading](https://www.mindspore.cn/tutorials/en/br_base/orange_pi/environment_setup.html#3-cann-upgrading). ### MindSpore Upgrading -Please refer to [MindSpore Upgrading](https://www.mindspore.cn/tutorials/en/master/orange_pi/environment_setup.html#4-mindspore-upgrading). +Please refer to [MindSpore Upgrading](https://www.mindspore.cn/tutorials/en/br_base/orange_pi/environment_setup.html#4-mindspore-upgrading). ```python import mindspore @@ -35,7 +35,7 @@ from mindspore.dataset import MnistDataset ## Preparing and Loading Dataset -MindSpore provides a Pipeline-based [data engine](https://www.mindspore.cn/docs/en/master/features/data_engine.html) to realize efficient data preprocessing through [data loading and processing](https://www.mindspore.cn/tutorials/en/master/beginner/dataset.html) to realize efficient data preprocessing. In this case, we use the Mnist dataset, which is automatically downloaded and then preprocessed using the data transforms provided by `mindspore.dataset`. +MindSpore provides a Pipeline-based [data engine](https://www.mindspore.cn/docs/en/br_base/features/data_engine.html) to realize efficient data preprocessing through [data loading and processing](https://www.mindspore.cn/tutorials/en/br_base/beginner/dataset.html) to realize efficient data preprocessing. In this case, we use the Mnist dataset, which is automatically downloaded and then preprocessed using the data transforms provided by `mindspore.dataset`. ```python # install download @@ -113,7 +113,7 @@ train_dataset = datapipe(train_dataset, 64) test_dataset = datapipe(test_dataset, 64) ``` -The dataset can be accessed iteratively using [create_tuple_iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html) or [create_dict_iterator](https://www.mindspore.cn/docs/en/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) to see the shape and datatype of the data and labels. +The dataset can be accessed iteratively using [create_tuple_iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html) or [create_dict_iterator](https://www.mindspore.cn/docs/en/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) to see the shape and datatype of the data and labels. ```python for image, label in test_dataset.create_tuple_iterator(): @@ -185,8 +185,8 @@ In model training, a complete training process (STEP) requires the realization o MindSpore uses a functional automatic differentiation mechanism, so for the above steps need to be implemented: 1. Define the forward computation function. -2. Use [value_and_grad](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.value_and_grad.html) to obtain the gradient computation function by functional transformation. -3. Define the training function and use [set_train](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_train) to set to training mode, perform forward computation, backpropagation and parameter optimization. +2. Use [value_and_grad](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.value_and_grad.html) to obtain the gradient computation function by functional transformation. +3. Define the training function and use [set_train](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_train) to set to training mode, perform forward computation, backpropagation and parameter optimization. ```python # Instantiate loss function and optimizer diff --git a/tutorials/source_en/orange_pi/environment_setup.md b/tutorials/source_en/orange_pi/environment_setup.md index dd0a7c69ba..adc4022da3 100644 --- a/tutorials/source_en/orange_pi/environment_setup.md +++ b/tutorials/source_en/orange_pi/environment_setup.md @@ -1,6 +1,6 @@ # Environment Setup Guide -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/orange_pi/environment_setup.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/orange_pi/environment_setup.md) This section describes how to burn an image on OrangePi AIpro, customize the installation of CANN and MindSpore, and configure the runtime environment. @@ -12,7 +12,7 @@ Image burning can be performed in any operating system. Here we will take Window Step 1 Insert the Micro SD card into the card reader and the card reader into the PC. -![environment-setup-1-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-1.jpg) +![environment-setup-1-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-1.jpg) ### 1.2 Downloading the Ubuntu image @@ -22,11 +22,11 @@ Step 1 Click [here](http://www.orangepi.cn/html/hardWare/computerAndMicrocontrol Step 2 Click the arrow icon in the picture to jump to the Baidu Wangpan download page. -![environment-setup-1-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-2.png) +![environment-setup-1-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-2.png) Step 3 Select the desktop version to download, it is recommended to download the 0318 version of the environment. -![environment-setup-1-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-3.png) +![environment-setup-1-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-3.png) Step 4 Alternative download method. @@ -42,13 +42,13 @@ There are two card-making tools balenaEtcher, Rufus, and you can choose any one Click [here](https://etcher.balena.io/) to jump to the official website, and click the green download button to jump to where the software is downloaded. - ![environment-setup-1-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-4.png) + ![environment-setup-1-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-4.png) Step 2 Select to download the Portable version. The Portable version does not require installation, so double-click it to open it and use it. - ![environment-setup-1-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-5.png) + ![environment-setup-1-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-5.png) Step 3 Alternative download method. @@ -56,9 +56,9 @@ There are two card-making tools balenaEtcher, Rufus, and you can choose any one Step 4 Open balenaEtcher. - ![environment-setup-1-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-6.png) + ![environment-setup-1-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-6.png) - ![environment-setup-1-7](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-7.png) + ![environment-setup-1-7](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-7.png) - Rufus: @@ -80,19 +80,19 @@ Here we introduce balenaEtcher, Rufus to burn the image, you can burn according 3. Click Start Burning, as shown below: - ![environment-setup-1-8](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-8.png) + ![environment-setup-1-8](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-8.png) It takes about 20 minutes to burn and verify, so please be patient: - ![environment-setup-1-9](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-9.png) + ![environment-setup-1-9](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-9.png) - ![environment-setup-1-10](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-10.png) + ![environment-setup-1-10](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-10.png) Step 2 Burning is complete. After the completion of burning, balenaEtcher is shown in the following figure, if the green indicator icon shows that the image is burned successfully, at this time you can exit balenaEtcher, pull out the TF card and insert it into the TF card slot on the development board to use: - ![environment-setup-1-11](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-11.png) + ![environment-setup-1-11](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-11.png) - Rufus burns images: @@ -100,13 +100,13 @@ Here we introduce balenaEtcher, Rufus to burn the image, you can burn according Insert the sd card into the card reader, insert the card reader into the computer, select the image and sd card, click "Start". - ![environment-setup-1-12](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-12.png) + ![environment-setup-1-12](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-12.png) Step 2 Burning is complete. Pull out the card reader directly after the wait is over. - ![environment-setup-1-13](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-13.png) + ![environment-setup-1-13](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-13.png) ## 2. Development Board Startup and Network Connection @@ -158,7 +158,7 @@ If the current CANN version does not meet the development requirements, the CANN Step 1 Use the `CTRL+ALT+T` or click on the icon with `$_` at the bottom of the page to open the terminal and switch to the root user. -![environment-setup-1-14](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-14.png) +![environment-setup-1-14](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-14.png) Switch the root user, root user password: Mind@123. @@ -213,7 +213,7 @@ Step 6 Execute the following command to upgrade the software. Type Y when this prompt pops up during installation, then press Enter to continue the installation. This process takes about 10-15 minutes, please be patient. -![environment-setup-1-16](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-16.png) +![environment-setup-1-16](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/environment_setup_1-16.png) After the upgrade is completed, if the following message is displayed, the software upgrade is successful: @@ -351,4 +351,4 @@ The result of multiplication calculation is correct, MindSpore has been installe ## Next Suggestion -At this point, the OrangePi AIpro development board environment has been set up, and you can experience [online model inference based on MindSpore development on the development board](https://www.mindspore.cn/tutorials/en/master/orange_pi/model_infer.html). +At this point, the OrangePi AIpro development board environment has been set up, and you can experience [online model inference based on MindSpore development on the development board](https://www.mindspore.cn/tutorials/en/br_base/orange_pi/model_infer.html). diff --git a/tutorials/source_en/orange_pi/model_infer.md b/tutorials/source_en/orange_pi/model_infer.md index 093c66a688..99f9d426b0 100644 --- a/tutorials/source_en/orange_pi/model_infer.md +++ b/tutorials/source_en/orange_pi/model_infer.md @@ -1,6 +1,6 @@ # Model Online Inference -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/orange_pi/model_infer.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/orange_pi/model_infer.md) This section describes how to download the Ascend MindSpore online inference case on the OrangePi AIpro (hereafter: OrangePi development board) and launch the Jupyter Lab interface to perform inference. @@ -58,15 +58,15 @@ Step 1 Launch the Jupyter Lab interface. After executing the script, the following printout will appear in the terminal, in which there will be a link to the URL for logging into Jupyter Lab. -![model-infer1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/model_infer1.png) +![model-infer1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/model_infer1.png) Then open the browser. -![model-infer2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/model_infer2.png) +![model-infer2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/model_infer2.png) Then enter the URL link you see above in your browser to log into the Jupyter Lab software. -![model-infer3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/orange_pi/images/model_infer3.png) +![model-infer3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/orange_pi/images/model_infer3.png) Step 2 In the Jupyter Lab interface, double-click the case directory shown in the figure below, take “04-FCN” as an example here, you can enter the case directory. The operation process of other cases is similar, just select the corresponding case directory and .ipynb file. @@ -76,7 +76,7 @@ Step 3 In this directory there are all the resources to run the sample, where mi ![model-infer5](../../source_zh_cn/orange_pi/images/model_infer5.png) -The beginning of the file describes the information of hardware resources (Orange Pi development board) and the versions of CANN and MindSpore required for running the sample. Please note to check the environment. For details on environment checking and setup, refer to [Environment Setup Guide](https://www.mindspore.cn/tutorials/en/master/orange_pi/environment_setup.html). +The beginning of the file describes the information of hardware resources (Orange Pi development board) and the versions of CANN and MindSpore required for running the sample. Please note to check the environment. For details on environment checking and setup, refer to [Environment Setup Guide](https://www.mindspore.cn/tutorials/en/br_base/orange_pi/environment_setup.html). Step 4 Click the ⏩ button to run the sample. In the pop-up dialog box, click the "Restart" button, then the sample begins to run. @@ -90,4 +90,4 @@ After the inference execution is completed, it is necessary to navigate to `KERN ## Next Suggestion -For specific case development based on MindSpore, please refer to [Quick Start](https://www.mindspore.cn/tutorials/en/master/orange_pi/dev_start.html) +For specific case development based on MindSpore, please refer to [Quick Start](https://www.mindspore.cn/tutorials/en/br_base/orange_pi/dev_start.html) diff --git a/tutorials/source_en/orange_pi/overview.md b/tutorials/source_en/orange_pi/overview.md index b7fb69f78a..af8fbf3c3d 100644 --- a/tutorials/source_en/orange_pi/overview.md +++ b/tutorials/source_en/orange_pi/overview.md @@ -1,6 +1,6 @@ # OrangePi AIpro Development -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/orange_pi/overview.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/orange_pi/overview.md) [OrangePi AIpro](http://www.orangepi.org/) adopts the route of Ascend AI technology, specifically 4-core 64-bit processor and AI processor, integrated graph processor. @@ -12,9 +12,9 @@ At present, the system image of OrangePi AIpro development board has been realiz | :----- |:----- |:----- | | Pre-learning | Before developing based on the MindSpore + OrangePi AIpro development board, it is necessary to understand and master the content | [MindSpore](https://www.mindspore.cn/en)
[Linux](https://www.runoob.com/linux/linux-tutorial.html)
[Jupyter](https://jupyter.org/documentation) | | Image acquisition | OrangePi AIpro Official Website - Official Mirror | [8T](http://www.orangepi.cn/html/hardWare/computerAndMicrocontrollers/service-and-support/Orange-Pi-AIpro.html)
[20T](http://www.orangepi.cn/html/hardWare/computerAndMicrocontrollers/details/Orange-Pi-AIpro(20T).html) | -| Environment Setup | How to build a custom environment based on OrangePi AIpro, including version checks and updates for CANN, MindSpore, kits, etc. (Taking the 8-12 TOPS 16G development board as an example, the operation method for the 20 TOPS development board is the same) | [Environment Setup Guide](https://www.mindspore.cn/tutorials/en/master/orange_pi/environment_setup.html) | -| Online Inference | How to initiate model inference in OrangePi AIpro | [Model Online Inference](https://www.mindspore.cn/tutorials/en/master/orange_pi/model_infer.html) | -| Quick Start | Case study of handwritten digit recognition based on MindSpore, explaining the development considerations in the OrangePi AIpro development board | [Quick Start](https://www.mindspore.cn/tutorials/en/master/orange_pi/dev_start.html) | +| Environment Setup | How to build a custom environment based on OrangePi AIpro, including version checks and updates for CANN, MindSpore, kits, etc. (Taking the 8-12 TOPS 16G development board as an example, the operation method for the 20 TOPS development board is the same) | [Environment Setup Guide](https://www.mindspore.cn/tutorials/en/br_base/orange_pi/environment_setup.html) | +| Online Inference | How to initiate model inference in OrangePi AIpro | [Model Online Inference](https://www.mindspore.cn/tutorials/en/br_base/orange_pi/model_infer.html) | +| Quick Start | Case study of handwritten digit recognition based on MindSpore, explaining the development considerations in the OrangePi AIpro development board | [Quick Start](https://www.mindspore.cn/tutorials/en/br_base/orange_pi/dev_start.html) | | Premium courses | MindSpore + Ascend Development Board:Combining Software and Hard to Play DeepSeek Development Practice Course | [Course link](https://www.hiascend.com/developer/courses/detail/1925362775376744449) | | Case sharing | Ascend Development Board Zone - Case Sharing| [Ascend Development Board Zone](https://www.hiascend.com/developer/devboard) | | MindSpore + OrangePi Code repository | orange-pi-mindspore code repository | [GitHub link](https://github.com/mindspore-courses/orange-pi-mindspore) | diff --git a/tutorials/source_en/parallel/comm_fusion.md b/tutorials/source_en/parallel/comm_fusion.md index ae41ee4d5d..c6f51482d3 100644 --- a/tutorials/source_en/parallel/comm_fusion.md +++ b/tutorials/source_en/parallel/comm_fusion.md @@ -1,12 +1,12 @@ # Distributed Training Communication Fusion -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/comm_fusion.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/comm_fusion.md) ## Overview In distributed parallel training scenarios to train large-scale parameter models (e.g., GPT-3, Pangu-$\alpha$), data transmission of cross-device or even cross-node is a bottleneck that limits scalability as well as operator power utilization [1]. Communication fusion is an important method to improve network resource utilization and accelerate data transmission efficiency by encapsulating the communication operator of the same source and destination nodes for simultaneous execution to avoid the extra overhead caused by multiple single operator executions. -MindSpore supports the fusion of three common communication operators ([AllReduce](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AllReduce.html), [AllGather](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.AllGather.html), and [ReduceScatter](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceScatter.html)) in distributed training, and provides a simple and easy-to-use interface for user configuration. The communication fusion plays an important role in the long and steady training mission support. +MindSpore supports the fusion of three common communication operators ([AllReduce](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AllReduce.html), [AllGather](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.AllGather.html), and [ReduceScatter](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceScatter.html)) in distributed training, and provides a simple and easy-to-use interface for user configuration. The communication fusion plays an important role in the long and steady training mission support. ### Basic Principle @@ -18,7 +18,7 @@ The whole process of distributed training can be roughly divided into two proces As shown in the figure below, each node backs up the complete neural network model and uses the local dataset partition to train a mini-batch for forward and backward computation. The gradient obtained from the backward computation is synchronized across the nodes, and the training of the next mini-batch continues after synchronization, and so on, until the accuracy/loss reaches a threshold, or a certain number of epochs are trained. It can be seen that computation and communication alternate in the distributed training process. Work has been done on how to do pipelining of interdependent computation and transmission to reduce the percentage of cross-node data synchronization in the overall training duration [5][6], which will not be repeated here. -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/parallel/images/data_parallel.png) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/parallel/images/data_parallel.png) #### The Necessity of Communication Fusion @@ -50,17 +50,17 @@ MindSpore provides two interfaces to enable communication fusion, each of which net.comm_fusion(config=config) ``` - In auto-parallel or semi-auto-parallel scenario, the user can utilize the [comm_fusion](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.Parameter.html#mindspore.Parameter.comm_fusion) parameter provided by this interface to set the parallel strategy when configuring the parallel strategy via `set_auto_parallel_context`, with inputs in the format {"communication_type": {"mode":str, "config": None int or list}}. For details, see `comm_fusion` in [Parallel Configuration](https://www.mindspore.cn/docs/en/r2.5.0/api_python/mindspore/mindspore.set_auto_parallel_context.html). This configuration method is preferred in this scenario. + In auto-parallel or semi-auto-parallel scenario, the user can utilize the [comm_fusion](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.Parameter.html#mindspore.Parameter.comm_fusion) parameter provided by this interface to set the parallel strategy when configuring the parallel strategy via `set_auto_parallel_context`, with inputs in the format {"communication_type": {"mode":str, "config": None int or list}}. For details, see `comm_fusion` in [Parallel Configuration](https://www.mindspore.cn/docs/en/r2.5.0/api_python/mindspore/mindspore.set_auto_parallel_context.html). This configuration method is preferred in this scenario. 2. Use the interface provided by `Cell` - Regardless of the parallel mode scenarios, the user can set the index for the parameters in a layer of the model through the [Cell.set_comm_fusion](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_comm_fusion) interface, and MindSpore will fuse the communication operators corresponding to parameters of the same index. + Regardless of the parallel mode scenarios, the user can set the index for the parameters in a layer of the model through the [Cell.set_comm_fusion](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_comm_fusion) interface, and MindSpore will fuse the communication operators corresponding to parameters of the same index. ## Operation Practice ### Sample Code Description -> You can download the full sample code here: [distributed_comm_fusion](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_comm_fusion). +> You can download the full sample code here: [distributed_comm_fusion](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_comm_fusion). The directory structure is as follows: diff --git a/tutorials/source_en/parallel/data_parallel.md b/tutorials/source_en/parallel/data_parallel.md index cd2b08db96..db69667fc3 100644 --- a/tutorials/source_en/parallel/data_parallel.md +++ b/tutorials/source_en/parallel/data_parallel.md @@ -1,6 +1,6 @@ # Data Parallel -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/data_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/data_parallel.md) ## Overview @@ -10,7 +10,7 @@ The following is an illustration of data parallel operation using the Ascend sin ## Sample Code Description -> You can download the full sample code here: [distributed_data_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_data_parallel). +> You can download the full sample code here: [distributed_data_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_data_parallel). The directory structure is as follows: @@ -53,7 +53,7 @@ rank_size = get_group_size() dataset = ds.MnistDataset(dataset_path, num_shards=rank_size, shard_id=rank_id) ``` -Unlike single-card, the `num_shards` and `shard_id` parameters need to be passed in the dataset interface, corresponding to the number of cards and the logical serial number, respectively, and it is recommended to obtain them through the following interfaces of the [mindspore.communication](https://www.mindspore.cn/docs/en/master/api_python/mindspore.communication.html) module: +Unlike single-card, the `num_shards` and `shard_id` parameters need to be passed in the dataset interface, corresponding to the number of cards and the logical serial number, respectively, and it is recommended to obtain them through the following interfaces of the [mindspore.communication](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore.communication.html) module: - `get_rank`: Obtain the ID of the current device in the cluster. - `get_group_size`: Obtain the number of clusters. @@ -115,7 +115,7 @@ net = Network() ## Training Network -In this step, we need to define the loss function, the optimizer, and the training process. The difference with single-card model is that the data parallel mode also requires the addition of the [mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.DistributedGradReducer.html) interface to aggregate the gradients of all cards. The first parameter of the network is the network parameter to be updated: +In this step, we need to define the loss function, the optimizer, and the training process. The difference with single-card model is that the data parallel mode also requires the addition of the [mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.DistributedGradReducer.html) interface to aggregate the gradients of all cards. The first parameter of the network is the network parameter to be updated: ```python from mindspore import nn @@ -143,7 +143,7 @@ for epoch in range(10): i += 1 ``` -> This can also be trained using [Model.train](https://www.mindspore.cn/docs/en/master/api_python/train/mindspore.train.Model.html#mindspore.train.Model.train). +> This can also be trained using [Model.train](https://www.mindspore.cn/docs/en/br_base/api_python/train/mindspore.train.Model.html#mindspore.train.Model.train). ## Running Single-machine Eight-card Script @@ -175,4 +175,4 @@ epoch: 0 step: 150, loss is 2.2822685 ... ``` -Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/master/parallel/startup_method.html). +Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/br_base/parallel/startup_method.html). diff --git a/tutorials/source_en/parallel/dataset_slice.md b/tutorials/source_en/parallel/dataset_slice.md index cecf49f750..84e2f995c4 100644 --- a/tutorials/source_en/parallel/dataset_slice.md +++ b/tutorials/source_en/parallel/dataset_slice.md @@ -1,6 +1,6 @@ # Dataset Slicing -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/dataset_slice.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/dataset_slice.md) ## Overview @@ -10,9 +10,9 @@ When performing distributed training, taking image data as an example, when the ### Related Interfaces -1. `mindspore.dataset.vision.SlicePatches(num_height=1, num_width=1)`: Slices the Tensor into multiple blocks horizontally and vertically. Suitable for scenarios where the Tensor has a large height and width. `num_height` is the number of slices in vertical direction and `num_width` is the number of slices in horizontal direction. More parameters can be found in [SlicePatches](https://www.mindspore.cn/docs/en/master/api_python/dataset_vision/mindspore.dataset.vision.SlicePatches.html). +1. `mindspore.dataset.vision.SlicePatches(num_height=1, num_width=1)`: Slices the Tensor into multiple blocks horizontally and vertically. Suitable for scenarios where the Tensor has a large height and width. `num_height` is the number of slices in vertical direction and `num_width` is the number of slices in horizontal direction. More parameters can be found in [SlicePatches](https://www.mindspore.cn/docs/en/br_base/api_python/dataset_vision/mindspore.dataset.vision.SlicePatches.html). -2. `dataset_strategy(config=((1, 1, 1, 8), (8,)))`: indicates dataset slicing strategy. For more details, refer to [AutoParallel Parallel Configuration](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html). The `dataset_strategy` interface has the following limitations: +2. `dataset_strategy(config=((1, 1, 1, 8), (8,)))`: indicates dataset slicing strategy. For more details, refer to [AutoParallel Parallel Configuration](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html). The `dataset_strategy` interface has the following limitations: - Each input is allowed to be sliced in at most one dimension. If `dataset_strategy(config=((1, 1, 1, 8), (8,)))` or `config=((1, 1, 1, 8), (1,))` is supported, each input is sliced in just one dimension at most, but not `config=((1, 1, 4, 2), (1,))`, whose first input is sliced into two dimensions. @@ -22,7 +22,7 @@ When performing distributed training, taking image data as an example, when the ### Sample Code Description -> Download the full sample code here: [dataset_slice](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/dataset_slice). +> Download the full sample code here: [dataset_slice](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/dataset_slice). The directory structure is as follows: @@ -85,7 +85,7 @@ data_set = create_dataset(32) ### Network Definition -The network definition here is consistent with the single-card model and the initialization of network parameters and optimizer parameters is deferred through the [no_init_parameters](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface: +The network definition here is consistent with the single-card model and the initialization of network parameters and optimizer parameters is deferred through the [no_init_parameters](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface: ```python from mindspore import nn diff --git a/tutorials/source_en/parallel/distributed_case.rst b/tutorials/source_en/parallel/distributed_case.rst index b055fbd86c..efa8af6753 100644 --- a/tutorials/source_en/parallel/distributed_case.rst +++ b/tutorials/source_en/parallel/distributed_case.rst @@ -1,8 +1,8 @@ Distributed High-Level Configuration Case ========================================== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/distributed_case.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/distributed_case.rst :alt: View Source On Gitee .. toctree:: diff --git a/tutorials/source_en/parallel/distributed_gradient_accumulation.md b/tutorials/source_en/parallel/distributed_gradient_accumulation.md index 91cff64b85..84a7030895 100644 --- a/tutorials/source_en/parallel/distributed_gradient_accumulation.md +++ b/tutorials/source_en/parallel/distributed_gradient_accumulation.md @@ -1,6 +1,6 @@ # Gradient Accumulation -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/distributed_gradient_accumulation.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/distributed_gradient_accumulation.md) ## Overview @@ -22,7 +22,7 @@ The core idea of gradient accumulation is to add the gradients of multiple Micro ### Related Interfaces -[mindspore.parallel.GradAccumulation(network, micro_size)](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.nn.GradAccumulation.html): Wrap the network with a finer-grained MicroBatch. `micro_size` is the size of the MicroBatch. +[mindspore.parallel.GradAccumulation(network, micro_size)](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.nn.GradAccumulation.html): Wrap the network with a finer-grained MicroBatch. `micro_size` is the size of the MicroBatch. > - Under grad accumulation situation, suggests to use lazy_inline decorator to reduce compile time, and only support to set the lazy_inline decorator to the outermost cell. @@ -32,7 +32,7 @@ The following is an illustration of the gradient accumulation operation using As ### Example Code Description -> Download the complete example code: [distributed_gradient_accumulation](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_gradient_accumulation). +> Download the complete example code: [distributed_gradient_accumulation](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_gradient_accumulation). The directory structure is as follows: @@ -60,7 +60,7 @@ init() ### Dataset Loading and Network Definition -Here the dataset loading and network definition is consistent with the single card model, with the initialization of network parameters and optimizer parameters deferred through the [no_init_parameters](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface. The code is as follows: +Here the dataset loading and network definition is consistent with the single card model, with the initialization of network parameters and optimizer parameters deferred through the [no_init_parameters](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface. The code is as follows: ```python import os @@ -109,9 +109,9 @@ with no_init_parameters(): ### Training the Network -In this step, we need to define the loss function and the training process. Parallel mode is set to semi-automatic parallel mode and optimizer parallel via the top-level [AutoParallel](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html) interface, and both interfaces are called to configure gradient accumulation: +In this step, we need to define the loss function and the training process. Parallel mode is set to semi-automatic parallel mode and optimizer parallel via the top-level [AutoParallel](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html) interface, and both interfaces are called to configure gradient accumulation: -- First the LossCell needs to be defined. In this case the [nn.WithLossCell](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.WithLossCell.html) interface is called to wrap the network and loss functions. +- First the LossCell needs to be defined. In this case the [nn.WithLossCell](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.WithLossCell.html) interface is called to wrap the network and loss functions. - It is then necessary to wrap a layer of `GradAccumulation` around the LossCell and specify a MicroBatch size of 4. Refer to the relevant interfaces in the overview of this chapter for more details. ```python diff --git a/tutorials/source_en/parallel/dynamic_cluster.md b/tutorials/source_en/parallel/dynamic_cluster.md index 31c7a418dd..118277f9d7 100644 --- a/tutorials/source_en/parallel/dynamic_cluster.md +++ b/tutorials/source_en/parallel/dynamic_cluster.md @@ -1,6 +1,6 @@ # Dynamic Cluster Startup -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/dynamic_cluster.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/dynamic_cluster.md) ## Overview @@ -157,7 +157,7 @@ The relevant environment variables: Dynamic cluster startup scripts are consistent across hardware platforms. The following is an example of how to write a startup script for Ascend: -> You can download the full sample code here: [startup_method](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/startup_method). +> You can download the full sample code here: [startup_method](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/startup_method). The directory structure is as follows: @@ -260,7 +260,7 @@ for epoch in range(10): #### Single-Machine Multi-Card -The content of the single-machine multi-card startup script [run_dynamic_cluster.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/run_dynamic_cluster.sh) is as follows. Taking the single-machine 8-card as an example: +The content of the single-machine multi-card startup script [run_dynamic_cluster.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/run_dynamic_cluster.sh) is as follows. Taking the single-machine 8-card as an example: ```bash EXEC_PATH=$(pwd) @@ -317,7 +317,7 @@ epoch: 0, step: 30, loss is 1.0437132 The startup script needs to be split in the multi-machine training scenario. The following is an example of performing 2-machine 8-card training, with each machine executing the startup 4 Worker: -The script [run_dynamic_cluster_1.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/run_dynamic_cluster_1.sh) starts 1 `Scheduler` process and 4 `Worker` processes on node 1: +The script [run_dynamic_cluster_1.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/run_dynamic_cluster_1.sh) starts 1 `Scheduler` process and 4 `Worker` processes on node 1: ```bash EXEC_PATH=$(pwd) @@ -352,7 +352,7 @@ export MS_ROLE=MS_SCHED # Set the startup process to the M python ./net.py > device/scheduler.log 2>&1 & # Start training script ``` -The script [run_dynamic_cluster_2.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/run_dynamic_cluster_2.sh) starts `Worker5` to `Worker8` on node 2 (without executing Scheduler): +The script [run_dynamic_cluster_2.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/run_dynamic_cluster_2.sh) starts `Worker5` to `Worker8` on node 2 (without executing Scheduler): ```bash EXEC_PATH=$(pwd) @@ -380,7 +380,7 @@ do done ``` -> In a multi-machine task, you need to set a different hostname for each host node, otherwise you will get an error reporting `device id` out of bounds. Refer to [FAQ](https://www.mindspore.cn/docs/en/master/faq/distributed_parallel.html#q-when-starting-distributed-framework-using-dynamic-cluster-or-msrun-in-multi-machine-scenario,-an-error-is-reported-that-device-id-is-out-of-range-how-can-we-solve-it?). +> In a multi-machine task, you need to set a different hostname for each host node, otherwise you will get an error reporting `device id` out of bounds. Refer to [FAQ](https://www.mindspore.cn/docs/en/br_base/faq/distributed_parallel.html#q-when-starting-distributed-framework-using-dynamic-cluster-or-msrun-in-multi-machine-scenario,-an-error-is-reported-that-device-id-is-out-of-range-how-can-we-solve-it?). > > In a multi-machine task, `MS_WORKER_NUM` should be the total number of Worker nodes in the cluster. > @@ -430,4 +430,4 @@ The `config.json` configuration file specified by `config_file_path` needs to ad - `cipher_list`: Cipher suite (list of supported SSL encrypted types) - `cert_expire_warning_time_in_day`: The warning time of certificate expiration. -The secret key in the p12 file is stored in cipher text, and the password needs to be passed in when starting. Please refer to the Python API [mindspore.set_ps_context](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.set_ps_context.html#mindspore.set_ps_context) for the `client_password` and `server_password` fields. +The secret key in the p12 file is stored in cipher text, and the password needs to be passed in when starting. Please refer to the Python API [mindspore.set_ps_context](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.set_ps_context.html#mindspore.set_ps_context) for the `client_password` and `server_password` fields. diff --git a/tutorials/source_en/parallel/high_dimension_tensor_parallel.md b/tutorials/source_en/parallel/high_dimension_tensor_parallel.md index 9887ad351a..0364c60851 100644 --- a/tutorials/source_en/parallel/high_dimension_tensor_parallel.md +++ b/tutorials/source_en/parallel/high_dimension_tensor_parallel.md @@ -1,6 +1,6 @@ # High Dimension Tensor Parallel -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/high_dimension_tensor_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/high_dimension_tensor_parallel.md) ## Overview @@ -18,7 +18,7 @@ Usage Scenario: In semi-automatic mode, when there is tensor parallelism in the In 1D tensor parallelism, the full data of activation bsh is stored on each card, and slices are made on only one dimension of weights he and eh. After the first matrix product of the weights of the activation and column slicing, a second matrix product is performed with the weights of the second row slicing, and the resulting `partial sums` are computed after one AllReduce communication between all cards to compute the final correct result. -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/parallel/images/high_dimension_tensor_parallel_image_0.png) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/parallel/images/high_dimension_tensor_parallel_image_0.png) *Figure: 1D tensor computing communication behavior (4 cards in parallel)* @@ -26,7 +26,7 @@ In 1D tensor parallelism, the full data of activation bsh is stored on each card The 2D tensor parallelism slices both the activation bsh and the weight he by two communication groups, x and y. The weights are sliced in both dimensions. As an example in the following figure, Rank0-Rank2 are `communication group x` and Rank0-Rank1 are `communication group y`. After activating the AllGather that passes through the first communication group y and matrix product with the weights, the obtained part and the ReduceScatter that passes between the first communication group x, the correct result of the first MatMul is computed. The second MatMul communication computes the communication behavior similar to the first one, which is not shown in the following figure. -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/parallel/images/high_dimension_tensor_parallel_image_1.png) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/parallel/images/high_dimension_tensor_parallel_image_1.png) *Figure : 2D tensor parallel computing communication behavior (as an example of a MatMul computation under 4-card parallelism)* @@ -34,7 +34,7 @@ The 2D tensor parallelism slices both the activation bsh and the weight he by tw 3D tensor parallelism further splits the total cardinality into x, y, and z communication groups for finer-grained slicing. Relative to 2D tensor parallelism, 3D tensor parallelism shifts a portion of the AllGather communication to weight he. This operation reduces the total communication introduced when the relative weight of the shape of the activated bsh is large. As shown in the 8-card parallel case in the following figure, the overall process is: activation in communication group y for AllGather, weights in communication group z for AllGather -> matrix product, the resulting partial sum -> ReduceScatter in communication group x to get the final result. The last 4 cards communication calculation is similar to the first 4 cards, the second MatMul communication calculation communication is similar to the first MatMul, none of the following figures are shown. -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/parallel/images/high_dimension_tensor_parallel_image_2.png) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/parallel/images/high_dimension_tensor_parallel_image_2.png) *Figure : 3D tensor parallel computing communication behavior (as an example of a MatMul computation in the first 5 cards under 8-card parallelism)* @@ -51,8 +51,8 @@ A comprehensive comparison of the theoretical computation, storage, and communic ### Related Interfaces -1. [mindspore.ops.MatMul().add_prim_attr("enable_nd_tp", True)](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MatMul.html): To turn on the 2D/3D communication/computation mode using AllGather, MatMul and ReduceScatter, you must configure MatMul's shard slice using Layout. -2. [mindspore.ops.BatchMatMul().add_prim_attr("enable_nd_tp", True)](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BatchMatMul.html): To turn on the 2D/3D communication/computation mode using AllGather, MatMul and ReduceScatter, you must configure MatMul's shard slice using Layout. +1. [mindspore.ops.MatMul().add_prim_attr("enable_nd_tp", True)](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MatMul.html): To turn on the 2D/3D communication/computation mode using AllGather, MatMul and ReduceScatter, you must configure MatMul's shard slice using Layout. +2. [mindspore.ops.BatchMatMul().add_prim_attr("enable_nd_tp", True)](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BatchMatMul.html): To turn on the 2D/3D communication/computation mode using AllGather, MatMul and ReduceScatter, you must configure MatMul's shard slice using Layout. With the above switch turned on, shard slicing determines whether 2D or 3D parallel mode is used depending on the in_strategy: @@ -69,7 +69,7 @@ The following is an illustration of 2D tensor parallel operation in an Ascend st ### Sample Code Description -> Download the full sample code: [high_dimension_tensor_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/high_dimension_tensor_parallel). +> Download the full sample code: [high_dimension_tensor_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/high_dimension_tensor_parallel). The directory structure is as follows: @@ -97,7 +97,7 @@ init() ### Constructing the Network and Computing -The operator definition needs to call the add_prim_attr method to specify the MatMul operator to open the high-dimensional TP, and specify the Matmul operator slice method via Layout. Initialization of network parameters is deferred by the [no_init_parameters](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface and parallel mode is set to semi-automatic parallel mode by wrapping `net` via `AutoParallel`. The code is as follows: +The operator definition needs to call the add_prim_attr method to specify the MatMul operator to open the high-dimensional TP, and specify the Matmul operator slice method via Layout. Initialization of network parameters is deferred by the [no_init_parameters](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface and parallel mode is set to semi-automatic parallel mode by wrapping `net` via `AutoParallel`. The code is as follows: ```python # sample code diff --git a/tutorials/source_en/parallel/host_device_training.md b/tutorials/source_en/parallel/host_device_training.md index 1f34264cb7..230ec62e7e 100644 --- a/tutorials/source_en/parallel/host_device_training.md +++ b/tutorials/source_en/parallel/host_device_training.md @@ -1,6 +1,6 @@ # Host&Device Heterogeneous -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/host_device_training.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/host_device_training.md) ## Overview @@ -12,21 +12,21 @@ In MindSpore, users can easily implement hybrid training by configuring trainabl Pipeline parallel and operator-level parallel are suitable for the model to have a large number of operators, and the parameters are more evenly distributed among the operators. What if the number of operators in the model is small, and the parameters are concentrated in only a few operators? Wide & Deep is an example of this, as shown in the image below. The Embedding table in Wide & Deep can be trained as a parameter of hundreds of GIGabytes or even a few terabytes. If it is executed on an accelerator (device), the number of accelerators required is huge, and the training cost is expensive. On the other hand, if you use accelerator computing, the training acceleration obtained is limited, and it will also trigger cross-server traffic, and the end-to-end training efficiency will not be very high. -![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/parallel/images/host_device_image_0_zh.png) +![image](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/parallel/images/host_device_image_0_zh.png) *Figure: Part of the structure of the Wide & Deep model* A careful analysis of the special structure of the Wide & Deep model can be obtained: although the Embedding table has a huge amount of parameters, it participates in very little computation, and the Embedding table and its corresponding operator, the EmbeddingLookup operator, can be placed on the Host side, by using the CPU for calculation, and the rest of the operators are placed on the accelerator side. This can take advantage of the large amount of memory on the Host side and the fast computing of the accelerator side, while taking advantage of the high bandwidth of the Host to accelerator of the same server. The following diagram shows how Wide & Deep heterogeneous slicing works: -![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/parallel/images/host_device_image_1_zh.png) +![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/parallel/images/host_device_image_1_zh.png) *Figure: Wide & Deep Heterogeneous Approach* ### Related Interfaces -1. [mindspore.ops.Primitive.set_device()](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.set_device): Set Primitive to execute the backend. +1. [mindspore.ops.Primitive.set_device()](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.set_device): Set Primitive to execute the backend. -2. [mindspore.nn.Optimizer.target](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Optimizer.html#mindspore.nn.Optimizer.target): This attribute specifies whether the parameter should be updated on the host or on the device. The input type is str and can only be "CPU" or "Ascend". +2. [mindspore.nn.Optimizer.target](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Optimizer.html#mindspore.nn.Optimizer.target): This attribute specifies whether the parameter should be updated on the host or on the device. The input type is str and can only be "CPU" or "Ascend". ## Operator Practices @@ -34,7 +34,7 @@ The following is an illustration of Host&Device heterogeneous operation using As ### Sample Code Description -> Download the complete example code: [host_device](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/host_device). +> Download the complete example code: [host_device](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/host_device). The directory structure is as follows: @@ -50,7 +50,7 @@ The directory structure is as follows: ### Configuring a Distributed Environment -First, the parallel mode is specified as [data parallel](https://www.mindspore.cn/tutorials/en/master/parallel/data_parallel.html) mode through the context interface, and the communication is initialized through init. +First, the parallel mode is specified as [data parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/data_parallel.html) mode through the context interface, and the communication is initialized through init. ```python import mindspore as ms @@ -93,7 +93,7 @@ data_set = create_dataset(32) ### Defining the Network -The network definition differs from a single-card network in that the [ops.Add()](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Add.html) operator is configured to run on the host side with the following code: +The network definition differs from a single-card network in that the [ops.Add()](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Add.html) operator is configured to run on the host side with the following code: ```python import mindspore as ms @@ -144,7 +144,7 @@ net.layer3.add.set_device("CPU") ### Training the Network -The loss function, optimizer, and training process are consistent with data parallelism, and [mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.DistributedGradReducer.html) interface is used to aggregate the gradients across all cards with the following code: +The loss function, optimizer, and training process are consistent with data parallelism, and [mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.DistributedGradReducer.html) interface is used to aggregate the gradients across all cards with the following code: ```python from mindspore import nn diff --git a/tutorials/source_en/parallel/mpirun.md b/tutorials/source_en/parallel/mpirun.md index b6506dc0d0..0a99c0d09a 100644 --- a/tutorials/source_en/parallel/mpirun.md +++ b/tutorials/source_en/parallel/mpirun.md @@ -1,6 +1,6 @@ # mpirun Startup -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/mpirun.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/mpirun.md) ## Overview @@ -32,7 +32,7 @@ Related commands: The `mpirun` startup script is consistent across Ascend and GPU hardware platforms. Below is a demonstration of how to write a startup script using Ascend as an example: -> You can download the full sample code here: [startup_method](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/startup_method). +> You can download the full sample code here: [startup_method](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/startup_method). The directory structure is as follows: diff --git a/tutorials/source_en/parallel/msrun_launcher.md b/tutorials/source_en/parallel/msrun_launcher.md index dc305e8889..35526e7df6 100644 --- a/tutorials/source_en/parallel/msrun_launcher.md +++ b/tutorials/source_en/parallel/msrun_launcher.md @@ -1,10 +1,10 @@ # msrun Launching -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/msrun_launcher.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/msrun_launcher.md) ## Overview -`msrun` is an encapsulation of the [Dynamic Cluster](https://www.mindspore.cn/tutorials/en/master/parallel/dynamic_cluster.html) startup method. Users can use `msrun` to pull multi-process distributed tasks across nodes with a single command line instruction. Users can use `msrun` to pull up multi-process distributed tasks on each node with a single command line command, and there is no need to manually set [dynamic networking environment variables](https://www.mindspore.cn/tutorials/en/master/parallel/dynamic_cluster.html). `msrun` supports both `Ascend`, `GPU` and `CPU` backends. As with the `Dynamic Cluster` startup, `msrun` has no dependencies on third-party libraries and configuration files. +`msrun` is an encapsulation of the [Dynamic Cluster](https://www.mindspore.cn/tutorials/en/br_base/parallel/dynamic_cluster.html) startup method. Users can use `msrun` to pull multi-process distributed tasks across nodes with a single command line instruction. Users can use `msrun` to pull up multi-process distributed tasks on each node with a single command line command, and there is no need to manually set [dynamic networking environment variables](https://www.mindspore.cn/tutorials/en/br_base/parallel/dynamic_cluster.html). `msrun` supports both `Ascend`, `GPU` and `CPU` backends. As with the `Dynamic Cluster` startup, `msrun` has no dependencies on third-party libraries and configuration files. > - `msrun` is available after the user installs MindSpore, and the command `msrun --help` can be used to view the supported parameters. > - `msrun` supports `graph mode` as well as `PyNative mode`. @@ -87,7 +87,7 @@ A parameters list of command line:
- + @@ -186,13 +186,13 @@ The following table shows the environment variables can be used in user scripts,
Set simulated compilation level. Integer Default: -1. Disable simulated compilation.If this parameter is set, msrun starts only the processes for simulated compilation and does not execute operators. This feature is commonly used to debug large-scale distributed training parallel strategies, and to detect memory and strategy issues in advance.
The settings for the simulated compilation level can be found in the document: DryRun.
If this parameter is set, msrun starts only the processes for simulated compilation and does not execute operators. This feature is commonly used to debug large-scale distributed training parallel strategies, and to detect memory and strategy issues in advance.
The settings for the simulated compilation level can be found in the document: DryRun.
--sim_rank_id
-msrun is used as an encapsulation of the Dynamic Cluster startup method, and all user-configurable environment variables can be found in [dynamic networking environment variables](https://www.mindspore.cn/tutorials/en/master/parallel/dynamic_cluster.html). +msrun is used as an encapsulation of the Dynamic Cluster startup method, and all user-configurable environment variables can be found in [dynamic networking environment variables](https://www.mindspore.cn/tutorials/en/br_base/parallel/dynamic_cluster.html). ## Launching Distributed Tasks The startup script is consistent across hardware platforms. The following is an example of how to write a startup script for Ascend: -> You can download the full sample code here: [startup_method](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/startup_method). +> You can download the full sample code here: [startup_method](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/startup_method). The directory structure is as follows: @@ -299,7 +299,7 @@ for epoch in range(10): The following is an example of performing a single-machine 8-card training session: -The script [msrun_single.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/msrun_single.sh) uses the msrun command to pull up 1 `Scheduler` process as well as 8 `Worker` processes on the current node (no need to set `master_addr`, defaults to `127.0.0.1`; no need to set `node_rank` for single-machine): +The script [msrun_single.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/msrun_single.sh) uses the msrun command to pull up 1 `Scheduler` process as well as 8 `Worker` processes on the current node (no need to set `master_addr`, defaults to `127.0.0.1`; no need to set `node_rank` for single-machine): ```bash EXEC_PATH=$(pwd) @@ -338,7 +338,7 @@ epoch: 0, step: 30, loss is 1.0437132 The following is an example of executing 2-machine, 8-card training, with each machine executing the startup of 4 Workers: -The script [msrun_1.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/msrun_1.sh) is executed on node 1 and uses the msrun command to pull up 1 `Scheduler` process and 4 `Worker` processes, configures `master_addr` as the IP address of node 1 (msrun automatically detects that the current node ip matches the `master_addr` and pulls up the `Scheduler` process). Set the current node to node 0 with `node_rank`: +The script [msrun_1.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/msrun_1.sh) is executed on node 1 and uses the msrun command to pull up 1 `Scheduler` process and 4 `Worker` processes, configures `master_addr` as the IP address of node 1 (msrun automatically detects that the current node ip matches the `master_addr` and pulls up the `Scheduler` process). Set the current node to node 0 with `node_rank`: ```bash EXEC_PATH=$(pwd) @@ -357,7 +357,7 @@ echo "start training" msrun --worker_num=8 --local_worker_num=4 --master_addr= --master_port=8118 --node_rank=0 --log_dir=msrun_log --join=True --cluster_time_out=300 net.py ``` -The script [msrun_2.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/msrun_2.sh) is executed on node 2 and uses the msrun command to pull up 4 `Worker` processes, configures `master_addr` as the IP address of node 1. Set the current node to node 0 with `node_rank`: +The script [msrun_2.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/msrun_2.sh) is executed on node 2 and uses the msrun command to pull up 4 `Worker` processes, configures `master_addr` as the IP address of node 1. Set the current node to node 0 with `node_rank`: ```bash EXEC_PATH=$(pwd) @@ -453,9 +453,9 @@ if get_rank() == 7: ms.set_seed(1) ``` -> The [mindspore.communication.get_rank()](https://www.mindspore.cn/docs/en/master/api_python/communication/mindspore.communication.get_rank.html) interface needs to be called after the [mindspore.communication.init()](https://www.mindspore.cn/docs/en/master/api_python/communication/mindspore.communication.init.html) interface has completed its distributed initialization to get the rank information properly, otherwise `get_rank()` returns 0 by default. +> The [mindspore.communication.get_rank()](https://www.mindspore.cn/docs/en/br_base/api_python/communication/mindspore.communication.get_rank.html) interface needs to be called after the [mindspore.communication.init()](https://www.mindspore.cn/docs/en/br_base/api_python/communication/mindspore.communication.init.html) interface has completed its distributed initialization to get the rank information properly, otherwise `get_rank()` returns 0 by default. -After a breakpoint operation on a rank, it will cause the execution of that rank process to stop at the breakpoint and wait for subsequent interactions, while other unbroken rank processes will continue to run, which may lead to inconsistent running speed, so you can use the [mindspore.communication.comm_func.barrier()](https://www.mindspore.cn/docs/en/master/api_python/communication/mindspore.communication.comm_func.barrier.html) operator and the [mindspore.runtime.synchronize()](https://www.mindspore.cn/docs/en/master/api_python/runtime/mindspore.runtime.synchronize.html) to synchronize the running of all ranks, ensuring that other ranks block and wait, and that the stops of other ranks are released once the debugging rank continues to run. For example, in a standalone 8-card task, only rank 7 is broken and all other ranks are blocked: +After a breakpoint operation on a rank, it will cause the execution of that rank process to stop at the breakpoint and wait for subsequent interactions, while other unbroken rank processes will continue to run, which may lead to inconsistent running speed, so you can use the [mindspore.communication.comm_func.barrier()](https://www.mindspore.cn/docs/en/br_base/api_python/communication/mindspore.communication.comm_func.barrier.html) operator and the [mindspore.runtime.synchronize()](https://www.mindspore.cn/docs/en/br_base/api_python/runtime/mindspore.runtime.synchronize.html) to synchronize the running of all ranks, ensuring that other ranks block and wait, and that the stops of other ranks are released once the debugging rank continues to run. For example, in a standalone 8-card task, only rank 7 is broken and all other ranks are blocked: ```python import pdb diff --git a/tutorials/source_en/parallel/multiple_copy.md b/tutorials/source_en/parallel/multiple_copy.md index 09236e288c..59db598b32 100644 --- a/tutorials/source_en/parallel/multiple_copy.md +++ b/tutorials/source_en/parallel/multiple_copy.md @@ -1,6 +1,6 @@ # Multi-copy Parallel -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/multiple_copy.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/multiple_copy.md) ## Overview @@ -12,11 +12,11 @@ Usage Scenario: When there is model parallel in semi-automatic mode as well as i The data of input model is sliced according to the batch size dimension, thus modifying the existing single-copy form into a multi-copy form, so that when the underlying layer is communicating, the other copy carries out the computational operation without waiting, which ensures that the computation and communication times of multi-copy complement each other and improve the model performance. At the same time, splitting the data into a multi-copy form also reduces the number of parameter of the operator inputs and reduces the computation time of a single operator, which is helpful in improving the model performance. -![Multi-copy parallel](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/images/multi_copy.png) +![Multi-copy parallel](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/images/multi_copy.png) ### Related Interfaces -- [mindspore.parallel.nn.MicroBatchInterleaved(cell_network, interleave_num=2)](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.nn.MicroBatchInterleaved.html): This function serves to split the input into `interleave_num` parts in the first dimension (dimension 0), and then performs the computation of the wrapped cell. +- [mindspore.parallel.nn.MicroBatchInterleaved(cell_network, interleave_num=2)](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.nn.MicroBatchInterleaved.html): This function serves to split the input into `interleave_num` parts in the first dimension (dimension 0), and then performs the computation of the wrapped cell. ## Operator Practice @@ -24,7 +24,7 @@ The following is an illustration of multi-copy parallel operation using an Ascen ### Example Code Description -> Download the complete example code: [multiple_copy](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/multiple_copy). +> Download the complete example code: [multiple_copy](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/multiple_copy). The directory structure is as follows: @@ -53,7 +53,7 @@ init() ### Dataset Loading and Network Definition Here the dataset loading and network definition is consistent with the single-card model. -Defer initialization of network parameters and optimizer parameters via the [no_init_parameters](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface. +Defer initialization of network parameters and optimizer parameters via the [no_init_parameters](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface. ```python import os @@ -103,8 +103,8 @@ with no_init_parameters(): In this step, we need to define the loss function and the training process, and in this section two interfaces need to be called to configure the gradient accumulation: -- First the LossCell needs to be defined. In this case the [nn.WithLossCell](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.WithLossCell.html) interface is called to wrap the network and loss functions. -- It is then necessary to wrap a layer of [mindspore.parallel.nn.MicroBatchInterleaved](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.nn.MicroBatchInterleaved.html) around the LossCell and specify interleave_num size of 2. Refer to the relevant interfaces in the overview of this chapter for more details. +- First the LossCell needs to be defined. In this case the [nn.WithLossCell](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.WithLossCell.html) interface is called to wrap the network and loss functions. +- It is then necessary to wrap a layer of [mindspore.parallel.nn.MicroBatchInterleaved](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.nn.MicroBatchInterleaved.html) around the LossCell and specify interleave_num size of 2. Refer to the relevant interfaces in the overview of this chapter for more details. Finally, the `AutoParallel` wraps `net` and sets the parallel mode to semi-automatic parallel mode. diff --git a/tutorials/source_en/parallel/multiple_mixed.md b/tutorials/source_en/parallel/multiple_mixed.md index abd5ae93f5..3f6238fb27 100644 --- a/tutorials/source_en/parallel/multiple_mixed.md +++ b/tutorials/source_en/parallel/multiple_mixed.md @@ -1,6 +1,6 @@ # Multi-dimensional Hybrid Parallel Case Based on Double Recursive Search -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/multiple_mixed.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/multiple_mixed.md) ## Overview @@ -12,7 +12,7 @@ The following is a multi-dimensional hybrid parallel case based on double recurs ### Example Code Description -> Download the complete example code: [multiple_mix](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/multiple_mix). +> Download the complete example code: [multiple_mix](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/multiple_mix). The directory structure is as follows: @@ -107,7 +107,7 @@ data_set = create_dataset(32) ### Training the Network -This part is consistent with the pipeline parallel training code. Two additional interfaces need to be called based on the stand-alone training code: [nn.WithLossCell](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.WithLossCell.html) for wrapping the network and loss function, and [parallel.nn.Pipeline](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.nn.Pipeline.html) for wrapping the LossCell and configuring the MicroBatch size. Specify the run mode, run device, run card number, etc. through the [Autoparallel](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html) interface. Unlike single-card scripts, parallel scripts also need to specify the parallel mode `parallel_mode` as double recursive strategy search mode `recursive_programming` for auto-slicing of the data parallel and model parallel. `stages` is the number of stages in pipeline parallel, and optimizer parallel is enabled by `hsdp`. The code is as follows: +This part is consistent with the pipeline parallel training code. Two additional interfaces need to be called based on the stand-alone training code: [nn.WithLossCell](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.WithLossCell.html) for wrapping the network and loss function, and [parallel.nn.Pipeline](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.nn.Pipeline.html) for wrapping the LossCell and configuring the MicroBatch size. Specify the run mode, run device, run card number, etc. through the [Autoparallel](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html) interface. Unlike single-card scripts, parallel scripts also need to specify the parallel mode `parallel_mode` as double recursive strategy search mode `recursive_programming` for auto-slicing of the data parallel and model parallel. `stages` is the number of stages in pipeline parallel, and optimizer parallel is enabled by `hsdp`. The code is as follows: ```python import mindspore as ms diff --git a/tutorials/source_en/parallel/operator_parallel.md b/tutorials/source_en/parallel/operator_parallel.md index af1133689e..ab58653dd0 100644 --- a/tutorials/source_en/parallel/operator_parallel.md +++ b/tutorials/source_en/parallel/operator_parallel.md @@ -1,6 +1,6 @@ # Operator-level Parallelism -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/operator_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/operator_parallel.md) ## Overview @@ -16,7 +16,7 @@ The illustration of the ops operator parallel operation is based on the Ascend s #### Sample Code Description -> Download the complete sample code here: [distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_operator_parallel). +> Download the complete sample code here: [distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_operator_parallel). The directory structure is as follows: @@ -107,7 +107,7 @@ The `ops.MatMul()` and `ops.ReLU()` operators for the above networks are configu #### Training Network Definition -In this step, we need to define the loss function, the optimizer, and the training process. Note that due to the huge number of parameters of the large model, the graphics memory will be far from sufficient if parameter initialization is performed when defining the network on a single card. Therefore, delayed initialization is required when defining the network in conjunction with the [mindspore.nn.utils.no_init_parameters](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface to delay parameter initialization until the parallel multicard phase. Here both network and optimizer definitions need to be delayed initialized. +In this step, we need to define the loss function, the optimizer, and the training process. Note that due to the huge number of parameters of the large model, the graphics memory will be far from sufficient if parameter initialization is performed when defining the network on a single card. Therefore, delayed initialization is required when defining the network in conjunction with the [mindspore.nn.utils.no_init_parameters](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface to delay parameter initialization until the parallel multicard phase. Here both network and optimizer definitions need to be delayed initialized. ```python from mindspore.nn.utils import no_init_parameters @@ -186,7 +186,7 @@ epoch: 0 step: 50, loss is 1.8051043 ... ``` -Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/master/parallel/startup_method.html). +Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/br_base/parallel/startup_method.html). ### mint Operator Parallel Practice @@ -194,7 +194,7 @@ The illustration of the mint operator parallel operation is based on the Ascend #### Sample Code Description -> Download the complete sample code here: [distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_operator_parallel). +> Download the complete sample code here: [distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_operator_parallel). The directory structure is as follows: @@ -250,7 +250,7 @@ data_set = create_dataset(32) #### Defining the Network -In the current mint operator parallel mode, the network needs to be defined with mint operators. Since the mint operators, as a functional interface, does not directly expose its operator type (Primitive), it is impossible to directly configure the slicing strategy for the operator. Instead, users need to manually configure the slicing strategy for mint operators by using [mindspore.parallel.shard](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.shard.html) interface based on a single-card network, e.g., the network structure after configuring the strategy is: +In the current mint operator parallel mode, the network needs to be defined with mint operators. Since the mint operators, as a functional interface, does not directly expose its operator type (Primitive), it is impossible to directly configure the slicing strategy for the operator. Instead, users need to manually configure the slicing strategy for mint operators by using [mindspore.parallel.shard](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.shard.html) interface based on a single-card network, e.g., the network structure after configuring the strategy is: ```python import mindspore as ms @@ -338,7 +338,7 @@ epoch: 0 step: 50, forward_sum is 0.96655 ... ``` -Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/master/parallel/startup_method.html). +Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/br_base/parallel/startup_method.html). ## Higher-Order Operator-Level Parallel Practice @@ -348,7 +348,7 @@ An illustration of higher-order ops operator parallel operations follows, using #### Sample Code Description -> Download the complete sample code here: [distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_operator_parallel). +> Download the complete sample code here: [distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_operator_parallel). The directory structure is as follows: @@ -462,7 +462,7 @@ epoch: 0 step: 50, loss is 1.8051043 ... ``` -Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/master/parallel/startup_method.html). +Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/br_base/parallel/startup_method.html). ### Higher-Order mint Operator Parallel Practice @@ -470,7 +470,7 @@ An illustration of higher-order mint operator parallel operations follows, using #### Sample Code Description -> Download the complete sample code here: [distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_operator_parallel). +> Download the complete sample code here: [distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_operator_parallel). The directory structure is as follows: @@ -604,4 +604,4 @@ epoch: 0 step: 50, forward_sum is 0.96655 ... ``` -Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/master/parallel/startup_method.html). +Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/br_base/parallel/startup_method.html). diff --git a/tutorials/source_en/parallel/optimize_technique.rst b/tutorials/source_en/parallel/optimize_technique.rst index 00836a8b97..9170b249c3 100644 --- a/tutorials/source_en/parallel/optimize_technique.rst +++ b/tutorials/source_en/parallel/optimize_technique.rst @@ -1,8 +1,8 @@ Optimization Techniques ======================== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/optimize_technique.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/optimize_technique.rst :alt: View Source On Gitee .. toctree:: @@ -23,18 +23,18 @@ Considering that in actual parallel training, there may be requirements for trai - Parallel strategy optimization: parallel strategy optimization mainly includes the selection of parallel strategy, sharding technique under operator-level parallel, and multi-copy technique. - - `Strategy Selection `_: Depending on the model size and data volume size, different parallel strategies can be selected to improve training efficiency and resource utilization. - - `Sharding Techniques `_: The sharding technique refers to the reduction of tensor rearranging to improve training efficiency by manually configuring the sharding strategy for certain key operators. - - `Multiply Copy `_: Multi-copy refers to splitting a training batch into multiple ones in an iterative step to concurrently communicate and compute the model in parallel and improve resource utilization. - - `High Dimension Tensor Parallel `_: High dimension tensor parallelism refers to multi-dimensional slicing of activation and weight tensor for MatMul computation in model parallelism, which reduces the communication amount and improves the training efficiency by optimizing the slicing strategy. + - `Strategy Selection `_: Depending on the model size and data volume size, different parallel strategies can be selected to improve training efficiency and resource utilization. + - `Sharding Techniques `_: The sharding technique refers to the reduction of tensor rearranging to improve training efficiency by manually configuring the sharding strategy for certain key operators. + - `Multiply Copy `_: Multi-copy refers to splitting a training batch into multiple ones in an iterative step to concurrently communicate and compute the model in parallel and improve resource utilization. + - `High Dimension Tensor Parallel `_: High dimension tensor parallelism refers to multi-dimensional slicing of activation and weight tensor for MatMul computation in model parallelism, which reduces the communication amount and improves the training efficiency by optimizing the slicing strategy. - Memory optimization: memory optimization includes gradient accumulation, recompute, dataset sharding, Host&Device heterogeneity and heterogeneous storage, with the main goal of saving memory space. - - `Gradient Accumulation `_: Gradient Accumulation updates the parameters of a neural network by computing gradients on multiple MicroBatches and summing them up, then applying this accumulated gradient at once. In this way a small number of devices can also train large Batches, effectively minimizing memory spikes. - - `Recompute `_: Recomputation is a time-for-space technique that saves memory space by not saving the results of certain forward operator calculations, and when calculating the reverse operator, the forward results need to be used before recomputing the forward operator. - - `Dataset Sharding `_: When a dataset is too large individually or even cannot be loaded onto a single device, the data can be sliced for distributed training. Slicing the dataset with model parallel is an effective way to reduce the graphics memory usage. - - `Host&Device Heterogeneous `_: When the number of parameters exceeds the upper limit of Device memory, you can put some operators with large memory usage and small computation on the Host side, which can simultaneously utilize the characteristics of large memory on the Host side and fast computation on the Device side, and improve the utilization rate of the device. + - `Gradient Accumulation `_: Gradient Accumulation updates the parameters of a neural network by computing gradients on multiple MicroBatches and summing them up, then applying this accumulated gradient at once. In this way a small number of devices can also train large Batches, effectively minimizing memory spikes. + - `Recompute `_: Recomputation is a time-for-space technique that saves memory space by not saving the results of certain forward operator calculations, and when calculating the reverse operator, the forward results need to be used before recomputing the forward operator. + - `Dataset Sharding `_: When a dataset is too large individually or even cannot be loaded onto a single device, the data can be sliced for distributed training. Slicing the dataset with model parallel is an effective way to reduce the graphics memory usage. + - `Host&Device Heterogeneous `_: When the number of parameters exceeds the upper limit of Device memory, you can put some operators with large memory usage and small computation on the Host side, which can simultaneously utilize the characteristics of large memory on the Host side and fast computation on the Device side, and improve the utilization rate of the device. - Communication optimization: communication optimization includes communication fusion and communication subgraph extraction and multiplexing, and the main goal is to reduce communication delay and improve performance. - - `Communication Fusion `_: Communication fusion can merge the communication operators of the same source and target nodes into a single communication process, avoiding the extra overhead caused by multiple communications. + - `Communication Fusion `_: Communication fusion can merge the communication operators of the same source and target nodes into a single communication process, avoiding the extra overhead caused by multiple communications. diff --git a/tutorials/source_en/parallel/optimizer_parallel.md b/tutorials/source_en/parallel/optimizer_parallel.md index 798ddd41dd..f19509e0d0 100644 --- a/tutorials/source_en/parallel/optimizer_parallel.md +++ b/tutorials/source_en/parallel/optimizer_parallel.md @@ -1,6 +1,6 @@ # Optimizer Parallel -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/optimizer_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/optimizer_parallel.md) ## Overview @@ -10,7 +10,7 @@ The following is an illustration of optimizer parallel operation using an Ascend ## Sample Code Description -> Download the full sample code: [distributed_optimizer_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_optimizer_parallel). +> Download the full sample code: [distributed_optimizer_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_optimizer_parallel). The directory structure is as follows: @@ -97,7 +97,7 @@ net.layer2.set_comm_fusion(1) net.layer3.set_comm_fusion(2) ``` -> Here communication fusion is configured for different layers in order to reduce the communication cost. Details can be found in [Communication Operator Fusion](https://www.mindspore.cn/tutorials/en/master/parallel/comm_fusion.html). +> Here communication fusion is configured for different layers in order to reduce the communication cost. Details can be found in [Communication Operator Fusion](https://www.mindspore.cn/tutorials/en/br_base/parallel/comm_fusion.html). ## Training Network Definition @@ -186,4 +186,4 @@ epoch: 0, step: 100, loss is 0.6854114 ... ``` -Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/master/parallel/startup_method.html). +Other startup methods such as `mpirun` and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/br_base/parallel/startup_method.html). diff --git a/tutorials/source_en/parallel/overview.md b/tutorials/source_en/parallel/overview.md index 85909d6d33..b7835d8a39 100644 --- a/tutorials/source_en/parallel/overview.md +++ b/tutorials/source_en/parallel/overview.md @@ -1,6 +1,6 @@ # Distributed Parallelism Overview -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/overview.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/overview.md) In deep learning, as the size of the dataset and the number of parameters grows, the time and hardware resources required for training increase and eventually become a bottleneck that constrains training. Distributed parallel training, which reduces the need for hardware such as memory and computational performance, is an important optimization for performing training. In addition, distributed parallelism is important for large model training and inference, which provides powerful computational capabilities and performance advantages for handling large-scale data and complex models. @@ -15,13 +15,13 @@ MindSpore currently supports four startup methods: - **mpirun**: Launched via OpenMPI, a multi-process communication library with Ascend/GPU support. - **rank table**: After configuring the rank_table table, Ascend is supported by scripts that start processes corresponding to the number of cards. -For details, refer to [Distributed Parallel Startup Approach](https://www.mindspore.cn/tutorials/en/master/parallel/startup_method.html). +For details, refer to [Distributed Parallel Startup Approach](https://www.mindspore.cn/tutorials/en/br_base/parallel/startup_method.html). ## Data Parallel Data parallel is the most commonly used parallel training approach for accelerating model training and handling large-scale datasets. In data parallel mode, the training data is divided into multiple copies and then each copy is assigned to a different compute node, such as multiple cards or multiple devices. Each node processes its own subset of data independently and uses the same model for forward and backward propagation, and ultimately performs model parameter updates after synchronizing the gradients of all nodes. -For details, refer to [Data Parallel](https://www.mindspore.cn/tutorials/en/master/parallel/data_parallel.html). +For details, refer to [Data Parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/data_parallel.html). ## Operator-level Parallel @@ -29,19 +29,19 @@ With the development of deep learning, network models are becoming larger and la MindSpore provides two levels of granularity: operator-level parallelism and higher-order operator-level parallelism. Operator-level parallelism describes the tensor dimensionality distribution through a simple slicing strategy, which meets the requirements of most scenarios. Higher-order operator parallelism supports complex slicing scenarios through open device scheduling descriptions. -For details, refer to [Operator-level Parallel](https://www.mindspore.cn/tutorials/en/master/parallel/operator_parallel.html). +For details, refer to [Operator-level Parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/operator_parallel.html). ## Optimizer Parallel When performing data parallel training, the parameter update part of the model is computed redundantly across cards. Optimizer parallelism can effectively reduce memory consumption and improve network performance on large-scale networks (e.g., Bert, GPT) by spreading the computation of the optimizer to the cards of the data parallel dimension. -For details, refer to [Optimizer Parallel](https://www.mindspore.cn/tutorials/en/master/parallel/optimizer_parallel.html). +For details, refer to [Optimizer Parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/optimizer_parallel.html). ## Pipeline Parallel In recent years, the scale of neural networks has increased exponentially. Limited by the memory on a single device, the number of devices used for training large models is also increasing. Due to the low communication bandwidth between servers, the performance of the conventional hybrid parallelism (data parallel + model parallel) is poor. Therefore, pipeline parallelism needs to be introduced. Pipeline parallel can divide a model in space based on stage. Each stage needs to execute only a part of the network, which greatly reduces memory overheads, shrinks the communication domain, and shortens the communication time. MindSpore can automatically convert a standalone model to the pipeline parallel mode based on user configurations. -For details, refer to [Pipeline Parallel](https://www.mindspore.cn/tutorials/en/master/parallel/pipeline_parallel.html). +For details, refer to [Pipeline Parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/pipeline_parallel.html). ## Parallel Optimization Strategies @@ -49,20 +49,20 @@ If there is a requirement for performance, throughput, or scale, or if you don't - **Parallel Strategy Optimization**: - - **Strategy Selection**: Depending on the model size and data volume size, different parallel strategies can be selected by referring to [Strategy Selection](https://www.mindspore.cn/tutorials/en/master/parallel/strategy_select.html) to improve training efficiency and resource utilization. - - **Sharding Techniques**: Slicing techniques are also key to efficient parallel computing. In the [Sharding Techniques](https://www.mindspore.cn/tutorials/en/master/parallel/split_technique.html) tutorial, you can learn how to apply various slicing techniques to improve efficiency through concrete examples. - - **Multiply Copy Parallel**: Under the existing single-copy model, certain underlying operators cannot be computed simultaneously while communicating, leading to wasted resources. Multiple copy parallel slices the data into multiple copies according to the Batch Size dimension, which can make one copy communicate while the other copy performs the computation operation, which improves the resource utilization. For details, please refer to the [Multiple Copy Parallel](https://www.mindspore.cn/tutorials/en/master/parallel/multiple_copy.html) tutorial. - - **High Dimension Tensor Parallel**: High-dimensional tensor parallelism refers to multi-dimensional slicing of activation and weight tensor in MatMul computation for model parallelism, which reduces the communication volume and improves the training efficiency by optimizing the slicing strategy. For details, please refer to the [High Dimension Tensor Parallel](https://www.mindspore.cn/tutorials/en/master/parallel/high_dimension_tensor_parallel.html) tutorial. + - **Strategy Selection**: Depending on the model size and data volume size, different parallel strategies can be selected by referring to [Strategy Selection](https://www.mindspore.cn/tutorials/en/br_base/parallel/strategy_select.html) to improve training efficiency and resource utilization. + - **Sharding Techniques**: Slicing techniques are also key to efficient parallel computing. In the [Sharding Techniques](https://www.mindspore.cn/tutorials/en/br_base/parallel/split_technique.html) tutorial, you can learn how to apply various slicing techniques to improve efficiency through concrete examples. + - **Multiply Copy Parallel**: Under the existing single-copy model, certain underlying operators cannot be computed simultaneously while communicating, leading to wasted resources. Multiple copy parallel slices the data into multiple copies according to the Batch Size dimension, which can make one copy communicate while the other copy performs the computation operation, which improves the resource utilization. For details, please refer to the [Multiple Copy Parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/multiple_copy.html) tutorial. + - **High Dimension Tensor Parallel**: High-dimensional tensor parallelism refers to multi-dimensional slicing of activation and weight tensor in MatMul computation for model parallelism, which reduces the communication volume and improves the training efficiency by optimizing the slicing strategy. For details, please refer to the [High Dimension Tensor Parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/high_dimension_tensor_parallel.html) tutorial. - **Memory Optimization**: - - **Gradient Accumulation**: Gradient Accumulation updates the parameters of a neural network by computing gradients on multiple MicroBatches and summing them up, then applying this accumulated gradient at once. In this way a small number of devices can also train large Batches, effectively minimizing memory spikes. For details, please refer to the [Gradient Accumulation](https://www.mindspore.cn/tutorials/en/master/parallel/distributed_gradient_accumulation.html) tutorial. - - **Recompute**: Recomputation is a time-for-space technique that saves memory space by not saving the results of certain forward operator calculations, and when calculating the reverse operator, the forward results need to be used before recomputing the forward operator. For details, please refer to the [Recompute](https://www.mindspore.cn/tutorials/en/master/parallel/recompute.html) tutorial. - - **Dataset Sharding**: When a dataset is too large individually, the data can be sliced for distributed training. Slicing the dataset with model parallel is an effective way to reduce the graphics memory usage. For details, please refer to the [Dataset Sharding](https://www.mindspore.cn/tutorials/en/master/parallel/dataset_slice.html) tutorial. - - **Host&Device Heterogeneous**: When the number of parameters exceeds the upper limit of Device memory, you can put some operators with large memory usage and small computation on the Host side, which can simultaneously utilize the characteristics of large memory on the Host side and fast computation on the Device side, and improve the utilization rate of the device. For details, please refer to the [Host&Device Heterogeneous](https://www.mindspore.cn/tutorials/en/master/parallel/host_device_training.html) tutorial. + - **Gradient Accumulation**: Gradient Accumulation updates the parameters of a neural network by computing gradients on multiple MicroBatches and summing them up, then applying this accumulated gradient at once. In this way a small number of devices can also train large Batches, effectively minimizing memory spikes. For details, please refer to the [Gradient Accumulation](https://www.mindspore.cn/tutorials/en/br_base/parallel/distributed_gradient_accumulation.html) tutorial. + - **Recompute**: Recomputation is a time-for-space technique that saves memory space by not saving the results of certain forward operator calculations, and when calculating the reverse operator, the forward results need to be used before recomputing the forward operator. For details, please refer to the [Recompute](https://www.mindspore.cn/tutorials/en/br_base/parallel/recompute.html) tutorial. + - **Dataset Sharding**: When a dataset is too large individually, the data can be sliced for distributed training. Slicing the dataset with model parallel is an effective way to reduce the graphics memory usage. For details, please refer to the [Dataset Sharding](https://www.mindspore.cn/tutorials/en/br_base/parallel/dataset_slice.html) tutorial. + - **Host&Device Heterogeneous**: When the number of parameters exceeds the upper limit of Device memory, you can put some operators with large memory usage and small computation on the Host side, which can simultaneously utilize the characteristics of large memory on the Host side and fast computation on the Device side, and improve the utilization rate of the device. For details, please refer to the [Host&Device Heterogeneous](https://www.mindspore.cn/tutorials/en/br_base/parallel/host_device_training.html) tutorial. - **Communication Optimization**: - - **Communication Fusion**: Communication fusion can merge the communication operators of the same source and target nodes into a single communication process, avoiding the extra overhead caused by multiple communications. For details, please refer to the [Communication Fusion](https://www.mindspore.cn/tutorials/en/master/parallel/comm_fusion.html). + - **Communication Fusion**: Communication fusion can merge the communication operators of the same source and target nodes into a single communication process, avoiding the extra overhead caused by multiple communications. For details, please refer to the [Communication Fusion](https://www.mindspore.cn/tutorials/en/br_base/parallel/comm_fusion.html). ## Distributed High-Level Configuration Examples -- **Multi-dimensional Hybrid Parallel Case Based on Double Recursive Search**: Multi-dimensional hybrid parallel based on double recursive search means that the user can configure optimization methods such as recomputation, optimizer parallel, pipeline parallel. Based on the user configurations, the operator-level strategy is automatically searched by the double recursive strategy search algorithm, which generates the optimal parallel strategy. For details, please refer to the [Multi-dimensional Hybrid Parallel Case Based on Double Recursive Search](https://www.mindspore.cn/tutorials/en/master/parallel/multiple_mixed.html). +- **Multi-dimensional Hybrid Parallel Case Based on Double Recursive Search**: Multi-dimensional hybrid parallel based on double recursive search means that the user can configure optimization methods such as recomputation, optimizer parallel, pipeline parallel. Based on the user configurations, the operator-level strategy is automatically searched by the double recursive strategy search algorithm, which generates the optimal parallel strategy. For details, please refer to the [Multi-dimensional Hybrid Parallel Case Based on Double Recursive Search](https://www.mindspore.cn/tutorials/en/br_base/parallel/multiple_mixed.html). diff --git a/tutorials/source_en/parallel/pipeline_parallel.md b/tutorials/source_en/parallel/pipeline_parallel.md index f6f1ee36f3..5523bed062 100644 --- a/tutorials/source_en/parallel/pipeline_parallel.md +++ b/tutorials/source_en/parallel/pipeline_parallel.md @@ -1,6 +1,6 @@ # Pipeline Parallel -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/pipeline_parallel.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/pipeline_parallel.md) ## Overview @@ -12,7 +12,7 @@ The following is an illustration of pipeline parallel operation using Ascend or ### Sample Code Description -> Download the complete sample code: [distributed_pipeline_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_pipeline_parallel). +> Download the complete sample code: [distributed_pipeline_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_pipeline_parallel). The directory structure is as follows: @@ -122,11 +122,11 @@ class Network(nn.Cell): ### Training Network Definition -In this step, we need to define the loss function, the optimizer, and the training process. It should be noted that the definitions of both the network and the optimizer here require deferred initialization. Besides, the interface [mindspore.parallel.nn.PipelineGradReducer](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.nn.PipelineGradReducer.html) is needed to handle gradient of pipeline parallelism, the first parameter of this interface is the network parameter to be updated, and the second one is whether to use optimizer parallelism. +In this step, we need to define the loss function, the optimizer, and the training process. It should be noted that the definitions of both the network and the optimizer here require deferred initialization. Besides, the interface [mindspore.parallel.nn.PipelineGradReducer](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.nn.PipelineGradReducer.html) is needed to handle gradient of pipeline parallelism, the first parameter of this interface is the network parameter to be updated, and the second one is whether to use optimizer parallelism. Unlike the single-card model, two interfaces need to be called in this section to configure the pipeline parallel: -- First define the LossCell. In this case the [mindspore.nn.WithLossCell](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.WithLossCell.html) interface is called to encapsulate the network and loss functions. +- First define the LossCell. In this case the [mindspore.nn.WithLossCell](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.WithLossCell.html) interface is called to encapsulate the network and loss functions. - Finally, wrap the LossCell with `Pipeline`, and specify the size of MicroBatch. Configure the `pipeline_stage` for each `Cell` containing training parameters via `stage_config`. ```python @@ -248,7 +248,7 @@ Tensor(shape=[8, 512], dtype=Float32, value= [ 4.89746094e-01 3.56689453e-01 -4.90966797e-01 ... -3.30078125e-e01 -2.38525391e-01 7.33398438e-01]]) ``` -Other startup methods such as dynamic cluster and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/master/parallel/startup_method.html). +Other startup methods such as dynamic cluster and `rank table` startup can be found in [startup methods](https://www.mindspore.cn/tutorials/en/br_base/parallel/startup_method.html). ## Inference Operation Practices @@ -256,7 +256,7 @@ The following is an illustration of pipeline parallel inference operation using ### Sample Code Description -> Download the complete sample code: [distributed_pipeline_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_pipeline_parallel). +> Download the complete sample code: [distributed_pipeline_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_pipeline_parallel). The directory structure is as follows: @@ -353,7 +353,7 @@ In the previous step, the parameter `embed` is shared by `self.word_embedding` a We need to further set up the parallelism-related configuration by wrapping the network again with `AutoParallel`, specifying the parallelism mode `semi-auto` as semi-automatic parallelism, in addition to turning on pipeline parallelism, configuring `pipeline`, and specifying the total number of stages by configuring the number of `stages`. If `device_target` is not set here, it will be automatically specified as the backend hardware device corresponding to the MindSpore package (default is Ascend). `output_broadcast=True` indicates that the result of the last stage will be broadcast to the remaining stages when pipelined parallel inference is performed, which can be used in autoregressive inference scenarios. -Before inference, executing `parallel_net.compile()` and [mindspore.parallel.sync_pipeline_shared_parameters(parallel_net)](https://www.mindspore.cn/docs/en/master/api_python/parallel/mindspore.parallel.sync_pipeline_shared_parameters.html), the framework will synchronize the shared parameter between stages automatically. +Before inference, executing `parallel_net.compile()` and [mindspore.parallel.sync_pipeline_shared_parameters(parallel_net)](https://www.mindspore.cn/docs/en/br_base/api_python/parallel/mindspore.parallel.sync_pipeline_shared_parameters.html), the framework will synchronize the shared parameter between stages automatically. ```python diff --git a/tutorials/source_en/parallel/rank_table.md b/tutorials/source_en/parallel/rank_table.md index afaaa14721..7c6112c632 100644 --- a/tutorials/source_en/parallel/rank_table.md +++ b/tutorials/source_en/parallel/rank_table.md @@ -1,6 +1,6 @@ # rank table Startup -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/rank_table.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/rank_table.md) ## Overview @@ -37,7 +37,7 @@ The parameter items that need to be modified according to the actual training en ## Operation Practice -> You can download the full sample code here: [startup_method](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/startup_method). +> You can download the full sample code here: [startup_method](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/startup_method). The directory structure is as follows: diff --git a/tutorials/source_en/parallel/recompute.md b/tutorials/source_en/parallel/recompute.md index 6606147f21..4d7d4de2cb 100644 --- a/tutorials/source_en/parallel/recompute.md +++ b/tutorials/source_en/parallel/recompute.md @@ -1,6 +1,6 @@ # Recomputation -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/recompute.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/recompute.md) ## Overview @@ -14,7 +14,7 @@ In order to reduce memory peaks, the recompute technique can not save the comput The recompute function is implemented as a forward operator that is recomputed according to the user's specified needs, copies the same operator, outputs it to the reverse operator, and deletes the continuous edge relationship between the original forward operator and the reverse operator. In addition, we need to ensure that the copied operator only begins to be evaluated when the corresponding inverse part is computed, so we need to insert control dependencies to ensure the order in which the operators are executed. As shown in the following figure: -![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/parallel/images/recompute_image_0_zh.png) +![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/parallel/images/recompute_image_0_zh.png) *Figure: Forward and reverse diagram before and after the recompute function is enabled* @@ -22,17 +22,17 @@ For user convenience, MindSpore currently provides not only a recompute interfac Taking the GPT-3 model as an example, the policy is set to recalculate the cell corresponding to the each layer, and then the output operator of the layer is set to non-recompute. The effect of recompute on the 72-layer GPT-3 network is shown in the following figure: -![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/parallel/images/recompute_image_1_zh.png) +![](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/parallel/images/recompute_image_1_zh.png) *Figure: Comparison of GPT-3 memory usage before and after recalculation function is enabled* ### Related Interfaces -1. `mindspore.nn.Cell.recompute()`: Call the [recompute interface](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.recompute) of `Cell`. After calling this interface, when computing the reverse part, all the operators inside the Cell and all the operators inside the sub-Cells are recomputed, except for the output operator of that Cell. Both PyNative mode and Graph mode are supported. +1. `mindspore.nn.Cell.recompute()`: Call the [recompute interface](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.recompute) of `Cell`. After calling this interface, when computing the reverse part, all the operators inside the Cell and all the operators inside the sub-Cells are recomputed, except for the output operator of that Cell. Both PyNative mode and Graph mode are supported. -2. `mindspore.ops.Primitive.recompute()`: Call the [recompute interface](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.recompute) of `Primitive`. After calling this interface, the operator is recomputed when computing the reverse part. Only Graph mode is supported. +2. `mindspore.ops.Primitive.recompute()`: Call the [recompute interface](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.recompute) of `Primitive`. After calling this interface, the operator is recomputed when computing the reverse part. Only Graph mode is supported. -3. `mindspore.recompute()`: Call the [recompute interface](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.recompute.html#mindspore.recompute). After calling this function interface, the block will be recomputed. Only PyNative mode is supported. +3. `mindspore.recompute()`: Call the [recompute interface](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.recompute.html#mindspore.recompute). After calling this function interface, the block will be recomputed. Only PyNative mode is supported. ## Operation Practice @@ -40,7 +40,7 @@ The following is an illustration of the recomputation operation using an Ascend ### Sample Code Description -> Download the complete sample code: [recompute](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/recompute). +> Download the complete sample code: [recompute](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/recompute). The directory structure is as follows: @@ -132,7 +132,7 @@ class Grad(Cell): ### Executing the Network -In this step, we need to define the network inputs, defer initialization of the network parameters and optimizer parameters through the [no_init_parameters](https://www.mindspore.cn/docs/en/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface, and then call `Grad` in order to obtain the derivatives. Set the parallel mode to semi-automatic parallel mode through the top-level `AutoParallel` interface, with the following code: +In this step, we need to define the network inputs, defer initialization of the network parameters and optimizer parameters through the [no_init_parameters](https://www.mindspore.cn/docs/en/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html) interface, and then call `Grad` in order to obtain the derivatives. Set the parallel mode to semi-automatic parallel mode through the top-level `AutoParallel` interface, with the following code: ```python import numpy as np diff --git a/tutorials/source_en/parallel/split_technique.md b/tutorials/source_en/parallel/split_technique.md index 62229a3fa0..65a4a50490 100644 --- a/tutorials/source_en/parallel/split_technique.md +++ b/tutorials/source_en/parallel/split_technique.md @@ -1,6 +1,6 @@ # Sharding Techniques -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/split_technique.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/split_technique.md) ## Overview @@ -10,23 +10,23 @@ For a new model using `Sharding Propagation` to configure the parallelization st The sharding strategy for parameter weights is very important, especially for large models, as the memory consumption caused by parameter weights accounts for a large portion of the total memory consumption for model training. Therefore, operators involving weights usually need to explicitly configure the sharding strategy. In the two examples below, the Gather and MatMul operators involving weights are configured with sharding strategy, while the other operators are not. These correspond the data-parallel VocabEmbedding layer and hybrid-parallel FeedForward Layer in [MindSpore TransFormers](https://gitee.com/mindspore/mindformers/blob/master/mindformers/modules/transformer/transformer.py), respectively. -![sp_case1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/parallel/images/sp_case1.png "Configuring Operators Involving Weights") +![sp_case1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/parallel/images/sp_case1.png "Configuring Operators Involving Weights") ### Configuring Dimension-changing/Axis-changing Operators -The operators of deep learning frameworks can be broadly categorized into two types: operators that are semantically simple and dimension-preserving and operators that change the dimension of the input tensor. For dimension-preserving operators, the strategy propagation algorithm can propagate the sharding strategy more easily. However, for dimension-changing operators, explicitly configuring the sharding strategy is the only way to better express the user initial thoughts and avoid the strategy propagation algorithm from deriving the sharding strategy that is not expected by the user. Common dimension-changing and axis-changing operators are: [ReduceMean](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceMean.html), [ReduceSum](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.ReduceSum.html), [Transpose](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Transpose.html), [StridedSlice](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.StridedSlice.html), [MatMul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.MatMul.html), and [BatchMatMul](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.BatchMatMul.html). In the example below, ReduceMean and MatMul are dimension-changing operators that are configured with sharding strategy. +The operators of deep learning frameworks can be broadly categorized into two types: operators that are semantically simple and dimension-preserving and operators that change the dimension of the input tensor. For dimension-preserving operators, the strategy propagation algorithm can propagate the sharding strategy more easily. However, for dimension-changing operators, explicitly configuring the sharding strategy is the only way to better express the user initial thoughts and avoid the strategy propagation algorithm from deriving the sharding strategy that is not expected by the user. Common dimension-changing and axis-changing operators are: [ReduceMean](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceMean.html), [ReduceSum](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.ReduceSum.html), [Transpose](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Transpose.html), [StridedSlice](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.StridedSlice.html), [MatMul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.MatMul.html), and [BatchMatMul](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.BatchMatMul.html). In the example below, ReduceMean and MatMul are dimension-changing operators that are configured with sharding strategy. -![sp_case2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/parallel/images/sp_case2.png "Configuring Dimension-changing Operators") +![sp_case2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/parallel/images/sp_case2.png "Configuring Dimension-changing Operators") ### Configuring Boundary Operators that Change in Parallel Strategy For ResNet-like models, different parts of the model have different preferred parallel: the first half uses data parallel, and the second half uses model parallel for optimal iterative performance. For Llama-like large models, when vocab_size is too large, model parallel slicing may be chosen for memory considerations; when sequence_length is too large, the strategy of sequence parallelism may also be chosen. The above strategies belong to those carefully configured by the user based on the model and hardware information. Sharding Propagation is a plain algorithm to find the least cost of rearrangement, and it does not find the carefully configured strategies automatically, so for the operator strategies carefully tuned by the user, it is necessary to configure them exclusively. In the example below, the first MatMul is configured with a strategy for data parallel, which will propagate the strategy for data parallel forward to the first half of the model, while the second MatMul is configured with a strategy for model parallel, which will propagate the strategy for model parallel backward to the second half of the model. -![sp_case3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_en/parallel/images/sp_case3.png "Configuring Boundary Operators that Change in Parallel Method") +![sp_case3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_en/parallel/images/sp_case3.png "Configuring Boundary Operators that Change in Parallel Method") ### Configuring Fusion Operators -Large fusion operators, such as [FlashAttentionScore](https://www.mindspore.cn/lite/api/en/master/generate/classmindspore_ops_FlashAttentionScore.html#exhale-class-classmindspore-ops-flashattentionscore), [rms_norm](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.rms_norm.html), are also operators that require the user to manually configure the strategy. The input and output logic of the fusion operator is relatively complex, and the propagated strategy without reordering is not necessarily the strategy expected by the user. These operators also require explicit configuration of the operator-level strategy. +Large fusion operators, such as [FlashAttentionScore](https://www.mindspore.cn/lite/api/en/master/generate/classmindspore_ops_FlashAttentionScore.html#exhale-class-classmindspore-ops-flashattentionscore), [rms_norm](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.rms_norm.html), are also operators that require the user to manually configure the strategy. The input and output logic of the fusion operator is relatively complex, and the propagated strategy without reordering is not necessarily the strategy expected by the user. These operators also require explicit configuration of the operator-level strategy. Users working with strategy propagation need to have some understanding not only of its propagation algorithm itself, but also of the parallelism of the model to be trained. If there exists a certain operator whose parallelization strategy determined by the strategy propagation algorithm does not meet the user's expectations, that can always be solved by configuring an additional operator parallelization strategy. In practice, for a new model, it does take several attempts to obtain an overall parallel configuration with better performance. diff --git a/tutorials/source_en/parallel/startup_method.rst b/tutorials/source_en/parallel/startup_method.rst index 87475a4560..b52171b020 100644 --- a/tutorials/source_en/parallel/startup_method.rst +++ b/tutorials/source_en/parallel/startup_method.rst @@ -1,8 +1,8 @@ Distributed Parallel Startup Methods ==================================== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/startup_method.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/startup_method.rst :alt: View Source On Gitee .. toctree:: @@ -19,10 +19,10 @@ Startup Method Currently GPU, Ascend and CPU support multiple startup methods respectively, four of which are \ ``msrun``, dynamic cluster, \ ``mpirun`` and \ ``rank table``: -- `msrun `_: `msrun` is the capsulation of Dynamic cluster. It allows user to launch distributed jobs using one single command in each node. It could be used after MindSpore is installed. This method does not rely on third-party libraries and configuration files, has disaster recovery function, good security, and supports three hardware platforms. It is recommended that users prioritize the use of this startup method. -- `Dynamic cluster `_: dynamic cluster requires user to spawn multiple processes and export environment variables. It's the implementation of `msrun`. Use this method when running `Parameter Server` training mode. For other distributed jobs, `msrun` is recommended. -- `mpirun `_: this method relies on the open source library OpenMPI, and startup command is simple. Multi-machine need to ensure two-by-two password-free login. It is recommended for users who have experience in using OpenMPI to use this startup method. -- `rank table `_: this method requires the Ascend hardware platform and does not rely on third-party library. After manually configuring the rank_table file, you can start the parallel program via a script, and the script is consistent across multiple machines for easy batch deployment. +- `msrun `_: `msrun` is the capsulation of Dynamic cluster. It allows user to launch distributed jobs using one single command in each node. It could be used after MindSpore is installed. This method does not rely on third-party libraries and configuration files, has disaster recovery function, good security, and supports three hardware platforms. It is recommended that users prioritize the use of this startup method. +- `Dynamic cluster `_: dynamic cluster requires user to spawn multiple processes and export environment variables. It's the implementation of `msrun`. Use this method when running `Parameter Server` training mode. For other distributed jobs, `msrun` is recommended. +- `mpirun `_: this method relies on the open source library OpenMPI, and startup command is simple. Multi-machine need to ensure two-by-two password-free login. It is recommended for users who have experience in using OpenMPI to use this startup method. +- `rank table `_: this method requires the Ascend hardware platform and does not rely on third-party library. After manually configuring the rank_table file, you can start the parallel program via a script, and the script is consistent across multiple machines for easy batch deployment. The hardware support for the four startup methods is shown in the table below: diff --git a/tutorials/source_en/parallel/strategy_select.md b/tutorials/source_en/parallel/strategy_select.md index 805a0d4a59..15958df309 100644 --- a/tutorials/source_en/parallel/strategy_select.md +++ b/tutorials/source_en/parallel/strategy_select.md @@ -1,18 +1,18 @@ # Strategy Selection -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/parallel/strategy_select.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/parallel/strategy_select.md) ## Overview In distributed model training, for different model sizes and data volume sizes, different parallel strategies can be chosen to improve training efficiency and resource utilization. The following are the explanation and application of different parallel strategies: -1. [Data Parallel](https://www.mindspore.cn/tutorials/en/master/parallel/data_parallel.html): Data parallel is the process of distributing different training samples to different devices (e.g., Ascend or GPUs) during the training process, with each device computing the gradient of its assigned sample. The parameters of the model are then updated by averaging or accumulating the gradients. Data parallel is suitable for situations where the amount of data is large and the number of model parameters is small enough to be loaded on a single device. Data parallel can speed up the training process by fully utilizing the computing power of multiple devices. +1. [Data Parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/data_parallel.html): Data parallel is the process of distributing different training samples to different devices (e.g., Ascend or GPUs) during the training process, with each device computing the gradient of its assigned sample. The parameters of the model are then updated by averaging or accumulating the gradients. Data parallel is suitable for situations where the amount of data is large and the number of model parameters is small enough to be loaded on a single device. Data parallel can speed up the training process by fully utilizing the computing power of multiple devices. -2. [Operator-level Parallel](https://www.mindspore.cn/tutorials/en/master/parallel/operator_parallel.html): Operator-level parallel means that the input tensor and model parameters are sliced into multiple devices for computation on an operator basis, with each device being responsible for computing a part of the model to improve overall speed. Operator-level parallel is subdivided into semi-automatic parallel mode, which requires manual configuration of the sharding strategy, and automatic parallel mode that requires little or even no configuration of the sharding strategy. Operator-level parallel is suitable for cases where the model architecture is large and cannot be fully loaded into a single device memory. +2. [Operator-level Parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/operator_parallel.html): Operator-level parallel means that the input tensor and model parameters are sliced into multiple devices for computation on an operator basis, with each device being responsible for computing a part of the model to improve overall speed. Operator-level parallel is subdivided into semi-automatic parallel mode, which requires manual configuration of the sharding strategy, and automatic parallel mode that requires little or even no configuration of the sharding strategy. Operator-level parallel is suitable for cases where the model architecture is large and cannot be fully loaded into a single device memory. -3. [Optimizer Parallel](https://www.mindspore.cn/tutorials/en/master/parallel/optimizer_parallel.html): Optimizer parallel can effectively reduce memory consumption and improve network performance on large-scale networks (e.g., LLAMA, DeepSeek) by spreading the optimizer computation over cards with data parallel dimensions, and is recommended to be turned on for parallel training. +3. [Optimizer Parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/optimizer_parallel.html): Optimizer parallel can effectively reduce memory consumption and improve network performance on large-scale networks (e.g., LLAMA, DeepSeek) by spreading the optimizer computation over cards with data parallel dimensions, and is recommended to be turned on for parallel training. -4. [Pipeline Parallel](https://www.mindspore.cn/tutorials/en/master/parallel/pipeline_parallel.html): Pipeline parallel divides the entire training process into multiple phases, with computations in each phase performed on a different device. Data flows between stages, similar to an assembly line. This strategy is applicable when the network model is large and cannot be loaded by a single card, and when the network can be more evenly divided into multiple phases of computation with longer computation times for each phase, thus maximizing overlapping computation and communication. +4. [Pipeline Parallel](https://www.mindspore.cn/tutorials/en/br_base/parallel/pipeline_parallel.html): Pipeline parallel divides the entire training process into multiple phases, with computations in each phase performed on a different device. Data flows between stages, similar to an assembly line. This strategy is applicable when the network model is large and cannot be loaded by a single card, and when the network can be more evenly divided into multiple phases of computation with longer computation times for each phase, thus maximizing overlapping computation and communication. The selection of an appropriate parallel strategy depends on the specific training task and resource allocation. Typically, the selection can be based on the following guidelines: diff --git a/tutorials/source_en/train_availability/fault_recover.md b/tutorials/source_en/train_availability/fault_recover.md index a9e546c0b4..1bd3208a90 100644 --- a/tutorials/source_en/train_availability/fault_recover.md +++ b/tutorials/source_en/train_availability/fault_recover.md @@ -1,6 +1,6 @@ # Fault Recovery -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/train_availability/fault_recover.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/train_availability/fault_recover.md) ## Overview @@ -104,7 +104,7 @@ model = Model(net, loss_fn=loss, optimizer=optim) # Model encapsulation ### Configuring CheckpointConfig -[mindspore.train.CheckpointConfig](https://www.mindspore.cn/docs/en/master/api_python/train/mindspore.train.CheckpointConfig.html) supports configuration based on the number of iterations, with the following main parameters: +[mindspore.train.CheckpointConfig](https://www.mindspore.cn/docs/en/br_base/api_python/train/mindspore.train.CheckpointConfig.html) supports configuration based on the number of iterations, with the following main parameters: - `save_checkpoint_steps`: indicates how many steps to save a Checkpoint file. The default value is 1. - `keep_checkpoint_max`: indicates the maximum number of Checkpoint files to be saved. The default value is 5. @@ -166,7 +166,7 @@ ckpt_file = ckpt_path + "/" + ckptnames[-1] ### Loading Checkpoint File -Use the [load_checkpoint](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.load_checkpoint.html) and [load_param_into_net](https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.load_param_into_net.html) methods to load the latest saved Checkpoint file. +Use the [load_checkpoint](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.load_checkpoint.html) and [load_param_into_net](https://www.mindspore.cn/docs/en/br_base/api_python/mindspore/mindspore.load_param_into_net.html) methods to load the latest saved Checkpoint file. - The `load_checkpoint` method will load the network parameters from the Checkpoint file into the dictionary param_dict. - The `load_param_into_net` method will load the parameters from the dictionary param_dict into the network or optimizer, and the parameters in the network after loading are the ones saved in the Checkpoint file. diff --git a/tutorials/source_en/train_availability/graceful_exit.md b/tutorials/source_en/train_availability/graceful_exit.md index 4536dd81ee..7fb0de9520 100644 --- a/tutorials/source_en/train_availability/graceful_exit.md +++ b/tutorials/source_en/train_availability/graceful_exit.md @@ -1,12 +1,12 @@ # Training Process Exit Gracefully -[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_en/train_availability/graceful_exit.md) +[![View Source On Gitee](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source_en.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_en/train_availability/graceful_exit.md) ## Overview When there are suboptimal devices in the training cluster, saving checkpoint and exiting the cluster training process before the failure occurs can effectively prevent the loss of weight data when the cluster is damaged. This also avoids issues such as training data rollback and loading checkpoint rollback when training recovery, effectively preventing the waste of training resources. -> This document describes how to use the process graceful exit. In order to illustrate the specific usage, the example of detecting the exit configuration message at the first training step and terminating the training process early is used. You can get the full sample code here: [process_graceful_exit](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/graceful_exit/) . +> This document describes how to use the process graceful exit. In order to illustrate the specific usage, the example of detecting the exit configuration message at the first training step and terminating the training process early is used. You can get the full sample code here: [process_graceful_exit](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/graceful_exit/) . `graceful_exit.py` is the training code, `train.sh` is the `msrun` startup script, and `graceful_exit.json` is the graceful exit config json file. @@ -159,7 +159,7 @@ config_json = r"./graceful_exit.json" cb = OnRequestExit(file_name="LeNet", config_file=config_json) ``` -When configuring the `OnRequestExit` callback function, you can configure saving mindir, saving checkpoint, and other configuration parameters as required. For more details, please refer to the documentation [OnRequestExit](https://www.mindspore.cn/docs/en/master/api_python/train/mindspore.train.OnRequestExit.html). +When configuring the `OnRequestExit` callback function, you can configure saving mindir, saving checkpoint, and other configuration parameters as required. For more details, please refer to the documentation [OnRequestExit](https://www.mindspore.cn/docs/en/br_base/api_python/train/mindspore.train.OnRequestExit.html). ```python def graceful_exit_case(): diff --git a/tutorials/source_zh_cn/beginner/accelerate_with_static_graph.ipynb b/tutorials/source_zh_cn/beginner/accelerate_with_static_graph.ipynb index 4f80abe32f..5d1ceb2da0 100644 --- a/tutorials/source_zh_cn/beginner/accelerate_with_static_graph.ipynb +++ b/tutorials/source_zh_cn/beginner/accelerate_with_static_graph.ipynb @@ -5,9 +5,9 @@ "id": "69a92ef2", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_accelerate_with_static_graph.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_accelerate_with_static_graph.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/beginner/accelerate_with_static_graph.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_accelerate_with_static_graph.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_accelerate_with_static_graph.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/beginner/accelerate_with_static_graph.ipynb)\n", "\n", - "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html) || [函数式自动微分](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/beginner/autograd.ipynb) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/save_load.html) || **Graph Mode加速** ||\n", + "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html) || [函数式自动微分](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/beginner/autograd.ipynb) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/save_load.html) || **Graph Mode加速** ||\n", "\n", "# Graph Mode加速\n", "\n", @@ -92,7 +92,7 @@ "source": [ "### 静态图模式\n", "\n", - "相较于动态图而言,静态图的特点是将计算图的构建和实际计算分开(Define and run)。有关静态图模式的运行原理,可以参考[静态图语法支持](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#概述)。\n", + "相较于动态图而言,静态图的特点是将计算图的构建和实际计算分开(Define and run)。有关静态图模式的运行原理,可以参考[静态图语法支持](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#概述)。\n", "\n", "在MindSpore中,静态图模式又被称为Graph模式。在Graph模式下,基于图优化、计算图整图下沉等技术,编译器可以针对图进行全局的优化,获得较好的性能,因此比较适合网络固定且需要高性能的场景。\n", "\n", @@ -167,7 +167,7 @@ "\n", "MindSpore编译器重点面向Tensor数据的计算以及其微分处理。因此,使用MindSpore API以及基于Tensor对象的操作,更适合使用静态图编译优化。其他操作虽然可以部分入图编译,但实际优化作用有限。另外,静态图模式采用先编译后执行,存在编译耗时。如果函数无需反复执行,那么Graph Mode加速也可能没有价值。\n", "\n", - "有关使用静态图来进行网络编译的示例,请参考[网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html)。\n", + "有关使用静态图来进行网络编译的示例,请参考[网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html)。\n", "\n", "## 静态图模式开启方式\n", "\n", @@ -457,11 +457,11 @@ "source": [ "## 静态图的语法约束\n", "\n", - "在Graph模式下,Python代码并不会由Python解释器去执行,而是先编译成静态计算图,再执行该静态计算图。因此,编译器无法支持全量的Python语法。MindSpore的静态图编译器支持Python常用语法子集,以支持神经网络的构建及训练。详情可参考[静态图语法支持](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html)。\n", + "在Graph模式下,Python代码并不会由Python解释器去执行,而是先编译成静态计算图,再执行该静态计算图。因此,编译器无法支持全量的Python语法。MindSpore的静态图编译器支持Python常用语法子集,以支持神经网络的构建及训练。详情可参考[静态图语法支持](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html)。\n", "\n", "## 静态图高级编程技巧\n", "\n", - "使用静态图高级编程技巧,可以有效地提高编译和执行效率,使程序运行更加稳定。详情可参考[静态图高级编程技巧](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph_expert_programming.html)。" + "使用静态图高级编程技巧,可以有效地提高编译和执行效率,使程序运行更加稳定。详情可参考[静态图高级编程技巧](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph_expert_programming.html)。" ] } ], diff --git a/tutorials/source_zh_cn/beginner/autograd.ipynb b/tutorials/source_zh_cn/beginner/autograd.ipynb index d64ef996d0..d89e85e15e 100644 --- a/tutorials/source_zh_cn/beginner/autograd.ipynb +++ b/tutorials/source_zh_cn/beginner/autograd.ipynb @@ -4,9 +4,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_autograd.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_autograd.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/beginner/autograd.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_autograd.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_autograd.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/beginner/autograd.ipynb)\n", "\n", - "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html) || **函数式自动微分** || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html) ||" + "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html) || **函数式自动微分** || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html) ||" ] }, { @@ -17,7 +17,7 @@ "\n", "神经网络的训练主要使用反向传播算法,模型预测值(logits)与正确标签(label)送入损失函数(loss function)获得loss,然后进行反向传播计算,求得梯度(gradients),最终更新至模型参数(parameters)。自动微分能够计算可导函数在某点处的导数值,是反向传播算法的一般化。自动微分主要解决的问题是将一个复杂的数学运算分解为一系列简单的基本运算,该功能对用户屏蔽了大量的求导细节和过程,大大降低了框架的使用门槛。\n", "\n", - "MindSpore使用函数式自动微分的设计理念,提供更接近于数学语义的自动微分接口[mindspore.grad](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.grad.html)和[mindspore.value_and_grad](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.value_and_grad.html)。下面我们使用一个简单的单层线性变换模型进行介绍。" + "MindSpore使用函数式自动微分的设计理念,提供更接近于数学语义的自动微分接口[mindspore.grad](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.grad.html)和[mindspore.value_and_grad](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.value_and_grad.html)。下面我们使用一个简单的单层线性变换模型进行介绍。" ] }, { @@ -39,7 +39,7 @@ "\n", "计算图是用图论语言表示数学函数的一种方式,也是深度学习框架表达神经网络模型的统一方法。我们将根据下面的计算图构造计算函数和神经网络。\n", "\n", - "![compute-graph](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/beginner/images/comp-graph.png)" + "![compute-graph](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/beginner/images/comp-graph.png)" ] }, { @@ -67,7 +67,7 @@ "metadata": {}, "source": [ "我们根据计算图描述的计算过程,构造计算函数。\n", - "其中,[binary_cross_entropy_with_logits](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.binary_cross_entropy_with_logits.html) 是一个损失函数,计算预测值和目标值之间的二值交叉熵损失。" + "其中,[binary_cross_entropy_with_logits](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.binary_cross_entropy_with_logits.html) 是一个损失函数,计算预测值和目标值之间的二值交叉熵损失。" ] }, { @@ -225,7 +225,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "可以看到求得$w$、$b$对应的梯度值发生了变化。此时如果想要屏蔽掉z对梯度的影响,即仍只求参数对loss的导数,可以使用[mindspore.ops.stop_gradient](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.stop_gradient.html)接口,将梯度在此处截断。我们将`function`实现加入`stop_gradient`,并执行。" + "可以看到求得$w$、$b$对应的梯度值发生了变化。此时如果想要屏蔽掉z对梯度的影响,即仍只求参数对loss的导数,可以使用[mindspore.ops.stop_gradient](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.stop_gradient.html)接口,将梯度在此处截断。我们将`function`实现加入`stop_gradient`,并执行。" ] }, { diff --git a/tutorials/source_zh_cn/beginner/dataset.ipynb b/tutorials/source_zh_cn/beginner/dataset.ipynb index d338b21210..6f723d1b82 100644 --- a/tutorials/source_zh_cn/beginner/dataset.ipynb +++ b/tutorials/source_zh_cn/beginner/dataset.ipynb @@ -5,9 +5,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_dataset.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_dataset.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/beginner/dataset.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_dataset.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_dataset.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/beginner/dataset.ipynb)\n", "\n", - "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/tensor.html) || **数据加载与处理** || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html) ||" + "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/tensor.html) || **数据加载与处理** || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html) ||" ] }, { @@ -19,15 +19,15 @@ "\n", "数据是深度学习的基础,高质量的数据输入将在整个深度神经网络中起到积极作用。\n", "\n", - "MindSpore提供基于Pipeline的[数据引擎](https://www.mindspore.cn/docs/zh-CN/master/features/data_engine.html),通过 `数据集(Dataset)`、`数据变换(Transforms)`和`数据批处理(Batch)`,可以实现高效的数据预处理。其中:\n", + "MindSpore提供基于Pipeline的[数据引擎](https://www.mindspore.cn/docs/zh-CN/br_base/features/data_engine.html),通过 `数据集(Dataset)`、`数据变换(Transforms)`和`数据批处理(Batch)`,可以实现高效的数据预处理。其中:\n", "\n", - "1. 数据集(Dataset)是Pipeline的起始,用于从存储中加载原始数据至内存。`mindspore.dataset`提供了内置的图像、文本、音频等[数据集加载接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.loading.html#),同时支持[自定义数据集加载接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.loading.html#%E8%87%AA%E5%AE%9A%E4%B9%89%E6%95%B0%E6%8D%AE%E9%9B%86%E5%8A%A0%E8%BD%BD-1);\n", + "1. 数据集(Dataset)是Pipeline的起始,用于从存储中加载原始数据至内存。`mindspore.dataset`提供了内置的图像、文本、音频等[数据集加载接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.loading.html#),同时支持[自定义数据集加载接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.loading.html#%E8%87%AA%E5%AE%9A%E4%B9%89%E6%95%B0%E6%8D%AE%E9%9B%86%E5%8A%A0%E8%BD%BD-1);\n", "\n", - "2. 数据变换(Transforms)对内存中的数据做进一步的变换操作,`mindspore.dataset.transforms`提供[通用的数据变换操作](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E9%80%9A%E7%94%A8)、`mindspore.dataset.transforms.vision`提供[图像数据变换操作](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E8%A7%86%E8%A7%89)、`mindspore.dataset.transforms.text`提供[文本数据变换操作](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E6%96%87%E6%9C%AC)、`mindspore.dataset.transforms.audio`提供[音频数据变换操作](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E9%9F%B3%E9%A2%91);\n", + "2. 数据变换(Transforms)对内存中的数据做进一步的变换操作,`mindspore.dataset.transforms`提供[通用的数据变换操作](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E9%80%9A%E7%94%A8)、`mindspore.dataset.transforms.vision`提供[图像数据变换操作](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E8%A7%86%E8%A7%89)、`mindspore.dataset.transforms.text`提供[文本数据变换操作](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E6%96%87%E6%9C%AC)、`mindspore.dataset.transforms.audio`提供[音频数据变换操作](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E9%9F%B3%E9%A2%91);\n", "\n", - "3. 数据批处理(Batch)完成对变换后的数据组批处理(Batch),用于最终的神经网络训练,Batch操作是针对一个数据集对象,其接口可参考:[batch操作](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MindDataset.html#batch%E6%89%B9%E6%93%8D%E4%BD%9C);\n", + "3. 数据批处理(Batch)完成对变换后的数据组批处理(Batch),用于最终的神经网络训练,Batch操作是针对一个数据集对象,其接口可参考:[batch操作](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MindDataset.html#batch%E6%89%B9%E6%93%8D%E4%BD%9C);\n", "\n", - "4. 数据集迭代器是将最后的数据通过迭代的方式输出,迭代器也是针对一个数据集对象,其接口可参考:[迭代器](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MindDataset.html#%E8%BF%AD%E4%BB%A3%E5%99%A8)。\n", + "4. 数据集迭代器是将最后的数据通过迭代的方式输出,迭代器也是针对一个数据集对象,其接口可参考:[迭代器](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MindDataset.html#%E8%BF%AD%E4%BB%A3%E5%99%A8)。\n", "\n", "此外MindSpore的领域开发库也提供了大量的预加载数据集,可以使用API一键下载使用。本教程将分别对不同的数据集(Dataset)加载方式:自定义数据集、标准格式数据集和常见数据集,数据变换(Transforms)和数据batch方法进行详细阐述。" ] @@ -55,11 +55,11 @@ "source": [ "## 数据集加载\n", "\n", - "[mindspore.dataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.html)模块提供了自定义数据集、标准格式数据集和一些常用的公开数据集的加载API。\n", + "[mindspore.dataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.html)模块提供了自定义数据集、标准格式数据集和一些常用的公开数据集的加载API。\n", "\n", "### 自定义数据集\n", "\n", - "对于MindSpore暂不支持直接加载的数据集,可以构造自定义数据加载类或自定义数据集生成函数的方式来生成数据集,然后通过[GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html)接口实现自定义方式的数据集加载。\n", + "对于MindSpore暂不支持直接加载的数据集,可以构造自定义数据加载类或自定义数据集生成函数的方式来生成数据集,然后通过[GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html)接口实现自定义方式的数据集加载。\n", "\n", "`GeneratorDataset`支持通过可随机访问数据集对象、可迭代数据集对象和生成器(generator)构造自定义数据集,下面分别对其进行介绍。\n", "\n", @@ -248,14 +248,14 @@ "source": [ "### 标准格式数据集\n", "\n", - "对于MindSpore暂不支持直接加载的数据集,可以将数据集转换成**MindRecord格式**数据集,然后通过[mindspore.dataset.MindDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MindDataset.html)接口实现数据集加载。" + "对于MindSpore暂不支持直接加载的数据集,可以将数据集转换成**MindRecord格式**数据集,然后通过[mindspore.dataset.MindDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MindDataset.html)接口实现数据集加载。" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "首先通过**MindRecord格式**接口[FileWriter](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.mindrecord.html#mindspore.mindrecord.FileWriter)创建一个新的**MindRecord格式**数据集,其中每个样本包含`file_name`、`label`和`data`三个字段。" + "首先通过**MindRecord格式**接口[FileWriter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.mindrecord.html#mindspore.mindrecord.FileWriter)创建一个新的**MindRecord格式**数据集,其中每个样本包含`file_name`、`label`和`data`三个字段。" ] }, { @@ -475,7 +475,7 @@ "\n", "### 内置数据变换操作\n", "\n", - "`mindspore.dataset`提供的内置数据变换:[vision数据变换](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E8%A7%86%E8%A7%89) , [nlp数据变换](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E6%96%87%E6%9C%AC) , [audio数据变换](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E9%9F%B3%E9%A2%91)。\n", + "`mindspore.dataset`提供的内置数据变换:[vision数据变换](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E8%A7%86%E8%A7%89) , [nlp数据变换](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E6%96%87%E6%9C%AC) , [audio数据变换](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E9%9F%B3%E9%A2%91)。\n", "\n", "下面举例对**Mnist**数据集中**data**使用 `Rescale`、`Normalize`和`HWC2CHW`操作,对**label**使用`TypeCast`操作。\n", "\n", @@ -583,7 +583,7 @@ "\n", "一般我们会设置一个固定的batch size,将连续的数据分为若干批(batch)。batch后的数据增加一维,大小为`batch_size`。\n", "\n", - "![op-batch](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/op_batch.png)" + "![op-batch](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/op_batch.png)" ] }, { @@ -623,7 +623,7 @@ "source": [ "## 数据集迭代器\n", "\n", - "数据集Pipeline定义完成后,一般以迭代方式获取数据,然后送入神经网络中进行训练。我们可以用[create_tuple_iterator](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html)或[create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html)接口创建数据迭代器,并迭代访问数据。\n", + "数据集Pipeline定义完成后,一般以迭代方式获取数据,然后送入神经网络中进行训练。我们可以用[create_tuple_iterator](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html)或[create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html)接口创建数据迭代器,并迭代访问数据。\n", "\n", "访问的数据类型默认为`Tensor`;若设置`output_numpy=True`,访问的数据类型为`Numpy`。" ] diff --git a/tutorials/source_zh_cn/beginner/introduction.ipynb b/tutorials/source_zh_cn/beginner/introduction.ipynb index d506a22f37..be6230ae6b 100644 --- a/tutorials/source_zh_cn/beginner/introduction.ipynb +++ b/tutorials/source_zh_cn/beginner/introduction.ipynb @@ -5,9 +5,9 @@ "id": "c55e51c5-4069-4134-8f68-7ea9a45f0038", "metadata": {}, "source": [ - "[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/beginner/introduction.ipynb)\n", + "[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/beginner/introduction.ipynb)\n", "\n", - "**基本介绍** || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/quick_start.html#) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html) ||" + "**基本介绍** || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/quick_start.html#) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html) ||" ] }, { @@ -31,7 +31,7 @@ "2. 深度学习+科学计算:为开发者提供AI模型开发所需各类Python接口,最大化保持开发者在Python生态开发的使用习惯;\n", "3. 核心:作为AI框架的核心,构建Tensor数据结构、基础运算算子Operator、自动求导autograd模块、并行计算Parallel模块、编译compile能力以及runtime运行时管理模块。\n", "\n", - "![arch](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/docs/mindspore/source_zh_cn/features/images/arch_zh.png)\n", + "![arch](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/docs/mindspore/source_zh_cn/features/images/arch_zh.png)\n", "\n", "### 设计理念\n", "\n", @@ -45,7 +45,7 @@ "\n", "昇腾AI全栈如下图所示:\n", "\n", - "![昇腾全栈](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/beginner/images/introduction1.png)\n", + "![昇腾全栈](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/beginner/images/introduction1.png)\n", "\n", "下面简单介绍每个模块的作用:\n", "\n", diff --git a/tutorials/source_zh_cn/beginner/model.ipynb b/tutorials/source_zh_cn/beginner/model.ipynb index bc559fbeb1..506f920685 100644 --- a/tutorials/source_zh_cn/beginner/model.ipynb +++ b/tutorials/source_zh_cn/beginner/model.ipynb @@ -4,9 +4,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_model.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_model.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/beginner/model.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_model.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_model.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/beginner/model.ipynb)\n", "\n", - "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html) || **网络构建** || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html) ||" + "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html) || **网络构建** || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html) ||" ] }, { @@ -20,7 +20,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "神经网络模型由神经网络层和Tensor操作构成,[mindspore.nn](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.nn.html)提供了常见神经网络层的实现。在MindSpore中,[Cell](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html)类是构建所有网络的基类,也是网络的基本单元。一个神经网络模型可表示为一个`Cell`,由不同的子`Cell`构成。使用这样的嵌套结构,可以简单地使用面向对象编程的思维,对神经网络结构进行构建和管理。\n", + "神经网络模型由神经网络层和Tensor操作构成,[mindspore.nn](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.nn.html)提供了常见神经网络层的实现。在MindSpore中,[Cell](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html)类是构建所有网络的基类,也是网络的基本单元。一个神经网络模型可表示为一个`Cell`,由不同的子`Cell`构成。使用这样的嵌套结构,可以简单地使用面向对象编程的思维,对神经网络结构进行构建和管理。\n", "\n", "下面我们将构建一个用于Mnist数据集分类的神经网络模型。" ] @@ -47,7 +47,7 @@ "\n", "当我们定义神经网络时,可以继承`nn.Cell`类,在`__init__`方法中进行子`Cell`的实例化和状态管理,在`construct`方法中实现Tensor操作。\n", "\n", - "> `construct`意为神经网络(计算图)构建,相关内容详见[Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html)。" + "> `construct`意为神经网络(计算图)构建,相关内容详见[Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html)。" ] }, { @@ -227,7 +227,7 @@ "source": [ "### nn.Flatten\n", "\n", - "实例化[nn.Flatten](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Flatten.html)层,将28x28的2D张量转换为784大小的连续数组。" + "实例化[nn.Flatten](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Flatten.html)层,将28x28的2D张量转换为784大小的连续数组。" ] }, { @@ -264,7 +264,7 @@ "source": [ "### nn.Dense\n", "\n", - "[nn.Dense](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Dense.html)为全连接层,其使用权重和偏差对输入进行线性变换。" + "[nn.Dense](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Dense.html)为全连接层,其使用权重和偏差对输入进行线性变换。" ] }, { @@ -299,7 +299,7 @@ "source": [ "### nn.ReLU\n", "\n", - "[nn.ReLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.ReLU.html)层为网络加入非线性激活函数,帮助神经网络学习各种复杂的特征。" + "[nn.ReLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.ReLU.html)层为网络加入非线性激活函数,帮助神经网络学习各种复杂的特征。" ] }, { @@ -360,7 +360,7 @@ "source": [ "### nn.SequentialCell\n", "\n", - "[nn.SequentialCell](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.SequentialCell.html)是一个有序的Cell容器。输入Tensor将按照定义的顺序通过所有Cell。我们可以使用`nn.SequentialCell`来快速组合构造一个神经网络模型。" + "[nn.SequentialCell](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.SequentialCell.html)是一个有序的Cell容器。输入Tensor将按照定义的顺序通过所有Cell。我们可以使用`nn.SequentialCell`来快速组合构造一个神经网络模型。" ] }, { @@ -401,7 +401,7 @@ "source": [ "### nn.Softmax\n", "\n", - "最后使用[nn.Softmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Softmax.html)将神经网络最后一个全连接层返回的logits的值缩放到\\[0, 1\\]区间,表示每个类别的预测概率。`axis`指定的维度数值和为1。" + "最后使用[nn.Softmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Softmax.html)将神经网络最后一个全连接层返回的logits的值缩放到\\[0, 1\\]区间,表示每个类别的预测概率。`axis`指定的维度数值和为1。" ] }, { @@ -499,7 +499,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "更多内置神经网络层详见[mindspore.nn API](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.nn.html)。" + "更多内置神经网络层详见[mindspore.nn API](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.nn.html)。" ] } ], diff --git a/tutorials/source_zh_cn/beginner/quick_start.ipynb b/tutorials/source_zh_cn/beginner/quick_start.ipynb index d213d253b8..35cc8c9b0a 100644 --- a/tutorials/source_zh_cn/beginner/quick_start.ipynb +++ b/tutorials/source_zh_cn/beginner/quick_start.ipynb @@ -4,9 +4,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_quick_start.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_quick_start.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/beginner/quick_start.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_quick_start.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_quick_start.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/beginner/quick_start.ipynb)\n", "\n", - "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/introduction.html) || **快速入门** || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html) ||" + "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/introduction.html) || **快速入门** || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html) ||" ] }, { @@ -36,7 +36,7 @@ "source": [ "## 处理数据集\n", "\n", - "MindSpore提供基于Pipeline的[数据引擎](https://www.mindspore.cn/docs/zh-CN/master/features/data_engine.html),通过[数据集(Dataset)](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html)实现高效的数据预处理。在本教程中,我们使用Mnist数据集,自动下载完成后,使用`mindspore.dataset`提供的数据变换进行预处理。\n", + "MindSpore提供基于Pipeline的[数据引擎](https://www.mindspore.cn/docs/zh-CN/br_base/features/data_engine.html),通过[数据集(Dataset)](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html)实现高效的数据预处理。在本教程中,我们使用Mnist数据集,自动下载完成后,使用`mindspore.dataset`提供的数据变换进行预处理。\n", "\n", "> 本章节中的示例代码依赖`download`,可使用命令`pip install download`安装。如本文档以Notebook运行时,完成安装后需要重启kernel才能执行后续代码。" ] @@ -163,7 +163,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "可使用[create_tuple_iterator](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html) 或[create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html)对数据集进行迭代访问,查看数据和标签的shape和datatype。" + "可使用[create_tuple_iterator](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html) 或[create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html)对数据集进行迭代访问,查看数据和标签的shape和datatype。" ] }, { @@ -212,7 +212,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "更多细节详见[数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html)。" + "更多细节详见[数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html)。" ] }, { @@ -221,7 +221,7 @@ "source": [ "## 网络构建\n", "\n", - "`mindspore.nn`类是构建所有网络的基类,也是网络的基本单元。当用户需要自定义网络时,可以继承`nn.Cell`类,并重写`__init__`方法和`construct`方法。`__init__`包含所有网络层的定义,`construct`包含数据([Tensor](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/tensor.html))的变换过程。" + "`mindspore.nn`类是构建所有网络的基类,也是网络的基本单元。当用户需要自定义网络时,可以继承`nn.Cell`类,并重写`__init__`方法和`construct`方法。`__init__`包含所有网络层的定义,`construct`包含数据([Tensor](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/tensor.html))的变换过程。" ] }, { @@ -273,7 +273,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "更多细节详见[网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html)。" + "更多细节详见[网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html)。" ] }, { @@ -301,8 +301,8 @@ "MindSpore使用函数式自动微分机制,因此针对上述步骤需要实现:\n", "\n", "1. 定义正向计算函数。\n", - "2. 使用[value_and_grad](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.value_and_grad.html)通过函数变换获得梯度计算函数。\n", - "3. 定义训练函数,使用[set_train](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_train)设置为训练模式,执行正向计算、反向传播和参数优化。" + "2. 使用[value_and_grad](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.value_and_grad.html)通过函数变换获得梯度计算函数。\n", + "3. 定义训练函数,使用[set_train](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_train)设置为训练模式,执行正向计算、反向传播和参数优化。" ] }, { @@ -446,7 +446,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "更多细节详见[模型训练](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/train.html)。" + "更多细节详见[模型训练](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/train.html)。" ] }, { @@ -556,7 +556,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "更多细节详见[保存与加载](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/save_load.html)。" + "更多细节详见[保存与加载](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/save_load.html)。" ] } ], diff --git a/tutorials/source_zh_cn/beginner/save_load.ipynb b/tutorials/source_zh_cn/beginner/save_load.ipynb index d31bb8eea8..2a765b8079 100644 --- a/tutorials/source_zh_cn/beginner/save_load.ipynb +++ b/tutorials/source_zh_cn/beginner/save_load.ipynb @@ -4,11 +4,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_save_load.ipynb) \n", - "[![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_save_load.py) \n", - "[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/beginner/save_load.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_save_load.ipynb) \n", + "[![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_save_load.py) \n", + "[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/beginner/save_load.ipynb)\n", "\n", - "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/train.html) || **保存与加载** || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html) ||" + "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/train.html) || **保存与加载** || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html) ||" ] }, { @@ -63,7 +63,7 @@ "source": [ "## 保存和加载模型权重\n", "\n", - "保存模型使用[mindspore.save_checkpoint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.save_checkpoint.html)接口,传入网络和指定的保存路径:" + "保存模型使用[mindspore.save_checkpoint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.save_checkpoint.html)接口,传入网络和指定的保存路径:" ] }, { @@ -162,7 +162,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "已有的MindIR模型可以方便地通过`load`接口加载,传入[mindspore.nn.GraphCell](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.GraphCell.html)即可进行推理。\n", + "已有的MindIR模型可以方便地通过`load`接口加载,传入[mindspore.nn.GraphCell](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.GraphCell.html)即可进行推理。\n", "\n", "> `nn.GraphCell`仅支持图模式。" ] @@ -198,14 +198,14 @@ "\n", "并不是所有的 Python 语法和数据类型都支持 MindIR 导出,若不在支持范围内,导出时会报错。\n", "\n", - "1. MindIR导出仅支持**STRICT级别的基础语法**,详细的支持范围,可参考[静态图语法支持](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html)。\n", + "1. MindIR导出仅支持**STRICT级别的基础语法**,详细的支持范围,可参考[静态图语法支持](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html)。\n", "\n", "2. 返回值的数据类型只支持:\n", "\n", " - Python 内置类型:`int`、`float`、`bool`、`str`、`tuple`、`list`。\n", - " - MindSpore 框架内置类型:[Tensor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.Tensor.html)、[Parameter](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.Parameter.html)、[COOTensor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.COOTensor.html)、[CSRTensor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.CSRTensor.html)。\n", + " - MindSpore 框架内置类型:[Tensor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.Tensor.html)、[Parameter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.Parameter.html)、[COOTensor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.COOTensor.html)、[CSRTensor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.CSRTensor.html)。\n", "\n", - " 例如下面的程序,返回值类型是 [mindspore.dtype](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.dtype.html),不在支持范围内,MindIR 导出的时候就会报错。\n", + " 例如下面的程序,返回值类型是 [mindspore.dtype](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.dtype.html),不在支持范围内,MindIR 导出的时候就会报错。\n", "\n", " ```python\n", " import mindspore\n", @@ -217,7 +217,7 @@ "     return x.dtype\n", " ```\n", "\n", - "3. `nn.Cell`的`construct()`方法中,不支持使用 [mindspore.mint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.mint.html) 包下的随机数生成接口,如`mint.rand`、`mint.randn`、`mint.randint`、`mint.randperm`。(建议改为使用 [mindspore.ops](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.ops.html) 包下的随机数生成接口)。\n", + "3. `nn.Cell`的`construct()`方法中,不支持使用 [mindspore.mint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.mint.html) 包下的随机数生成接口,如`mint.rand`、`mint.randn`、`mint.randint`、`mint.randperm`。(建议改为使用 [mindspore.ops](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.ops.html) 包下的随机数生成接口)。\n", "\n", "4. `Parameter`对象只能定义在`nn.Cell`的`__init__()`方法中或者作为函数的输入参数,否则 MindIR 不支持导出该`Parameter`。例如下面的程序,有一个`Parameter`是全局变量,导出时会报错不支持。\n", "\n", diff --git a/tutorials/source_zh_cn/beginner/tensor.ipynb b/tutorials/source_zh_cn/beginner/tensor.ipynb index af801d8418..8831a18b8d 100644 --- a/tutorials/source_zh_cn/beginner/tensor.ipynb +++ b/tutorials/source_zh_cn/beginner/tensor.ipynb @@ -4,10 +4,10 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_tensor.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_tensor.py)\n", - " [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/beginner/tensor.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_tensor.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_tensor.py)\n", + " [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/beginner/tensor.ipynb)\n", "\n", - "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/quick_start.html) || **张量 Tensor** || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html) ||" + "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/quick_start.html) || **张量 Tensor** || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/autograd.html) || [模型训练](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/train.html) || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html) ||" ] }, { @@ -20,7 +20,7 @@ "\n", "张量(Tensor)是一个多线性函数,可用于表示矢量、标量和其他张量之间的线性关系。常见的线性关系包括内积、外积、线性映射以及笛卡儿积等。张量的坐标在 $n$ 维空间内,有  $n^{r}$ 个分量,每个分量都是坐标的函数。在坐标变换时,这些分量也依照某些规则作线性变换。$r$ 称为该张量的秩或阶(与矩阵的秩和阶无关)。\n", "\n", - "张量是一种特殊的数据结构,与数组和矩阵非常相似。张量([Tensor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.Tensor.html))是MindSpore网络运算中的基本数据结构,本教程主要介绍张量的属性及用法。" + "张量是一种特殊的数据结构,与数组和矩阵非常相似。张量([Tensor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.Tensor.html))是MindSpore网络运算中的基本数据结构,本教程主要介绍张量的属性及用法。" ] }, { @@ -103,11 +103,11 @@ "\n", " 当使用`init`初始化器对张量进行初始化时,支持传入的参数有`init`、`shape`、`dtype`。\n", "\n", - " - `init`: 支持传入[initializer](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.common.initializer.html)的子类。如:下方示例中的 [One()](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.common.initializer.html#mindspore.common.initializer.One) 和 [Normal()](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.common.initializer.html#mindspore.common.initializer.Normal)。\n", + " - `init`: 支持传入[initializer](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.common.initializer.html)的子类。如:下方示例中的 [One()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.common.initializer.html#mindspore.common.initializer.One) 和 [Normal()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.common.initializer.html#mindspore.common.initializer.Normal)。\n", "\n", " - `shape`: 支持传入 `list`、`tuple`、 `int`。\n", "\n", - " - `dtype`: 支持传入[mindspore.dtype](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.dtype.html#mindspore.dtype)。" + " - `dtype`: 支持传入[mindspore.dtype](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.dtype.html#mindspore.dtype)。" ] }, { @@ -317,7 +317,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[concat](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.concat.html)将给定维度上的一系列张量连接起来。" + "[concat](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.concat.html)将给定维度上的一系列张量连接起来。" ] }, { @@ -351,7 +351,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[stack](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.stack.html)则是从另一个维度上将两个张量合并起来。" + "[stack](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.stack.html)则是从另一个维度上将两个张量合并起来。" ] }, { @@ -392,7 +392,7 @@ "\n", "### Tensor转换为NumPy\n", "\n", - "可以使用 [Tensor.asnumpy()](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.asnumpy.html) 将Tensor变量转换为NumPy变量。" + "可以使用 [Tensor.asnumpy()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.asnumpy.html) 将Tensor变量转换为NumPy变量。" ] }, { diff --git a/tutorials/source_zh_cn/beginner/train.ipynb b/tutorials/source_zh_cn/beginner/train.ipynb index 414a7284ed..9361e7adeb 100644 --- a/tutorials/source_zh_cn/beginner/train.ipynb +++ b/tutorials/source_zh_cn/beginner/train.ipynb @@ -8,9 +8,9 @@ } }, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_train.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/beginner/mindspore_train.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/beginner/train.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_train.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/beginner/mindspore_train.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/beginner/train.ipynb)\n", "\n", - "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/autograd.html) || **模型训练** || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html) ||" + "[基本介绍](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/introduction.html) || [快速入门](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/quick_start.html) || [张量 Tensor](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/tensor.html) || [数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html) || [网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html) || [函数式自动微分](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/autograd.html) || **模型训练** || [保存与加载](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/save_load.html) || [Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html) ||" ] }, { @@ -43,7 +43,7 @@ "source": [ "## 构建数据集\n", "\n", - "首先从[数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html)加载代码,构建数据集。" + "首先从[数据加载与处理](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html)加载代码,构建数据集。" ] }, { @@ -109,7 +109,7 @@ "source": [ "## 定义神经网络模型\n", "\n", - "从[网络构建](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/model.html)中加载代码,构建一个神经网络模型。" + "从[网络构建](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/model.html)中加载代码,构建一个神经网络模型。" ] }, { @@ -194,7 +194,7 @@ "\n", "损失函数(loss function)用于评估模型的预测值(logits)和目标值(targets)之间的误差。训练模型时,随机初始化的神经网络模型开始时会预测出错误的结果。损失函数会评估预测结果与目标值的相异程度,模型训练的目标即为降低损失函数求得的误差。\n", "\n", - "常见的损失函数包括用于回归任务的[mindspore.nn.MSELoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.MSELoss.html)(均方误差)和用于分类的[mindspore.nn.NLLLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.NLLLoss.html)(负对数似然)等。[mindspore.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.CrossEntropyLoss.html) 结合了[mindspore.nn.LogSoftmax](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.LogSoftmax.html)和[mindspore.nn.NLLLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.NLLLoss.html),可以对logits 进行归一化并计算预测误差。" + "常见的损失函数包括用于回归任务的[mindspore.nn.MSELoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.MSELoss.html)(均方误差)和用于分类的[mindspore.nn.NLLLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.NLLLoss.html)(负对数似然)等。[mindspore.nn.CrossEntropyLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.CrossEntropyLoss.html) 结合了[mindspore.nn.LogSoftmax](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.LogSoftmax.html)和[mindspore.nn.NLLLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.NLLLoss.html),可以对logits 进行归一化并计算预测误差。" ] }, { @@ -279,7 +279,7 @@ } }, "source": [ - "使用函数式自动微分,需先定义正向函数`forward_fn`,再使用[value_and_grad](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.value_and_grad.html)获取微分函数`grad_fn`。然后,我们将微分函数和优化器的执行封装为`train_step`函数,最后循环迭代数据集进行训练。" + "使用函数式自动微分,需先定义正向函数`forward_fn`,再使用[value_and_grad](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.value_and_grad.html)获取微分函数`grad_fn`。然后,我们将微分函数和优化器的执行封装为`train_step`函数,最后循环迭代数据集进行训练。" ] }, { diff --git a/tutorials/source_zh_cn/compile/operators.md b/tutorials/source_zh_cn/compile/operators.md index c6baaf4b87..25e2ae31b9 100644 --- a/tutorials/source_zh_cn/compile/operators.md +++ b/tutorials/source_zh_cn/compile/operators.md @@ -1,6 +1,6 @@ # 图模式语法-运算符 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/compile/operators.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/compile/operators.md) 算术运算符和赋值运算符支持`Number`和`Tensor`运算,也支持不同`dtype`的`Tensor`运算。 diff --git a/tutorials/source_zh_cn/compile/python_builtin_functions.ipynb b/tutorials/source_zh_cn/compile/python_builtin_functions.ipynb index 69a7d28e82..b219254c71 100644 --- a/tutorials/source_zh_cn/compile/python_builtin_functions.ipynb +++ b/tutorials/source_zh_cn/compile/python_builtin_functions.ipynb @@ -7,7 +7,7 @@ "source": [ "# 图模式语法-python内置函数\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/compile/mindspore_python_builtin_functions.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/compile/mindspore_python_builtin_functions.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/compile/python_builtin_functions.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/compile/mindspore_python_builtin_functions.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/compile/mindspore_python_builtin_functions.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/compile/python_builtin_functions.ipynb)\n", "\n", "当前静态图模式支持的Python内置函数包括:`int`、`float`、`bool`、`str`、`tuple`、`list`、`dict`、`getattr`、`hasattr`、`len`、`isinstance`、`all`、`any`、`round`、`max`、`min`、`sum`、`abs`、`map`、`zip`、`range`、`enumerate`、`super`、`pow`、`print`、`filter`、`type`。图模式下内置函数的使用方法与对应的Python内置函数类似。\n", "\n", @@ -476,7 +476,7 @@ "source": [ "在静态图模式下,对象的属性可能会和动态图模式下有区别。建议使用`default`输入,或者在使用`getattr`前先使用`hasattr`进行校验。\n", "\n", - "其中`getattr(x.asnumpy(), \"shape\", np.array([0, 1, 2, 3, 4]))`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", + "其中`getattr(x.asnumpy(), \"shape\", np.array([0, 1, 2, 3, 4]))`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", "\n", "## hasattr\n", "\n", @@ -540,7 +540,7 @@ "id": "d250cfe9", "metadata": {}, "source": [ - "其中`hasattr(Tensor(np.array([1, 2, 3, 4])).asnumpy(), \"__len__\")`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", + "其中`hasattr(Tensor(np.array([1, 2, 3, 4])).asnumpy(), \"__len__\")`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", "\n", "## len\n", "\n", @@ -611,7 +611,7 @@ "id": "e7cfebfe", "metadata": {}, "source": [ - "其中`len(w.asnumpy())`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", + "其中`len(w.asnumpy())`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", "\n", "## isinstance\n", "\n", @@ -676,7 +676,7 @@ "id": "abbdad59", "metadata": {}, "source": [ - "其中`isinstance(w.asnumpy(), np.ndarray)`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", + "其中`isinstance(w.asnumpy(), np.ndarray)`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", "\n", "## all\n", "\n", @@ -750,7 +750,7 @@ "id": "c584d3aa", "metadata": {}, "source": [ - "其中`all(x.asnumpy())`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", + "其中`all(x.asnumpy())`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", "\n", "## any\n", "\n", @@ -1140,7 +1140,7 @@ "id": "bd2880eb", "metadata": {}, "source": [ - "其中`abs(Tensor([-1, 2]).asnumpy())`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", + "其中`abs(Tensor([-1, 2]).asnumpy())`属于高阶用法,更多介绍可见[AST扩展语法(LAX级别)](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#ast%E6%89%A9%E5%B1%95%E8%AF%AD%E6%B3%95lax%E7%BA%A7%E5%88%AB)章节。\n", "\n", "## map\n", "\n", diff --git a/tutorials/source_zh_cn/compile/statements.ipynb b/tutorials/source_zh_cn/compile/statements.ipynb index ee1f0b6a21..77c93e389c 100644 --- a/tutorials/source_zh_cn/compile/statements.ipynb +++ b/tutorials/source_zh_cn/compile/statements.ipynb @@ -7,7 +7,7 @@ "source": [ "# 图模式语法-python语句\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/compile/mindspore_statements.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/compile/mindspore_statements.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/compile/statements.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/compile/mindspore_statements.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/compile/mindspore_statements.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/compile/statements.ipynb)\n", "\n", "## 简单语句\n", "\n", @@ -997,7 +997,7 @@ "\n", "在图模式下,有限制地支持`with`语句。`with`语句要求对象必须有两个魔术方法:`__enter__()`和`__exit__()`。\n", "\n", - "值得注意的是,with语句中使用的类需要有装饰器@ms.jit_class修饰或者继承于nn.Cell,更多介绍可见[使用jit_class](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph_expert_programming.html#使用jit-class)。\n", + "值得注意的是,with语句中使用的类需要有装饰器@ms.jit_class修饰或者继承于nn.Cell,更多介绍可见[使用jit_class](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph_expert_programming.html#使用jit-class)。\n", "\n", "示例如下:" ] diff --git a/tutorials/source_zh_cn/compile/static_graph.md b/tutorials/source_zh_cn/compile/static_graph.md index f2200e4aba..8cd764b92c 100644 --- a/tutorials/source_zh_cn/compile/static_graph.md +++ b/tutorials/source_zh_cn/compile/static_graph.md @@ -1,6 +1,6 @@ # 图模式编程介绍 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/compile/static_graph.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/compile/static_graph.md) ## 概述 @@ -19,9 +19,9 @@ MindSpore的静态图执行过程实际包含两步,对应静态图的Define `model(inputs) = model.compile(inputs) + model.construct(inputs)`,其中`model`为实例化的Cell对象。 -即时编译可以使用 [JIT接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.jit.html) +即时编译可以使用 [JIT接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.jit.html) ,或者通过设置`ms.set_context(mode=ms.GRAPH_MODE)`进入Graph模式,并在`Cell`类的`construct`函数中编写执行代码,此时`construct`函数的代码将会被编译成静态计算图。`Cell`定义详见[Cell -API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html)。 +API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html)。 由于语法解析的限制,当前在编译构图时,支持的数据类型、语法以及相关操作并没有完全与Python语法保持一致,部分使用受限。借鉴传统JIT编译的思路,从图模式的角度考虑动静图的统一,扩展图模式的语法能力,使得静态图提供接近动态图的语法使用体验,从而实现动静统一。为了便于用户选择是否扩展静态图语法,提供了JIT语法支持级别选项`jit_syntax_level`,其值必须在\[STRICT,LAX\]范围内,选择`STRICT`则认为使用基础语法,不扩展静态图语法。默认值为`LAX`。全部级别都支持所有后端。 @@ -191,7 +191,7 @@ API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn 支持在网络里定义`Number`,即支持语法:`y = 1`、`y = 1.2`、`y = True`。 当数据为常量时,编译期可以获取到数值,在网络中可以支持强制类型转换`Number`的语法:`y = int(x)`、`y = float(x)`、`y = bool(x)`。 -当数据为变量时,即需要在执行期才可以获取到数值,也支持使用int()、float()、bool()等内置函数[Python内置函数](https://www.mindspore.cn/tutorials/zh-CN/master/compile/python_builtin_functions.html)进行数据类型转换。例如: +当数据为变量时,即需要在执行期才可以获取到数值,也支持使用int()、float()、bool()等内置函数[Python内置函数](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/python_builtin_functions.html)进行数据类型转换。例如: ``` python import mindspore @@ -826,9 +826,9 @@ res: ('H', 'Spore', 'Hello!MindSpore', 'MindSporeMindSpore', True, 'My name is M ##### Tensor Tensor的属性与接口详见[Tensor -API文档](https://mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.Tensor.html#mindspore-tensor)。 +API文档](https://mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.Tensor.html#mindspore-tensor)。 -支持在静态图模式下创建和使用Tensor。创建方式有使用[tensor函数接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.tensor.html#mindspore.tensor)和使用`Tensor`类接口。推荐使用tensor函数接口,用户可以使用指定所需要的dtype类型。代码用例如下。 +支持在静态图模式下创建和使用Tensor。创建方式有使用[tensor函数接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.tensor.html#mindspore.tensor)和使用`Tensor`类接口。推荐使用tensor函数接口,用户可以使用指定所需要的dtype类型。代码用例如下。 ``` python import mindspore @@ -890,7 +890,7 @@ ret.shape:(3, 4, 1, 6) 当前不支持在网络调用`Primitive`及其子类相关属性和接口。 当前已定义的`Primitive`详见[Primitive -API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive)。 +API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive)。 ##### Cell @@ -901,18 +901,18 @@ API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.o 当前不支持在网络调用`Cell`及其子类相关属性和接口,除非是在`Cell`自己的`construct`中通过`self`调用。 `Cell`定义详见[Cell -API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html)。 +API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html)。 ##### Parameter `Parameter`是变量张量,代表在训练网络时,需要被更新的参数。 `Parameter`的定义和使用详见[Parameter -API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.Parameter.html#mindspore.Parameter)。 +API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.Parameter.html#mindspore.Parameter)。 ### 运算符 -算术运算符和赋值运算符支持`Number`和`Tensor`运算,也支持不同`dtype`的`Tensor`运算。详见[运算符](https://www.mindspore.cn/tutorials/zh-CN/master/compile/operators.html)。 +算术运算符和赋值运算符支持`Number`和`Tensor`运算,也支持不同`dtype`的`Tensor`运算。详见[运算符](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/operators.html)。 ### 原型 @@ -1005,11 +1005,11 @@ ret:[[3. 3. 3. 3.]] ### 语句 -当前静态图模式支持部分Python语句,包括raise语句、assert语句、pass语句、return语句、break语句、continue语句、if语句、for语句、while语句、with语句、列表生成式、生成器表达式、函数定义语句等,详见[Python语句](https://www.mindspore.cn/tutorials/zh-CN/master/compile/statements.html)。 +当前静态图模式支持部分Python语句,包括raise语句、assert语句、pass语句、return语句、break语句、continue语句、if语句、for语句、while语句、with语句、列表生成式、生成器表达式、函数定义语句等,详见[Python语句](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/statements.html)。 ### Python内置函数 -当前静态图模式支持部分Python内置函数,其使用方法与对应的Python内置函数类似,详见[Python内置函数](https://www.mindspore.cn/tutorials/zh-CN/master/compile/python_builtin_functions.html)。 +当前静态图模式支持部分Python内置函数,其使用方法与对应的Python内置函数类似,详见[Python内置函数](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/python_builtin_functions.html)。 ### 网络定义 @@ -1260,7 +1260,7 @@ in-place操作是指直接修改输入张量的内容,而不创建新的张量 - Tensor索引场景 - 开启`MS_DEV_TENSOR_INDEX_BOOST`使能后,将使用view算子和in-place算子实现Tensor索引功能,提升索引操作的执行效率,具体描述请参考[环境变量](https://www.mindspore.cn/docs/zh-CN/master/api_python/env_var_list.html#%E5%9B%BE%E7%BC%96%E8%AF%91%E6%89%A7%E8%A1%8C)。 + 开启`MS_DEV_TENSOR_INDEX_BOOST`使能后,将使用view算子和in-place算子实现Tensor索引功能,提升索引操作的执行效率,具体描述请参考[环境变量](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/env_var_list.html#%E5%9B%BE%E7%BC%96%E8%AF%91%E6%89%A7%E8%A1%8C)。 支持`Ascend`设备,使用`mindspore.jit`进行编译时,`jit_level=00`和`01`均支持。 @@ -1385,7 +1385,7 @@ in-place操作是指直接修改输入张量的内容,而不创建新的张量 ## 基础语法的语法约束 -图模式下的执行图是从源码转换而来,并不是所有的Python语法都能支持。下面介绍在基础语法下存在的一些语法约束。更多网络编译问题可见[网络编译](https://www.mindspore.cn/docs/zh-CN/master/faq/network_compilation.html)。 +图模式下的执行图是从源码转换而来,并不是所有的Python语法都能支持。下面介绍在基础语法下存在的一些语法约束。更多网络编译问题可见[网络编译](https://www.mindspore.cn/docs/zh-CN/br_base/faq/network_compilation.html)。 1. 当`construct`函数里,使用未定义的类成员时,将抛出`AttributeError`异常。示例如下: @@ -1468,7 +1468,7 @@ in-place操作是指直接修改输入张量的内容,而不创建新的张量 4. 对标Python内置数据类型,除去当前图模式下支持的[Python内置数据类型](#python内置数据类型),复数`complex`和集合`set`类型是不支持的。列表`list`和字典`dictionary`的一些高阶用法在基础语法场景下是不支持的,需要在JIT语法支持级别选项`jit_syntax_level`为`LAX`时才支持。 -5. 对标Python的内置函数,在基础语法场景下,除去当前图模式下支持的[Python内置函数](https://www.mindspore.cn/tutorials/zh-CN/master/compile/python_builtin_functions.html),仍存在部分内置函数在图模式下是不支持的,例如:basestring、bin、bytearray、callable、chr、cmp、compile、 +5. 对标Python的内置函数,在基础语法场景下,除去当前图模式下支持的[Python内置函数](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/python_builtin_functions.html),仍存在部分内置函数在图模式下是不支持的,例如:basestring、bin、bytearray、callable、chr、cmp、compile、 delattr、dir、divmod、eval、execfile、file、frozenset、hash、hex、id、input、issubclass、iter、locals、long、memoryview、next、object、oct、open、ord、property、raw_input、reduce、reload、repr、reverse、set、slice、sorted、unichr、unicode、vars、xrange、\_\_import\_\_。 6. Python提供了很多第三方库,通常需要通过import语句调用。在图模式下JIT语法支持级别为STRICT时,不能直接使用第三方库。如果需要在图模式下使用第三方库的数据类型或者调用第三方库的方法,需要在JIT语法支持级别选项`jit_syntax_level`为`LAX`时才支持。 @@ -1512,9 +1512,9 @@ in-place操作是指直接修改输入张量的内容,而不创建新的张量 - 第三方库 1. Python内置模块和Python标准库。例如`os`、`sys`、`math`、`time`等模块。 - 2. 第三方代码库。路径在Python安装目录的`site-packages`目录下,需要先安装后导入,例如`NumPy`、`SciPy`等。需要注意的是,`mindyolo`、`mindflow`等MindSpore套件不被视作第三方库,具体列表可以参考[parser](https://gitee.com/mindspore/mindspore/blob/master/mindspore/python/mindspore/_extends/parse/parser.py)文件的 + 2. 第三方代码库。路径在Python安装目录的`site-packages`目录下,需要先安装后导入,例如`NumPy`、`SciPy`等。需要注意的是,`mindyolo`、`mindflow`等MindSpore套件不被视作第三方库,具体列表可以参考[parser](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/python/mindspore/_extends/parse/parser.py)文件的 `_modules_from_mindspore` 列表。 - 3. 通过环境变量`MS_JIT_IGNORE_MODULES`指定的模块。与之相对的有环境变量`MS_JIT_MODULES`,具体使用方法请参考[环境变量](https://www.mindspore.cn/docs/zh-CN/master/api_python/env_var_list.html)。 + 3. 通过环境变量`MS_JIT_IGNORE_MODULES`指定的模块。与之相对的有环境变量`MS_JIT_MODULES`,具体使用方法请参考[环境变量](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/env_var_list.html)。 - 支持第三方库的数据类型,允许调用和返回第三方库的对象。 @@ -1660,7 +1660,7 @@ assert out == 100 \'\*\',\'/\',\'//\',\'%\',\'\*\*\',\'\<\<\',\'\>\>\',\'&\',\'\|\',\'\^\', \'not\', \'==\', \'!=\', \'\<\', \'\>\', \'\<=\', \'\>=\', \'in\', \'not in\', -\'y=x\[0\]\'\]。图模式重载的运算符详见[运算符](https://www.mindspore.cn/tutorials/zh-CN/master/compile/operators.html)。列表中的运算符在输入图模式中不支持的输入类型时将使用扩展静态图语法支持,并使输出结果与动态图模式下的输出结果一致。 +\'y=x\[0\]\'\]。图模式重载的运算符详见[运算符](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/operators.html)。列表中的运算符在输入图模式中不支持的输入类型时将使用扩展静态图语法支持,并使输出结果与动态图模式下的输出结果一致。 代码用例如下。 @@ -1924,7 +1924,7 @@ x: 扩展内置函数的支持范围。Python内置函数完善支持更多输入类型,例如第三方库数据类型。 -例如下面的例子,`x.asnumpy()`和`np.ndarray`均是扩展支持的类型。更多内置函数的支持情况可见[Python内置函数](https://www.mindspore.cn/tutorials/zh-CN/master/compile/python_builtin_functions.html)章节。 +例如下面的例子,`x.asnumpy()`和`np.ndarray`均是扩展支持的类型。更多内置函数的支持情况可见[Python内置函数](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/python_builtin_functions.html)章节。 ``` python import numpy as np @@ -2140,7 +2140,7 @@ assert out == 2 对于运行时的扩展支持的语法,会产生一些无法被类型推导出的节点,比如动态创建Tensor等。这种类型称为`Any`类型。因为该类型无法在编译时推导出正确的类型,所以这种`Any`将会以一种默认最大精度`float64`进行运算,防止其精度丢失。为了能更好地优化相关性能,需要减少`Any`类型数据的产生。当用户可以明确知道当前通过扩展支持的语句会产生具体类型的时候,我们推荐使用`Annotation @jit.typing:`的方式进行指定对应Python语句类型,从而确定解释节点的类型避免`Any`类型的生成。 -例如,[Tensor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.Tensor.html#mindspore.Tensor)类和[tensor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.tensor.html#mindspore.tensor)接口的区别就在于在`tensor`接口内部运用了Annotation +例如,[Tensor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.Tensor.html#mindspore.Tensor)类和[tensor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.tensor.html#mindspore.tensor)接口的区别就在于在`tensor`接口内部运用了Annotation Type机制。当`tensor`函数的`dtype`确定时,函数内部会利用`Annotation`指定输出类型从而避免`Any`类型的产生。`Annotation Type`的使用只需要在对应Python语句上面或者后面加上注释 `# @jit.typing: () -> tensor_type[float32]` 即可,其中 `->` 后面的 `tensor_type[float32]` 指示了被注释的语句输出类型。 @@ -2234,7 +2234,7 @@ ret: Tensor(shape=[3], dtype=Int64, value= [2, 3, 4]) net.attr: Tensor(shape=[3], dtype=Int64, value= [2, 3, 4]) ``` -3. 基于字节码构图时,变量场景的控制流无法入图。有关变量的相关介绍请见[变量产生场景](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#%E5%8F%98%E9%87%8F%E4%BA%A7%E7%94%9F%E5%9C%BA%E6%99%AF)。示例如下: +3. 基于字节码构图时,变量场景的控制流无法入图。有关变量的相关介绍请见[变量产生场景](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#%E5%8F%98%E9%87%8F%E4%BA%A7%E7%94%9F%E5%9C%BA%E6%99%AF)。示例如下: ``` python import mindspore diff --git a/tutorials/source_zh_cn/compile/static_graph_expert_programming.ipynb b/tutorials/source_zh_cn/compile/static_graph_expert_programming.ipynb index b6ea677d43..7d84c7c5a1 100644 --- a/tutorials/source_zh_cn/compile/static_graph_expert_programming.ipynb +++ b/tutorials/source_zh_cn/compile/static_graph_expert_programming.ipynb @@ -7,11 +7,11 @@ "source": [ "# 图模式-编程技巧\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/compile/mindspore_static_graph_expert_programming.ipynb) \n", - "[![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/compile/mindspore_static_graph_expert_programming.py) \n", - "[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/compile/static_graph_expert_programming.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/compile/mindspore_static_graph_expert_programming.ipynb) \n", + "[![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/compile/mindspore_static_graph_expert_programming.py) \n", + "[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/compile/static_graph_expert_programming.ipynb)\n", "\n", - "本章介绍常用的静态图优化的高级编程技巧。这些技巧能够有效地提高静态图的编译效率、执行效率,并提升程序的稳定性。有关静态图编译的基础介绍,请见[Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/accelerate_with_static_graph.html)。\n", + "本章介绍常用的静态图优化的高级编程技巧。这些技巧能够有效地提高静态图的编译效率、执行效率,并提升程序的稳定性。有关静态图编译的基础介绍,请见[Graph Mode加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/accelerate_with_static_graph.html)。\n", "\n", "## 如何优化编译性能\n", "\n", @@ -248,7 +248,7 @@ "\n", "使用场景:使用HyperMap替换for循环来优化编译性能。\n", "\n", - "`HyperMap`是一个特殊的类,构造对象时需要传入映射函数f,调用对象时需要传入f的n个参数序列。更多使用方法见:[HyperMap](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.HyperMap.html)。映射函数f必须是`MultitypeFuncGraph`类型, 可参考[MultitypeFuncGraph](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MultitypeFuncGraph.html)。在使用for循环批量处理列表元素时,可以通过`HyperMap`等价替换来优化网络编译性能。" + "`HyperMap`是一个特殊的类,构造对象时需要传入映射函数f,调用对象时需要传入f的n个参数序列。更多使用方法见:[HyperMap](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.HyperMap.html)。映射函数f必须是`MultitypeFuncGraph`类型, 可参考[MultitypeFuncGraph](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MultitypeFuncGraph.html)。在使用for循环批量处理列表元素时,可以通过`HyperMap`等价替换来优化网络编译性能。" ] }, { @@ -262,9 +262,9 @@ "\n", "编译缓存本质上是存储网络模型的编译中间过程文件。当网络模型不变时,生产的编译中间过程文件也是一样的,因此可以复用上一次生成的中间过程文件。\n", "\n", - "通过设置环境变量[MS_COMPILER_CACHE_ENABLE](https://www.mindspore.cn/docs/zh-CN/master/api_python/env_var_list.html?highlight=MS_COMPILER_CACHE_ENABLE),可以指定是否保存和加载编译缓存。\n", + "通过设置环境变量[MS_COMPILER_CACHE_ENABLE](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/env_var_list.html?highlight=MS_COMPILER_CACHE_ENABLE),可以指定是否保存和加载编译缓存。\n", "\n", - "通过设置环境变量[MS_COMPILER_CACHE_PATH](https://www.mindspore.cn/docs/zh-CN/master/api_python/env_var_list.html?highlight=MS_COMPILER_CACHE_PATH),可以指定MindSpore编译缓存目录,用于存储图和算子编译过程生成的缓存文件。\n", + "通过设置环境变量[MS_COMPILER_CACHE_PATH](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/env_var_list.html?highlight=MS_COMPILER_CACHE_PATH),可以指定MindSpore编译缓存目录,用于存储图和算子编译过程生成的缓存文件。\n", "\n", "以下为通过使能编译缓存来优化编译性能的代码示例:" ] @@ -418,7 +418,7 @@ "\n", "- 自定义类\n", "\n", - " 定义自定义类后,可以对类进行实例化、调用类对象的属性和方法,请参考[自定义类的使用](https://www.mindspore.cn/tutorials/zh-CN/master/compile/static_graph.html#支持自定义类的使用)。相比于`Cell`的类定义,自定义类更贴近用户调用Python类的使用习惯。自定义类在静态图模式下的实现方式与`Cell`不同,例如,调用自定义类对象的函数方法时,其函数方法中的代码不会被编译成静态计算图,而是通过Python解释器进行解释执行。\n", + " 定义自定义类后,可以对类进行实例化、调用类对象的属性和方法,请参考[自定义类的使用](https://www.mindspore.cn/tutorials/zh-CN/br_base/compile/static_graph.html#支持自定义类的使用)。相比于`Cell`的类定义,自定义类更贴近用户调用Python类的使用习惯。自定义类在静态图模式下的实现方式与`Cell`不同,例如,调用自定义类对象的函数方法时,其函数方法中的代码不会被编译成静态计算图,而是通过Python解释器进行解释执行。\n", "\n", "- `@jit_class`修饰的类\n", "\n", diff --git a/tutorials/source_zh_cn/custom_program/fusion_pass.md b/tutorials/source_zh_cn/custom_program/fusion_pass.md index 8d6ea1ac7c..baf0521ebd 100644 --- a/tutorials/source_zh_cn/custom_program/fusion_pass.md +++ b/tutorials/source_zh_cn/custom_program/fusion_pass.md @@ -1,6 +1,6 @@ # 自定义融合 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/fusion_pass.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/fusion_pass.md) ## 概述 diff --git a/tutorials/source_zh_cn/custom_program/hook_program.ipynb b/tutorials/source_zh_cn/custom_program/hook_program.ipynb index f2d3bcaf8f..1a33d6d104 100644 --- a/tutorials/source_zh_cn/custom_program/hook_program.ipynb +++ b/tutorials/source_zh_cn/custom_program/hook_program.ipynb @@ -6,7 +6,7 @@ "source": [ "# Hook编程\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/custom_program/mindspore_hook_program.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/custom_program/mindspore_hook_program.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/hook_program.ipynb)" + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/custom_program/mindspore_hook_program.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/custom_program/mindspore_hook_program.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/hook_program.ipynb)" ] }, { @@ -15,7 +15,7 @@ "source": [ "调试深度学习网络是每一个深度学习领域的从业者需要面对且投入精力较大的工作。由于深度学习网络隐藏了中间层算子的输入、输出数据以及反向梯度,只提供网络输入数据(特征量、权重)的梯度,导致无法准确地感知中间层算子的数据变化,从而降低了调试效率。为了方便用户准确、快速地对深度学习网络进行调试,MindSpore在动态图模式下设计了Hook功能,**使用Hook功能可以捕获中间层算子的输入、输出数据以及反向梯度**。\n", "\n", - "目前,动态图模式下提供了五种形式的Hook功能,分别是:[HookBackward](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.HookBackward.html)算子和在Cell对象上进行注册的register_forward_pre_hook、register_forward_hook、register_backward_pre_hook、register_backward_hook功能。\n", + "目前,动态图模式下提供了五种形式的Hook功能,分别是:[HookBackward](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.HookBackward.html)算子和在Cell对象上进行注册的register_forward_pre_hook、register_forward_hook、register_backward_pre_hook、register_backward_hook功能。\n", "\n", "## HookBackward算子\n", "\n", @@ -71,7 +71,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "更多HookBackward算子的说明可以参考[API文档](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.HookBackward.html)。\n", + "更多HookBackward算子的说明可以参考[API文档](https://mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.HookBackward.html)。\n", "\n", "## Cell对象的register_forward_pre_hook功能\n", "\n", @@ -223,7 +223,7 @@ "source": [ "为了避免脚本在切换到图模式时运行失败,不建议在Cell对象的 `construct` 函数中调用 `register_forward_pre_hook` 函数和 `handle` 对象的 `remove()` 函数。在动态图模式下,如果在Cell对象的 `construct` 函数中调用 `register_forward_pre_hook` 函数,那么Cell对象每次运行都将注册一个新的Hook函数。\n", "\n", - "更多关于Cell对象的 `register_forward_pre_hook` 功能的说明可以参考[API文档](https://mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_forward_pre_hook)。\n", + "更多关于Cell对象的 `register_forward_pre_hook` 功能的说明可以参考[API文档](https://mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_forward_pre_hook)。\n", "\n", "## Cell对象的register_forward_hook功能\n", "\n", @@ -317,7 +317,7 @@ "用户如果在Hook函数中直接返回新创建的数据,而不是将原始的输出数据经过计算后,将得到的新输出数据返回,那么梯度的反向传播将会在该Cell对象上截止。该现象可以参考`register_forward_pre_hook`函数的用例说明。\n", "为了避免脚本在切换到图模式时运行失败,不建议在Cell对象的`construct`函数中调用`register_forward_hook`函数和`handle`对象的`remove()`函数。在动态图模式下,如果在Cell对象的`construct`函数中调用`register_forward_hook`函数,那么Cell对象每次运行都将注册一个新的Hook函数。\n", "\n", - "更多关于Cell对象的`register_forward_hook`功能的说明可以参考[API文档](https://mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_forward_hook)。\n", + "更多关于Cell对象的`register_forward_hook`功能的说明可以参考[API文档](https://mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_forward_hook)。\n", "\n", "## Cell对象的register_backward_pre_hook功能\n", "\n", @@ -410,7 +410,7 @@ "source": [ "为了避免脚本在切换到图模式时运行失败,不建议在Cell对象的 `construct` 函数中调用 `register_backward_pre_hook` 函数和 `handle` 对象的 `remove()` 函数。在PyNative模式下,如果在Cell对象的 `construct` 函数中调用 `register_backward_pre_hook` 函数,那么Cell对象每次运行都将注册一个新的Hook函数。\n", "\n", - "更多关于Cell对象的`register_backward_pre_hook`功能的说明可以参考[API文档](https://mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_pre_hook)。\n", + "更多关于Cell对象的`register_backward_pre_hook`功能的说明可以参考[API文档](https://mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_pre_hook)。\n", "\n", "## Cell对象的register_backward_hook功能\n", "\n", @@ -514,7 +514,7 @@ "source": [ "为了避免脚本在切换到图模式时运行失败,不建议在Cell对象的 `construct` 函数中调用 `register_backward_hook` 函数和 `handle` 对象的 `remove()` 函数。在PyNative模式下,如果在Cell对象的 `construct` 函数中调用 `register_backward_hook` 函数,那么Cell对象每次运行都将注册一个新的Hook函数。\n", "\n", - "更多关于Cell对象的 `register_backward_hook` 功能的说明可以参考[API文档](https://mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_hook)。\n", + "更多关于Cell对象的 `register_backward_hook` 功能的说明可以参考[API文档](https://mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_hook)。\n", "\n", "## Cell对象使用多个hook功能\n", "\n", diff --git a/tutorials/source_zh_cn/custom_program/op_custom.rst b/tutorials/source_zh_cn/custom_program/op_custom.rst index f30ac98b4c..dd34dddad2 100644 --- a/tutorials/source_zh_cn/custom_program/op_custom.rst +++ b/tutorials/source_zh_cn/custom_program/op_custom.rst @@ -1,8 +1,8 @@ 自定义算子 ============ -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/op_custom.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/op_custom.rst :alt: 查看源文件 .. toctree:: @@ -18,7 +18,7 @@ operation/op_customopbuilder_atb operation/op_customopbuilder_asdsip -当开发网络遇到内置算子不足以满足需求时,你可以利用MindSpore的自定义算子功能接入你的算子。当前MindSpore提供了两种方式接入自定义算子,分别是 `基于Custom原语接入 `_ 和 `基于CustomOpBuilder接入 `_ 。 +当开发网络遇到内置算子不足以满足需求时,你可以利用MindSpore的自定义算子功能接入你的算子。当前MindSpore提供了两种方式接入自定义算子,分别是 `基于Custom原语接入 `_ 和 `基于CustomOpBuilder接入 `_ 。 .. list-table:: @@ -26,8 +26,8 @@ :header-rows: 1 * - 接口比较 - - `Custom原语 `_ - - `CustomOpBuilder `_ + - `Custom原语 `_ + - `CustomOpBuilder `_ * - 支持模式 - 静态图(Graph Mode)和动态图(PyNative Mode) - 动态图(PyNative Mode) diff --git a/tutorials/source_zh_cn/custom_program/operation/cpp_api_for_custom_ops.md b/tutorials/source_zh_cn/custom_program/operation/cpp_api_for_custom_ops.md index 4c77055275..930d1405af 100644 --- a/tutorials/source_zh_cn/custom_program/operation/cpp_api_for_custom_ops.md +++ b/tutorials/source_zh_cn/custom_program/operation/cpp_api_for_custom_ops.md @@ -1,6 +1,6 @@ # 自定义算子的C++接口说明 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/operation/cpp_api_for_custom_ops.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/operation/cpp_api_for_custom_ops.md) ## 概述 @@ -18,7 +18,7 @@ MindSpore自定义算子的C++接口分为两类: ### enum TypeId -`TypeId` 枚举类型定义在[type_id.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/core/include/mindapi/base/type_id.h)头文件中,定义了 MindSpore 中支持的张量数据类型,包括布尔值、整数类型、浮点数类型、复数类型等。 +`TypeId` 枚举类型定义在[type_id.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/core/include/mindapi/base/type_id.h)头文件中,定义了 MindSpore 中支持的张量数据类型,包括布尔值、整数类型、浮点数类型、复数类型等。 此接口也被包含在 `namespace ms`中,通过`ms::TypeId`也可以访问。 @@ -51,7 +51,7 @@ kNumberTypeEnd, // Number 类型结束值 ### class Tensor -张量类定义在[tensor.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/common/tensor.h)头文件中,表示 MindSpore 的张量对象,提供操作和查询张量属性的方法。 +张量类定义在[tensor.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/common/tensor.h)头文件中,表示 MindSpore 的张量对象,提供操作和查询张量属性的方法。 #### 构造函数 @@ -302,7 +302,7 @@ kNumberTypeEnd, // Number 类型结束值 ### function tensor -构造常量张量的工厂方法,定义在[tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/common/tensor_utils.h)头文件中。 +构造常量张量的工厂方法,定义在[tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/common/tensor_utils.h)头文件中。 ```cpp Tensor tensor(int64_t value, TypeId dtype = TypeId::kNumberTypeInt64) @@ -319,7 +319,7 @@ Tensor tensor(const std::vector &value, TypeId dtype = TypeId::kNumberTy ### function ones -构造全1张量的工厂方法,定义在[tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/common/tensor_utils.h)头文件中。 +构造全1张量的工厂方法,定义在[tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/common/tensor_utils.h)头文件中。 ```cpp Tensor ones(const ShapeVector &shape, TypeId dtype = TypeId::kNumberTypeFloat32) @@ -333,7 +333,7 @@ Tensor ones(const ShapeVector &shape, TypeId dtype = TypeId::kNumberTypeFloat32) ### function zeros -构造全0张量的工厂方法,定义在[tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/common/tensor_utils.h)头文件中。 +构造全0张量的工厂方法,定义在[tensor_utils.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/common/tensor_utils.h)头文件中。 ```cpp Tensor zeros(const ShapeVector &shape, TypeId dtype = TypeId::kNumberTypeFloat32) @@ -349,7 +349,7 @@ Tensor zeros(const ShapeVector &shape, TypeId dtype = TypeId::kNumberTypeFloat32 ### class PyboostRunner -PyNative 流程的运行器类,定义在[pyboost_extension.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/pynative/pyboost_extension.h)头文件中,为管理执行、内存分配和内核启动提供方法。 +PyNative 流程的运行器类,定义在[pyboost_extension.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/pynative/pyboost_extension.h)头文件中,为管理执行、内存分配和内核启动提供方法。 `PyboostRunner` 是 `std::enable_shared_from_this` 的子类,需要使用智能指针 `std::shared_ptr` 管理其对象。 @@ -470,11 +470,11 @@ PyNative 流程的运行器类,定义在[pyboost_extension.h](https://gitee.co ### class AtbOpRunner -用于执行 Ascend Transformer Boost (ATB) 算子的运行器类,定义在[atb_common.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/ascend/atb/atb_common.h)头文件中。 +用于执行 Ascend Transformer Boost (ATB) 算子的运行器类,定义在[atb_common.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/ascend/atb/atb_common.h)头文件中。 此类继承自 `PyboostRunner`,并封装了 ATB 算子的调用流程,包括初始化和运行 ATB 算子、管理输入输出 Tensor、内存分配及内核调度。 -可以查看教程 [CustomOpBuilder通过AtbOpRunner接入ATB算子](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_customopbuilder_atb.html) 获取使用方法。 +可以查看教程 [CustomOpBuilder通过AtbOpRunner接入ATB算子](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_customopbuilder_atb.html) 获取使用方法。 #### 构造函数 @@ -502,7 +502,7 @@ PyNative 流程的运行器类,定义在[pyboost_extension.h](https://gitee.co ### function RunAtbOp -动态图执行ATB算子的接口,定义在[atb_common.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/ascend/atb/atb_common.h)头文件中。 +动态图执行ATB算子的接口,定义在[atb_common.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/ascend/atb/atb_common.h)头文件中。 ```cpp template @@ -520,11 +520,11 @@ void RunAtbOp(const std::string &op_name, const ParamType ¶m, const std::vec ### class AsdSipFFTOpRunner -用于执行 Ascend Sip Boost (ASDSIP) 算子的运行器类,定义在[asdsip_common.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/ascend/asdsip/asdsip_common.h)头文件中。 +用于执行 Ascend Sip Boost (ASDSIP) 算子的运行器类,定义在[asdsip_common.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/ascend/asdsip/asdsip_common.h)头文件中。 此类继承自 `PyboostRunner`,并封装了 ASDSIP FFT 算子的调用流程,包括初始化和运行 ASDSIP FFT 算子、管理输入输出 Tensor、内存分配及内核调度。 -可以查看教程 [CustomOpBuilder通过AsdSipFFTOpRunner接入ASDSIP FFT算子](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_customopbuilder_asdsip.html) 获取使用方法。 +可以查看教程 [CustomOpBuilder通过AsdSipFFTOpRunner接入ASDSIP FFT算子](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_customopbuilder_asdsip.html) 获取使用方法。 #### 构造函数 @@ -550,7 +550,7 @@ void RunAtbOp(const std::string &op_name, const ParamType ¶m, const std::vec ### function RunAsdSipFFTOp -动态图执行ASDSIP FFT算子的接口,定义在[asdsip_common.h](https://gitee.com/mindspore/mindspore/blob/master/mindspore/ccsrc/ms_extension/ascend/asdsip/asdsip_common.h)头文件中。 +动态图执行ASDSIP FFT算子的接口,定义在[asdsip_common.h](https://gitee.com/mindspore/mindspore/blob/br_base/mindspore/ccsrc/ms_extension/ascend/asdsip/asdsip_common.h)头文件中。 ```cpp inline void RunAsdSipFFTOp(const std::string &op_name, const FFTParam &fft_param, const ms::Tensor &input, diff --git a/tutorials/source_zh_cn/custom_program/operation/op_custom_adv.ipynb b/tutorials/source_zh_cn/custom_program/operation/op_custom_adv.ipynb index 2a6b911dcb..7dcd6e9e57 100644 --- a/tutorials/source_zh_cn/custom_program/operation/op_custom_adv.ipynb +++ b/tutorials/source_zh_cn/custom_program/operation/op_custom_adv.ipynb @@ -7,11 +7,11 @@ "source": [ "# Custom原语自定义算子高级用法\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/custom_program/operation/mindspore_op_custom_adv.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/custom_program/operation/mindspore_op_custom_adv.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/operation/op_custom_adv.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/custom_program/operation/mindspore_op_custom_adv.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/custom_program/operation/mindspore_op_custom_adv.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/operation/op_custom_adv.ipynb)\n", "\n", "## 算子信息注册\n", "\n", - "算子信息主要描述了算子实现函数所支持的输入输出类型、输入输出数据格式、属性和target(平台信息),它是后端做算子选择和映射时的依据。它通过[CustomRegOp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop)接口定义,通过[custom_info_register](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.custom_info_register.html#mindspore-ops-custom-info-register)装饰器或者[Custom](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Custom.html#mindspore-ops-custom)原语构造函数中的`reg_info`参数,实现算子信息与算子实现函数的绑定,并最终注册到MindSpore C++侧的算子信息库。`reg_info`参数优先级高于`custom_info_register`装饰器。\n", + "算子信息主要描述了算子实现函数所支持的输入输出类型、输入输出数据格式、属性和target(平台信息),它是后端做算子选择和映射时的依据。它通过[CustomRegOp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop)接口定义,通过[custom_info_register](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.custom_info_register.html#mindspore-ops-custom-info-register)装饰器或者[Custom](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Custom.html#mindspore-ops-custom)原语构造函数中的`reg_info`参数,实现算子信息与算子实现函数的绑定,并最终注册到MindSpore C++侧的算子信息库。`reg_info`参数优先级高于`custom_info_register`装饰器。\n", "\n", "算子信息中的target的值可以为\"Ascend\"或\"GPU\"或\"CPU\",描述的是算子实现函数在当前target上所支持的输入输出类型、输入输出数据格式和属性等信息。对于同一个算子实现函数,其在不同target上支持的输入输出类型可能不一致,但算子信息在同一target下只会被注册一次,所以可以通过target进行区分。\n", "\n", @@ -117,7 +117,7 @@ "id": "d4c1592f", "metadata": {}, "source": [ - "> 更多示例可参考MindSpore源码中[tests/st/graph_kernel/custom](https://gitee.com/mindspore/mindspore/tree/master/tests/st/graph_kernel/custom)下的用例。" + "> 更多示例可参考MindSpore源码中[tests/st/graph_kernel/custom](https://gitee.com/mindspore/mindspore/tree/br_base/tests/st/graph_kernel/custom)下的用例。" ] } ], diff --git a/tutorials/source_zh_cn/custom_program/operation/op_custom_aot.md b/tutorials/source_zh_cn/custom_program/operation/op_custom_aot.md index 5c47b07e5a..fb3be67209 100644 --- a/tutorials/source_zh_cn/custom_program/operation/op_custom_aot.md +++ b/tutorials/source_zh_cn/custom_program/operation/op_custom_aot.md @@ -1,12 +1,12 @@ # Custom原语AOT类型自定义算子(CPU/GPU平台) -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/operation/op_custom_aot.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/operation/op_custom_aot.md) ## 概述 -AOT类型的自定义算子采用预编译的方式,要求网络开发者基于特定接口,手写算子实现函数对应的源码文件,并提前将源码文件编译为动态链接库,然后在网络运行时框架会自动调用执行动态链接库中的函数。AOT类型的自定义算子支持GPU平台的CUDA语言,和CPU平台的C和C++语言。对于Ascend平台的自定义算子开发,参考[AOT类型自定义算子(Ascend平台)](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_custom_ascendc.html)。 +AOT类型的自定义算子采用预编译的方式,要求网络开发者基于特定接口,手写算子实现函数对应的源码文件,并提前将源码文件编译为动态链接库,然后在网络运行时框架会自动调用执行动态链接库中的函数。AOT类型的自定义算子支持GPU平台的CUDA语言,和CPU平台的C和C++语言。对于Ascend平台的自定义算子开发,参考[AOT类型自定义算子(Ascend平台)](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_custom_ascendc.html)。 -这篇教程中,我们提供几个简单的AOT类型自定义算子在CPU和GPU平台的用例作为展示。对于更多完整的AOT类型自定义算子的例子,参见MindSpore源码中的[用例](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/test_custom_aot.py)。 +这篇教程中,我们提供几个简单的AOT类型自定义算子在CPU和GPU平台的用例作为展示。对于更多完整的AOT类型自定义算子的例子,参见MindSpore源码中的[用例](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/test_custom_aot.py)。 ## AOT类型自定义算子基础用法特性简介 @@ -33,7 +33,7 @@ extern "C" int CustomFunc(int nparam, void **params, int *ndims, int64_t **shape 算子输出shape和数据类型推理可以通过定义Python函数实现,描述算子输出shape和数据类型的推导逻辑。 -若自定义算子只支持特定的输入输出数据类型,则需要定义算子信息,算子信息生成方式请参考[算子信息注册](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_custom_adv.html#算子信息注册)。 +若自定义算子只支持特定的输入输出数据类型,则需要定义算子信息,算子信息生成方式请参考[算子信息注册](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_custom_adv.html#算子信息注册)。 下面通过例子介绍GPU平台和CPU平台上AOT类型的自定义算子开发流程,其中自定义算子实现两个输入张量相加的功能。 @@ -200,7 +200,7 @@ python test_custom_aot.py - AOT类型自定义算子的属性和中间变量; - AOT类型自定义算子的动态shape支持。 -对于下面用例的完整代码,请查阅[这里](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/test_custom_aot_fused.py)。 +对于下面用例的完整代码,请查阅[这里](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/test_custom_aot_fused.py)。 ### AOT类型自定义算子的自动编译 @@ -271,7 +271,7 @@ extern "C" int FuncNameInit(int *ndims, int64_t **shapes, const char **dtypes, A - ndims (int \*): 输入输出shape维度数组。 - shapes (int64_t \*\*): 输入输出shape数组。 - dtypes (const char \*\*): 输入输出数据类型数组。 -- extra (AotExtra \*): 用于带属性的自定义算子扩展。其中`AotExtra`类型定义在MindSpore提供的头文件[custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h)。 +- extra (AotExtra \*): 用于带属性的自定义算子扩展。其中`AotExtra`类型定义在MindSpore提供的头文件[custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h)。 ### Shape推导函数 @@ -285,7 +285,7 @@ extern "C" std::vector FuncNameInferShape(int *ndims, int64_t **shapes, - ndims (int \*): 输入shape维度数组。 - shapes (int64_t \*\*): 输入shape数组。 -- extra (AotExtra \*): 用于带属性的自定义算子扩展。其中`AotExtra`类型定义在MindSpore提供的头文件[custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h)。 +- extra (AotExtra \*): 用于带属性的自定义算子扩展。其中`AotExtra`类型定义在MindSpore提供的头文件[custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h)。 ### type推导函数 @@ -308,7 +308,7 @@ extern "C" TypeId FuncNameInferType(std::vector type_ids, AotExtra *extr def attr(self, name=None, param_type=None, value_type=None, default_value=None, **kwargs) ``` -其参数含义参见[CustomRegOp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop)相关接口文档。其中,在AOT类型自定义算子注册时,我们注册时需要注意一下四个参数: +其参数含义参见[CustomRegOp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop)相关接口文档。其中,在AOT类型自定义算子注册时,我们注册时需要注意一下四个参数: - name: AOT类型自定义算子的属性的名称; - param_type: 属性的参数类型。对于AOT类型自定义算子的属性,这个输入固定为”required“,即必选参数; @@ -335,7 +335,7 @@ output = ReduceSum(tmp, axis, keep_dims) #### 算子属性类 -首先我们定义一个数据结构贮存算子属性,该数据接口继承自`AotKernelData`。`AotKernelData`是自定义算子属性数据结构的统一基类,通过下载MindSpore提供的头文件[custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h)放在源文件同一目录下并在文件前`#include "custom_aot_extra.h"`便可以使用相关接口。 +首先我们定义一个数据结构贮存算子属性,该数据接口继承自`AotKernelData`。`AotKernelData`是自定义算子属性数据结构的统一基类,通过下载MindSpore提供的头文件[custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h)放在源文件同一目录下并在文件前`#include "custom_aot_extra.h"`便可以使用相关接口。 ```c++ #include @@ -520,7 +520,7 @@ class ReduceDynNet(Cell): #### 算子注册 -算子属性在初始化时的赋值,通过算子注册文件实现。关于自定义算子注册的函数,参见[CustomRegOp](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop)相关文档。对于每一个属性,我们为算子注册文件`reduce_cpu_info`创建一个`attr`,设置属性名和属性的值。 +算子属性在初始化时的赋值,通过算子注册文件实现。关于自定义算子注册的函数,参见[CustomRegOp](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.CustomRegOp.html#mindspore-ops-customregop)相关文档。对于每一个属性,我们为算子注册文件`reduce_cpu_info`创建一个`attr`,设置属性名和属性的值。 这里每一个`attr`项有四个输入:第一个为名字,如`"axis"`或`"keep_dim"`;中间两个为`"required"`和`"all"`;最后一个输入需要指定输入名为`value=`,输入的值为属性的值,例如这里`value=axis`和`value=keep_dim`。这里我们从网络的输入确定这两个参数的值,这两个值应该和上面初始化函数和shape推导函数中使用的`extra->Attr`模板接口的类型匹配。 @@ -574,7 +574,7 @@ AOT类型的自定义算子支持多输出(输出为tuple)的情况。多输 - 算子注册文件:需要列出多个输出的名字和数据类型信息; - 算子计算函数:需要识别多个输出对应的指针。 -下面我们用一个例子来展现多输出AOT类型自定义算子的定义方法,具体的文件用例参见[这里](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/test_custom_aot.py#L405)。 +下面我们用一个例子来展现多输出AOT类型自定义算子的定义方法,具体的文件用例参见[这里](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/test_custom_aot.py#L405)。 ### 算子推导文件 @@ -675,7 +675,7 @@ void *output2 = params[3]; void *output3 = params[4]; ``` -完整的算子计算文件参见[这里](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/aot_test_files/add_mul_div.cu)。 +完整的算子计算文件参见[这里](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/aot_test_files/add_mul_div.cu)。 ### 算子使用文件 diff --git a/tutorials/source_zh_cn/custom_program/operation/op_custom_ascendc.md b/tutorials/source_zh_cn/custom_program/operation/op_custom_ascendc.md index a1d6adb4be..469c9cf2cb 100644 --- a/tutorials/source_zh_cn/custom_program/operation/op_custom_ascendc.md +++ b/tutorials/source_zh_cn/custom_program/operation/op_custom_ascendc.md @@ -1,6 +1,6 @@ # Custom原语AOT类型自定义算子(Ascend平台) -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/operation/op_custom_ascendc.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/operation/op_custom_ascendc.md) ## 概述 @@ -10,7 +10,7 @@ AOT类型的自定义算子采用预编译的方式,要求网络开发者基 2. **离线编译与部署**:完成算子开发后,进行离线编译,确保算子可以在Ascend AI处理器上高效运行,并进行部署。 3. **MindSpore使用自定义算子**:将编译后的Ascend C自定义算子集成到MindSpore框架中,实现在实际AI应用中的使用。 -本章内容旨在帮助开发者全面了解并掌握Ascend C自定义算子的整个生命周期,从开发到部署,再到在MindSpore中的有效利用。对于其他平台的AOT自定义算子开发,参考[AOT类型自定义算子(CPU/GPU平台)](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_custom_aot.html)。 +本章内容旨在帮助开发者全面了解并掌握Ascend C自定义算子的整个生命周期,从开发到部署,再到在MindSpore中的有效利用。对于其他平台的AOT自定义算子开发,参考[AOT类型自定义算子(CPU/GPU平台)](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_custom_aot.html)。 ## 自定义算子开发 @@ -85,9 +85,9 @@ AOT类型的自定义算子采用预编译的方式,要求网络开发者基 ## MindSpore使用自定义算子 -MindSpore自定义算子接口为[ops.Custom](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Custom.html), -详细的接口说明可以参看[ops.Custom](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Custom.html) -,本文侧重说明如何使用[ops.Custom](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Custom.html) +MindSpore自定义算子接口为[ops.Custom](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Custom.html), +详细的接口说明可以参看[ops.Custom](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Custom.html) +,本文侧重说明如何使用[ops.Custom](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Custom.html) 原语接入Ascend C自定义算子。 ### 环境准备 @@ -103,7 +103,7 @@ ops.Custom(func, bprop=None, out_dtype=None, func_type='aot', out_shape=None, re - `func`(str): 自定义算子名字。 - `out_shape`(Union[function, list, tuple]): 输出shape或输出shape的推导函数。默认值: `None`。 - `out_dtype` (Union[ - function, [mindspore.dtype](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.dtype.html#mindspore.dtype) + function, [mindspore.dtype](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.dtype.html#mindspore.dtype) ,list, tuple]) : 输出type或输出type的推导函数。默认值: `None`。 - `func_type`(str): 自定义算子的函数类型, Ascend C自定义算子指定`func_type="aot"`。 @@ -111,7 +111,7 @@ ops.Custom(func, bprop=None, out_dtype=None, func_type='aot', out_shape=None, re - `reg_info`(Union[str, dict, list, tuple]): 自定义算子的算子注册信息。默认值: `None`。Ascend C自定义算子无需传入该参数,使用默认值。 **场景限制**: 当前动态图和静态图GE后端只支持输入输出为Tensor类型,静态图O0/O1模式无限制类型。Ascend -C自定义算子动态图场景推荐使用[基于CustomOpBuilder的自定义算子](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_customopbuilder.html)。 +C自定义算子动态图场景推荐使用[基于CustomOpBuilder的自定义算子](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_customopbuilder.html)。 ### 简单示例 @@ -157,7 +157,7 @@ assert output.asnumpy().dtype == 'float32' assert output.asnumpy().shape == (1280, 1280) ``` -您可以查看MindSpore仓中的 [自定义算子测试用例](https://gitee.com/mindspore/mindspore/tree/master/tests/st/graph_kernel/custom/custom_ascendc) +您可以查看MindSpore仓中的 [自定义算子测试用例](https://gitee.com/mindspore/mindspore/tree/br_base/tests/st/graph_kernel/custom/custom_ascendc) 获取更多数据类型和使用场景的Ascend C自定义算子用例。 样例工程的目录结构如下: @@ -361,7 +361,7 @@ extern "C" std::vector> FuncNameInferShape(int *ndims, int6 - ndims (int \*): 输入shape维度数组。 - shapes (int64_t \*\*): 输入shape数组。 - extra (AotExtra \*): 用于带属性的自定义算子扩展。其中`AotExtra` - 类型定义在MindSpore提供的头文件[custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h)。 + 类型定义在MindSpore提供的头文件[custom_aot_extra.h](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/aot_test_files/custom_aot_extra.h)。 **Infer Type函数原型** diff --git a/tutorials/source_zh_cn/custom_program/operation/op_custom_prim.ipynb b/tutorials/source_zh_cn/custom_program/operation/op_custom_prim.ipynb index 0ce525b3f5..f569af8d94 100644 --- a/tutorials/source_zh_cn/custom_program/operation/op_custom_prim.ipynb +++ b/tutorials/source_zh_cn/custom_program/operation/op_custom_prim.ipynb @@ -7,9 +7,9 @@ "source": [ "# 基于Custom原语的自定义算子\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/custom_program/operation/mindspore_op_custom_prim.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/custom_program/operation/mindspore_op_custom_prim.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/operation/op_custom_prim.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/custom_program/operation/mindspore_op_custom_prim.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/custom_program/operation/mindspore_op_custom_prim.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/operation/op_custom_prim.ipynb)\n", "\n", - "当开发网络遇到内置算子不足以满足需求时,你可以利用MindSpore的Python API中的[Custom](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Custom.html#mindspore-ops-custom)原语方便快捷地进行不同类型自定义算子的定义和使用。\n", + "当开发网络遇到内置算子不足以满足需求时,你可以利用MindSpore的Python API中的[Custom](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Custom.html#mindspore-ops-custom)原语方便快捷地进行不同类型自定义算子的定义和使用。\n", "\n", "传统的添加一个自定义算子的方式,需要完成算子原语注册、算子实现、算子信息注册三部分工作。\n", "\n", @@ -26,12 +26,12 @@ "\n", "## 自定义算子分类及适应场景\n", "\n", - "基于[Custom](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Custom.html#mindspore-ops-custom)原语的自定义算子支持的算子开发方式包括:pyfunc、aot。不同的算子开发方式适应的场景如下:\n", + "基于[Custom](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Custom.html#mindspore-ops-custom)原语的自定义算子支持的算子开发方式包括:pyfunc、aot。不同的算子开发方式适应的场景如下:\n", "\n", "| 算子开发方式 | 开发语言 | 支持平台 | 推荐场景 |\n", "|:-------|:------------------ |:------ |:------------------------|\n", "| [pyfunc](#自定义算子用例) | Python | `CPU` | 快速算法验证的场景 |\n", - "| [aot](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_custom_aot.html) | Ascend C/CUDA/C++ | `Ascend` `GPU` `CPU` | 需要高性能算子的场景 |\n", + "| [aot](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_custom_aot.html) | Ascend C/CUDA/C++ | `Ascend` `GPU` `CPU` | 需要高性能算子的场景 |\n", "\n", "不同的开发方式使用不同的开发语言实现算子计算逻辑,但是自定义算子的开发流程是一致的,包括算子实现、shape推导、数据类型推理和算子信息注册(可选)。网络开发者可以根据需要选用不同的自定义算子开发方式。在开发者进行自定义算子开发的时候,可以参考如下方式选择对应类型:\n", "\n", @@ -42,10 +42,10 @@ "\n", "为了帮助大家更好地使用自定义算子,我们以[pyfunc类型自定义算子](#自定义算子用例)中作为自定义算子的范例展示。此外,我们提供了其他自定义算子的教程包括:\n", "\n", - "- aot类型自定义算子:[Ascend平台](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_custom_ascendc.html)和[GPU/CPU平台](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_custom_aot.html);\n", - "- [自定义算子进阶用法](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_custom_adv.html):算子注册和反向算子。\n", + "- aot类型自定义算子:[Ascend平台](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_custom_ascendc.html)和[GPU/CPU平台](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_custom_aot.html);\n", + "- [自定义算子进阶用法](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_custom_adv.html):算子注册和反向算子。\n", "\n", - "> 更多示例可参考MindSpore源码中[tests/st/graph_kernel/custom](https://gitee.com/mindspore/mindspore/tree/master/tests/st/graph_kernel/custom)下的用例。\n", + "> 更多示例可参考MindSpore源码中[tests/st/graph_kernel/custom](https://gitee.com/mindspore/mindspore/tree/br_base/tests/st/graph_kernel/custom)下的用例。\n", "\n", "## 自定义算子用例\n", "\n", @@ -186,7 +186,7 @@ "id": "91389287", "metadata": {}, "source": [ - "如此我们完成一个pyfunc类型自定义算子的定义。对于更多完整的pyfunc类型自定义算子的例子,参见MindSpore源码中的[用例](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/test_custom_pyfunc.py)。\n" + "如此我们完成一个pyfunc类型自定义算子的定义。对于更多完整的pyfunc类型自定义算子的例子,参见MindSpore源码中的[用例](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/test_custom_pyfunc.py)。\n" ] } ], diff --git a/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder.md b/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder.md index 1a41e5ffa7..44852345fc 100644 --- a/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder.md +++ b/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder.md @@ -1,14 +1,14 @@ # 基于CustomOpBuilder的自定义算子 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder.md) ## 概述 动态图模式下,网络流程更容易调试,可以支持执行单算子、普通函数和网络,以及单独求梯度等操作。 -[基于Custom原语的自定义算子](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_custom_prim.html)虽然可以同时支持静态图和动态图,但是需要定义的内容较多。因此MindSpore针对动态图的自定义算子接入方式做了优化,提供了新的Python API [CustomOpBuilder](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.CustomOpBuilder.html) ,在方便用户使用的同时,还能提升动态图自定义算子的执行性能。 +[基于Custom原语的自定义算子](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_custom_prim.html)虽然可以同时支持静态图和动态图,但是需要定义的内容较多。因此MindSpore针对动态图的自定义算子接入方式做了优化,提供了新的Python API [CustomOpBuilder](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.CustomOpBuilder.html) ,在方便用户使用的同时,还能提升动态图自定义算子的执行性能。 -用户基于[C++接口](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/cpp_api_for_custom_ops.html)开发算子,需要定义算子函数体,包括推导并构造输出Tensor,调用执行device算子等功能。定义好算子函数体后,通过[pybind11](https://github.com/pybind/pybind11)组件即可将C++函数注册成为Python模块接口。 +用户基于[C++接口](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/cpp_api_for_custom_ops.html)开发算子,需要定义算子函数体,包括推导并构造输出Tensor,调用执行device算子等功能。定义好算子函数体后,通过[pybind11](https://github.com/pybind/pybind11)组件即可将C++函数注册成为Python模块接口。 ## 动态图算子执行流程介绍 @@ -27,7 +27,7 @@ MindSpore以Python作为前端,用C++实现后端,每个算子执行时需 ## 自定义算子通过PyboostRunner支持多级流水 -动态图多级流水的调用流程较复杂,涉及的接口和数据结构较多,为了方便用户在动态图接入自定义算子,MindSpore封装了[PyboostRunner类](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/cpp_api_for_custom_ops.html#class-pyboostrunner)。 +动态图多级流水的调用流程较复杂,涉及的接口和数据结构较多,为了方便用户在动态图接入自定义算子,MindSpore封装了[PyboostRunner类](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/cpp_api_for_custom_ops.html#class-pyboostrunner)。 下面以一个例子演示动态图自定义算子的接入流程: @@ -214,5 +214,5 @@ print(out) ## 更多场景示例 -- [通过AtbOpRunner接入ATB算子](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_customopbuilder_atb.html):介绍通过自定义算子快速对接ATB算子的方法。 -- [通过AsdSipFFTOpRunner接入ASDSIP FFT算子](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_customopbuilder_asdsip.html):介绍通过自定义算子快速对接ASDSIP FFT算子的方法。 +- [通过AtbOpRunner接入ATB算子](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_customopbuilder_atb.html):介绍通过自定义算子快速对接ATB算子的方法。 +- [通过AsdSipFFTOpRunner接入ASDSIP FFT算子](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_customopbuilder_asdsip.html):介绍通过自定义算子快速对接ASDSIP FFT算子的方法。 diff --git a/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_asdsip.md b/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_asdsip.md index 781f456634..d07665cf7f 100644 --- a/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_asdsip.md +++ b/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_asdsip.md @@ -1,6 +1,6 @@ # CustomOpBuilder通过AsdSipFFTOpRunner接入ASDSIP FFT算子 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_asdsip.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_asdsip.md) ## 概述 @@ -8,11 +8,11 @@ 当用户需要使用ASDSIP加速库的FFT类算子,而MindSpore未提供相应算子接口时,用户可以使用自定义算子的方法快速接入使用。 -在 [基于CustomOpBuilder的自定义算子](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_customopbuilder.html) 中,MindSpore提供了 `PyboostRunner` 方便用户在动态图接入自定义算子。现在针对ASDSIP算子,MindSpore又额外提供了一套`AsdSipFFTOpRunner`用于把ASDSIP FFT算子的调用流程和动态图多级流水封装到一起。 +在 [基于CustomOpBuilder的自定义算子](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_customopbuilder.html) 中,MindSpore提供了 `PyboostRunner` 方便用户在动态图接入自定义算子。现在针对ASDSIP算子,MindSpore又额外提供了一套`AsdSipFFTOpRunner`用于把ASDSIP FFT算子的调用流程和动态图多级流水封装到一起。 -用户基于 [AsdSipFFTOpRunner类](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/cpp_api_for_custom_ops.html#class-asdsipfftoprunner) 对接ASDSIP FFT算子时,仅需要提供`Param`,并调用`Init`接口初始化(即构造`Operation`),再调用`Run`接口即可执行ASDSIP算子。还可以直接调用 [RunAsdSipFFTOp](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/cpp_api_for_custom_ops.html#function-launchasdsipfft)函数一键执行(函数内包含了`Init`和`Run`接口的调用)。 +用户基于 [AsdSipFFTOpRunner类](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/cpp_api_for_custom_ops.html#class-asdsipfftoprunner) 对接ASDSIP FFT算子时,仅需要提供`Param`,并调用`Init`接口初始化(即构造`Operation`),再调用`Run`接口即可执行ASDSIP算子。还可以直接调用 [RunAsdSipFFTOp](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/cpp_api_for_custom_ops.html#function-launchasdsipfft)函数一键执行(函数内包含了`Init`和`Run`接口的调用)。 -本指南以一个`FftC2C`为例,展示ASDSIP FFT算子的接入流程。完整代码请参阅[代码仓库](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/jit_test_files/asdsip_fftc2c.cpp)。 +本指南以一个`FftC2C`为例,展示ASDSIP FFT算子的接入流程。完整代码请参阅[代码仓库](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/jit_test_files/asdsip_fftc2c.cpp)。 ## 安装ASDSIP加速库 diff --git a/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_atb.md b/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_atb.md index 7b0bdb3277..bdf4a361ad 100644 --- a/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_atb.md +++ b/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_atb.md @@ -1,6 +1,6 @@ # CustomOpBuilder通过AtbOpRunner接入ATB算子 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_atb.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/custom_program/operation/op_customopbuilder_atb.md) ## 概述 @@ -8,13 +8,13 @@ 当用户需要使用ATB加速库的算子,而MindSpore未提供相应算子接口时,用户可以使用自定义算子的方法快速接入使用。 -在 [基于CustomOpBuilder的自定义算子](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/op_customopbuilder.html) 中,MindSpore提供了 `PyboostRunner` 方便用户在动态图接入自定义算子。现在针对ATB算子,MindSpore又额外提供了一套`AtbOpRunner`用于把ATB算子的调用流程和动态图多级流水封装到一起。 +在 [基于CustomOpBuilder的自定义算子](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/op_customopbuilder.html) 中,MindSpore提供了 `PyboostRunner` 方便用户在动态图接入自定义算子。现在针对ATB算子,MindSpore又额外提供了一套`AtbOpRunner`用于把ATB算子的调用流程和动态图多级流水封装到一起。 在完整的[ATB算子的调用流程](https://www.hiascend.com/document/detail/zh/canncommercial/81RC1/developmentguide/acce/ascendtb/ascendtb_0037.html)中,用户需要执行 构造`Param`、创建`Operation`和`Context`、设置`variantPack`(算子输入输出张量)、调用`Setup`、调用`Execute`、销毁`Context`和`Operation` 等流程。但是对于一个算子来说,其`Operation`仅依赖于算子属性(`Param`),其`Context`仅依赖于流(stream),且都是可以复用的,因此MindSpore提供了一个缓存,将这些数据结构放在缓存中,避免多次创建和销毁带来不必要的时间消耗。 -用户基于 [AtbOpRunner类](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/cpp_api_for_custom_ops.html#class-atboprunner) 对接ATB算子时,仅需要提供相应`Param`的哈希函数(作为缓存`Operation`的键值),并调用`Init`接口初始化(即构造`Operation`),再调用`Run`接口即可执行ATB算子。还可以直接调用 [RunAtbOp](https://www.mindspore.cn/tutorials/zh-CN/master/custom_program/operation/cpp_api_for_custom_ops.html#function-runatbop)函数一键执行(函数内包含了`Init`和`Run`接口的调用)。 +用户基于 [AtbOpRunner类](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/cpp_api_for_custom_ops.html#class-atboprunner) 对接ATB算子时,仅需要提供相应`Param`的哈希函数(作为缓存`Operation`的键值),并调用`Init`接口初始化(即构造`Operation`),再调用`Run`接口即可执行ATB算子。还可以直接调用 [RunAtbOp](https://www.mindspore.cn/tutorials/zh-CN/br_base/custom_program/operation/cpp_api_for_custom_ops.html#function-runatbop)函数一键执行(函数内包含了`Init`和`Run`接口的调用)。 -本指南以一个`SwiGLU`为例,展示ATB算子的接入流程。完整代码请参阅[代码仓库](https://gitee.com/mindspore/mindspore/blob/master/tests/st/graph_kernel/custom/jit_test_files/atb_swiglu.cpp)。 +本指南以一个`SwiGLU`为例,展示ATB算子的接入流程。完整代码请参阅[代码仓库](https://gitee.com/mindspore/mindspore/blob/br_base/tests/st/graph_kernel/custom/jit_test_files/atb_swiglu.cpp)。 ## 安装ATB加速库 diff --git a/tutorials/source_zh_cn/cv/fcn8s.ipynb b/tutorials/source_zh_cn/cv/fcn8s.ipynb index c0bd74405b..9151c2778a 100644 --- a/tutorials/source_zh_cn/cv/fcn8s.ipynb +++ b/tutorials/source_zh_cn/cv/fcn8s.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/cv/mindspore_fcn8s.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/cv/mindspore_fcn8s.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/cv/fcn8s.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/cv/mindspore_fcn8s.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/cv/mindspore_fcn8s.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/cv/fcn8s.ipynb)\n", "\n", "# FCN图像语义分割\n", "\n", @@ -12,7 +12,7 @@ "\n", "FCN是首个端到端(end to end)进行像素级(pixel level)预测的全卷积网络。\n", "\n", - "![fcn-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_1.png)\n", + "![fcn-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_1.png)\n", "\n", "## 语义分割\n", "\n", @@ -22,7 +22,7 @@ "\n", "语义分割的目的是对图像中每个像素点进行分类。与普通的分类任务只输出某个类别不同,语义分割任务输出与输入大小相同的图像,输出图像的每个像素对应了输入图像每个像素的类别。语义在图像领域指的是图像的内容,对图片意思的理解,下图是一些语义分割的实例:\n", "\n", - "![fcn-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_2.png)\n", + "![fcn-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_2.png)\n", "\n", "## 模型简介\n", "\n", @@ -34,19 +34,19 @@ "\n", " 使用VGG-16作为FCN的backbone。VGG-16的输入为224*224的RGB图像,输出为1000个预测值。VGG-16只能接受固定大小的输入,丢弃了空间坐标,产生非空间输出。VGG-16中共有三个全连接层,全连接层也可视为带有覆盖整个区域的卷积。将全连接层转换为卷积层能使网络输出由一维非空间输出变为二维矩阵,利用输出能生成输入图片映射的heatmap。\n", "\n", - " ![fcn-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_3.png)\n", + " ![fcn-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_3.png)\n", "\n", "2. 上采样(Upsample)\n", "\n", " 在卷积过程的卷积操作和池化操作会使得特征图的尺寸变小,为得到原图的大小的稠密图像预测,需要对得到的特征图进行上采样操作。使用双线性插值的参数来初始化上采样逆卷积的参数,后通过反向传播来学习非线性上采样。在网络中执行上采样,以通过像素损失的反向传播进行端到端的学习。\n", "\n", - " ![fcn-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_4.png)\n", + " ![fcn-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_4.png)\n", "\n", "3. 跳跃结构(Skip Layer)\n", "\n", " 利用上采样技巧对最后一层的特征图进行上采样得到原图大小的分割是步长为32像素的预测,称之为FCN-32s。由于最后一层的特征图太小,损失过多细节,采用skips结构将更具有全局信息的最后一层预测和更浅层的预测结合,使预测结果获取更多的局部细节。将底层(stride 32)的预测(FCN-32s)进行2倍的上采样得到原尺寸的图像,并与从pool4层(stride 16)进行的预测融合起来(相加),这一部分的网络被称为FCN-16s。随后将这一部分的预测再进行一次2倍的上采样并与从pool3层得到的预测融合起来,这一部分的网络被称为FCN-8s。 Skips结构将深层的全局信息与浅层的局部信息相结合。\n", "\n", - " ![fcn-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_5.png)\n", + " ![fcn-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_5.png)\n", "\n", "## 网络特点\n", "\n", @@ -265,7 +265,7 @@ "6. FCN-16s是将conv7的输出进行反卷积,使其尺寸扩大两倍至原图的1/16,并将其与pool4输出的特征图进行融合,后通过反卷积扩大到原始尺寸。\n", "7. FCN-8s是将conv7的输出进行反卷积扩大4倍,将pool4输出的特征图反卷积扩大2倍,并将pool3输出特征图拿出,三者融合后通反卷积扩大到原始尺寸。\n", "\n", - "![fcn-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/fcn_6.png)\n", + "![fcn-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/fcn_6.png)\n", "\n", "使用以下代码构建FCN-8s网络。" ] diff --git a/tutorials/source_zh_cn/cv/resnet50.ipynb b/tutorials/source_zh_cn/cv/resnet50.ipynb index 7343424eab..9909bae874 100644 --- a/tutorials/source_zh_cn/cv/resnet50.ipynb +++ b/tutorials/source_zh_cn/cv/resnet50.ipynb @@ -5,7 +5,7 @@ "id": "fa7e3e52", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/cv/mindspore_resnet50.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/cv/mindspore_resnet50.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/cv/resnet50.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/cv/mindspore_resnet50.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/cv/mindspore_resnet50.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/cv/resnet50.ipynb)\n", "\n", "# ResNet50图像分类\n", "\n", @@ -15,11 +15,11 @@ "\n", "ResNet50网络是2015年由微软实验室的何恺明提出,获得ILSVRC2015图像分类竞赛第一名。在ResNet网络提出之前,传统的卷积神经网络都是将一系列的卷积层和池化层堆叠得到的,但当网络堆叠到一定深度时,就会出现退化问题。下图是在CIFAR-10数据集上使用56层网络与20层网络训练误差和测试误差图,由图中数据可以看出,56层网络比20层网络训练误差和测试误差更大,随着网络的加深,其误差并没有如预想的一样减小。\n", "\n", - "![resnet-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_1.png)\n", + "![resnet-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_1.png)\n", "\n", "ResNet网络提出了残差网络结构(Residual Network)来减轻退化问题,使用ResNet网络可以实现搭建较深的网络结构(突破1000层)。论文中使用ResNet网络在CIFAR-10数据集上的训练误差与测试误差图如下图所示,图中虚线表示训练误差,实线表示测试误差。由图中数据可以看出,ResNet网络层数越深,其训练误差和测试误差越小。\n", "\n", - "![resnet-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_4.png)\n", + "![resnet-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_4.png)\n", "\n", "> 了解ResNet网络更多详细内容,参见[ResNet论文](https://arxiv.org/pdf/1512.03385.pdf)。" ] @@ -79,7 +79,7 @@ "\n", "```\n", "\n", - "然后,使用[mindspore.dataset.Cifar10Dataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html)接口来加载数据集,并进行相关图像增强操作。" + "然后,使用[mindspore.dataset.Cifar10Dataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html)接口来加载数据集,并进行相关图像增强操作。" ] }, { @@ -239,7 +239,7 @@ "\n", "残差网络结构图如下图所示,残差网络由两个分支构成:一个主分支,一个shortcuts(图中弧线表示)。主分支通过堆叠一系列的卷积操作得到,shortcuts从输入直接到输出,主分支输出的特征矩阵$F(x)$加上shortcuts输出的特征矩阵$x$得到$F(x)+x$,通过Relu激活函数后即为残差网络最后的输出。\n", "\n", - "![residual](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_3.png)\n", + "![residual](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_3.png)\n", "\n", "残差网络结构主要有两种,一种是Building Block,适用于较浅的ResNet网络,如ResNet18和ResNet34;另一种是Bottleneck,适用于层数较深的ResNet网络,如ResNet50、ResNet101和ResNet152。\n", "\n", @@ -252,7 +252,7 @@ "\n", "最后将主分支输出的特征矩阵与shortcuts输出的特征矩阵相加,通过Relu激活函数即为Building Block最后的输出。\n", "\n", - "![building-block-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_5.png)\n", + "![building-block-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_5.png)\n", "\n", "主分支与shortcuts输出的特征矩阵相加时,需要保证主分支与shortcuts输出的特征矩阵shape相同。如果主分支与shortcuts输出的特征矩阵shape不相同,如输出channel是输入channel的一倍时,shortcuts上需要使用数量与输出channel相等,大小为$1\\times1$的卷积核进行卷积操作;若输出的图像较输入图像缩小一倍,则要设置shortcuts中卷积操作中的`stride`为2,主分支第一层卷积操作的`stride`也需设置为2。\n", "\n", @@ -327,7 +327,7 @@ "\n", "最后将主分支输出的特征矩阵与shortcuts输出的特征矩阵相加,通过Relu激活函数即为Bottleneck最后的输出。\n", "\n", - "![building-block-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_6.png)\n", + "![building-block-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_6.png)\n", "\n", "主分支与shortcuts输出的特征矩阵相加时,需要保证主分支与shortcuts输出的特征矩阵shape相同。如果主分支与shortcuts输出的特征矩阵shape不相同,如输出channel是输入channel的一倍时,shortcuts上需要使用数量与输出channel相等,大小为$1\\times1$的卷积核进行卷积操作;若输出的图像较输入图像缩小一倍,则要设置shortcuts中卷积操作中的`stride`为2,主分支第二层卷积操作的`stride`也需设置为2。\n", "\n", @@ -393,7 +393,7 @@ "\n", "ResNet网络层结构如下图所示,以输入彩色图像$224\\times224$为例,首先通过数量64,卷积核大小为$7\\times7$,stride为2的卷积层conv1,该层输出图片大小为$112\\times112$,输出channel为64;然后通过一个$3\\times3$的最大下采样池化层,该层输出图片大小为$56\\times56$,输出channel为64;再堆叠4个残差网络块(conv2_x、conv3_x、conv4_x和conv5_x),此时输出图片大小为$7\\times7$,输出channel为2048;最后通过一个平均池化层、全连接层和softmax,得到分类概率。\n", "\n", - "![resnet-layer](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/resnet_2.png)\n", + "![resnet-layer](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/resnet_2.png)\n", "\n", "对于每个残差网络块,以ResNet50网络中的conv2_x为例,其由3个Bottleneck结构堆叠而成,每个Bottleneck输入的channel为64,输出channel为256。\n", "\n", diff --git a/tutorials/source_zh_cn/cv/ssd.ipynb b/tutorials/source_zh_cn/cv/ssd.ipynb index 4cf8d6c8c0..b49a2528e8 100644 --- a/tutorials/source_zh_cn/cv/ssd.ipynb +++ b/tutorials/source_zh_cn/cv/ssd.ipynb @@ -9,7 +9,7 @@ } }, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/cv/mindspore_ssd.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/cv/mindspore_ssd.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/cv/ssd.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/cv/mindspore_ssd.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/cv/mindspore_ssd.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/cv/ssd.ipynb)\n", "\n", "# SSD目标检测\n", "\n" @@ -39,7 +39,7 @@ "\n", "SSD是单阶段的目标检测算法,通过卷积神经网络进行特征提取,取不同的特征层进行检测输出,所以SSD是一种多尺度的检测方法。在需要检测的特征层,直接使用一个3 $\\times$ 3卷积,进行通道的变换。SSD采用了anchor的策略,预设不同长宽比例的anchor,每一个输出特征层基于anchor预测多个检测框(4或者6)。采用了多尺度检测方法,浅层用于检测小目标,深层用于检测大目标。SSD的框架如下图:\n", "\n", - "![SSD-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_1.png)\n" + "![SSD-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_1.png)\n" ] }, { @@ -55,7 +55,7 @@ "\n", "SSD采用VGG16作为基础模型,然后在VGG16的基础上新增了卷积层来获得更多的特征图以用于检测。SSD的网络结构如图所示。上面是SSD模型,下面是YOLO模型,可以明显看到SSD利用了多尺度的特征图做检测。\n", "\n", - "![SSD-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_2.jpg)\n", + "![SSD-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_2.jpg)\n", "
\n", "\n", "两种单阶段目标检测算法的比较:
\n", @@ -474,7 +474,7 @@ "\n", "SSD的网络结构主要分为以下几个部分:\n", "\n", - "![SSD-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_3.jpg)\n", + "![SSD-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_3.jpg)\n", "\n", "- VGG16 Base Layer\n", "\n", @@ -488,7 +488,7 @@ "\n", "### Backbone Layer\n", "\n", - "![SSD-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_4.png)\n", + "![SSD-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_4.png)\n", "\n", "输入图像经过预处理后大小固定为300×300,首先经过backbone,本案例中使用的是VGG16网络的前13个卷积层,然后分别将VGG16的全连接层fc6和fc7转换成3 $\\times$ 3卷积层block6和1 $\\times$ 1卷积层block7,进一步提取特征。 在block6中,使用了空洞数为6的空洞卷积,其padding也为6,这样做同样也是为了增加感受野的同时保持参数量与特征图尺寸的不变。\n", "\n", @@ -496,7 +496,7 @@ "\n", "在VGG16的基础上,SSD进一步增加了4个深度卷积层,用于提取更高层的语义信息:\n", "\n", - "![SSD-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_5.png)\n", + "![SSD-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_5.png)\n", "\n", "block8-11,用于更高语义信息的提取。block8的通道数为512,而block9、block10与block11的通道数都为256。从block7到block11,这5个卷积后输出特征图的尺寸依次为19×19、10×10、5×5、3×3和1×1。为了降低参数量,使用了1×1卷积先降低通道数为该层输出通道数的一半,再利用3×3卷积进行特征提取。\n", "\n", @@ -506,25 +506,25 @@ "PriorBox生成规则:\n", "SSD由6个特征层来检测目标,在不同特征层上,PriorBox的尺寸scale大小是不一样的,最低层的scale=0.1,最高层的scale=0.95,其他层的计算公式如下:\n", "\n", - "![SSD-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_6.jpg)\n", + "![SSD-6](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_6.jpg)\n", "\n", "在某个特征层上其scale一定,那么会设置不同长宽比ratio的PriorBox,其长和宽的计算公式如下:\n", "\n", - "![SSD-7](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_7.jpg)\n", + "![SSD-7](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_7.jpg)\n", "\n", "在ratio=1的时候,还会根据该特征层和下一个特征层计算一个特定scale的PriorBox(长宽比ratio=1),计算公式如下:\n", "\n", - "![SSD-8](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_8.jpg)\n", + "![SSD-8](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_8.jpg)\n", "\n", "每个特征层的每个点都会以上述规则生成PriorBox,(cx,cy)由当前点的中心点来确定,由此每个特征层都生成大量密集的PriorBox,如下图:\n", "\n", - "![SSD-9](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_9.png)\n", + "![SSD-9](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_9.png)\n", "\n", "SSD使用了第4、7、8、9、10和11这6个卷积层得到的特征图,这6个特征图尺寸越来越小,而其对应的感受野越来越大。6个特征图上的每一个点分别对应4、6、6、6、4、4个PriorBox。某个特征图上的一个点根据下采样率可以得到在原图的坐标,以该坐标为中心生成4个或6个不同大小的PriorBox,然后利用特征图的特征去预测每一个PriorBox对应类别与位置的预测量。例如:第8个卷积层得到的特征图大小为10×10×512,每个点对应6个PriorBox,一共有600个PriorBox。定义MultiBox类,生成多个预测框。\n", "\n", "### Detection Layer\n", "\n", - "![SSD-10](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_10.jpg)\n", + "![SSD-10](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_10.jpg)\n", "\n", "SSD模型一共有6个预测特征图,对于其中一个尺寸为m\\*n,通道为p的预测特征图,假设其每个像素点会产生k个anchor,每个anchor会对应c个类别和4个回归偏移量,使用(4+c)k个尺寸为3x3,通道为p的卷积核对该预测特征图进行卷积操作,得到尺寸为m\\*n,通道为(4+c)m\\*k的输出特征图,它包含了预测特征图上所产生的每个anchor的回归偏移量和各类别概率分数。所以对于尺寸为m\\*n的预测特征图,总共会产生(4+c)k\\*m\\*n个结果。cls分支的输出通道数为k\\*class_num,loc分支的输出通道数为k\\*4。" ] @@ -750,7 +750,7 @@ "\n", "SSD算法的目标函数分为两部分:计算相应的预选框与目标类别的置信度误差(confidence loss, conf)以及相应的位置误差(locatization loss, loc):\n", "\n", - "![SSD-11](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_11.jpg)\n", + "![SSD-11](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_11.jpg)\n", "\n", "其中:
\n", "N 是先验框的正样本数量;
\n", @@ -763,13 +763,13 @@ "\n", "针对所有的正样本,采用 Smooth L1 Loss, 位置信息都是 encode 之后的位置信息。\n", "\n", - "![SSD-12](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_12.jpg)\n", + "![SSD-12](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_12.jpg)\n", "\n", "### 对于置信度损失函数\n", "\n", "置信度损失是多类置信度(c)上的softmax损失。\n", "\n", - "![SSD-13](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_13.jpg)" + "![SSD-13](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_13.jpg)" ] }, { @@ -1029,7 +1029,7 @@ "\n", "3. 如果多个gt和某一个prior的IOU均大于阈值,那么prior只与IOU最大的那个进行匹配。\n", "\n", - "![SSD-14](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_14.jpg)\n", + "![SSD-14](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_14.jpg)\n", "\n", "如上图所示,训练过程中的 prior boxes 和 ground truth boxes 的匹配,基本思路是:让每一个 prior box 回归并且到 ground truth box,这个过程的调控我们需要损失层的帮助,他会计算真实值和预测值之间的误差,从而指导学习的走向。\n", "\n", @@ -1330,13 +1330,13 @@ "\n", "- 精确率(Average Precision,AP):\n", "\n", - " ![SSD-15](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_15.jpg)\n", + " ![SSD-15](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_15.jpg)\n", "\n", " 精确率是将正样本预测正确的结果与正样本预测的结果和预测错误的结果的和的比值,主要反映出预测结果错误率。\n", "\n", "- 召回率(Average Recall,AR):\n", "\n", - " ![SSD-16](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/SSD_16.jpg)\n", + " ![SSD-16](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/SSD_16.jpg)\n", "\n", " 召回率是正样本预测正确的结果与正样本预测正确的结果和正样本预测错误的和的比值,主要反映出来的是预测结果中的漏检率。\n", "\n", diff --git a/tutorials/source_zh_cn/cv/transfer_learning.ipynb b/tutorials/source_zh_cn/cv/transfer_learning.ipynb index 39abee702c..b2d4d1c6c2 100644 --- a/tutorials/source_zh_cn/cv/transfer_learning.ipynb +++ b/tutorials/source_zh_cn/cv/transfer_learning.ipynb @@ -5,7 +5,7 @@ "id": "21d983ad", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/cv/mindspore_transfer_learning.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/cv/mindspore_transfer_learning.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/cv/transfer_learning.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/cv/mindspore_transfer_learning.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/cv/mindspore_transfer_learning.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/cv/transfer_learning.ipynb)\n", "\n", "# ResNet50迁移学习\n", "\n", @@ -83,7 +83,7 @@ "source": [ "## 加载数据集\n", "\n", - "狼狗数据集提取自ImageNet分类数据集,使用[mindspore.dataset.ImageFolderDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html)接口来加载数据集,并进行相关图像增强操作。 \n", + "狼狗数据集提取自ImageNet分类数据集,使用[mindspore.dataset.ImageFolderDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html)接口来加载数据集,并进行相关图像增强操作。 \n", "\n", "首先执行过程定义一些输入:" ] @@ -177,7 +177,7 @@ "source": [ "### 数据集可视化\n", "\n", - "从`mindspore.dataset.ImageFolderDataset`接口中加载的训练数据集返回值为字典,用户可通过 [create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) 接口创建数据迭代器,使用 `next` 迭代访问数据集。本章中 `batch_size` 设为18,所以使用 `next` 一次可获取18个图像及标签数据。" + "从`mindspore.dataset.ImageFolderDataset`接口中加载的训练数据集返回值为字典,用户可通过 [create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html) 接口创建数据迭代器,使用 `next` 迭代访问数据集。本章中 `batch_size` 设为18,所以使用 `next` 一次可获取18个图像及标签数据。" ] }, { diff --git a/tutorials/source_zh_cn/cv/vit.ipynb b/tutorials/source_zh_cn/cv/vit.ipynb index 9d7495fe3a..572583de36 100644 --- a/tutorials/source_zh_cn/cv/vit.ipynb +++ b/tutorials/source_zh_cn/cv/vit.ipynb @@ -9,7 +9,7 @@ } }, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/cv/mindspore_vit.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/cv/mindspore_vit.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/cv/vit.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/cv/mindspore_vit.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/cv/mindspore_vit.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/cv/vit.ipynb)\n", "\n", "# Vision Transformer图像分类\n", "\n", @@ -35,7 +35,7 @@ "\n", "ViT模型的主体结构是基于Transformer模型的Encoder部分(部分结构顺序有调整,如:Normalization的位置与标准Transformer不同),其结构图[1]如下:\n", "\n", - "![vit-architecture](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/vit_architecture.png)\n", + "![vit-architecture](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/vit_architecture.png)\n", "\n", "### 模型特点\n", "\n", @@ -161,11 +161,11 @@ "\n", "Transformer模型源于2017年的一篇文章[2]。在这篇文章中提出的基于Attention机制的编码器-解码器型结构在自然语言处理领域获得了巨大的成功。模型结构如下图所示:\n", "\n", - "![transformer-architecture](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/transformer_architecture.png)\n", + "![transformer-architecture](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/transformer_architecture.png)\n", "\n", "其主要结构为多个Encoder和Decoder模块所组成,其中Encoder和Decoder的详细结构如下图[2]所示:\n", "\n", - "![encoder-decoder](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/encoder_decoder.png)\n", + "![encoder-decoder](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/encoder_decoder.png)\n", "\n", "Encoder与Decoder由许多结构组成,如:多头注意力(Multi-Head Attention)层,Feed Forward层,Normaliztion层,甚至残差连接(Residual Connection,图中的“Add”)。不过,其中最重要的结构是多头注意力(Multi-Head Attention)结构,该结构基于自注意力(Self-Attention)机制,是多个Self-Attention的并行组成。\n", "\n", @@ -198,7 +198,7 @@ "\\tag{1}\n", "$$\n", "\n", - "![self-attention1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/self_attention_1.png)\n", + "![self-attention1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/self_attention_1.png)\n", "\n", "2. 自注意力机制的自注意主要体现在它的Q,K,V都来源于其自身,也就是该过程是在提取输入的不同顺序的向量的联系与特征,最终通过不同顺序向量之间的联系紧密性(Q与K乘积经过Softmax的结果)来表现出来。**Q,K,V得到后就需要获取向量间权重,需要对Q和K进行点乘并除以维度的平方根,对所有向量的结果进行Softmax处理,通过公式(2)的操作,我们获得了向量之间的关系权重。**\n", "\n", @@ -211,11 +211,11 @@ "\\tag{2}\n", "$$\n", "\n", - "![self-attention3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/self_attention_3.png)\n", + "![self-attention3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/self_attention_3.png)\n", "\n", "$$ Softmax: \\hat a_{1,i} = exp(a_{1,i}) / \\sum_j exp(a_{1,j}),\\hspace{1em} j = 1,2,3 \\ldots \\tag{3}$$\n", "\n", - "![self-attention2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/self_attention_2.png)\n", + "![self-attention2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/self_attention_2.png)\n", "\n", "3. 其最终输出则是通过V这个映射后的向量与Q,K经过Softmax结果进行weight sum获得,这个过程可以理解为在全局上进行自注意表示。**每一组Q,K,V最后都有一个V输出,这是Self-Attention得到的最终结果,是当前向量在结合了它与其他向量关联权重后得到的结果。**\n", "\n", @@ -226,7 +226,7 @@ "\n", "通过下图可以整体把握Self-Attention的全部过程。\n", "\n", - "![self-attention](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/self_attention_process.png)\n", + "![self-attention](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/self_attention_process.png)\n", "\n", "多头注意力机制就是将原本self-Attention处理的向量分割为多个Head进行处理,这一点也可以从代码中体现,这也是attention结构可以进行并行加速的一个方面。\n", "\n", @@ -234,7 +234,7 @@ "\n", "所以,对于同一个输入向量,多个注意力机制可以同时对其进行处理,即利用并行计算加速处理过程,又在处理的时候更充分的分析和利用了向量特征。下图展示了多头注意力机制,其并行能力的主要体现在下图中的$a_1$和$a_2$是同一个向量进行分割获得的。\n", "\n", - "![multi-head-attention](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/multi_head_attention.png)\n", + "![multi-head-attention](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/multi_head_attention.png)\n", "\n", "以下是Multi-Head Attention代码,结合上文的解释,代码清晰的展现了这一过程。" ] @@ -368,7 +368,7 @@ "source": [ "接下来就利用Self-Attention来构建ViT模型中的TransformerEncoder部分,类似于构建了一个Transformer的编码器部分,如下图[1]所示:\n", "\n", - "![vit-encoder](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/vit_encoder.png)\n", + "![vit-encoder](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/vit_encoder.png)\n", "\n", "1. ViT模型中的基础结构与标准Transformer有所不同,主要在于Normalization的位置是放在Self-Attention和Feed Forward之前,其他结构如Residual Connection,Feed Forward,Normalization都如Transformer中所设计。\n", "\n", @@ -617,7 +617,7 @@ "source": [ "整体流程图如下所示:\n", "\n", - "![data-process](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/data_process.png)" + "![data-process](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/data_process.png)" ] }, { @@ -767,7 +767,7 @@ "source": [ "### 模型验证\n", "\n", - "模型验证过程主要应用了[ImageFolderDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html),CrossEntropySmooth和Model等接口。\n", + "模型验证过程主要应用了[ImageFolderDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html),CrossEntropySmooth和Model等接口。\n", "\n", "ImageFolderDataset主要用于读取数据集。\n", "\n", @@ -1092,7 +1092,7 @@ "source": [ "推理过程完成后,在推理文件夹下可以找到图片的推理结果,可以看出预测结果是Doberman,与期望结果相同,验证了模型的准确性。\n", "\n", - "![infer-result](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/infer_result.jpg)" + "![infer-result](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/infer_result.jpg)" ] }, { diff --git a/tutorials/source_zh_cn/dataset/augment.ipynb b/tutorials/source_zh_cn/dataset/augment.ipynb index 78ba7192be..91b377b08c 100644 --- a/tutorials/source_zh_cn/dataset/augment.ipynb +++ b/tutorials/source_zh_cn/dataset/augment.ipynb @@ -6,7 +6,7 @@ "source": [ "# 自动数据增强\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_augment.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_augment.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset/augment.ipynb)" + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_augment.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_augment.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/dataset/augment.ipynb)" ] }, { @@ -469,7 +469,7 @@ "source": [ "> 为了更好地演示效果,此处只加载5张图片,且读取时不进行`shuffle`操作,自动数据增强时也不进行`Normalize`和`HWC2CHW`操作。\n", "\n", - "![augment](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/auto_augmentation.png)\n", + "![augment](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/auto_augmentation.png)\n", "\n", "运行结果可以看到,batch中每张图像的增强效果,垂直方向表示1个batch的5张图像,水平方向表示5个batch。\n", "\n", diff --git a/tutorials/source_zh_cn/dataset/cache.ipynb b/tutorials/source_zh_cn/dataset/cache.ipynb index 23c7efb4bc..d8f13f0748 100644 --- a/tutorials/source_zh_cn/dataset/cache.ipynb +++ b/tutorials/source_zh_cn/dataset/cache.ipynb @@ -7,7 +7,7 @@ "source": [ "# 单节点数据缓存\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_cache.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_cache.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset/cache.ipynb)" + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_cache.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_cache.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/dataset/cache.ipynb)" ] }, { @@ -33,12 +33,12 @@ "\n", " 用户可以在数据集加载操作中使用缓存。首先把加载完成的数据存到缓存服务器中,后续若需相同数据则可直接从缓存中读取,避免从磁盘中重复加载。\n", "\n", - " ![cache on leaf pipeline](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/cache_dataset.png)\n", + " ![cache on leaf pipeline](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/cache_dataset.png)\n", "- 缓存经过数据增强处理后的数据\n", "\n", " 用户也可在`map`操作中使用缓存。预先缓存数据增强(如图像裁剪、缩放等)处理后的数据,避免数据增强操作重复进行,减少不必要的计算量。\n", "\n", - " ![cache on map pipeline](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/cache_processed_data.png)" + " ![cache on map pipeline](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/cache_processed_data.png)" ] }, { @@ -668,7 +668,7 @@ "done\n", "```\n", "\n", - "> 直接获取完整样例代码:[cache.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/cache/cache.sh)" + "> 直接获取完整样例代码:[cache.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/cache/cache.sh)" ] }, { @@ -735,7 +735,7 @@ " print(\"Got {} samples on device {}\".format(num_iter, args_opt.device))\n", "```\n", "\n", - "> 直接获取完整样例代码:[my_training_script.py](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/cache/my_training_script.py)" + "> 直接获取完整样例代码:[my_training_script.py](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/cache/my_training_script.py)" ] }, { @@ -862,7 +862,7 @@ " }\n", " ```\n", "\n", - " > 直接获取完整样例代码:[cache_util.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/cache/cache_util.sh)\n", + " > 直接获取完整样例代码:[cache_util.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/cache/cache_util.sh)\n", "\n", "2. 在启动NFS数据集训练的Shell脚本`run_train_nfs_cache.sh`中,开启缓存服务器,并生成一个缓存会话,保存在Shell变量`CACHE_SESSION_ID`中:\n", "\n", diff --git a/tutorials/source_zh_cn/dataset/dataset_autotune.md b/tutorials/source_zh_cn/dataset/dataset_autotune.md index 3fba31d850..44f7fb4cc8 100644 --- a/tutorials/source_zh_cn/dataset/dataset_autotune.md +++ b/tutorials/source_zh_cn/dataset/dataset_autotune.md @@ -1,6 +1,6 @@ # 自动数据加速 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset/dataset_autotune.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/dataset/dataset_autotune.md) ## 概述 @@ -53,7 +53,7 @@ print("tuning interval:", ds.config.get_autotune_interval()) ## 约束 - Profiling性能分析和自动数据加速无法同时开启,因为Profilling的其他处理会干扰自动数据加速进程。如果同时开启这两个功能,则会有一条警告信息提示用户检查是否为误操作。因此在使用Dataset AutoTune时,用户需要确保关闭Profiling功能。 -- 如果同时启动了[数据异构加速](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/dataset_offload.html)和自动数据加速,当有数据节点通过AutoTune进行异构硬件加速时,自动数据加速将不能保存数据管道配置并以警告日志提醒,因为此时实际运行的数据管道并不是预先定义的数据管道。 +- 如果同时启动了[数据异构加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/dataset_offload.html)和自动数据加速,当有数据节点通过AutoTune进行异构硬件加速时,自动数据加速将不能保存数据管道配置并以警告日志提醒,因为此时实际运行的数据管道并不是预先定义的数据管道。 - 如果数据处理管道包含不支持反序列化的节点(如用户自定义Python函数、GeneratorDataset),则使用保存的优化配置文件进行反序列化时将产生错误。此时推荐用户根据调优配置文件的内容手动修改数据管道的配置已达到加速的目的。 - 在分布式多卡训练启动自动数据加速时,`set_enable_autotune()` 需要在集群初始化完成后才能执行(mindspore.communication.management.init()),否则自动数据加速只会识别到ID为0的设备,且只会生成单个调优文件(预期生成文件数量应与设备数量相等),见以下样例: @@ -247,7 +247,7 @@ new_dataset = ds.deserialize("/path/to/autotune_out_0.json") 在进行下一次训练之前,用户可以根据自动数据加速模块输出的推荐配置,对数据集加载部分的代码进行调整,以便在下一次训练的开始时就可以在较优性能水平下运行数据处理管道。 -另外,MindSpore也提供了相关的API用于全局调整数据处理管道操作的并行度与内部队列深度,请参考[mindspore.dataset.config](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.loading.html#%E9%85%8D%E7%BD%AE)。 +另外,MindSpore也提供了相关的API用于全局调整数据处理管道操作的并行度与内部队列深度,请参考[mindspore.dataset.config](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.loading.html#%E9%85%8D%E7%BD%AE)。 -- [mindspore.dataset.config.set_num_parallel_workers](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.config.set_num_parallel_workers.html#mindspore.dataset.config.set_num_parallel_workers) -- [mindspore.dataset.config.set_prefetch_size](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.config.set_prefetch_size.html#mindspore.dataset.config.set_prefetch_size) +- [mindspore.dataset.config.set_num_parallel_workers](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.config.set_num_parallel_workers.html#mindspore.dataset.config.set_num_parallel_workers) +- [mindspore.dataset.config.set_prefetch_size](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.config.set_prefetch_size.html#mindspore.dataset.config.set_prefetch_size) diff --git a/tutorials/source_zh_cn/dataset/dataset_offload.md b/tutorials/source_zh_cn/dataset/dataset_offload.md index 61548724cd..0768edd498 100644 --- a/tutorials/source_zh_cn/dataset/dataset_offload.md +++ b/tutorials/source_zh_cn/dataset/dataset_offload.md @@ -1,6 +1,6 @@ # 数据准备异构加速 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset/dataset_offload.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/dataset/dataset_offload.md) ## 概述 diff --git a/tutorials/source_zh_cn/dataset/eager.ipynb b/tutorials/source_zh_cn/dataset/eager.ipynb index 360f5fbcb5..e34f59d5d9 100644 --- a/tutorials/source_zh_cn/dataset/eager.ipynb +++ b/tutorials/source_zh_cn/dataset/eager.ipynb @@ -7,7 +7,7 @@ "source": [ "# 数据操作/数据变换\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_eager.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_eager.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset/eager.ipynb)\n" + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_eager.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_eager.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/dataset/eager.ipynb)\n" ] }, { @@ -17,7 +17,7 @@ "source": [ "## 数据操作\n", "\n", - "`mindspore.dataset` 提供了一系列数据集操作,用户可通过这些操作(如 [.shuffle](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore.dataset.Dataset.shuffle) 、 [.filter](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.filter.html#mindspore.dataset.Dataset.filter) 、 [.skip](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.skip.html#mindspore.dataset.Dataset.skip) 、 [.take](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.take.html#mindspore.dataset.Dataset.take) 、 [.batch](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html#mindspore.dataset.Dataset.batch) 等)实现数据集的混洗、过滤、跳过、批处理组合等功能。\n", + "`mindspore.dataset` 提供了一系列数据集操作,用户可通过这些操作(如 [.shuffle](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore.dataset.Dataset.shuffle) 、 [.filter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.filter.html#mindspore.dataset.Dataset.filter) 、 [.skip](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.skip.html#mindspore.dataset.Dataset.skip) 、 [.take](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.take.html#mindspore.dataset.Dataset.take) 、 [.batch](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html#mindspore.dataset.Dataset.batch) 等)实现数据集的混洗、过滤、跳过、批处理组合等功能。\n", "\n", "常用数据变换操作包括:\n", "\n", @@ -331,17 +331,17 @@ "\n", "在Eager模式下,Transforms以函数式调用的方式执行,代码更为简洁,且能立即获得运行结果。推荐在小型数据变换实验、模型推理等轻量化场景中使用。\n", "\n", - "![eagermode1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/eager_mode.jpeg)\n", + "![eagermode1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/eager_mode.jpeg)\n", "\n", "MindSpore目前支持在Eager模式执行各种Transform,具体如下所示,更多数据变换接口参见API文档。\n", "\n", - "- [vision模块](https://mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#视觉),基于OpenCV/Pillow实现的数据变换。\n", + "- [vision模块](https://mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#视觉),基于OpenCV/Pillow实现的数据变换。\n", "\n", - "- [text模块](https://mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#文本),基于Jieba/ICU4C等库实现的数据变换。\n", + "- [text模块](https://mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#文本),基于Jieba/ICU4C等库实现的数据变换。\n", "\n", - "- [audio模块](https://mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#音频),基于C++实现的数据变换。\n", + "- [audio模块](https://mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#音频),基于C++实现的数据变换。\n", "\n", - "- [transforms模块](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#通用),基于C++/Python/NumPy实现的通用数据变换。\n", + "- [transforms模块](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#通用),基于C++/Python/NumPy实现的通用数据变换。\n", "\n", "下面简要介绍各Transforms模块的Eager模式使用方法。使用Eager模式,只需要将Transform当成可执行函数调用即可。" ] @@ -398,7 +398,7 @@ "\n", "此示例将使用`mindspore.dataset.vision`模块中的Transform,对给定图像进行变换。\n", "\n", - "Vision Transform的Eager模式支持`numpy.array`或`PIL.Image`类型数据作为入参。更多示例请参考:[样例库](https://www.mindspore.cn/docs/zh-CN/master/api_python/samples/dataset/vision_gallery.html)" + "Vision Transform的Eager模式支持`numpy.array`或`PIL.Image`类型数据作为入参。更多示例请参考:[样例库](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/samples/dataset/vision_gallery.html)" ] }, { @@ -470,7 +470,7 @@ "\n", "此示例将使用`text`模块中的Transforms,对给定文本进行变换。\n", "\n", - "Text Transforms的Eager模式支持`numpy.array`类型数据作为入参。更多示例请参考:[样例库](https://www.mindspore.cn/docs/zh-CN/master/api_python/samples/dataset/text_gallery.html)" + "Text Transforms的Eager模式支持`numpy.array`类型数据作为入参。更多示例请参考:[样例库](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/samples/dataset/text_gallery.html)" ] }, { @@ -513,7 +513,7 @@ "\n", "此示例将使用`audio`模块中的Transforms,对给定音频进行变换。\n", "\n", - "Audio Transforms的Eager模式支持`numpy.array`类型数据作为入参。更多示例请参考:[样例库](https://www.mindspore.cn/docs/zh-CN/master/api_python/samples/dataset/audio_gallery.html)" + "Audio Transforms的Eager模式支持`numpy.array`类型数据作为入参。更多示例请参考:[样例库](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/samples/dataset/audio_gallery.html)" ] }, { diff --git a/tutorials/source_zh_cn/dataset/optimize.ipynb b/tutorials/source_zh_cn/dataset/optimize.ipynb index 5d74e1ecf0..cba837f392 100644 --- a/tutorials/source_zh_cn/dataset/optimize.ipynb +++ b/tutorials/source_zh_cn/dataset/optimize.ipynb @@ -6,7 +6,7 @@ "source": [ "# 数据处理性能优化\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_optimize.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_optimize.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset/optimize.ipynb)" + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_optimize.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_optimize.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/dataset/optimize.ipynb)" ] }, { @@ -20,7 +20,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "![pipeline](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/pipeline.png)" + "![pipeline](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/pipeline.png)" ] }, { @@ -123,7 +123,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "![data-loading-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/data_loading_performance_scheme.png)" + "![data-loading-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/data_loading_performance_scheme.png)" ] }, { @@ -132,11 +132,11 @@ "source": [ "数据加载性能优化建议如下:\n", "\n", - "- 对于已经提供加载接口的常用数据集,优先使用MindSpore提供的数据集加载接口,可以获得较好的加载性能。具体内容请参考框架提供的[数据集加载接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.html)。如果性能仍无法满足需求,可通过增大数据集接口参数`num_parallel_workers`(默认值:8)的值来提升性能。\n", + "- 对于已经提供加载接口的常用数据集,优先使用MindSpore提供的数据集加载接口,可以获得较好的加载性能。具体内容请参考框架提供的[数据集加载接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.html)。如果性能仍无法满足需求,可通过增大数据集接口参数`num_parallel_workers`(默认值:8)的值来提升性能。\n", "\n", - "- 对于不支持的数据集格式,建议先将数据集转换为MindRecord数据格式,再使用`MindDataset`类进行加载(详细使用方法参考[API](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MindDataset.html))。具体内容请参考[将数据集转换为MindSpore数据格式](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/record.html)。如果性能仍无法满足需求,可通过增大`num_parallel_workers`(默认值:8)的值来提升性能。\n", + "- 对于不支持的数据集格式,建议先将数据集转换为MindRecord数据格式,再使用`MindDataset`类进行加载(详细使用方法参考[API](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MindDataset.html))。具体内容请参考[将数据集转换为MindSpore数据格式](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/record.html)。如果性能仍无法满足需求,可通过增大`num_parallel_workers`(默认值:8)的值来提升性能。\n", "\n", - "- 对于不支持的数据集格式,在算法快速验证场景下,优选使用用户自定义的`GeneratorDataset`类实现(详细使用方法参考[API](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html))。如果性能仍无法满足需求,则可采取多进程/多线程并发方案,即:\n", + "- 对于不支持的数据集格式,在算法快速验证场景下,优选使用用户自定义的`GeneratorDataset`类实现(详细使用方法参考[API](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html))。如果性能仍无法满足需求,则可采取多进程/多线程并发方案,即:\n", "\n", " 1. 增大数据集接口参数`num_parallel_workers`(默认值:1)的值,以提升并发度;\n", "\n", @@ -151,7 +151,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "基于以上的数据加载性能优化建议,本次体验分别使用框架提供的数据集加载操作`Cifar10Dataset`类(详细使用方法参考[API](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html))、数据转换后使用`MindDataset`类、使用`GeneratorDataset`类进行数据加载,代码演示如下:\n", + "基于以上的数据加载性能优化建议,本次体验分别使用框架提供的数据集加载操作`Cifar10Dataset`类(详细使用方法参考[API](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html))、数据转换后使用`MindDataset`类、使用`GeneratorDataset`类进行数据加载,代码演示如下:\n", "\n", "1. 使用数据集加载操作`Cifar10Dataset`类加载CIFAR-10数据集,这里使用的是CIFAR-10二进制格式的数据集,加载数据时采取多线程优化方案,开启了4个线程并发完成任务,最后对数据创建了字典迭代器,并通过迭代器读取了一条数据记录。" ] @@ -324,9 +324,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "shuffle操作用于对有序数据集或经过repeat的数据集进行混洗。MindSpore提供了`shuffle`函数,它是基于内存缓存实现的。其中设定的`buffer_size`参数越大,混洗程度越大,但会增加内存和时间消耗。该接口支持在pipeline的任意阶段对数据进行混洗,具体内容请参考[shuffle处理](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore-dataset-dataset-shuffle)。\n", + "shuffle操作用于对有序数据集或经过repeat的数据集进行混洗。MindSpore提供了`shuffle`函数,它是基于内存缓存实现的。其中设定的`buffer_size`参数越大,混洗程度越大,但会增加内存和时间消耗。该接口支持在pipeline的任意阶段对数据进行混洗,具体内容请参考[shuffle处理](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore-dataset-dataset-shuffle)。\n", "\n", - "但由于其基于内存缓存方式实现,性能不如直接在[数据集加载操作](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.html)中设置`shuffle=True`(默认值)参数进行混洗。" + "但由于其基于内存缓存方式实现,性能不如直接在[数据集加载操作](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.html)中设置`shuffle=True`(默认值)参数进行混洗。" ] }, { @@ -340,7 +340,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "![shuffle-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/shuffle_performance_scheme.png)" + "![shuffle-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/shuffle_performance_scheme.png)" ] }, { @@ -495,11 +495,11 @@ "source": [ "在训练任务中,尤其是当数据集比较小的时候,用户可以使用数据增强的方法来预处理图片,达到丰富数据集的目的。MindSpore为用户提供了多种数据增强操作,其中包括:\n", "\n", - "- Vision类数据增强操作,主要基于C++实现,见[Vision数据增强](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E8%A7%86%E8%A7%89)。\n", + "- Vision类数据增强操作,主要基于C++实现,见[Vision数据增强](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E8%A7%86%E8%A7%89)。\n", "\n", - "- NLP类数据增强操作,主要基于C++实现,见[NLP数据增强](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E6%96%87%E6%9C%AC)。\n", + "- NLP类数据增强操作,主要基于C++实现,见[NLP数据增强](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E6%96%87%E6%9C%AC)。\n", "\n", - "- Audio类数据增强操作,主要基于C++实现,见[Audio数据增强](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E9%9F%B3%E9%A2%91)。\n", + "- Audio类数据增强操作,主要基于C++实现,见[Audio数据增强](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E9%9F%B3%E9%A2%91)。\n", "\n", "- 用户可根据需求,自定义Python数据增强函数(Python实现)。\n", "\n", @@ -527,7 +527,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "![data-enhancement-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/data_enhancement_performance_scheme.png)" + "![data-enhancement-performance-scheme](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/data_enhancement_performance_scheme.png)" ] }, { @@ -545,13 +545,13 @@ " 2. 融合算子优化\n", "\n", " 当CPU占用率比较高时(如:单机多卡训练),使用融合操作(将多个操作聚合为一个)降低CPU占用,提升性能。可以通过设置环境变量`export OPTIMIZE=true`来使其生效。融合示例如下:\n", - " ![operation-fusion](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/operation_fusion.png)\n", + " ![operation-fusion](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/operation_fusion.png)\n", "\n", " 3. Compose优化\n", "\n", " 当CPU占用率比较高时(如:单机多卡训练),通过一个map操作接收多个增强操作(会按照顺序应用这些操作),降低CPU竞争,提升性能。示例如下:\n", "\n", - " ![compose](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/compose.png)" + " ![compose](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/compose.png)" ] }, { @@ -698,7 +698,7 @@ "\n", " 数据加载过程涉及频繁的磁盘操作,磁盘读写性能直接影响数据加载速度。当数据集较大时,推荐使用固态硬盘存储,固态硬盘的读写速度普遍高于普通磁盘,能够减少I/O操作对数据处理性能的影响。\n", "\n", - " 通常,加载后的数据将会被缓存到操作系统的页面缓存中,在一定程度上降低了后续读取的开销,加快后续Epoch的数据加载速度。用户也可以通过MindSpore提供的[单节点缓存技术](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/cache.html),手动缓存加载增强后的数据,避免重复加载和增强。\n", + " 通常,加载后的数据将会被缓存到操作系统的页面缓存中,在一定程度上降低了后续读取的开销,加快后续Epoch的数据加载速度。用户也可以通过MindSpore提供的[单节点缓存技术](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/cache.html),手动缓存加载增强后的数据,避免重复加载和增强。\n", "\n", "2. NUMA架构\n", "\n", @@ -778,7 +778,7 @@ "source": [ "## 自动数据加速\n", "\n", - "MindSpore提供了一种自动数据调优的工具——Dataset AutoTune,可在训练过程中根据环境资源自动调整数据处理管道的并行度,最大化利用系统资源与,加速数据处理。详细用法请参考[自动数据加速](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/dataset_autotune.html)。" + "MindSpore提供了一种自动数据调优的工具——Dataset AutoTune,可在训练过程中根据环境资源自动调整数据处理管道的并行度,最大化利用系统资源与,加速数据处理。详细用法请参考[自动数据加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/dataset_autotune.html)。" ] }, { @@ -787,7 +787,7 @@ "source": [ "## 数据异构加速\n", "\n", - "MindSpore提供运算负载均衡技术,可将Tensor运算分配到不同的异构硬件上,既均衡各硬件的运算开销,又能利用异构硬件的优势加速运算。详细用法请参考[数据异构加速](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/dataset_offload.html)。" + "MindSpore提供运算负载均衡技术,可将Tensor运算分配到不同的异构硬件上,既均衡各硬件的运算开销,又能利用异构硬件的优势加速运算。详细用法请参考[数据异构加速](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/dataset_offload.html)。" ] } ], diff --git a/tutorials/source_zh_cn/dataset/overview.ipynb b/tutorials/source_zh_cn/dataset/overview.ipynb index 8189b51823..bd2b501a02 100644 --- a/tutorials/source_zh_cn/dataset/overview.ipynb +++ b/tutorials/source_zh_cn/dataset/overview.ipynb @@ -13,7 +13,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_overview.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_overview.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset/overview.ipynb)" + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_overview.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_overview.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/dataset/overview.ipynb)" ] }, { @@ -37,19 +37,19 @@ "\n", "用户通过API定义的Dataset流水线,运行训练进程后Dataset会从数据集中循环加载数据 -> 处理 -> Batch -> 迭代器,最终用于训练。\n", "\n", - "![MindSpore Dataset Pipeline](https://www.mindspore.cn/docs/zh-CN/master/_images/dataset_pipeline.png)\n", + "![MindSpore Dataset Pipeline](https://www.mindspore.cn/docs/zh-CN/br_base/_images/dataset_pipeline.png)\n", "\n", "如上图所示,MindSpore Dataset模块使得用户很简便地定义数据预处理Pipeline,并以最高效(多进程/多线程)的方式处理数据集中样本,具体的步骤参考如下:\n", "\n", - "- 数据集加载(Dataset):用户可以方便地使用Dataset类 ([标准格式数据集加载](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.loading.html#%E6%A0%87%E5%87%86%E6%A0%BC%E5%BC%8F%E6%95%B0%E6%8D%AE%E9%9B%86%E5%8A%A0%E8%BD%BD)、[视觉数据集](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.loading.html#%E8%A7%86%E8%A7%89%E6%95%B0%E6%8D%AE%E9%9B%86)、[文本数据集](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.loading.html#%E6%96%87%E6%9C%AC%E6%95%B0%E6%8D%AE%E9%9B%86)、[音频数据集](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.loading.html#%E9%9F%B3%E9%A2%91%E6%95%B0%E6%8D%AE%E9%9B%86)) 来加载已支持的数据集,或者通过 UDF Loader + [GeneratorDataset 自定义数据集](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset) 实现Python层自定义数据集的加载。加载类方法可以使用多种Sampler、数据分片、数据shuffle等功能;\n", + "- 数据集加载(Dataset):用户可以方便地使用Dataset类 ([标准格式数据集加载](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.loading.html#%E6%A0%87%E5%87%86%E6%A0%BC%E5%BC%8F%E6%95%B0%E6%8D%AE%E9%9B%86%E5%8A%A0%E8%BD%BD)、[视觉数据集](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.loading.html#%E8%A7%86%E8%A7%89%E6%95%B0%E6%8D%AE%E9%9B%86)、[文本数据集](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.loading.html#%E6%96%87%E6%9C%AC%E6%95%B0%E6%8D%AE%E9%9B%86)、[音频数据集](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.loading.html#%E9%9F%B3%E9%A2%91%E6%95%B0%E6%8D%AE%E9%9B%86)) 来加载已支持的数据集,或者通过 UDF Loader + [GeneratorDataset 自定义数据集](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset) 实现Python层自定义数据集的加载。加载类方法可以使用多种Sampler、数据分片、数据shuffle等功能;\n", "\n", - "- 数据集操作(filter/ skip):用户通过数据集对象方法 [.shuffle](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore.dataset.Dataset.shuffle)、[.filter](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.filter.html#mindspore.dataset.Dataset.filter)、[.skip](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.skip.html#mindspore.dataset.Dataset.skip)、[.split](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.split.html#mindspore.dataset.Dataset.split)、[.take](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.take.html#mindspore.dataset.Dataset.take)等来实现数据集的进一步混洗、过滤、跳过、最多获取条数等操作;\n", + "- 数据集操作(filter/ skip):用户通过数据集对象方法 [.shuffle](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.shuffle.html#mindspore.dataset.Dataset.shuffle)、[.filter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.filter.html#mindspore.dataset.Dataset.filter)、[.skip](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.skip.html#mindspore.dataset.Dataset.skip)、[.split](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.split.html#mindspore.dataset.Dataset.split)、[.take](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.take.html#mindspore.dataset.Dataset.take)等来实现数据集的进一步混洗、过滤、跳过、最多获取条数等操作;\n", "\n", - "- 数据集样本变换操作(map):用户可以将数据变换操作 ([vision数据变换](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E8%A7%86%E8%A7%89) , [nlp数据变换](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E6%96%87%E6%9C%AC) , [audio数据变换](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E9%9F%B3%E9%A2%91) ) 添加到map操作中执行,数据预处理过程中可以定义多个map操作,用于执行不同变换操作。数据变换操作也可以是用户自定义变换的 PyFunc ;\n", + "- 数据集样本变换操作(map):用户可以将数据变换操作 ([vision数据变换](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E8%A7%86%E8%A7%89) , [nlp数据变换](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E6%96%87%E6%9C%AC) , [audio数据变换](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E9%9F%B3%E9%A2%91) ) 添加到map操作中执行,数据预处理过程中可以定义多个map操作,用于执行不同变换操作。数据变换操作也可以是用户自定义变换的 PyFunc ;\n", "\n", - "- 批(batch):用户在样本完成变换后,使用 [.batch](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html#mindspore.dataset.Dataset.batch) 操作将多个样本组织成batch,也可以通过batch的参数 per_batch_map 来自定义batch逻辑;\n", + "- 批(batch):用户在样本完成变换后,使用 [.batch](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html#mindspore.dataset.Dataset.batch) 操作将多个样本组织成batch,也可以通过batch的参数 per_batch_map 来自定义batch逻辑;\n", "\n", - "- 迭代器(create_dict_iterator):最后用户通过数据集对象方法 [.create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html#mindspore.dataset.Dataset.create_dict_iterator)、[.create_tuple_iterator](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html#mindspore.dataset.Dataset.create_tuple_iterator) 来创建迭代器将预处理完成的数据循环输出。" + "- 迭代器(create_dict_iterator):最后用户通过数据集对象方法 [.create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html#mindspore.dataset.Dataset.create_dict_iterator)、[.create_tuple_iterator](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html#mindspore.dataset.Dataset.create_tuple_iterator) 来创建迭代器将预处理完成的数据循环输出。" ] }, { @@ -67,11 +67,11 @@ "\n", "| 数据集接口分类 | API列表 | 说明 |\n", "|------------------------|----------------------------------------------------------|--------------------------------------------------------------|\n", - "| 标准格式数据集 | [MindDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MindDataset.html#mindspore.dataset.MindDataset) 、 [TFRecordDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.TFRecordDataset.html#mindspore.dataset.TFRecordDataset) 、 [CSVDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.CSVDataset.html#mindspore.dataset.CSVDataset) 等 | 其中 MindDataset 依赖 MindSpore 数据格式, 详见: [格式转换](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/record.html) |\n", - "| 自定义数据集 | [GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset) 、 [RandomDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.RandomDataset.html#mindspore.dataset.RandomDataset) 等 | 其中 GeneratorDataset 负责加载用户自定义DataLoader, 详见: [自定义数据集](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html#%E8%87%AA%E5%AE%9A%E4%B9%89%E6%95%B0%E6%8D%AE%E9%9B%86) |\n", - "| 常用数据集 | [ImageFolderDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset) 、 [Cifar10Dataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset) 、 [IWSLT2017Dataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset) 、 [LJSpeechDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset) 等 | 用于常用的开源数据集 |\n", + "| 标准格式数据集 | [MindDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MindDataset.html#mindspore.dataset.MindDataset) 、 [TFRecordDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.TFRecordDataset.html#mindspore.dataset.TFRecordDataset) 、 [CSVDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.CSVDataset.html#mindspore.dataset.CSVDataset) 等 | 其中 MindDataset 依赖 MindSpore 数据格式, 详见: [格式转换](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/record.html) |\n", + "| 自定义数据集 | [GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset) 、 [RandomDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.RandomDataset.html#mindspore.dataset.RandomDataset) 等 | 其中 GeneratorDataset 负责加载用户自定义DataLoader, 详见: [自定义数据集](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html#%E8%87%AA%E5%AE%9A%E4%B9%89%E6%95%B0%E6%8D%AE%E9%9B%86) |\n", + "| 常用数据集 | [ImageFolderDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.ImageFolderDataset.html#mindspore.dataset.ImageFolderDataset) 、 [Cifar10Dataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.Cifar10Dataset.html#mindspore.dataset.Cifar10Dataset) 、 [IWSLT2017Dataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.IWSLT2017Dataset.html#mindspore.dataset.IWSLT2017Dataset) 、 [LJSpeechDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.LJSpeechDataset.html#mindspore.dataset.LJSpeechDataset) 等 | 用于常用的开源数据集 |\n", "\n", - "以上数据集加载([示例](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html#%E6%95%B0%E6%8D%AE%E9%9B%86%E5%8A%A0%E8%BD%BD))中,可以配置不同的参数,以实现不同的加载效果,常用参数举例如下:\n", + "以上数据集加载([示例](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html#%E6%95%B0%E6%8D%AE%E9%9B%86%E5%8A%A0%E8%BD%BD))中,可以配置不同的参数,以实现不同的加载效果,常用参数举例如下:\n", "\n", "- `columns_list`:过滤数据集中指定的列,仅针对部分数据集接口。默认值:None,加载所有数据列。\n", "\n", @@ -83,11 +83,11 @@ "\n", " - `num_shards` 和 `shard_id`:对数据集进行分片。默认值:None,不分片。\n", "\n", - " - 其他更多的采样逻辑可以参考:[数据采样](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/sampler.html)。\n", + " - 其他更多的采样逻辑可以参考:[数据采样](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/sampler.html)。\n", "\n", "#### 数据集组合\n", "\n", - "数据集组合可以将多个数据集以串联/并朕的方式组合起来,形成一个全新的dataset对象,详见[数据操作](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/eager.html#数据操作)。\n", + "数据集组合可以将多个数据集以串联/并朕的方式组合起来,形成一个全新的dataset对象,详见[数据操作](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/eager.html#数据操作)。\n", "\n" ] }, @@ -102,7 +102,7 @@ "source": [ "#### 数据集切分\n", "\n", - "将数据集切分成训练数据集和验证数据集,分别用于训练过程和验证过程,详见[数据操作](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/eager.html#数据操作)。" + "将数据集切分成训练数据集和验证数据集,分别用于训练过程和验证过程,详见[数据操作](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/eager.html#数据操作)。" ] }, { @@ -116,7 +116,7 @@ "source": [ "#### 数据集保存\n", "\n", - "将数据集重新保存到MindRecord数据格式,详见[数据操作](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/eager.html#数据操作)。" + "将数据集重新保存到MindRecord数据格式,详见[数据操作](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/eager.html#数据操作)。" ] }, { @@ -146,41 +146,41 @@ "\n", "- 在 `.map(...)` 中使用Dataset提供的数据变换操作\n", "\n", - " Dataset提供了丰富的[数据变换操作](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#),这些数据变换操作可以直接放在 `.map(...)` 中使用。具体使用方法参考 [map变换操作](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html#%E5%86%85%E7%BD%AE%E6%95%B0%E6%8D%AE%E5%8F%98%E6%8D%A2%E6%93%8D%E4%BD%9C)。\n", + " Dataset提供了丰富的[数据变换操作](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#),这些数据变换操作可以直接放在 `.map(...)` 中使用。具体使用方法参考 [map变换操作](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html#%E5%86%85%E7%BD%AE%E6%95%B0%E6%8D%AE%E5%8F%98%E6%8D%A2%E6%93%8D%E4%BD%9C)。\n", "\n", "- 在 `.map(...)` 中使用自定义数据变换操作\n", "\n", - " Dataset也支持用户自定义的数据变换操作,仅需将用户自定义函数传递给 `.map(...)` 退可。具体使用方法参考:[自定义map变换操作](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html#%E8%87%AA%E5%AE%9A%E4%B9%89%E6%95%B0%E6%8D%AE%E5%8F%98%E6%8D%A2%E6%93%8D%E4%BD%9C)。\n", + " Dataset也支持用户自定义的数据变换操作,仅需将用户自定义函数传递给 `.map(...)` 退可。具体使用方法参考:[自定义map变换操作](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html#%E8%87%AA%E5%AE%9A%E4%B9%89%E6%95%B0%E6%8D%AE%E5%8F%98%E6%8D%A2%E6%93%8D%E4%BD%9C)。\n", "\n", "- 在 `.map(...)` 中返回Dict数据结构数据\n", "\n", - " Dataset也支持在用户自定义的数据变换操作中返回Dict数据结构,使得定义的数据变换更加灵活。具体使用方法参考:[自定义map变换操作处理字典对象](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/python_objects.html#%E8%87%AA%E5%AE%9A%E4%B9%89map%E5%A2%9E%E5%BC%BA%E6%93%8D%E4%BD%9C%E5%A4%84%E7%90%86%E5%AD%97%E5%85%B8%E5%AF%B9%E8%B1%A1)。\n", + " Dataset也支持在用户自定义的数据变换操作中返回Dict数据结构,使得定义的数据变换更加灵活。具体使用方法参考:[自定义map变换操作处理字典对象](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/python_objects.html#%E8%87%AA%E5%AE%9A%E4%B9%89map%E5%A2%9E%E5%BC%BA%E6%93%8D%E4%BD%9C%E5%A4%84%E7%90%86%E5%AD%97%E5%85%B8%E5%AF%B9%E8%B1%A1)。\n", "\n", "#### 自动数据增强\n", "\n", - "除了以上的普通数据变换,Dataset 还提供了一种自动数据变换方式,可以基于特定策略自动对图像进行数据变换处理。详细说明见:[自动数据增强](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/augment.html)。\n", + "除了以上的普通数据变换,Dataset 还提供了一种自动数据变换方式,可以基于特定策略自动对图像进行数据变换处理。详细说明见:[自动数据增强](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/augment.html)。\n", "\n", "### 数据batch\n", "\n", "Dataset提供 `.batch(...)` 操作,可以很方便的将数据变换操作后的样本组织成batch。有两种使用方式:\n", "\n", - "1. 默认 `.batch(...)` 操作,将batch_size个样本组织成shape为 (batch_size, ...)的数据,详细用法请参考 [batch操作](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html#%E6%95%B0%E6%8D%AEbatch);\n", + "1. 默认 `.batch(...)` 操作,将batch_size个样本组织成shape为 (batch_size, ...)的数据,详细用法请参考 [batch操作](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html#%E6%95%B0%E6%8D%AEbatch);\n", "\n", - "2. 自定义 `.batch(..., per_batch_map, ...)` 操作,支持用户将 [np.ndarray, nd.ndarray, ...] 多条数据按照自定义逻辑组织batch,详细用法请参考 [自定义batch操作](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/python_objects.html#batch%E6%93%8D%E4%BD%9C%E5%A4%84%E7%90%86%E5%AD%97%E5%85%B8%E5%AF%B9%E8%B1%A1)。\n", + "2. 自定义 `.batch(..., per_batch_map, ...)` 操作,支持用户将 [np.ndarray, nd.ndarray, ...] 多条数据按照自定义逻辑组织batch,详细用法请参考 [自定义batch操作](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/python_objects.html#batch%E6%93%8D%E4%BD%9C%E5%A4%84%E7%90%86%E5%AD%97%E5%85%B8%E5%AF%B9%E8%B1%A1)。\n", "\n", "### 数据集迭代器\n", "\n", - "用户在定义完成 `数据集加载(xxDataset)-> 数据处理(.map)-> 数据batch(.batch)` Dataset流水线(Pipeline)后,可以通过创建迭代器方法 `.create_dict_iterator(...)` / `.create_tuple_iterator(...)` 循环将数据输出。具体的使用方法参考:[数据集迭代器](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html#%E6%95%B0%E6%8D%AE%E9%9B%86%E8%BF%AD%E4%BB%A3%E5%99%A8)。\n", + "用户在定义完成 `数据集加载(xxDataset)-> 数据处理(.map)-> 数据batch(.batch)` Dataset流水线(Pipeline)后,可以通过创建迭代器方法 `.create_dict_iterator(...)` / `.create_tuple_iterator(...)` 循环将数据输出。具体的使用方法参考:[数据集迭代器](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html#%E6%95%B0%E6%8D%AE%E9%9B%86%E8%BF%AD%E4%BB%A3%E5%99%A8)。\n", "\n", "### 性能优化\n", "\n", "#### 数据处理性能优化\n", "\n", - "针对数据处理Pipeline性能不足的场景,可以参考[数据处理性能优化](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/optimize.html)来进一步优化性能,以满足训练端到端性能要求。\n", + "针对数据处理Pipeline性能不足的场景,可以参考[数据处理性能优化](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/optimize.html)来进一步优化性能,以满足训练端到端性能要求。\n", "\n", "#### 单节点数据缓存\n", "\n", - "另外,对于推理场景,为了追求极致的性能,可以使用 [单节点数据缓存](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/cache.html) 将数据集缓存于本地内存中,以加速数据集的读取和预处理。" + "另外,对于推理场景,为了追求极致的性能,可以使用 [单节点数据缓存](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/cache.html) 将数据集缓存于本地内存中,以加速数据集的读取和预处理。" ] }, { @@ -192,7 +192,7 @@ "\n", "用户可以直接使用数据变换操作处理一条数据,返回值即是数据变换的结果。\n", "\n", - "数据变换操作([vision数据变换](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E8%A7%86%E8%A7%89) ,[nlp数据变换](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E6%96%87%E6%9C%AC) ,[audio数据变换](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.transforms.html#%E9%9F%B3%E9%A2%91))可以像调用普通函数一样直接来使用。常见用法是:先初始化数据变换对象,然后调用数据变换操作方法,传入需要处理的数据,最后得到处理的结果。示例详见[轻量化数据变换](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/eager.html#轻量化数据变换)。" + "数据变换操作([vision数据变换](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E8%A7%86%E8%A7%89) ,[nlp数据变换](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E6%96%87%E6%9C%AC) ,[audio数据变换](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.transforms.html#%E9%9F%B3%E9%A2%91))可以像调用普通函数一样直接来使用。常见用法是:先初始化数据变换对象,然后调用数据变换操作方法,传入需要处理的数据,最后得到处理的结果。示例详见[轻量化数据变换](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/eager.html#轻量化数据变换)。" ] }, { @@ -204,7 +204,7 @@ "\n", "### 数据处理管道支持Python对象\n", "\n", - "数据处理管道中的特定操作(如自定义数据集GeneratorDataset、自定义map增强操作、自定义batch(per_batch_map=...))支持任意Python类型对象作为输入。详见[数据处理管道支持Python对象](https://www.mindspore.cn/tutorials/zh-CN/master/dataset/python_objects.html)。" + "数据处理管道中的特定操作(如自定义数据集GeneratorDataset、自定义map增强操作、自定义batch(per_batch_map=...))支持任意Python类型对象作为输入。详见[数据处理管道支持Python对象](https://www.mindspore.cn/tutorials/zh-CN/br_base/dataset/python_objects.html)。" ] } ], diff --git a/tutorials/source_zh_cn/dataset/python_objects.ipynb b/tutorials/source_zh_cn/dataset/python_objects.ipynb index d016d361ec..de301d6d55 100644 --- a/tutorials/source_zh_cn/dataset/python_objects.ipynb +++ b/tutorials/source_zh_cn/dataset/python_objects.ipynb @@ -7,7 +7,7 @@ "source": [ "# 数据处理管道支持Python对象\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_python_objects.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_python_objects.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset/python_objects.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_python_objects.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_python_objects.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/dataset/python_objects.ipynb)\n", "\n", "数据处理管道中的特定操作(如自定义数据集`GeneratorDataset`、自定义`map`增强操作、自定义`batch(per_batch_map=...)`)支持任意Python类型对象作为输入。\n", "\n", diff --git a/tutorials/source_zh_cn/dataset/record.ipynb b/tutorials/source_zh_cn/dataset/record.ipynb index 359424027a..4491e9fb03 100644 --- a/tutorials/source_zh_cn/dataset/record.ipynb +++ b/tutorials/source_zh_cn/dataset/record.ipynb @@ -6,13 +6,13 @@ "source": [ "# MindRecord格式转换\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_record.ipynb) \n", - "[![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_record.py) \n", - "[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset/record.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_record.ipynb) \n", + "[![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_record.py) \n", + "[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/dataset/record.ipynb)\n", "\n", - "MindSpore可以将用于训练网络模型的数据集转换为特定的数据格式(MindSpore Record),便于数据的保存和加载。其目标是归一化用户数据集,并通过[mindspore.dataset.MindDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MindDataset.html)接口实现数据的读取,用于训练过程。\n", + "MindSpore可以将用于训练网络模型的数据集转换为特定的数据格式(MindSpore Record),便于数据的保存和加载。其目标是归一化用户数据集,并通过[mindspore.dataset.MindDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MindDataset.html)接口实现数据的读取,用于训练过程。\n", "\n", - "![conversion](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/data_conversion_concept.png)\n", + "![conversion](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/data_conversion_concept.png)\n", "\n", "此外,MindSpore还针对部分数据场景进行了性能优化。使用MindSpore Record数据格式可以减少磁盘IO和网络IO开销,从而获得更好的使用体验。\n", "\n", @@ -27,7 +27,7 @@ "\n", "如下图所示,MindSpore Record文件由数据文件和索引文件组成。\n", "\n", - "![MindSpore Record](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/mindrecord.png)\n", + "![MindSpore Record](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/mindrecord.png)\n", "\n", "其中,数据文件包含文件头、标量数据页和块数据页,用于存储用户归一化后的训练数据。具体用途如下:\n", "\n", @@ -269,11 +269,11 @@ "\n", "MindSpore提供常用数据集的转换工具类,能够将常用的数据集转换为MindSpore Record文件格式。\n", "\n", - "> 更多数据集转换的详细说明参考[API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.mindrecord.html)。\n", + "> 更多数据集转换的详细说明参考[API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.mindrecord.html)。\n", "\n", "### 转存CIFAR-10数据集\n", "\n", - "用户可以通过[mindspore.dataset.Dataset.save](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.save.html)方法,将CIFAR-10原始数据转换为MindSpore Record,并使用`MindDataset`接口读取。\n", + "用户可以通过[mindspore.dataset.Dataset.save](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.save.html)方法,将CIFAR-10原始数据转换为MindSpore Record,并使用`MindDataset`接口读取。\n", "\n", "1. 下载[CIFAR-10数据集](https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz),并使用`Cifar10Dataset`加载。" ] diff --git a/tutorials/source_zh_cn/dataset/sampler.ipynb b/tutorials/source_zh_cn/dataset/sampler.ipynb index 60efd8ad0c..a10c3a86aa 100644 --- a/tutorials/source_zh_cn/dataset/sampler.ipynb +++ b/tutorials/source_zh_cn/dataset/sampler.ipynb @@ -13,9 +13,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_sampler.ipynb) \n", - "[![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/dataset/mindspore_sampler.py) \n", - "[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/dataset/sampler.ipynb)" + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_sampler.ipynb) \n", + "[![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/dataset/mindspore_sampler.py) \n", + "[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/dataset/sampler.ipynb)" ] }, { @@ -29,7 +29,7 @@ "\n", "### 自定义数据集\n", "\n", - "MindSpore可以通过自定义数据加载类或自定义数据集生成函数的方式来生成数据集。然后通过[mindspore.dataset.GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html)接口实现自定义方式的数据集加载。\n", + "MindSpore可以通过自定义数据加载类或自定义数据集生成函数的方式来生成数据集。然后通过[mindspore.dataset.GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html)接口实现自定义方式的数据集加载。\n", "\n", "`GeneratorDataset` 支持通过可随机访问数据集对象、可迭代数据集对象和生成器(generator)构造自定义数据集,下面分别进行介绍。\n", "\n", @@ -179,7 +179,7 @@ "\n", "MindSpore也支持开源经典数据集的解析和读取,如MNIST、CIFAR-10、CLUE、LJSpeech等。\n", "\n", - "以MNIST数据集作为样例,更多其他数据集请参考 [开源数据集](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.loading.html#%E5%BC%80%E6%BA%90%E6%95%B0%E6%8D%AE%E9%9B%86%E5%8A%A0%E8%BD%BD) 。" + "以MNIST数据集作为样例,更多其他数据集请参考 [开源数据集](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.loading.html#%E5%BC%80%E6%BA%90%E6%95%B0%E6%8D%AE%E9%9B%86%E5%8A%A0%E8%BD%BD) 。" ] }, { @@ -249,13 +249,13 @@ "\n", "为满足训练需求,解决如数据集过大、样本类别分布不均等问题,MindSpore提供了多种不同用途的采样器(Sampler),帮助用户对数据集进行不同形式的采样。用户只需在加载数据集时传入采样器对象,即可实现数据采样。\n", "\n", - "MindSpore目前提供了如[mindspore.dataset.RandomSampler](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.RandomSampler.html)、[mindspore.dataset.WeightedRandomSampler](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.WeightedRandomSampler.html)、[mindspore.dataset.SubsetRandomSampler](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.SubsetRandomSampler.html)等多种采样器。此外,用户也可以根据需要实现自定义采样器类。\n", + "MindSpore目前提供了如[mindspore.dataset.RandomSampler](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.RandomSampler.html)、[mindspore.dataset.WeightedRandomSampler](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.WeightedRandomSampler.html)、[mindspore.dataset.SubsetRandomSampler](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.SubsetRandomSampler.html)等多种采样器。此外,用户也可以根据需要实现自定义采样器类。\n", "\n", - "> 更多采样器的使用方法参见[采样器API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.loading.html#%E9%87%87%E6%A0%B7%E5%99%A8-1)。\n", + "> 更多采样器的使用方法参见[采样器API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.loading.html#%E9%87%87%E6%A0%B7%E5%99%A8-1)。\n", "\n", "下面主要以CIFAR-10数据集为例,介绍几种常用MindSpore采样器的使用方法。\n", "\n", - "![cifar10](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/dataset/images/cifar10.jpg)\n", + "![cifar10](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/dataset/images/cifar10.jpg)\n", "\n", "> 本章节中的示例代码依赖`matplotlib`,可使用命令`pip install matplotlib`安装。如本文档以Notebook运行时,完成安装后需要重启kernel才能执行后续代码。" ] diff --git a/tutorials/source_zh_cn/debug/dryrun.md b/tutorials/source_zh_cn/debug/dryrun.md index cac90ebf9b..250373d0cc 100644 --- a/tutorials/source_zh_cn/debug/dryrun.md +++ b/tutorials/source_zh_cn/debug/dryrun.md @@ -1,6 +1,6 @@ # DryRun -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/dryrun.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/dryrun.md) ## 概述 @@ -17,7 +17,7 @@ MindSpore框架提供了DryRun机制,模拟(mock)所有的device侧接口 用户可以根据自己的需求,通过使能环境变量 `export MS_SIMULATION_LEVEL=0/1/2/3`,设置模拟运行的级别。 > - 该特性为模拟执行,无法获取算子正确的输出信息,静态图涉及动态shape的场景下,存在算子的输入shape依赖上一个算子的输出shape的情况,因此不适用该特性。 -> - 动态图场景需要采用[mock接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.utils.html#mindspore.utils.dryrun.mock)自行适配脚本。 +> - 动态图场景需要采用[mock接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.utils.html#mindspore.utils.dryrun.mock)自行适配脚本。 #### MS_SIMULATION_LEVEL=0 @@ -57,7 +57,7 @@ Actual peak memory usage (with fragments): 26244M #### MS_SIMULATION_LEVEL=3 -在`2`的基础上增加了计算算子的运行统计,占用CPU资源以及与需要模拟的卡数相对应的计算资源。在显存分析的基础上,增加了当前卡的计算算子执行,用户可以结合[MindSpore Profiler](https://www.mindspore.cn/tutorials/zh-CN/master/debug/profiler.html)分析计算算子耗时。 +在`2`的基础上增加了计算算子的运行统计,占用CPU资源以及与需要模拟的卡数相对应的计算资源。在显存分析的基础上,增加了当前卡的计算算子执行,用户可以结合[MindSpore Profiler](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/profiler.html)分析计算算子耗时。 开启profiling后,可以找到`trace_view.json`文件,如下图所示: diff --git a/tutorials/source_zh_cn/debug/dump.md b/tutorials/source_zh_cn/debug/dump.md index 444a145e26..6a058dae99 100644 --- a/tutorials/source_zh_cn/debug/dump.md +++ b/tutorials/source_zh_cn/debug/dump.md @@ -1,6 +1,6 @@ # Dump功能调试 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/dump.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/dump.md) 为了对训练过程进行分析,MindSpore提供了Dump功能,用于保存训练过程中算子的输入和输出数据。 @@ -22,7 +22,7 @@ MindSpore在不同后端下支持的Dump功能不完全相同,需要的配置 - [Ascend下GE后端Dump](#ascend下ge后端dump) - [CPU/GPU后端Dump](#cpugpu后端dump) -> - Ascend下ms_backend/GE后端的区别请见[jit接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.jit.html#mindspore.jit)。 +> - Ascend下ms_backend/GE后端的区别请见[jit接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.jit.html#mindspore.jit)。 > > - CPU/GPU后端支持dump常量数据,Ascend ms_backend/GE后端不支持Dump常量数据。 > @@ -131,14 +131,14 @@ MindSpore在不同后端下支持的Dump功能如下表所示: - `common_dump_settings`: - `op_debug_mode`:该属性用于算子溢出或算子异常调试,设置成0,表示保存所有算子或指定算子;设置成3,表示只保存溢出算子;设置成4,表示只保存异常算子的输入。在Dump数据的时候请设置成0,若设置成其他值,则只会Dump溢出算子或异常算子的数据。默认值:0。 - - `dump_mode`:设置成0,表示Dump出该网络中的所有算子数据;设置成1,表示Dump`"kernels"`里面指定的算子数据或算子类型数据;设置成2,表示使用[mindspore.set_dump](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.set_dump.html) Dump指定对象。仅在op_debug_mode设置为0时支持指定算子dump。 + - `dump_mode`:设置成0,表示Dump出该网络中的所有算子数据;设置成1,表示Dump`"kernels"`里面指定的算子数据或算子类型数据;设置成2,表示使用[mindspore.set_dump](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.set_dump.html) Dump指定对象。仅在op_debug_mode设置为0时支持指定算子dump。 - `path`:Dump保存数据的绝对路径。 - `net_name`:自定义的网络名称,例如:"ResNet50"。 - `iteration`:指定需要Dump数据的迭代。类型为str,用“|”分离要保存的不同区间的step的数据。如"0|5-8|100-120"表示Dump第1个,第6个到第9个,第101个到第121个step的数据。指定“all”,表示Dump所有迭代的数据。仅在op_debug_mode设置为0或3时支持保存指定迭代,op_debug_mode设置为4时不支持指定迭代。 - `saved_data`: 指定Dump的数据。类型为str,取值成"tensor",表示Dump出完整张量数据;取值成"statistic",表示只Dump张量的统计信息;取值"full"代表两种都要。默认取值为"tensor"。保存统计信息仅在op_debug_mode设置为0时生效。 - `input_output`:设置成0,表示Dump出算子的输入和算子的输出;设置成1,表示Dump出算子的输入;设置成2,表示Dump出算子的输出。在op_debug_mode设置为3时,只能设置`input_output`为同时保存算子输入和算子输出。在op_debug_mode设置为4时,只能保存算子输入。 - `kernels`:该项可以配置三种格式: - 1. 算子的名称列表。通过设置环境变量`MS_DEV_SAVE_GRAPHS`的值为2开启IR保存开关并执行用例,从生成的IR文件`trace_code_graph_{graph_id}`中获取算子名称。详细说明可以参照教程:[如何保存IR](https://www.mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/mindir.html#如何保存ir)。 + 1. 算子的名称列表。通过设置环境变量`MS_DEV_SAVE_GRAPHS`的值为2开启IR保存开关并执行用例,从生成的IR文件`trace_code_graph_{graph_id}`中获取算子名称。详细说明可以参照教程:[如何保存IR](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/mindir.html#如何保存ir)。 需要注意的是,是否设置环境变量`MS_DEV_SAVE_GRAPHS`的值为2可能会导致同一个算子的id不同,所以在Dump指定算子时要在获取算子名称之后保持这一项设置不变。或者也可以在Dump保存的`ms_output_trace_code_graph_{graph_id}.ir`文件中获取算子名称,参考[Ascend ms_backend后端下Dump数据对象目录](#数据对象目录和数据文件介绍)。 2. 还可以指定算子类型。当字符串中不带算子scope信息和算子id信息时,后台则认为其为算子类型,例如:"conv"。算子类型的匹配规则为:当发现算子名中包含算子类型字符串时,则认为匹配成功(不区分大小写),例如:"conv" 可以匹配算子 "Conv2D-op1234"、"Conv3D-op1221"。 3. 算子名称的正则表达式。当字符串符合"name-regex(xxx)"格式时,后台则会将其作为正则表达式。例如,"name-regex(Default/.+)"可匹配算子名称以"Default/"开头的所有算子。 @@ -291,11 +291,11 @@ ms_execution_order_graph_{graph_id}.csv ### 数据分析样例 -为了更好地展示使用Dump来保存数据并分析数据的流程,我们提供了一套[完整样例脚本](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/dump) ,只需要执行 `bash run_sync_dump.sh`。 +为了更好地展示使用Dump来保存数据并分析数据的流程,我们提供了一套[完整样例脚本](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/dump) ,只需要执行 `bash run_sync_dump.sh`。 在通过Dump功能将脚本对应的图保存到磁盘上后,会产生最终执行图文件`ms_output_trace_code_graph_{graph_id}.ir`。该文件中保存了对应的图中每个算子的堆栈信息,记录了算子对应的生成脚本。 -以[AlexNet脚本](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/dump/train_alexnet.py)为例: +以[AlexNet脚本](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/dump/train_alexnet.py)为例: ```python ... @@ -479,14 +479,14 @@ Ascend下GE后端Dump已迁移到msprobe工具,更多详情请查看[《msprob - `common_dump_settings`: - `op_debug_mode`:该属性用于算子溢出或算子异常调试,CPU/GPU Dump只支持设置成0,表示保存所有算子或指定算子。 - - `dump_mode`:设置成0,表示Dump出该网络中的所有算子数据;设置成1,表示Dump`"kernels"`里面指定的算子数据或算子类型数据;设置成2,表示使用[mindspore.set_dump](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.set_dump.html) Dump指定对象。仅在op_debug_mode设置为0时支持指定算子dump。 + - `dump_mode`:设置成0,表示Dump出该网络中的所有算子数据;设置成1,表示Dump`"kernels"`里面指定的算子数据或算子类型数据;设置成2,表示使用[mindspore.set_dump](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.set_dump.html) Dump指定对象。仅在op_debug_mode设置为0时支持指定算子dump。 - `path`:Dump保存数据的绝对路径。 - `net_name`:自定义的网络名称,例如:"ResNet50"。 - `iteration`:指定需要Dump数据的迭代。类型为str,用“|”分离要保存的不同区间的step的数据。如"0|5-8|100-120"表示Dump第1个,第6个到第9个,第101个到第121个step的数据。指定“all”,表示Dump所有迭代的数据。仅在op_debug_mode设置为0或3时支持保存指定迭代,op_debug_mode设置为4时不支持指定迭代。 - `saved_data`: 指定Dump的数据。类型为str,取值成"tensor",表示Dump出完整张量数据;取值成"statistic",表示只Dump张量的统计信息;取值"full"代表两种都要。统计信息现只支持GPU场景,CPU场景若选"statistic"或"full"便会错误退出。默认取值为"tensor"。保存统计信息仅支持op_debug_mode设置为0的场景。 - `input_output`:设置成0,表示Dump出算子的输入和算子的输出;设置成1,表示Dump出算子的输入;设置成2,表示Dump出算子的输出。在op_debug_mode设置为4时,只能保存算子输入。 - `kernels`:该项可以配置三种格式: - 1. 算子的名称列表。通过设置环境变量`MS_DEV_SAVE_GRAPHS`的值为2开启IR保存开关并执行用例,从生成的IR文件`trace_code_graph_{graph_id}`中获取算子名称。详细说明可以参照教程:[如何保存IR](https://www.mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/mindir.html#如何保存ir)。 + 1. 算子的名称列表。通过设置环境变量`MS_DEV_SAVE_GRAPHS`的值为2开启IR保存开关并执行用例,从生成的IR文件`trace_code_graph_{graph_id}`中获取算子名称。详细说明可以参照教程:[如何保存IR](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/mindir.html#如何保存ir)。 需要注意的是,是否设置环境变量`MS_DEV_SAVE_GRAPHS`的值为2可能会导致同一个算子的id不同,所以在Dump指定算子时要在获取算子名称之后保持这一项设置不变。或者也可以在Dump保存的`ms_output_trace_code_graph_{graph_id}.ir`文件中获取算子名称,参考[CPU/GPU后端下Dump数据对象目录](#数据对象目录和数据文件介绍-1)。 2. 还可以指定算子类型。当字符串中不带算子scope信息和算子id信息时,后台则认为其为算子类型,例如:"conv"。算子类型的匹配规则为:当发现算子名中包含算子类型字符串时,则认为匹配成功(不区分大小写),例如:"conv" 可以匹配算子 "Conv2D-op1234"、"Conv3D-op1221"。 3. 算子名称的正则表达式。当字符串符合"name-regex(xxx)"格式时,后台则会将其作为正则表达式。例如,"name-regex(Default/.+)"可匹配算子名称以"Default/"开头的所有算子。 @@ -638,11 +638,11 @@ ms_global_execution_order_graph_{graph_id}.csv ### 数据分析样例 -为了更好地展示使用Dump来保存数据并分析数据的流程,我们提供了一套[完整样例脚本](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/dump) ,CPU/GPU后端下Dump只需要执行 `bash run_sync_dump.sh`。 +为了更好地展示使用Dump来保存数据并分析数据的流程,我们提供了一套[完整样例脚本](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/dump) ,CPU/GPU后端下Dump只需要执行 `bash run_sync_dump.sh`。 在通过Dump功能将脚本对应的图保存到磁盘上后,会产生最终执行图文件`ms_output_trace_code_graph_{graph_id}.ir`。该文件中保存了对应的图中每个算子的堆栈信息,记录了算子对应的生成脚本。 -以[AlexNet脚本](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/dump/train_alexnet.py)为例: +以[AlexNet脚本](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/dump/train_alexnet.py)为例: ```python ... @@ -782,6 +782,6 @@ numpy.load("Conv2D.Conv2D-op12.0.0.1623124369613540.output.0.DefaultFormat.npy") - Dump仅支持bool、int、int8、in16、int32、int64、uint、uint8、uint16、uint32、uint64、float、float16、float32、float64、bfloat16、double、complex64、complex128类型数据的保存。 - complex64和complex128仅支持保存为npy文件,不支持保存为统计值信息。 - Print算子内部有一个输入参数为string类型,string类型不属于Dump支持的数据类型,所以在脚本中包含Print算子时,会有错误日志,这不会影响其他类型数据的保存。 -- 使能Ascend GE后端下Dump时,sink size只能设置为1。用户通常可以使用[Model.train()](https://www.mindspore.cn/docs/zh-CN/master/api_python/train/mindspore.train.Model.html#mindspore.train.Model.train)或[data_sink()](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.data_sink.html)接口配置sink size。 +- 使能Ascend GE后端下Dump时,sink size只能设置为1。用户通常可以使用[Model.train()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/train/mindspore.train.Model.html#mindspore.train.Model.train)或[data_sink()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.data_sink.html)接口配置sink size。 - 使能Ascend GE后端下Dump时,**统计值dump**如果是大数据量dump场景(如网络本身规模庞大,连续dump多个step等),可能会导致host侧内存被占满,导致数据流同步失败,建议使用新版[**统计值dump**](https://gitee.com/ascend/mstt/blob/master/debug/accuracy_tools/msprobe/docs/06.data_dump_MindSpore.md#51-%E9%9D%99%E6%80%81%E5%9B%BE%E5%9C%BA%E6%99%AF)替代。 -- 默认情况下,Dump会忽略算子的无效输出,比如Send/Print算子的输出、FlashAttentionScore算子的第三个预留输出等。如果需要保留这些无效输出,可以将环境变量`MINDSPORE_DUMP_IGNORE_USELESS_OUTPUT`设置为`0`。详情请参阅[环境变量-Dump调试](https://www.mindspore.cn/docs/zh-CN/master/api_python/env_var_list.html#dump%E8%B0%83%E8%AF%95)。 +- 默认情况下,Dump会忽略算子的无效输出,比如Send/Print算子的输出、FlashAttentionScore算子的第三个预留输出等。如果需要保留这些无效输出,可以将环境变量`MINDSPORE_DUMP_IGNORE_USELESS_OUTPUT`设置为`0`。详情请参阅[环境变量-Dump调试](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/env_var_list.html#dump%E8%B0%83%E8%AF%95)。 diff --git a/tutorials/source_zh_cn/debug/error_analysis.rst b/tutorials/source_zh_cn/debug/error_analysis.rst index 5564386a95..91213a44cd 100644 --- a/tutorials/source_zh_cn/debug/error_analysis.rst +++ b/tutorials/source_zh_cn/debug/error_analysis.rst @@ -1,8 +1,8 @@ 报错分析 ======== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/error_analysis.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/error_analysis.rst :alt: 查看源文件 .. toctree:: @@ -89,7 +89,7 @@ MindSpore网络训练的一般过程是数据加载与处理,网络构建与 3) 根据Python调用栈以及报错信息,分析发生报错的位置。在动态图模式下,代码报错位置较容易判断。在静态图模式下,需要结合报错信息中“The Traceback of Net Construct Code”部分内容,分析报错位置。 4) 基于可能的报错问题场景以及类型,假设导致报错问题的可能原因。 -具体如何根据不同场景进行错误分析,请参考\ `错误分析 `__\ 。 +具体如何根据不同场景进行错误分析,请参考\ `错误分析 `__\ 。 错误搜索 ^^^^^^^^ @@ -100,7 +100,7 @@ MindSpore网络训练的一般过程是数据加载与处理,网络构建与 MindSpore提供常见报错问题FAQ,包括数据处理、编译执行、分布式并行等场景。可根据错误分析中得出的问题场景,使用报错描述信息进行问题搜索。 - 搜索地址:\ `FAQ `__\ 。 + 搜索地址:\ `FAQ `__\ 。 - 报错案例 @@ -143,7 +143,7 @@ MindSpore网络训练的一般过程是数据加载与处理,网络构建与 动态图模式为提高动态图执行效率,默认使用异步执行方式,错误信息在执行的最后阶段显示。在图3中可以看到异步执行方式报错信息会有告警信息,对报错分析造成干扰。 MindSpore提供切换同步执行的方法,通过设置\ ``set_context(mode=mindspore.PYNATIVE_MODE, pynative_synchronize=True)`` - 切换到同步方式执行,如果算子执行错误时,任务直接终止并显示当前错误信息。具体内容可参考\ `PyNative同步执行 `__\ 。 + 切换到同步方式执行,如果算子执行错误时,任务直接终止并显示当前错误信息。具体内容可参考\ `PyNative同步执行 `__\ 。 - 二分法策略 @@ -174,7 +174,7 @@ MindSpore网络训练的一般过程是数据加载与处理,网络构建与 1. ops.print\_接口 - 静态图模式下,MindSpore提供 `ops.print_ `_ 接口,用于打印计算图中Tensor信息或字符串信息。默认打印在屏幕上,也可以保存在文件中。 + 静态图模式下,MindSpore提供 `ops.print_ `_ 接口,用于打印计算图中Tensor信息或字符串信息。默认打印在屏幕上,也可以保存在文件中。 2. 调试器 @@ -222,7 +222,7 @@ MindSpore网络训练的一般过程是数据加载与处理,网络构建与 +----------+----------------+----------------------------------------------+-----------------------------------+ | | print\_接口 | print\_接口可以将用户输入的Tensor或\ | `print\_接口功能介绍 `_ | | | | | | @@ -230,12 +230,12 @@ MindSpore网络训练的一般过程是数据加载与处理,网络构建与 +----------+----------------+----------------------------------------------+-----------------------------------+ | | 中间文件保存 | 用于保存图编译过程中生成的中间文件,我们称为\| `查看中间文件 `_ | +----------+----------------+----------------------------------------------+-----------------------------------+ | | 数据Dump | 训练网络时,若训练结果和预期有偏差,通过Du\ | `Dump功能调试 `_ | +----------+----------------+----------------------------------------------+-----------------------------------+ | 执行控制 | Callback | 用户可以使用回调函数在特定时期执行特定动作或\| | @@ -247,7 +247,7 @@ MindSpore网络训练的一般过程是数据加载与处理,网络构建与 +----------+----------------+----------------------------------------------+-----------------------------------+ | | Hook | 在pynative模式使用Hook功能可以捕\ | `Hook功能 `_ | | | | e_hook、register_forwar\ | | @@ -256,7 +256,7 @@ MindSpore网络训练的一般过程是数据加载与处理,网络构建与 +----------+----------------+----------------------------------------------+-----------------------------------+ | | 同步执行 | 在动态图模式下,为了提升性能,算子在devi\ | `动态图同步执行 `_ | +----------+----------------+----------------------------------------------+-----------------------------------+ @@ -273,7 +273,7 @@ MindSpore为框架开发者提供了丰富的调试手段,调试功能涵盖 | | | 的信息,为了解框架执行过程\ | `_ | | | | | | | | | | | diff --git a/tutorials/source_zh_cn/debug/error_analysis/cann_error_cases.md b/tutorials/source_zh_cn/debug/error_analysis/cann_error_cases.md index 7086f23a3b..6a29b4b5c5 100644 --- a/tutorials/source_zh_cn/debug/error_analysis/cann_error_cases.md +++ b/tutorials/source_zh_cn/debug/error_analysis/cann_error_cases.md @@ -1,6 +1,6 @@ # CANN常见错误分析 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/error_analysis/cann_error_cases.md)   +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/error_analysis/cann_error_cases.md)   本文主要介绍用户常见的CANN错误处理方法。在遇到CANN错误时,MindSpore的日志可能不足以分析相关错误,可以通过设置以下两个环境变量来打印CANN的日志以更好地分析错误: diff --git a/tutorials/source_zh_cn/debug/error_analysis/error_scenario_analysis.md b/tutorials/source_zh_cn/debug/error_analysis/error_scenario_analysis.md index 98f105b9e4..561e78ffe8 100644 --- a/tutorials/source_zh_cn/debug/error_analysis/error_scenario_analysis.md +++ b/tutorials/source_zh_cn/debug/error_analysis/error_scenario_analysis.md @@ -1,6 +1,6 @@ # 错误分析 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/error_analysis/error_scenario_analysis.md)   +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/error_analysis/error_scenario_analysis.md)   如前文所述,错误分析是指基于获取到的网络、框架各种信息(例如:错误信息、网络代码等信息)进行错误原因分析,推断错误的可能原因。 @@ -20,9 +20,9 @@ | 常见错误类型 | 错误说明 | 案例分析 | |-------------|---------|---| -| 数据准备错误 | 数据集本身问题,包括数据集路径问题以及MindRecord 文件问题 | [数据准备错误案例](https://www.mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/minddata_debug.html#数据准备) | -| 数据加载错误 | 数据加载阶段的资源配置错误、自定义加载方法错误以及迭代器使用错误等 | [数据加载错误案例](https://www.mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/minddata_debug.html#数据加载) | -| 数据增强错误 | 数据格式不匹配、数据尺寸不匹配、资源占用问题、多线程卡死 | [数据增强错误案例](https://www.mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/minddata_debug.html#数据增强) | +| 数据准备错误 | 数据集本身问题,包括数据集路径问题以及MindRecord 文件问题 | [数据准备错误案例](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/minddata_debug.html#数据准备) | +| 数据加载错误 | 数据加载阶段的资源配置错误、自定义加载方法错误以及迭代器使用错误等 | [数据加载错误案例](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/minddata_debug.html#数据加载) | +| 数据增强错误 | 数据格式不匹配、数据尺寸不匹配、资源占用问题、多线程卡死 | [数据增强错误案例](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/minddata_debug.html#数据增强) | ## 网络构建与训练错误分析 @@ -32,11 +32,11 @@ | 常见错误类型 | 错误说明 | 案例分析 | | - | - | - | -| context配置问题 | 系统进行上下文配置时的错误 | [context配置问题分析](https://www.mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/mindrt_debug.html#context%E9%85%8D%E7%BD%AE%E9%97%AE%E9%A2%98)| -| 语法错误 | 包括Python语法错误和MindSpore静态图语法错误,例如控制流语法不支持、Tensor切片错误等 | [语法错误分析](https://mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/mindrt_debug.html#语法问题) | -| 算子编译错误 | 包括算子参数值/类型/shape不满足要求、算子功能限制等 | [算子编译错误分析](https://mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/mindrt_debug.html#算子编译错误) | -| 算子执行错误 | 包括输入数据异常、算子实现错误、功能限制、资源限制等 | [算子执行错误分析](https://mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/mindrt_debug.html#算子执行错误) | -| 资源不足 | 包括设备内存不足、函数调用栈超限、流资源超限等 | [资源不足分析](https://mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/mindrt_debug.html#资源不足) | +| context配置问题 | 系统进行上下文配置时的错误 | [context配置问题分析](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/mindrt_debug.html#context%E9%85%8D%E7%BD%AE%E9%97%AE%E9%A2%98)| +| 语法错误 | 包括Python语法错误和MindSpore静态图语法错误,例如控制流语法不支持、Tensor切片错误等 | [语法错误分析](https://mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/mindrt_debug.html#语法问题) | +| 算子编译错误 | 包括算子参数值/类型/shape不满足要求、算子功能限制等 | [算子编译错误分析](https://mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/mindrt_debug.html#算子编译错误) | +| 算子执行错误 | 包括输入数据异常、算子实现错误、功能限制、资源限制等 | [算子执行错误分析](https://mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/mindrt_debug.html#算子执行错误) | +| 资源不足 | 包括设备内存不足、函数调用栈超限、流资源超限等 | [资源不足分析](https://mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/mindrt_debug.html#资源不足) | ### 动态图模式错误分析 @@ -52,7 +52,7 @@ - 根据报错描述内容,确认报错的对象,比如对应的算子API名称; - 根据Python调用栈信息,找到报错的代码行位置; -- 分析报错位置的代码输入数据和计算逻辑,结合[MindSpore API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.html)中对应的报错对象的说明和规格限制,分析出现报错问题的原因。 +- 分析报错位置的代码输入数据和计算逻辑,结合[MindSpore API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.html)中对应的报错对象的说明和规格限制,分析出现报错问题的原因。 ### 静态图模式错误分析 @@ -69,9 +69,9 @@ - 如果是计算图编译报错,根据报错描述和发生报错时自动保存的`analyze_failed.ir`文件,分析计算图推导失败的原因和位置; - 如果是计算图执行报错,可能是资源不足导致的执行报错,也可能是算子的执行报错,需要根据报错信息进行区分。如果是算子执行报错,首先确认是哪个算子,然后使用Dump功能保存算子的输入数据,通过输入数据分析算子报错的原因; -分析计算图推导失败的原因可以参考[`analyze_failed.ir`分析方法](https://www.mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/mindir.html#如何根据analyze-failir文件分析图推导失败的原因)。 +分析计算图推导失败的原因可以参考[`analyze_failed.ir`分析方法](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/mindir.html#如何根据analyze-failir文件分析图推导失败的原因)。 -使用Dump保存算子输入数据可以参考[Dump功能调试](https://www.mindspore.cn/tutorials/zh-CN/master/debug/dump.html)。 +使用Dump保存算子输入数据可以参考[Dump功能调试](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/dump.html)。 ## 分布式并行错误分析 @@ -114,7 +114,7 @@ class MyStridedSlice(nn.Cell): 错误原因: -这段代码在第零维度进行了取切片操作。但是配置的策略(2,1)表示分别对输入Tensor的第零维度和第一维度进行取切片操作。根据目前[MindSpore API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/operator_list_parallel.html)中对算子切分的说明, +这段代码在第零维度进行了取切片操作。但是配置的策略(2,1)表示分别对输入Tensor的第零维度和第一维度进行取切片操作。根据目前[MindSpore API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/operator_list_parallel.html)中对算子切分的说明, > 仅支持值为全0的mask;需要切分的维度必须全部提取;输入在strides不为1对应的维度不支持切分 @@ -274,11 +274,11 @@ EK0001: Path [/ms_test/csj/csj/user_scene/profiler_chinese_中文/resnet/scripts | 常见错误类型 | 错误说明 | 案例分析 | | - | - | - | -| AICORE算子编译问题 | AICORE算子编译时的错误 | [AICORE算子编译问题分析](https://www.mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/cann_error_cases.html#aicore算子编译问题)| -| AICORE算子执行问题 | AICORE算子执行时的错误 | [AICORE算子执行问题分析](https://mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/cann_error_cases.html#aicore算子执行问题) | -| AICPU算子执行问题 | AICPU算子执行时的错误 | [AICPU算子执行问题分析](https://mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/cann_error_cases.html#aicpu算子执行问题) | -| runtime常见问题 | 包括输入数据异常、算子实现错误、功能限制、资源限制等 | [runtime常见问题分析](https://mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/cann_error_cases.html#runtime常见问题) | -| HCCL&HCCP常见问题 | 多机多卡训练时的通信常见问题,包括socket建链超时、notify wait超时、ranktable配置错误等 | [HCCL&HCCP常见问题](https://mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/cann_error_cases.html#hcclhccp常见问题) | -| profiling常见问题 | 性能调优运行profiling时的错误 | [profiling常见问题分析](https://mindspore.cn/tutorials/zh-CN/master/debug/error_analysis/cann_error_cases.html#profiling常见问题) | +| AICORE算子编译问题 | AICORE算子编译时的错误 | [AICORE算子编译问题分析](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/cann_error_cases.html#aicore算子编译问题)| +| AICORE算子执行问题 | AICORE算子执行时的错误 | [AICORE算子执行问题分析](https://mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/cann_error_cases.html#aicore算子执行问题) | +| AICPU算子执行问题 | AICPU算子执行时的错误 | [AICPU算子执行问题分析](https://mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/cann_error_cases.html#aicpu算子执行问题) | +| runtime常见问题 | 包括输入数据异常、算子实现错误、功能限制、资源限制等 | [runtime常见问题分析](https://mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/cann_error_cases.html#runtime常见问题) | +| HCCL&HCCP常见问题 | 多机多卡训练时的通信常见问题,包括socket建链超时、notify wait超时、ranktable配置错误等 | [HCCL&HCCP常见问题](https://mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/cann_error_cases.html#hcclhccp常见问题) | +| profiling常见问题 | 性能调优运行profiling时的错误 | [profiling常见问题分析](https://mindspore.cn/tutorials/zh-CN/br_base/debug/error_analysis/cann_error_cases.html#profiling常见问题) | 更多有关CANN错误的信息可前往[昇腾CANN开发者文档](https://www.hiascend.com/document/moreVersion/zh/CANNCommunityEdition/),查询对应CANN版本的故障处理章节。 diff --git a/tutorials/source_zh_cn/debug/error_analysis/minddata_debug.md b/tutorials/source_zh_cn/debug/error_analysis/minddata_debug.md index 91799e3a01..5b0772ceb0 100644 --- a/tutorials/source_zh_cn/debug/error_analysis/minddata_debug.md +++ b/tutorials/source_zh_cn/debug/error_analysis/minddata_debug.md @@ -1,6 +1,6 @@ # 数据处理调试方法与常见问题分析 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/error_analysis/minddata_debug.md)   +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/error_analysis/minddata_debug.md)   ## 数据处理调试方法 @@ -217,7 +217,7 @@ mindspore/ccsrc/minddata/dataset/kernels/image/crop_op.cc(33). 根据打印的信息可以看到 `Crop` 处理第一个样本时报错,第一个样本的shape(32, 32, 3),被 `RandomResize` 变换为(3, 16, 3),但是没有打印 `Crop` 变换后的shape就报错了。因此正是此时的shape不能被 `Crop` 处理导致错误发生。进一步根据Dataset Pipeline Error Message的提示,输入样本的高只有3,但是期望裁剪出高维8的区域,所以报错。 -查看 `Crop` 的 [API说明](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.Crop.html#mindspore.dataset.vision.Crop) ,`Crop` 要求输入样本的shape为 ,所以 `Crop` 会把(3, 16, 3)当成,当H=3, W=16,C=3时自然裁剪不出H=8, W=8的区域。 +查看 `Crop` 的 [API说明](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.Crop.html#mindspore.dataset.vision.Crop) ,`Crop` 要求输入样本的shape为 ,所以 `Crop` 会把(3, 16, 3)当成,当H=3, W=16,C=3时自然裁剪不出H=8, W=8的区域。 为了快速修复此问题,我们只需要把 `RandomResize` 的参数size由原来的(3, 16)改为(16, 16),再次执行就会发现用例通过。 @@ -240,13 +240,13 @@ data (8, 8, 48) #### 方式二:通过数据管道调试模式调试map操作 -我们还可以调用 [set_debug_mode](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.config.set_debug_mode.html) 方法开启数据集管道调试模式来进行调试。 +我们还可以调用 [set_debug_mode](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.config.set_debug_mode.html) 方法开启数据集管道调试模式来进行调试。 当启用调试模式时,如果随机种子没有被设置,则会将随机种子设置为1,以便在调试模式下执行数据集管道可以获得确定性的结果。 流程如下: 1. 在 `map` 算子中打印每个变换op的输入输出数据的形状和类型。 -2. 启用数据集管道调试模式,并使用MindData提供的预定义调试钩子或者用户定义的调试钩子,它必须定义继承自 [DebugHook](https://mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.debug.DebugHook.html) 类。 +2. 启用数据集管道调试模式,并使用MindData提供的预定义调试钩子或者用户定义的调试钩子,它必须定义继承自 [DebugHook](https://mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.debug.DebugHook.html) 类。 以下是在 `方式一` 的用例上做修改,使用MindData提供的预定义调试钩子。 @@ -300,7 +300,7 @@ E ------------------------------------------------------------------ E mindspore/ccsrc/minddata/dataset/kernels/image/crop_op.cc(33). ``` -根据打印的信息我们就能很清楚的知道 `Crop` 在处理输入shape为(3, 16, 3)的时候出现了报错,同样查看 `Crop` 的 [API说明](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.Crop.html#mindspore.dataset.vision.Crop)。我们只需要把 `RandomResize` 的参数size由原来的(3, 16)改为(16, 16),再次执行就会发现用例通过。 +根据打印的信息我们就能很清楚的知道 `Crop` 在处理输入shape为(3, 16, 3)的时候出现了报错,同样查看 `Crop` 的 [API说明](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.Crop.html#mindspore.dataset.vision.Crop)。我们只需要把 `RandomResize` 的参数size由原来的(3, 16)改为(16, 16),再次执行就会发现用例通过。 ```text [Dataset debugger] Print the [INPUT] of the operation [RandomResize]. diff --git a/tutorials/source_zh_cn/debug/error_analysis/mindir.md b/tutorials/source_zh_cn/debug/error_analysis/mindir.md index 85c6b8593c..558776c458 100644 --- a/tutorials/source_zh_cn/debug/error_analysis/mindir.md +++ b/tutorials/source_zh_cn/debug/error_analysis/mindir.md @@ -1,6 +1,6 @@ # IR文件分析 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/error_analysis/mindir.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/error_analysis/mindir.md) ## 概述 diff --git a/tutorials/source_zh_cn/debug/error_analysis/mindrt_debug.md b/tutorials/source_zh_cn/debug/error_analysis/mindrt_debug.md index 495c4766bc..dba308955f 100644 --- a/tutorials/source_zh_cn/debug/error_analysis/mindrt_debug.md +++ b/tutorials/source_zh_cn/debug/error_analysis/mindrt_debug.md @@ -1,6 +1,6 @@ # 网络构建与训练常见错误分析 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/error_analysis/mindrt_debug.md)   +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/error_analysis/mindrt_debug.md)   静态图模式下,网络构建与训练过程的常见的报错类型如下所示: @@ -18,7 +18,7 @@ ValueError: For 'set_context', package type mindspore-gpu support 'device_target [MindSpore 配置问题 - 'set_context'配置报错](https://www.hiascend.com/developer/blog/details/0229106885219029083)。 -关于context配置的详细使用说明请参考['set_context'](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.set_context.html)。 +关于context配置的详细使用说明请参考['set_context'](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.set_context.html)。 ## 语法问题 @@ -62,7 +62,7 @@ Shape Join Failed: shape1 = (2, 3, 4, 5), shape2 = (). 参考实例链接: -[MindSpore 语法问题 - Type(Shape) Join Failed](https://www.mindspore.cn/docs/zh-CN/master/faq/network_compilation.html?highlight=type%20join%20failed) +[MindSpore 语法问题 - Type(Shape) Join Failed](https://www.mindspore.cn/docs/zh-CN/br_base/faq/network_compilation.html?highlight=type%20join%20failed) for语句以及while语句可能存在循环次数过大,导致函数调用栈超限的问题。报错信息如下所示: diff --git a/tutorials/source_zh_cn/debug/profiler.md b/tutorials/source_zh_cn/debug/profiler.md index 30f147a729..cddb7fbe4c 100644 --- a/tutorials/source_zh_cn/debug/profiler.md +++ b/tutorials/source_zh_cn/debug/profiler.md @@ -1,6 +1,6 @@ # Ascend性能调优 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/profiler.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/profiler.md) ## 概述 @@ -10,7 +10,7 @@ 1. 准备训练脚本; -2. 在训练脚本中调用性能调试接口,如[mindspore.profiler.profile](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler.profile.html)以及[mindspore.profiler.DynamicProfilerMonitor](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler.DynamicProfilerMonitor.html)接口; +2. 在训练脚本中调用性能调试接口,如[mindspore.profiler.profile](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler.profile.html)以及[mindspore.profiler.DynamicProfilerMonitor](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler.DynamicProfilerMonitor.html)接口; 3. 运行训练脚本; @@ -22,7 +22,7 @@ ### 方式一:mindspore.profiler.profile接口使能 -在训练脚本中添加MindSpore profile相关接口,用户可以参考[MindSpore profile参数详解](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler.profile.html)和[_ExperimentalConfig可扩展参数详解](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler._ExperimentalConfig.html),针对自己的数据需求配置采集性能数据的级别等参数。 +在训练脚本中添加MindSpore profile相关接口,用户可以参考[MindSpore profile参数详解](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler.profile.html)和[_ExperimentalConfig可扩展参数详解](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler._ExperimentalConfig.html),针对自己的数据需求配置采集性能数据的级别等参数。 该接口支持两种采集方式:自定义for循环方式和CallBack方式,且在Graph和PyNative两种模式下都支持。 @@ -67,22 +67,22 @@ with mindspore.profiler.profile(activities=[ProfilerActivity.CPU, ProfilerActivi ``` - schedule:使能后,落盘数据中kernel_details.csv中包含了Step ID一列信息。根据样例中schedule的配置,skip_first跳过0个step,wait等待0个step,warmup预热0个step。根据active为1,则从第0个step开始采集,采集1个step。因此Step ID为0,表示采集的是第0个step。 -- on_trace_ready:profiler的落盘路径是通过on_trace_ready的tensorboard_trace_handler参数指定的,tensorboard_trace_handler会默认解析性能数据,用户如果没有配置tensorboard_trace_handler,数据会默认落盘到当前脚本同级目录的'/data'文件夹下,可以通过离线解析功能解析性能数据,离线解析功能可参考[方式四:离线解析](https://www.mindspore.cn/tutorials/zh-CN/master/debug/profiler.html#%E6%96%B9%E5%BC%8F%E5%9B%9B-%E7%A6%BB%E7%BA%BF%E8%A7%A3%E6%9E%90)。 +- on_trace_ready:profiler的落盘路径是通过on_trace_ready的tensorboard_trace_handler参数指定的,tensorboard_trace_handler会默认解析性能数据,用户如果没有配置tensorboard_trace_handler,数据会默认落盘到当前脚本同级目录的'/data'文件夹下,可以通过离线解析功能解析性能数据,离线解析功能可参考[方式四:离线解析](https://www.mindspore.cn/tutorials/zh-CN/br_base/debug/profiler.html#%E6%96%B9%E5%BC%8F%E5%9B%9B-%E7%A6%BB%E7%BA%BF%E8%A7%A3%E6%9E%90)。 -完整案例参考[自定义for循环采集完整代码样例](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/profiler/for_loop_profiler.py)。 +完整案例参考[自定义for循环采集完整代码样例](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/profiler/for_loop_profiler.py)。 **schedule参数配置原理如下:** 如下图,schedule中有5个参数可以配置,分别为:skip_first、wait、warmup、active、repeat。其中skip_first表示跳过前skip_first个step;wait表示等待阶段, 跳过wait个step;warmup表示预热阶段,跳过warmup个step;active表示采集active个step;repeat表示重复执行次数。其中1个repeat包括wait+warmup+active个step。 -一个repeat内所有step执行完之后,会执行on_trace_ready配置的回调函数解析性能数据。各个参数的详细介绍请参考[schedule API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler.schedule.html)。 +一个repeat内所有step执行完之后,会执行on_trace_ready配置的回调函数解析性能数据。各个参数的详细介绍请参考[schedule API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler.schedule.html)。 ![schedule.png](./images/schedule.png) 例如:模型训练共100个step(0-99),此时配置schedule为 `schedule(skip_first=10, wait=10, warmup=5, active=5, repeat=2)` 。那么profiler将会先跳过前10个step(0-9)。从step 10开始,第1个repeat将等待10个step(10-19),预热5个step(20-24),最终采集5个step(25-29)的性能数据。第2个repeat重复等待10个step(30-39),预热5个step(40-44),最终采集5个step(45-49)的性能数据。 > - 在单卡场景下,profiler根据repeat次数在同一目录下生成多份性能数据。每个repeat对应一个文件夹,包含该repeat中所有active step采集到的性能数据。在多卡场景下,每张卡会独立生成性能数据,每张卡的数据都会根据repeat次数分成多份。当repeat配置为0时,表示重复执行的具体次数由总step数确定,不断重复wait-warmup-active直到所有step执行完毕。 -> - schedule需要配合[mindspore.profiler.profile.step](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler.profile.html#mindspore.profiler.profile.step)接口使用,如果配置了schedule而没有调用mindspore.profiler.profile.step接口进行数据采集,则profiler数据采集区间的所有数据都属于第0个step,因此只有在第0个step对应active(wait、warmup、skip_first都配置为0)时,才会生成性能数据文件。 +> - schedule需要配合[mindspore.profiler.profile.step](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler.profile.html#mindspore.profiler.profile.step)接口使用,如果配置了schedule而没有调用mindspore.profiler.profile.step接口进行数据采集,则profiler数据采集区间的所有数据都属于第0个step,因此只有在第0个step对应active(wait、warmup、skip_first都配置为0)时,才会生成性能数据文件。 #### CallBack方式采集样例 @@ -114,7 +114,7 @@ class StopAtStep(mindspore.Callback): self.profiler.stop() ``` -完整案例请参考[CallBack方式采集完整代码样例](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/profiler/call_back_profiler.py)。 +完整案例请参考[CallBack方式采集完整代码样例](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/profiler/call_back_profiler.py)。 ### 方式二:动态profiler使能 @@ -148,7 +148,7 @@ JSON配置样例如下: } ``` -1. 用户需要在实例化DynamicProfilerMonitor前,配置如上的JSON文件,并将配置文件保存在cfg_path中。详细参数介绍请参考[DynamicProfilerMonitor参数详解](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler.DynamicProfilerMonitor.html); +1. 用户需要在实例化DynamicProfilerMonitor前,配置如上的JSON文件,并将配置文件保存在cfg_path中。详细参数介绍请参考[DynamicProfilerMonitor参数详解](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler.DynamicProfilerMonitor.html); 2. 在模型训练后调用DynamicProfilerMonitor的step接口采集数据; 3. 用户如果想在训练中变更采集、解析任务,可以修改JSON配置文件。如变更上述JSON配置中的start_step为8,stop_step为10。保存后,DynamicProfilerMonitor会自动识别出配置文件,变更成新的采集、解析任务。 @@ -170,11 +170,11 @@ for _ in range(STEP_NUM): 此时生成的结果文件包含两个文件夹:rank0_start2_stop5以及rank0_start8_stop10,分别代表采集的step为2-5和8-10。 -完整案例请参考[动态Profiler使能方式案例](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/profiler/dynamic_profiler.py)。 +完整案例请参考[动态Profiler使能方式案例](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/profiler/dynamic_profiler.py)。 ### 方式三:环境变量使能 -用户如果想最简单地使能Profiler,可以使用环境变量使能方式,目前只支持单卡场景。该方式只需将参数配置到环境变量中,在模型训练中会自动采集性能数据。该方式暂不支持schedule、on_trace_ready、experimental_config参数,其他参数都可以使用。详细配置项介绍请参考[环境变量使能方式参数详解](https://www.mindspore.cn/docs/zh-CN/master/api_python/env_var_list.html)。 +用户如果想最简单地使能Profiler,可以使用环境变量使能方式,目前只支持单卡场景。该方式只需将参数配置到环境变量中,在模型训练中会自动采集性能数据。该方式暂不支持schedule、on_trace_ready、experimental_config参数,其他参数都可以使用。详细配置项介绍请参考[环境变量使能方式参数详解](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/env_var_list.html)。 > 使用环境变量使能方式,请在脚本开始执行之前通过环境变量设置好device_id。禁止在脚本中通过set_context函数设置device_id。 @@ -195,7 +195,7 @@ export MS_PROFILER_OPTIONS=' ### 方式四:离线解析 -用户如果想重新解析已经采集的性能数据,可以使用[mindspore.profiler.profiler.analyse](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler.profiler.analyse.html)接口进行离线解析。analyse接口详细介绍请参考[离线解析analyse接口参数详解](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler.profiler.analyse.html)。 +用户如果想重新解析已经采集的性能数据,可以使用[mindspore.profiler.profiler.analyse](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler.profiler.analyse.html)接口进行离线解析。analyse接口详细介绍请参考[离线解析analyse接口参数详解](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler.profiler.analyse.html)。 离线解析样例如下: @@ -211,7 +211,7 @@ analyse("./profiler_data_path") # './profiler_data_path'为离线解析数据路 ![mstx_profiler.png](./images/mstx_profiler.png) -使用轻量化打点功能时需要保证[_ExperimentalConfig](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler._ExperimentalConfig.html)接口中的mstx配置为True,同时注意轻量化打点数据只在profiler的数据采集区间生效。mstx接口详细介绍请参考[mstx API文档](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.profiler.mstx.html)。 +使用轻量化打点功能时需要保证[_ExperimentalConfig](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler._ExperimentalConfig.html)接口中的mstx配置为True,同时注意轻量化打点数据只在profiler的数据采集区间生效。mstx接口详细介绍请参考[mstx API文档](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.profiler.mstx.html)。 轻量化打点样例如下: @@ -224,7 +224,7 @@ mstx.mark("start") mstx.range_end(range_id) ``` -完整案例请参考[mstx轻量化打点方式案例](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/profiler/mstx_profiler.py)。 +完整案例请参考[mstx轻量化打点方式案例](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/profiler/mstx_profiler.py)。 ## 性能数据 diff --git a/tutorials/source_zh_cn/debug/pynative.md b/tutorials/source_zh_cn/debug/pynative.md index 60c44ffb54..5219e0894e 100644 --- a/tutorials/source_zh_cn/debug/pynative.md +++ b/tutorials/source_zh_cn/debug/pynative.md @@ -1,6 +1,6 @@ # 动态图调试 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/pynative.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/pynative.md) ## 概述 @@ -82,7 +82,7 @@ def some_function(): - 3-ERROR,表示程序执行出现报错,输出错误日志,程序可能不会终止 - 4-CRITICAL,表示程序执行出现异常,将会终止执行程序 -详细的日志控制方法见[环境变量](https://www.mindspore.cn/docs/zh-CN/master/api_python/env_var_list.html#日志)。 +详细的日志控制方法见[环境变量](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/env_var_list.html#日志)。 ### 常见PDB调试命令 @@ -125,7 +125,7 @@ def some_function(): print(output) ``` - 详细API使用说明可以[参考](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.register_hook.html#mindspore.Tensor.register_hook)。 + 详细API使用说明可以[参考](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/Tensor/mindspore.Tensor.register_hook.html#mindspore.Tensor.register_hook)。 - 可以通过`mindspore.ops.HookBackward`查看执行过程中的梯度,例如: @@ -151,7 +151,7 @@ def some_function(): print(output) ``` - 详细API使用说明可以[参考](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.HookBackward.html)。 + 详细API使用说明可以[参考](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.HookBackward.html)。 - 可以通过`mindspore.nn.Cell.register_backward_hook`查看某个Cell的梯度,例如: @@ -180,7 +180,7 @@ def some_function(): print(output) ``` - 详细API使用说明可以[参考](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_hook)。 + 详细API使用说明可以[参考](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.register_backward_hook)。 ## 更多实际案例 diff --git a/tutorials/source_zh_cn/debug/sdc.md b/tutorials/source_zh_cn/debug/sdc.md index 6f644702f0..7b631a8d81 100644 --- a/tutorials/source_zh_cn/debug/sdc.md +++ b/tutorials/source_zh_cn/debug/sdc.md @@ -1,6 +1,6 @@ # 特征值检测 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/debug/sdc.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/debug/sdc.md) ## 概述 @@ -46,7 +46,7 @@ MindSpore框架2.4版本提供了网络模型的特征值检测方案,该方 环境变量`NPU_ASD_SIGMA_THRESH`控制检测的相对数值阈值,格式与上者相同,其中第一个元素控制数值跳变一级阈值,第二个元素控制数值跳变二级阈值;默认情况下,`NPU_ASD_SIGMA_THRESH=100000,5000`。 -上述环境变量的详细说明参见[环境变量](https://www.mindspore.cn/docs/zh-CN/master/api_python/env_var_list.html)。 +上述环境变量的详细说明参见[环境变量](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/env_var_list.html)。 ## 使用用例 diff --git a/tutorials/source_zh_cn/generative/cyclegan.ipynb b/tutorials/source_zh_cn/generative/cyclegan.ipynb index 9d4a9a5d34..e9dd2eb1a0 100644 --- a/tutorials/source_zh_cn/generative/cyclegan.ipynb +++ b/tutorials/source_zh_cn/generative/cyclegan.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/generative/mindspore_cyclegan.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/generative/mindspore_cyclegan.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/generative/cyclegan.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/generative/mindspore_cyclegan.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/generative/mindspore_cyclegan.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/generative/cyclegan.ipynb)\n", "\n", "# CycleGAN图像风格迁移互换\n", "\n", @@ -32,13 +32,13 @@ "\n", "CycleGAN 网络本质上是由两个镜像对称的 GAN 网络组成,其结构如下图所示(图片来源于原论文):\n", "\n", - "![CycleGAN](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/CycleGAN.png)\n", + "![CycleGAN](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/CycleGAN.png)\n", "\n", "为了方便理解,这里以苹果和橘子为例介绍。上图中 $X$ 可以理解为苹果,$Y$ 为橘子;$G$ 为将苹果生成橘子风格的生成器,$F$ 为将橘子生成的苹果风格的生成器,$D_{X}$ 和 $D_{Y}$ 为其相应判别器,具体生成器和判别器的结构可见下文代码。模型最终能够输出两个模型的权重,分别将两种图像的风格进行彼此迁移,生成新的图像。\n", "\n", "该模型一个很重要的部分就是损失函数,在所有损失里面循环一致损失(Cycle Consistency Loss)是最重要的。循环损失的计算过程如下图所示(图片来源于原论文):\n", "\n", - "![Cycle Consistency Loss](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/CycleGAN_1.png)\n", + "![Cycle Consistency Loss](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/CycleGAN_1.png)\n", "\n", "图中苹果图片 $x$ 经过生成器 $G$ 得到伪橘子 $\\hat{Y}$,然后将伪橘子 $\\hat{Y}$ 结果送进生成器 $F$ 又产生苹果风格的结果 $\\hat{x}$,最后将生成的苹果风格结果 $\\hat{x}$ 与原苹果图片 $x$ 一起计算出循环一致损失,反之亦然。循环损失捕捉了这样的直觉,即如果我们从一个域转换到另一个域,然后再转换回来,我们应该到达我们开始的地方。详细的训练过程见下文代码。\n" ] @@ -77,7 +77,7 @@ "source": [ "### 数据集加载\n", "\n", - "使用 MindSpore 的 [MindDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MindDataset.html) 接口读取和解析数据集。\n" + "使用 MindSpore 的 [MindDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MindDataset.html) 接口读取和解析数据集。\n" ] }, { @@ -178,7 +178,7 @@ "\n", "生成器的结构如下所示:\n", "\n", - "![CycleGAN Generator](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/CycleGAN_2.jpg)\n", + "![CycleGAN Generator](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/CycleGAN_2.jpg)\n", "\n", "具体的模型结构请参照下文代码:\n" ] diff --git a/tutorials/source_zh_cn/generative/dcgan.ipynb b/tutorials/source_zh_cn/generative/dcgan.ipynb index fda4946048..2b6071266e 100644 --- a/tutorials/source_zh_cn/generative/dcgan.ipynb +++ b/tutorials/source_zh_cn/generative/dcgan.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/generative/mindspore_dcgan.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/generative/mindspore_dcgan.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/generative/dcgan.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/generative/mindspore_dcgan.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/generative/mindspore_dcgan.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/generative/dcgan.ipynb)\n", "\n", "# DCGAN生成漫画头像\n", "\n", @@ -17,7 +17,7 @@ "source": [ "## GAN基础原理\n", "\n", - "这部分原理介绍参考[GAN图像生成](https://www.mindspore.cn/tutorials/zh-CN/master/generative/gan.html#模型简介)。\n", + "这部分原理介绍参考[GAN图像生成](https://www.mindspore.cn/tutorials/zh-CN/br_base/generative/gan.html#模型简介)。\n", "\n", "## DCGAN原理\n", "\n", @@ -146,7 +146,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "通过[create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html)函数将数据转换成字典迭代器,然后使用`matplotlib`模块可视化部分训练数据。" + "通过[create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html)函数将数据转换成字典迭代器,然后使用`matplotlib`模块可视化部分训练数据。" ] }, { @@ -197,7 +197,7 @@ "\n", "DCGAN论文生成图像如下所示:\n", "\n", - "![dcgangenerator](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/dcgan.png)\n", + "![dcgangenerator](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/dcgan.png)\n", "\n", "> 图片来源:[Unsupervised Representation Learning With Deep Convolutional Generative Adversarial Networks](https://arxiv.org/pdf/1511.06434.pdf).\n", "\n", @@ -303,7 +303,7 @@ "\n", "### 损失函数\n", "\n", - "当定义了`D`和`G`后,接下来将使用MindSpore中定义的二进制交叉熵损失函数[BCELoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.BCELoss.html)。" + "当定义了`D`和`G`后,接下来将使用MindSpore中定义的二进制交叉熵损失函数[BCELoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.BCELoss.html)。" ] }, { @@ -541,7 +541,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "![dcgan](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/dcgan.gif)\n", + "![dcgan](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/dcgan.gif)\n", "\n", "从上面的图像可以看出,随着训练次数的增多,图像质量也越来越好。如果增大训练周期数,当`num_epochs`达到50以上时,生成的动漫头像图片与数据集中的较为相似,下面我们通过加载生成器网络模型参数文件来生成图像,代码如下:" ] diff --git a/tutorials/source_zh_cn/generative/diffusion.ipynb b/tutorials/source_zh_cn/generative/diffusion.ipynb index e1bf50e5b0..0d4389e3a8 100644 --- a/tutorials/source_zh_cn/generative/diffusion.ipynb +++ b/tutorials/source_zh_cn/generative/diffusion.ipynb @@ -8,7 +8,7 @@ } }, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/generative/mindspore_diffusion.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/generative/mindspore_diffusion.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/generative/diffusion.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/generative/mindspore_diffusion.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/generative/mindspore_diffusion.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/generative/diffusion.ipynb)\n", "\n", "# Diffusion扩散模型\n", "\n" @@ -37,7 +37,7 @@ "\n", "本文是在Phil Wang[基于PyTorch框架的复现](https://github.com/lucidrains/denoising-diffusion-pytorch)的基础上(而它本身又是基于[TensorFlow实现](https://github.com/hojonathanho/diffusion)),迁移到MindSpore AI框架上实现的。\n", "\n", - "![Image-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/diffusion_1.png)\n", + "![Image-1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/diffusion_1.png)\n", "\n", "实验中我们采用离散时间(潜在变量模型)的观点,另外,读者也可以查看有关于扩散模型的其他[几个观点](https://twitter.com/sedielem/status/1530894256168222722?s=20&t=mfv4afx1GcNQU5fZklpACw)!" ] @@ -108,7 +108,7 @@ "\n", "- 一个学习的反向去噪的扩散过程 $p_\\theta$ :通过训练神经网络从纯噪声开始逐渐对图像去噪,直到最终得到一个实际的图像\n", "\n", - "![Image-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/diffusion_2.png)\n", + "![Image-2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/diffusion_2.png)\n", "\n", "由 $t$ 索引的正向和反向过程都发生在某些有限时间步长 $T$(DDPM作者使用 $T=1000$)内。从$t=0$开始,在数据分布中采样真实图像 $\\mathbf{x}_0$(本文使用一张来自ImageNet的猫图像形象的展示了diffusion正向添加噪声的过程),正向过程在每个时间步长 $t$ 都从高斯分布中采样一些噪声,再添加到上一个时刻的图像中。假定给定一个足够大的 $T$ 和一个在每个时间步长添加噪声的良好时间表,您最终会在 $t=T$ 通过渐进的过程得到所谓的[各向同性的高斯分布](https://math.stackexchange.com/questions/1991961/gaussian-distribution-is-isotropic)。" ] @@ -241,7 +241,7 @@ "\n", "训练算法现在如下所示:\n", "\n", - "![Image-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/diffusion_3.png)\n", + "![Image-3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/diffusion_3.png)\n", "\n", "换句话说:\n", "\n", @@ -263,7 +263,7 @@ "\n", "在模型结构方面,DDPM的作者选择了U-Net,出自([Ronneberger et al.,2015](https://arxiv.org/abs/1505.04597))(当时,它在医学图像分割方面取得了最先进的结果)。这个网络就像任何自动编码器一样,在中间由一个bottleneck组成,确保网络只学习最重要的信息。重要的是,它在编码器和解码器之间引入了残差连接,极大地改善了梯度流(灵感来自于([He et al., 2015](https://arxiv.org/abs/1512.03385)))。\n", "\n", - "![Image-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/diffusion_4.jpg)\n", + "![Image-4](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/diffusion_4.jpg)\n", "\n", "可以看出,U-Net模型首先对输入进行下采样(即,在空间分辨率方面使输入更小),之后执行上采样。" ] @@ -1389,7 +1389,7 @@ "\n", "由于我们将在训练期间从模型中采样(以便跟踪进度),我们定义了下面的代码。采样在本文中总结为算法2:\n", "\n", - "![Image-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/diffusion_5.png)\n", + "![Image-5](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/diffusion_5.png)\n", "\n", "从扩散模型生成新图像是通过反转扩散过程来实现的:我们从$T$开始,我们从高斯分布中采样纯噪声,然后使用我们的神经网络逐渐去噪(使用它所学习的条件概率),直到我们最终在时间步$t = 0$结束。如上图所示,我们可以通过使用我们的噪声预测器插入平均值的重新参数化,导出一个降噪程度较低的图像\n", "$\\mathbf{x}_{t-1 }$。请注意,方差是提前知道的。\n", diff --git a/tutorials/source_zh_cn/generative/gan.ipynb b/tutorials/source_zh_cn/generative/gan.ipynb index 3f4cd5b7ef..33eae53fbd 100644 --- a/tutorials/source_zh_cn/generative/gan.ipynb +++ b/tutorials/source_zh_cn/generative/gan.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/generative/mindspore_gan.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/generative/mindspore_gan.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/generative/gan.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/generative/mindspore_gan.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/generative/mindspore_gan.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/generative/gan.ipynb)\n", "\n", "# GAN图像生成\n" ] @@ -45,7 +45,7 @@ "3. 生成器通过优化,生成出更加贴近真实数据分布的数据。\n", "4. 生成器所生成的数据和真实数据达到相同的分布,此时判别器的输出为1/2。\n", "\n", - "![gan](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/cv/images/gan_image.png)\n", + "![gan](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/cv/images/gan_image.png)\n", "\n", "在上图中,蓝色虚线表示判别器,黑色虚线表示真实数据分布,绿色实线表示生成器生成的虚假数据分布,$z$ 表示隐码,$x$ 表示生成的虚假图像 $G(z)$。该图片来源于[Generative Adversarial Nets](https://papers.nips.cc/paper/5423-generative-adversarial-nets.pdf)。详细的训练方法介绍见原论文。\n", "\n", @@ -112,7 +112,7 @@ "source": [ "### 数据加载\n", "\n", - "使用MindSpore自己的[MnistDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.MnistDataset.html)接口,读取和解析MNIST数据集的源文件构建数据集。然后对数据进行一些前处理。" + "使用MindSpore自己的[MnistDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.MnistDataset.html)接口,读取和解析MNIST数据集的源文件构建数据集。然后对数据进行一些前处理。" ] }, { @@ -310,7 +310,7 @@ "source": [ "### 判别器\n", "\n", - "如前所述,判别器 `Discriminator` 是一个二分类网络模型,输出判定该图像为真实图的概率。主要通过一系列的 [Dense](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Dense.html) 层和 [LeakyReLU](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.LeakyReLU.html) 层对其进行处理,最后通过 [Sigmoid](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Sigmoid.html) 激活函数,使其返回 [0, 1] 的数据范围内,得到最终概率。注意实例化判别器之后需要修改参数的名称,不然静态图模式下会报错。" + "如前所述,判别器 `Discriminator` 是一个二分类网络模型,输出判定该图像为真实图的概率。主要通过一系列的 [Dense](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Dense.html) 层和 [LeakyReLU](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.LeakyReLU.html) 层对其进行处理,最后通过 [Sigmoid](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Sigmoid.html) 激活函数,使其返回 [0, 1] 的数据范围内,得到最终概率。注意实例化判别器之后需要修改参数的名称,不然静态图模式下会报错。" ] }, { @@ -353,7 +353,7 @@ "source": [ "### 损失函数和优化器\n", "\n", - "定义了 `Generator` 和 `Discriminator` 后,损失函数使用MindSpore中二进制交叉熵损失函数[BCELoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.BCELoss.html) ;这里生成器和判别器都是使用[Adam](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Adam.html)优化器,但是需要构建两个不同名称的优化器,分别用于更新两个模型的参数,详情见下文代码。注意优化器的参数名称也需要修改。" + "定义了 `Generator` 和 `Discriminator` 后,损失函数使用MindSpore中二进制交叉熵损失函数[BCELoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.BCELoss.html) ;这里生成器和判别器都是使用[Adam](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Adam.html)优化器,但是需要构建两个不同名称的优化器,分别用于更新两个模型的参数,详情见下文代码。注意优化器的参数名称也需要修改。" ] }, { @@ -630,7 +630,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "![训练过程测试动态图](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/train_test.gif)\n", + "![训练过程测试动态图](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/train_test.gif)\n", "\n", "从上面的图像可以看出,随着训练次数的增多,图像质量也越来越好。如果增大训练周期数,当 `epoch` 达到100以上时,生成的手写数字图片与数据集中的较为相似。下面我们通过加载生成器网络模型参数文件来生成图像,代码如下:" ] diff --git a/tutorials/source_zh_cn/generative/pix2pix.ipynb b/tutorials/source_zh_cn/generative/pix2pix.ipynb index 23bbf2e9ec..bd3480cbf0 100644 --- a/tutorials/source_zh_cn/generative/pix2pix.ipynb +++ b/tutorials/source_zh_cn/generative/pix2pix.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/generative/mindspore_pix2pix.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/generative/mindspore_pix2pix.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/generative/pix2pix.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/generative/mindspore_pix2pix.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/generative/mindspore_pix2pix.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/generative/pix2pix.ipynb)\n", "\n", "# Pix2Pix实现图像转换\n", "\n", @@ -33,7 +33,7 @@ "\n", "$$arg\\min_{G}\\max_{D}L_{cGAN}(G,D)$$\n", "\n", - "![pix2pix1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/pix2pix_1.png)\n", + "![pix2pix1](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/pix2pix_1.png)\n", "\n", "为了对比cGAN和GAN的不同,我们将GAN的目标也进行了说明:\n", "\n", @@ -137,7 +137,7 @@ "\n", "U-Net是德国Freiburg大学模式识别和图像处理组提出的一种全卷积结构。它分为两个部分,其中左侧是由卷积和降采样操作组成的压缩路径,右侧是由卷积和上采样组成的扩张路径,扩张的每个网络块的输入由上一层上采样的特征和压缩路径部分的特征拼接而成。网络模型整体是一个U形的结构,因此被叫做U-Net。和常见的先降采样到低维度,再升采样到原始分辨率的编解码结构的网络相比,U-Net的区别是加入skip-connection,对应的feature maps和decode之后的同样大小的feature maps按通道拼一起,用来保留不同分辨率下像素级的细节信息。\n", "\n", - "![pix2pix2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/pix2pix_2.png)" + "![pix2pix2](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/pix2pix_2.png)" ] }, { @@ -529,7 +529,7 @@ "source": [ "## 推理\n", "\n", - "获取上述训练过程完成后的ckpt文件,通过[load_checkpoint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.load_checkpoint.html)和[load_param_into_net](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.load_param_into_net.html)将ckpt中的权重参数导入到模型中,获取数据进行推理并对推理的效果图进行演示(由于时间问题,训练过程只进行了100个epoch)。" + "获取上述训练过程完成后的ckpt文件,通过[load_checkpoint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.load_checkpoint.html)和[load_param_into_net](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.load_param_into_net.html)将ckpt中的权重参数导入到模型中,获取数据进行推理并对推理的效果图进行演示(由于时间问题,训练过程只进行了100个epoch)。" ] }, { @@ -582,7 +582,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "![pix2pix3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/generative/images/pix2pix_3.png)" + "![pix2pix3](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/generative/images/pix2pix_3.png)" ] }, { diff --git a/tutorials/source_zh_cn/model_infer/introduction.md b/tutorials/source_zh_cn/model_infer/introduction.md index 5d765395ac..9a2fa2d45c 100644 --- a/tutorials/source_zh_cn/model_infer/introduction.md +++ b/tutorials/source_zh_cn/model_infer/introduction.md @@ -1,6 +1,6 @@ # MindSpore推理概述 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/model_infer/introduction.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/model_infer/introduction.md) ## 特性背景 diff --git a/tutorials/source_zh_cn/model_infer/lite_infer/overview.md b/tutorials/source_zh_cn/model_infer/lite_infer/overview.md index 53566e6c75..e157928888 100644 --- a/tutorials/source_zh_cn/model_infer/lite_infer/overview.md +++ b/tutorials/source_zh_cn/model_infer/lite_infer/overview.md @@ -1,5 +1,5 @@ # Lite推理概述 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/model_infer/lite_infer/overview.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/model_infer/lite_infer/overview.md) MindSpore Lite是专注于离线模型的高效推理部署方案和端上设备的高性能推理的轻量化推理引擎。详情可参考[Lite文档](https://www.mindspore.cn/lite/docs/zh-CN/master/index.html)。 diff --git a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_infer.rst b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_infer.rst index cca47ae04f..ef33391d6f 100644 --- a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_infer.rst +++ b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_infer.rst @@ -1,8 +1,8 @@ MindSpore大语言模型带框架推理 ============================= -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/optimize_technique.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/optimize_technique.rst :alt: 查看源文件 .. toctree:: @@ -378,7 +378,7 @@ MindSpore大语言模型带框架推理主要依赖MindSpore开源软件,用 可以看到,将模型推理的token id翻译后,即是一句可以被正常人理解的语句,实际验证过程中,由于do_sample的随机性,每次推理会有一定的差异,但是结果的逻辑基本都是可以被理解的。 - 完整端到端样例可以参考 `infer.py `_ 。 + 完整端到端样例可以参考 `infer.py `_ 。 模型并行 ~~~~~~~~ diff --git a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_serving_infer.md b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_serving_infer.md index 41e18f0b3f..b6af396b2c 100644 --- a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_serving_infer.md +++ b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_serving_infer.md @@ -1,7 +1,7 @@ # 服务化模型推理 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_serving_infer.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_serving_infer.md) ## 特性背景 diff --git a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_network_develop.md b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_network_develop.md index a6803e42f4..62c1076b00 100644 --- a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_network_develop.md +++ b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_network_develop.md @@ -1,6 +1,6 @@ # 从零构建大语言模型推理网络 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_network_develop.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_network_develop.md) ## 模型开发模式 @@ -26,7 +26,7 @@ MindSpore推荐用户先用动态图模式进行模型开发,然后根据需 - **RmsNorm & Linear**:输出线性归一层,在Transformer结构计算完后,将结果归一成和模型词表一样的维度,最终输出成每个token的概率分布返回。 -使用MindSpore大语言模型推理构建网络,可以根据MindSpore提供的算子自己拼装。下面以Qwen2模型为例,简单描述构建模型的过程,完整端到端样例可以参考[qwen2.py](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/infer_code/qwen2/qwen2.py)。 +使用MindSpore大语言模型推理构建网络,可以根据MindSpore提供的算子自己拼装。下面以Qwen2模型为例,简单描述构建模型的过程,完整端到端样例可以参考[qwen2.py](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/infer_code/qwen2/qwen2.py)。 ### 基础公共网络层 diff --git a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_parallel_infer.md b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_parallel_infer.md index b307b2faa6..b378dc2fc6 100644 --- a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_parallel_infer.md +++ b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_parallel_infer.md @@ -1,6 +1,6 @@ # 构建可并行的大语言模型网络 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_parallel_infer.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_parallel_infer.md) 随着模型规模的不断扩展,大语言模型所需的计算资源,特别是显存需求,呈指数级增长。以Qwen2-72B为例,在半精度(FP16)下,这些参数本身就需要约144GB的显存。 @@ -475,7 +475,7 @@ Linear层作为切分主要的网络层,其核心是MatMul矩阵计算,因 return hidden_state ``` -具体端到端的大语言模型代码工程可以参考[model_dev.py](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/infer_code/model_dev.py)脚本,通过运行如下命令进行验证: +具体端到端的大语言模型代码工程可以参考[model_dev.py](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/infer_code/model_dev.py)脚本,通过运行如下命令进行验证: ```shell msrun --worker_num 2 --local_worker_num 2 --master_port 8124 --log_dir msrun_log --join True --cluster_time_out 300 model_dev.py diff --git a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_quantization.md b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_quantization.md index 6997d8ce6d..13ffbe84aa 100644 --- a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_quantization.md +++ b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_quantization.md @@ -1,6 +1,6 @@ # 模型量化 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_quantization.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_quantization.md) ## 概述 @@ -90,7 +90,7 @@ ptq.convert(net) ms.save_checkpoint(net.parameters_dict(), './simplenet_ptq.ckpt') ``` -1. 使用[nn.Cell](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html)定义网络,训练模型后得到模型的浮点权重,在推理过程中,加载该模型的浮点权重。上述例子对该过程进行了简化,直接创建网络,使用初始浮点权重进行量化。 +1. 使用[nn.Cell](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html)定义网络,训练模型后得到模型的浮点权重,在推理过程中,加载该模型的浮点权重。上述例子对该过程进行了简化,直接创建网络,使用初始浮点权重进行量化。 2. 使用PTQConfig配置mode为量化模式,后端为Ascend,对权重进行8bit量化。详细说明可参考[PTQConfig的配置说明](#ptqconfig的配置说明)。 3. 使用apply接口将网络转换为伪量化网络,根据`PTQConfig`中的配置统计量化对象的信息。 4. 使用convert接口对上一步的伪量化网络进行真实量化,得到量化后的网络。 diff --git a/tutorials/source_zh_cn/model_migration/model_migration.md b/tutorials/source_zh_cn/model_migration/model_migration.md index 9647d0f2b3..a9ec79b7f7 100644 --- a/tutorials/source_zh_cn/model_migration/model_migration.md +++ b/tutorials/source_zh_cn/model_migration/model_migration.md @@ -1,6 +1,6 @@ # 模型迁移 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/model_migration/model_migration.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/model_migration/model_migration.md) 本章节主要对模型迁移场景所必须的数据集、模型和训练、推理流程等在MindSpore上构建方法做简单的介绍。同时展示了MindSpore和PyTorch在数据集包装、模型构建、训练流程代码上的差别。 @@ -18,11 +18,11 @@ ## 数据集包装 -MindSpore提供了多种典型开源数据集的解析读取,如MNIST、CIFAR-10、CLUE、LJSpeech等,详情可参考[mindspore.dataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.dataset.html)。 +MindSpore提供了多种典型开源数据集的解析读取,如MNIST、CIFAR-10、CLUE、LJSpeech等,详情可参考[mindspore.dataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.dataset.html)。 ### 自定义数据加载 GeneratorDataset -在迁移场景,最常用的数据加载方式是[GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset),只需对Python迭代器做简单包装,就可以直接对接MindSpore模型进行训练、推理。 +在迁移场景,最常用的数据加载方式是[GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset),只需对Python迭代器做简单包装,就可以直接对接MindSpore模型进行训练、推理。 ```python import numpy as np @@ -56,13 +56,13 @@ GeneratorDataset至少需要包含: - source:一个Python迭代器; - column_names:迭代器\_\_getitem\_\_方法每个输出的名字。 -更多使用方法参考[GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset)。 +更多使用方法参考[GeneratorDataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html#mindspore.dataset.GeneratorDataset)。 dataset.batch将数据集中连续batch_size条数据,组合为一个批数据,至少需要包含: - batch_size:指定每个批处理数据包含的数据条目。 -更多使用方法参考[Dataset.batch](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html)。 +更多使用方法参考[Dataset.batch](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html)。 ### 与PyTorch数据集构建差别 @@ -74,7 +74,7 @@ MindSpore的GeneratorDataset与PyTorch的DataLoader的主要差别有: - PyTorch的数据增强输入的对象是Tensor类型,MindSpore的数据增强输入的对象是numpy类型,且数据处理不能用MindSpore的mint、ops和nn算子; - PyTorch的batch操作是DataLoader的属性,MindSpore的batch操作是独立的方法。 -详细可参考[与torch.utils.data.DataLoader的差异](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_diff/DataLoader.html)。 +详细可参考[与torch.utils.data.DataLoader的差异](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_diff/DataLoader.html)。 ## 模型构建 @@ -134,13 +134,13 @@ for i in net.get_parameters(): -MindSpore和PyTorch构建模型的方法差不多,使用算子的差别可以参考[API差异文档](https://www.mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html)。 +MindSpore和PyTorch构建模型的方法差不多,使用算子的差别可以参考[API差异文档](https://www.mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html)。 #### 模型保存和加载 PyTorch提供了 `state_dict()` 用于参数状态的查看及保存,`load_state_dict` 用于模型参数的加载。 -MindSpore可以使用 [save_checkpoint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.save_checkpoint.html) 与[load_checkpoint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.load_checkpoint.html) 。 +MindSpore可以使用 [save_checkpoint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.save_checkpoint.html) 与[load_checkpoint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.load_checkpoint.html) 。 @@ -180,7 +180,7 @@ ms_model.load_state_dict(param_dict) ### 优化器 -PyTorch和MindSpore同时支持的优化器异同比较,详见[API映射表](https://mindspore.cn/docs/zh-CN/master/note/api_mapping/pytorch_api_mapping.html#torch-optim)。 +PyTorch和MindSpore同时支持的优化器异同比较,详见[API映射表](https://mindspore.cn/docs/zh-CN/br_base/note/api_mapping/pytorch_api_mapping.html#torch-optim)。 #### 优化器的执行和使用差异 @@ -190,7 +190,7 @@ PyTorch单步执行优化器时,一般需要手动执行 `zero_grad()` 方法 如果在训练过程中需要动态调整学习率,PyTorch提供了 `LRScheduler` 类用于对学习率管理。使用动态学习率时,将 `optimizer` 实例传入 `LRScheduler` 子类中,通过循环调用 `scheduler.step()` 执行学习率修改,并将修改同步至优化器中。 -MindSpore提供了`Cell`和`list`两种动态修改学习率的方法。使用时对应动态学习率对象直接传入优化器,学习率的更新在优化器中自动执行,具体请参考[动态学习率](https://mindspore.cn/docs/zh-CN/master/api_python/mindspore.nn.html#%E5%8A%A8%E6%80%81%E5%AD%A6%E4%B9%A0%E7%8E%87)。 +MindSpore提供了`Cell`和`list`两种动态修改学习率的方法。使用时对应动态学习率对象直接传入优化器,学习率的更新在优化器中自动执行,具体请参考[动态学习率](https://mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.nn.html#%E5%8A%A8%E6%80%81%E5%AD%A6%E4%B9%A0%E7%8E%87)。
diff --git a/tutorials/source_zh_cn/nlp/sentiment_analysis.ipynb b/tutorials/source_zh_cn/nlp/sentiment_analysis.ipynb index ad3f2af12a..39d7484c7a 100644 --- a/tutorials/source_zh_cn/nlp/sentiment_analysis.ipynb +++ b/tutorials/source_zh_cn/nlp/sentiment_analysis.ipynb @@ -6,7 +6,7 @@ "id": "ace41c03-dfa3-4cb6-88bc-bcaa72cfdc85", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/nlp/mindspore_sentiment_analysis.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/nlp/mindspore_sentiment_analysis.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/nlp/sentiment_analysis.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/nlp/mindspore_sentiment_analysis.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/nlp/mindspore_sentiment_analysis.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/nlp/sentiment_analysis.ipynb)\n", "\n", "# RNN实现情感分类\n" ] @@ -264,7 +264,7 @@ "id": "f6a05899-85f4-4e84-803a-e6afb3e784a7", "metadata": {}, "source": [ - "将IMDB数据集加载至内存并构造为迭代对象后,可以使用`mindspore.dataset`提供的[Generatordataset](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/mindspore.dataset.GeneratorDataset.html)接口加载数据集迭代对象,并进行下一步的数据处理,下面封装一个函数将train和test分别使用`Generatordataset`进行加载,并指定数据集中文本和标签的`column_name`分别为`text`和`label`:" + "将IMDB数据集加载至内存并构造为迭代对象后,可以使用`mindspore.dataset`提供的[Generatordataset](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/mindspore.dataset.GeneratorDataset.html)接口加载数据集迭代对象,并进行下一步的数据处理,下面封装一个函数将train和test分别使用`Generatordataset`进行加载,并指定数据集中文本和标签的`column_name`分别为`text`和`label`:" ] }, { @@ -463,8 +463,8 @@ "- 通过Vocab将所有的Token处理为index id。\n", "- 将文本序列统一长度,不足的使用``补齐,超出的进行截断。\n", "\n", - "这里我们使用`mindspore.dataset`中提供的接口进行预处理操作。这里使用到的接口均为MindSpore的高性能数据引擎设计,每个接口对应操作视作数据流水线的一部分,详情请参考[MindSpore数据引擎](https://www.mindspore.cn/docs/zh-CN/master/features/data_engine.html)。\n", - "首先针对token到index id的查表操作,使用`text.Lookup`接口,将前文构造的词表加载,并指定`unknown_token`。其次为文本序列统一长度操作,使用[PadEnd](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset_transforms/mindspore.dataset.transforms.PadEnd.html)接口,此接口定义最大长度和补齐值(`pad_value`),这里我们取最大长度为500,填充值对应词表中``的index id。\n", + "这里我们使用`mindspore.dataset`中提供的接口进行预处理操作。这里使用到的接口均为MindSpore的高性能数据引擎设计,每个接口对应操作视作数据流水线的一部分,详情请参考[MindSpore数据引擎](https://www.mindspore.cn/docs/zh-CN/br_base/features/data_engine.html)。\n", + "首先针对token到index id的查表操作,使用`text.Lookup`接口,将前文构造的词表加载,并指定`unknown_token`。其次为文本序列统一长度操作,使用[PadEnd](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset_transforms/mindspore.dataset.transforms.PadEnd.html)接口,此接口定义最大长度和补齐值(`pad_value`),这里我们取最大长度为500,填充值对应词表中``的index id。\n", "\n", "> 除了对数据集中`text`进行预处理外,由于后续模型训练的需要,要将`label`数据转为float32格式。" ] @@ -489,7 +489,7 @@ "id": "35e08575-85cf-4c50-8654-52bae0c3b413", "metadata": {}, "source": [ - "完成预处理操作后,需将其加入到数据集处理流水线中,使用[map](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.map.html#mindspore.dataset.Dataset.map)接口对指定的column添加操作。" + "完成预处理操作后,需将其加入到数据集处理流水线中,使用[map](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/operation/mindspore.dataset.Dataset.map.html#mindspore.dataset.Dataset.map)接口对指定的column添加操作。" ] }, { @@ -531,7 +531,7 @@ "id": "ec8a81cf-2376-4840-9bda-84338897f8af", "metadata": {}, "source": [ - "最后指定数据集的batch大小,通过[batch](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html)接口指定,并设置是否丢弃无法被batch size整除的剩余数据。\n", + "最后指定数据集的batch大小,通过[batch](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/batch/mindspore.dataset.Dataset.batch.html)接口指定,并设置是否丢弃无法被batch size整除的剩余数据。\n", "\n", "> 调用数据集的`map`、`split`、`batch`为数据集处理流水线增加对应操作,返回值为新的Dataset类型。现在仅定义流水线操作,在执行时开始执行数据处理流水线,获取最终处理好的数据并送入模型进行训练。" ] @@ -586,17 +586,17 @@ "\n", "循环神经网络(Recurrent Neural Network, RNN)是一类以序列(sequence)数据为输入,在序列的演进方向进行递归(recursion)且所有节点(循环单元)按链式连接的神经网络。下图为RNN的一般结构:\n", "\n", - "![RNN-0](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/nlp/images/0-RNN-0.png)\n", + "![RNN-0](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/nlp/images/0-RNN-0.png)\n", "\n", "> 图示左侧为一个RNN Cell循环,右侧为RNN的链式连接平铺。实际上不管是单个RNN Cell还是一个RNN网络,都只有一个Cell的参数,在不断进行循环计算中更新。\n", "\n", "由于RNN的循环特性,和自然语言文本的序列特性(句子是由单词组成的序列)十分匹配,因此被大量应用于自然语言处理研究中。下图为RNN的结构拆解:\n", "\n", - "![RNN](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/nlp/images/0-RNN.png)\n", + "![RNN](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/nlp/images/0-RNN.png)\n", "\n", "RNN单个Cell的结构简单,因此也造成了梯度消失(Gradient Vanishing)问题,具体表现为RNN网络在序列较长时,在序列尾部已经基本丢失了序列首部的信息。为了克服这一问题,LSTM(Long short-term memory)被提出,通过门控机制(Gating Mechanism)来控制信息流在每个循环步中的留存和丢弃。下图为LSTM的结构拆解:\n", "\n", - "![LSTM](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/tutorials/source_zh_cn/nlp/images/0-LSTM.png)\n", + "![LSTM](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/tutorials/source_zh_cn/nlp/images/0-LSTM.png)\n", "\n", "本节我们选择LSTM变种而不是经典的RNN做特征提取,来规避梯度消失问题,并获得更好的模型效果。下面来看MindSpore中`nn.LSTM`对应的公式:\n", "\n", @@ -655,7 +655,7 @@ "source": [ "### 损失函数与优化器\n", "\n", - "完成模型主体构建后,首先根据指定的参数实例化网络;然后选择损失函数和优化器。针对本节情感分类问题的特性,即预测Positive或Negative的二分类问题,我们选择[nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.BCEWithLogitsLoss.html)(二分类交叉熵损失函数)。" + "完成模型主体构建后,首先根据指定的参数实例化网络;然后选择损失函数和优化器。针对本节情感分类问题的特性,即预测Positive或Negative的二分类问题,我们选择[nn.BCEWithLogitsLoss](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.BCEWithLogitsLoss.html)(二分类交叉熵损失函数)。" ] }, { @@ -874,7 +874,7 @@ "\n", "模型训练完成后,一般需要对模型进行测试或部署上线,此时需要加载已保存的最优模型(即checkpoint),供后续测试使用。这里我们直接使用MindSpore提供的Checkpoint加载和网络权重加载接口:1.将保存的模型Checkpoint加载到内存中,2.将Checkpoint加载至模型。\n", "\n", - "> [load_param_into_net](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.load_param_into_net.html)接口会返回模型中没有和Checkpoint匹配的权重名,正确匹配时返回空列表。" + "> [load_param_into_net](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.load_param_into_net.html)接口会返回模型中没有和Checkpoint匹配的权重名,正确匹配时返回空列表。" ] }, { diff --git a/tutorials/source_zh_cn/nlp/sequence_labeling.ipynb b/tutorials/source_zh_cn/nlp/sequence_labeling.ipynb index 180316b77c..7393856e5c 100644 --- a/tutorials/source_zh_cn/nlp/sequence_labeling.ipynb +++ b/tutorials/source_zh_cn/nlp/sequence_labeling.ipynb @@ -5,7 +5,7 @@ "id": "66014f9c-60b8-4cb4-b5c0-3f387aaf01af", "metadata": {}, "source": [ - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/nlp/mindspore_sequence_labeling.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/nlp/mindspore_sequence_labeling.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/nlp/sequence_labeling.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/nlp/mindspore_sequence_labeling.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/nlp/mindspore_sequence_labeling.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/nlp/sequence_labeling.ipynb)\n", "\n", "# LSTM+CRF序列标注\n", "\n", diff --git a/tutorials/source_zh_cn/orange_pi/dev_start.ipynb b/tutorials/source_zh_cn/orange_pi/dev_start.ipynb index fc9718a65a..7b7c1a0a9f 100644 --- a/tutorials/source_zh_cn/orange_pi/dev_start.ipynb +++ b/tutorials/source_zh_cn/orange_pi/dev_start.ipynb @@ -6,7 +6,7 @@ "source": [ "# 开发入门\n", "\n", - "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/orange_pi/mindspore_dev_start.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/master/tutorials/zh_cn/orange_pi/mindspore_dev_start.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/orange_pi/dev_start.ipynb)\n", + "[![下载Notebook](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_notebook.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/orange_pi/mindspore_dev_start.ipynb) [![下载样例代码](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_download_code.svg)](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/br_base/tutorials/zh_cn/orange_pi/mindspore_dev_start.py) [![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/orange_pi/dev_start.ipynb)\n", "\n", "因开发者可能会在OrangePi AIpro(下称:香橙派开发板)进行自定义模型和案例开发,本章节通过基于MindSpore的手写数字识别案例,说明香橙派开发板中的开发注意事项。" ] @@ -25,15 +25,15 @@ "\n", "### 镜像烧录\n", "\n", - "运行该案例需要烧录香橙派官网Ubuntu镜像,参考[镜像烧录](https://www.mindspore.cn/tutorials/zh-CN/master/orange_pi/environment_setup.html#1-%E9%95%9C%E5%83%8F%E7%83%A7%E5%BD%95%E4%BB%A5windows%E7%B3%BB%E7%BB%9F%E4%B8%BA%E4%BE%8B)章节。\n", + "运行该案例需要烧录香橙派官网Ubuntu镜像,参考[镜像烧录](https://www.mindspore.cn/tutorials/zh-CN/br_base/orange_pi/environment_setup.html#1-%E9%95%9C%E5%83%8F%E7%83%A7%E5%BD%95%E4%BB%A5windows%E7%B3%BB%E7%BB%9F%E4%B8%BA%E4%BE%8B)章节。\n", "\n", "### CANN升级\n", "\n", - "参考[CANN升级](https://www.mindspore.cn/tutorials/zh-CN/master/orange_pi/environment_setup.html#3-cann%E5%8D%87%E7%BA%A7)章节。\n", + "参考[CANN升级](https://www.mindspore.cn/tutorials/zh-CN/br_base/orange_pi/environment_setup.html#3-cann%E5%8D%87%E7%BA%A7)章节。\n", "\n", "### MindSpore升级\n", "\n", - "参考[MindSpore升级](https://www.mindspore.cn/tutorials/zh-CN/master/orange_pi/environment_setup.html#4-mindspore%E5%8D%87%E7%BA%A7)章节。" + "参考[MindSpore升级](https://www.mindspore.cn/tutorials/zh-CN/br_base/orange_pi/environment_setup.html#4-mindspore%E5%8D%87%E7%BA%A7)章节。" ] }, { @@ -71,7 +71,7 @@ "source": [ "## 数据集准备与加载\n", "\n", - "MindSpore提供基于Pipeline的[数据引擎](https://www.mindspore.cn/docs/zh-CN/master/features/data_engine.html),通过[数据集(Dataset)](https://www.mindspore.cn/tutorials/zh-CN/master/beginner/dataset.html)实现高效的数据预处理。在本案例中,我们使用Mnist数据集,自动下载完成后,使用`mindspore.dataset`提供的数据变换进行预处理。\n" + "MindSpore提供基于Pipeline的[数据引擎](https://www.mindspore.cn/docs/zh-CN/br_base/features/data_engine.html),通过[数据集(Dataset)](https://www.mindspore.cn/tutorials/zh-CN/br_base/beginner/dataset.html)实现高效的数据预处理。在本案例中,我们使用Mnist数据集,自动下载完成后,使用`mindspore.dataset`提供的数据变换进行预处理。\n" ] }, { @@ -226,7 +226,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "可使用[create_tuple_iterator](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html) 或[create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html)对数据集进行迭代访问,查看数据和标签的shape和datatype。" + "可使用[create_tuple_iterator](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_tuple_iterator.html) 或[create_dict_iterator](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset/dataset_method/iterator/mindspore.dataset.Dataset.create_dict_iterator.html)对数据集进行迭代访问,查看数据和标签的shape和datatype。" ] }, { @@ -345,8 +345,8 @@ "MindSpore使用函数式自动微分机制,因此针对上述步骤需要实现:\n", "\n", "1. 定义正向计算函数。\n", - "2. 使用[value_and_grad](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.value_and_grad.html)通过函数变换获得梯度计算函数。\n", - "3. 定义训练函数,使用[set_train](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_train)设置为训练模式,执行正向计算、反向传播和参数优化。" + "2. 使用[value_and_grad](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.value_and_grad.html)通过函数变换获得梯度计算函数。\n", + "3. 定义训练函数,使用[set_train](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_train)设置为训练模式,执行正向计算、反向传播和参数优化。" ] }, { diff --git a/tutorials/source_zh_cn/orange_pi/environment_setup.md b/tutorials/source_zh_cn/orange_pi/environment_setup.md index e6bb16567d..301b1a544e 100644 --- a/tutorials/source_zh_cn/orange_pi/environment_setup.md +++ b/tutorials/source_zh_cn/orange_pi/environment_setup.md @@ -1,6 +1,6 @@ # 环境搭建指南 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/orange_pi/environment_setup.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/orange_pi/environment_setup.md) 本章节将介绍如何在OrangePi AIpro上烧录镜像,自定义安装CANN和MindSpore,并配置运行环境。 @@ -353,4 +353,4 @@ The result of multiplication calculation is correct, MindSpore has been installe ## 下一步建议 -此时香橙派开发板环境搭建已经完成,可以在开发板上体验基于昇思MindSpore开发的[模型在线推理](https://www.mindspore.cn/tutorials/zh-CN/master/orange_pi/model_infer.html)。 +此时香橙派开发板环境搭建已经完成,可以在开发板上体验基于昇思MindSpore开发的[模型在线推理](https://www.mindspore.cn/tutorials/zh-CN/br_base/orange_pi/model_infer.html)。 diff --git a/tutorials/source_zh_cn/orange_pi/model_infer.md b/tutorials/source_zh_cn/orange_pi/model_infer.md index b027eed2dd..dfe48e80c2 100644 --- a/tutorials/source_zh_cn/orange_pi/model_infer.md +++ b/tutorials/source_zh_cn/orange_pi/model_infer.md @@ -1,6 +1,6 @@ # 模型在线推理 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/orange_pi/model_infer.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/orange_pi/model_infer.md) 本章节将介绍如何在OrangePi AIpro(下称:香橙派开发板)下载昇思MindSpore在线推理案例,并启动Jupyter Lab界面执行推理。 @@ -76,7 +76,7 @@ ![model-infer5](./images/model_infer5.png) -文件开头说明了硬件资源(香橙派开发板)信息,以及运行样例所需的CANN和MindSpore等版本,请注意检查环境,环境的检查与搭建详见[环境搭建指南](https://www.mindspore.cn/tutorials/zh-CN/master/orange_pi/environment_setup.html)。 +文件开头说明了硬件资源(香橙派开发板)信息,以及运行样例所需的CANN和MindSpore等版本,请注意检查环境,环境的检查与搭建详见[环境搭建指南](https://www.mindspore.cn/tutorials/zh-CN/br_base/orange_pi/environment_setup.html)。 步骤4 单击⏩按钮运行样例,在弹出的对话框中单击“Restart”按钮,此时该样例开始运行。 @@ -90,4 +90,4 @@ ## 下一步建议 -具体基于昇思MindSpore的案例开发详见[开发入门](https://www.mindspore.cn/tutorials/zh-CN/master/orange_pi/dev_start.html)。 +具体基于昇思MindSpore的案例开发详见[开发入门](https://www.mindspore.cn/tutorials/zh-CN/br_base/orange_pi/dev_start.html)。 diff --git a/tutorials/source_zh_cn/orange_pi/overview.md b/tutorials/source_zh_cn/orange_pi/overview.md index 865e4f33e2..90a0a154ab 100644 --- a/tutorials/source_zh_cn/orange_pi/overview.md +++ b/tutorials/source_zh_cn/orange_pi/overview.md @@ -1,6 +1,6 @@ # 香橙派开发 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/orange_pi/overview.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/orange_pi/overview.md) [OrangePi AIpro(香橙派 AIpro)](http://www.orangepi.cn/index.html)采用昇腾AI技术路线,具体为4核64位处理器和AI处理器,集成图形处理器。 @@ -12,9 +12,9 @@ | :----- |:----- |:----- | | 前置学习 | 基于昇思+香橙派开发板进行开发前,需要了解掌握的内容 | [昇思MindSpore](https://www.mindspore.cn/)
[Linux](https://www.runoob.com/linux/linux-tutorial.html)
[Jupyter](https://jupyter.org/documentation) | | 镜像获取 | 香橙派官网-官方镜像 | [8T](http://www.orangepi.cn/html/hardWare/computerAndMicrocontrollers/service-and-support/Orange-Pi-AIpro.html)
[20T](http://www.orangepi.cn/html/hardWare/computerAndMicrocontrollers/details/Orange-Pi-AIpro(20T).html) | -| 环境搭建 | 如何基于OrangePi AIpro进行自定义环境搭建,包括CANN、MindSpore、套件等版本检查与更新(以8-12 TOPS 16G开发板为例,20 TOPS开发板操作方式相同) | [环境搭建指南](https://www.mindspore.cn/tutorials/zh-CN/master/orange_pi/environment_setup.html) | -| 在线推理 | 如何在香橙派启动模型推理 | [模型在线推理](https://www.mindspore.cn/tutorials/zh-CN/master/orange_pi/model_infer.html) | -| 开发入门 | 基于MindSpore的手写数字识别案例,说明香橙派开发板中的开发注意事项 | [开发入门](https://www.mindspore.cn/tutorials/zh-CN/master/orange_pi/dev_start.html) | +| 环境搭建 | 如何基于OrangePi AIpro进行自定义环境搭建,包括CANN、MindSpore、套件等版本检查与更新(以8-12 TOPS 16G开发板为例,20 TOPS开发板操作方式相同) | [环境搭建指南](https://www.mindspore.cn/tutorials/zh-CN/br_base/orange_pi/environment_setup.html) | +| 在线推理 | 如何在香橙派启动模型推理 | [模型在线推理](https://www.mindspore.cn/tutorials/zh-CN/br_base/orange_pi/model_infer.html) | +| 开发入门 | 基于MindSpore的手写数字识别案例,说明香橙派开发板中的开发注意事项 | [开发入门](https://www.mindspore.cn/tutorials/zh-CN/br_base/orange_pi/dev_start.html) | | 精品课程 | 《昇思+昇腾开发板:
软硬结合玩转DeepSeek开发实战》课程 | [课程链接](https://www.hiascend.com/developer/courses/detail/1925362775376744449) | | 案例分享 | 昇腾开发板专区-案例分享 | [昇腾开发板专区](https://www.hiascend.com/developer/devboard) | | 昇思+香橙派案例代码仓 | orange-pi-mindspore 代码仓 | [GitHub仓链接](https://github.com/mindspore-courses/orange-pi-mindspore) | diff --git a/tutorials/source_zh_cn/parallel/comm_fusion.md b/tutorials/source_zh_cn/parallel/comm_fusion.md index f5bd3c2bfc..9a39f7e8e2 100644 --- a/tutorials/source_zh_cn/parallel/comm_fusion.md +++ b/tutorials/source_zh_cn/parallel/comm_fusion.md @@ -1,12 +1,12 @@ # 分布式训练通信融合 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/comm_fusion.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/comm_fusion.md) ## 简介 在分布式并行训练场景下训练大规模参数量的模型(如GPT-3, Pangu-$\alpha$),跨设备甚至跨节点的数据传输是制约扩展性以及算力利用率的瓶颈[1]。通信融合是一种提升网络资源利用率、加速数据传输效率的重要方法,其将相同源节点和目的节点的通信算子打包同时执行,以避免多个单算子执行带来的额外开销。 -MindSpore支持对分布式训练中三种常用通信算子([AllReduce](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AllReduce.html)、[AllGather](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.AllGather.html)、[ReduceScatter](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceScatter.html))的融合,并提供简洁易用的接口方便用户自行配置。在长稳训练任务支撑中,通信融合特性发挥了重要作用。 +MindSpore支持对分布式训练中三种常用通信算子([AllReduce](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AllReduce.html)、[AllGather](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.AllGather.html)、[ReduceScatter](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceScatter.html))的融合,并提供简洁易用的接口方便用户自行配置。在长稳训练任务支撑中,通信融合特性发挥了重要作用。 ### 基本原理 @@ -50,17 +50,17 @@ MindSpore提供两种接口来使能通信融合,下面分别进行介绍: net.comm_fusion(config=config) ``` - 在自动并行或半自动并行场景下,用户在通过`net = AutoParallel(net, parallel_mode="semi_auto")`来配置并行策略时,可以利用该顶层`AutoParallel`类提供的[comm_fusion](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.Parameter.html#mindspore.Parameter.comm_fusion)接口的参数`congfig`来设置并行策略,输入格式为{"通信类型": {"mode":str, "config": None int 或者 list}}。具体可以参考[并行配置](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html)中的`comm_fusion`。在这种场景下,优先推荐此种配置方法。 + 在自动并行或半自动并行场景下,用户在通过`net = AutoParallel(net, parallel_mode="semi_auto")`来配置并行策略时,可以利用该顶层`AutoParallel`类提供的[comm_fusion](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.Parameter.html#mindspore.Parameter.comm_fusion)接口的参数`congfig`来设置并行策略,输入格式为{"通信类型": {"mode":str, "config": None int 或者 list}}。具体可以参考[并行配置](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html)中的`comm_fusion`。在这种场景下,优先推荐此种配置方法。 2. 利用`Cell`提供的接口 - 无论在哪种并行模式场景下,用户都可以通过[Cell.set_comm_fusion](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_comm_fusion)接口为模型某layer的参数设置index,MindSpore将融合相同index的参数所对应的通信算子。 + 无论在哪种并行模式场景下,用户都可以通过[Cell.set_comm_fusion](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.set_comm_fusion)接口为模型某layer的参数设置index,MindSpore将融合相同index的参数所对应的通信算子。 ## 操作实践 ### 样例代码说明 -> 下载完整的样例代码:[distributed_comm_fusion](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_comm_fusion)。 +> 下载完整的样例代码:[distributed_comm_fusion](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_comm_fusion)。 目录结构如下: @@ -95,7 +95,7 @@ init() 为了避免上述问题,可以将网络参数进行分组融合:在下一组参数进行的计算的同时,进行上组参数的通信,使得计算和通信能够互相隐藏,可以通过限定fusion buffer的大小,或者index分区的方法进行分组融合。 -更多使用方法,可以参考MindSpore的[测试用例](https://gitee.com/mindspore/mindspore/blob/master/tests/ut/python/parallel/test_comm_fusion.py)。 +更多使用方法,可以参考MindSpore的[测试用例](https://gitee.com/mindspore/mindspore/blob/br_base/tests/ut/python/parallel/test_comm_fusion.py)。 > 用户可以自行尝试`comm_fusion`的size和index模式,本质上都是fusion buffer类的方法。 diff --git a/tutorials/source_zh_cn/parallel/data_parallel.md b/tutorials/source_zh_cn/parallel/data_parallel.md index c93abf6cac..5386671daf 100644 --- a/tutorials/source_zh_cn/parallel/data_parallel.md +++ b/tutorials/source_zh_cn/parallel/data_parallel.md @@ -1,6 +1,6 @@ # 数据并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/data_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/data_parallel.md) ## 简介 @@ -10,7 +10,7 @@ ## 样例代码说明 -> 下载完整的样例代码:[distributed_data_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_data_parallel)。 +> 下载完整的样例代码:[distributed_data_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_data_parallel)。 目录结构如下: @@ -53,7 +53,7 @@ rank_size = get_group_size() dataset = ds.MnistDataset(dataset_path, num_shards=rank_size, shard_id=rank_id) ``` -其中,与单卡不同的是,在数据集接口需要传入`num_shards`和`shard_id`参数,分别对应卡的数量和逻辑序号,建议通过[mindspore.communication](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore.communication.html)模块的以下接口获取: +其中,与单卡不同的是,在数据集接口需要传入`num_shards`和`shard_id`参数,分别对应卡的数量和逻辑序号,建议通过[mindspore.communication](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore.communication.html)模块的以下接口获取: - `get_rank`:获取当前设备在集群中的ID。 - `get_group_size`:获取集群数量。 @@ -115,7 +115,7 @@ net = Network() ## 训练网络 -在这一步,我们需要定义损失函数、优化器以及训练过程。与单卡模型不同的地方在于,数据并行模式还需要增加[mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.DistributedGradReducer.html)接口,来对所有卡的梯度进行聚合,该接口第一个参数为需要更新的网络参数: +在这一步,我们需要定义损失函数、优化器以及训练过程。与单卡模型不同的地方在于,数据并行模式还需要增加[mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.DistributedGradReducer.html)接口,来对所有卡的梯度进行聚合,该接口第一个参数为需要更新的网络参数: ```python from mindspore import nn @@ -143,7 +143,7 @@ for epoch in range(10): i += 1 ``` -> 此处也可以用[Model.train](https://www.mindspore.cn/docs/zh-CN/master/api_python/train/mindspore.train.Model.html#mindspore.train.Model.train)的方式进行训练。 +> 此处也可以用[Model.train](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/train/mindspore.train.Model.html#mindspore.train.Model.train)的方式进行训练。 ## 运行单机8卡脚本 @@ -175,4 +175,4 @@ epoch: 0 step: 150, loss is 2.2822685 ... ``` -其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/startup_method.html)。 \ No newline at end of file +其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/startup_method.html)。 \ No newline at end of file diff --git a/tutorials/source_zh_cn/parallel/dataset_slice.md b/tutorials/source_zh_cn/parallel/dataset_slice.md index fcedd8e414..ffe0991dfc 100644 --- a/tutorials/source_zh_cn/parallel/dataset_slice.md +++ b/tutorials/source_zh_cn/parallel/dataset_slice.md @@ -1,6 +1,6 @@ # 数据集切分 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/dataset_slice.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/dataset_slice.md) ## 简介 @@ -10,9 +10,9 @@ ### 相关接口 -1. `mindspore.dataset.vision.SlicePatches(num_height=1, num_width=1)`:在水平和垂直方向上将Tensor切片为多个块。适合于Tensor高宽较大的使用场景。其中`num_height`为垂直方向的切块数量,`num_width`为水平方向的切块数量。更多参数可以参考[SlicePatches](https://www.mindspore.cn/docs/zh-CN/master/api_python/dataset_vision/mindspore.dataset.vision.SlicePatches.html)。 +1. `mindspore.dataset.vision.SlicePatches(num_height=1, num_width=1)`:在水平和垂直方向上将Tensor切片为多个块。适合于Tensor高宽较大的使用场景。其中`num_height`为垂直方向的切块数量,`num_width`为水平方向的切块数量。更多参数可以参考[SlicePatches](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/dataset_vision/mindspore.dataset.vision.SlicePatches.html)。 -2. `dataset_strategy(config=((1, 1, 1, 8), (8,)))`:表示数据集分片策略,具体可以参考[AutoParallel并行配置](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html)。`dataset_strategy`接口有以下几点限制: +2. `dataset_strategy(config=((1, 1, 1, 8), (8,)))`:表示数据集分片策略,具体可以参考[AutoParallel并行配置](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html)。`dataset_strategy`接口有以下几点限制: - 每个输入至多允许在一维进行切分。如支持`dataset_strategy(config=((1, 1, 1, 8), (8,)))`或者`config=((1, 1, 1, 8), (1,))`,每个输入至多切分了一维;但是不支持`config=((1, 1, 4, 2), (1,))`,其第一个输入切分了两维。 @@ -22,7 +22,7 @@ ### 样例代码说明 -> 下载完整的样例代码:[dataset_slice](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/dataset_slice)。 +> 下载完整的样例代码:[dataset_slice](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/dataset_slice)。 目录结构如下: @@ -85,7 +85,7 @@ data_set = create_dataset(32) ### 网络定义 -此处网络定义与单卡模型一致,并通过[no_init_parameters](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html)接口延后初始化网络参数和优化器参数: +此处网络定义与单卡模型一致,并通过[no_init_parameters](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html)接口延后初始化网络参数和优化器参数: ```python from mindspore import nn diff --git a/tutorials/source_zh_cn/parallel/distributed_case.rst b/tutorials/source_zh_cn/parallel/distributed_case.rst index 42542720e2..18c1bbb71d 100644 --- a/tutorials/source_zh_cn/parallel/distributed_case.rst +++ b/tutorials/source_zh_cn/parallel/distributed_case.rst @@ -1,8 +1,8 @@ 分布式高阶配置案例 ======================== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/distributed_case.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/distributed_case.rst :alt: 查看源文件 .. toctree:: diff --git a/tutorials/source_zh_cn/parallel/distributed_gradient_accumulation.md b/tutorials/source_zh_cn/parallel/distributed_gradient_accumulation.md index fec5c01686..0761a555eb 100644 --- a/tutorials/source_zh_cn/parallel/distributed_gradient_accumulation.md +++ b/tutorials/source_zh_cn/parallel/distributed_gradient_accumulation.md @@ -1,6 +1,6 @@ # 梯度累加 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/distributed_gradient_accumulation.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/distributed_gradient_accumulation.md) ## 简介 @@ -22,7 +22,7 @@ ### 相关接口 -[mindspore.parallel.GradAccumulation(network, micro_size)](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.nn.GradAccumulation.html):用更细粒度的MicroBatch包装网络。`micro_size`是MicroBatch的大小。 +[mindspore.parallel.GradAccumulation(network, micro_size)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.nn.GradAccumulation.html):用更细粒度的MicroBatch包装网络。`micro_size`是MicroBatch的大小。 > - 在梯度累加场景下,推荐使用lazy_inline装饰器来缩短编译时间,并且仅支持将lazy_inline装饰器配置在最外层的Cell上。 @@ -32,7 +32,7 @@ ### 样例代码说明 -> 下载完整的样例代码:[distributed_gradient_accumulation](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_gradient_accumulation)。 +> 下载完整的样例代码:[distributed_gradient_accumulation](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_gradient_accumulation)。 目录结构如下: @@ -60,7 +60,7 @@ init() ### 数据集加载与网络定义 -此处数据集加载和网络定义与单卡模型一致,通过[no_init_parameters](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html)接口延后初始化网络参数和优化器参数。代码如下: +此处数据集加载和网络定义与单卡模型一致,通过[no_init_parameters](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html)接口延后初始化网络参数和优化器参数。代码如下: ```python import os @@ -109,9 +109,9 @@ with no_init_parameters(): ### 训练网络 -在这一步,我们需要定义损失函数以及训练过程,通过顶层 [AutoParallel](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html) 接口设置并行模式为半自动并行模式和优化器并行,调用两个接口来配置梯度累加: +在这一步,我们需要定义损失函数以及训练过程,通过顶层 [AutoParallel](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html) 接口设置并行模式为半自动并行模式和优化器并行,调用两个接口来配置梯度累加: -- 首先需要定义LossCell,本例中调用了[nn.WithLossCell](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.WithLossCell.html)接口封装网络和损失函数。 +- 首先需要定义LossCell,本例中调用了[nn.WithLossCell](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.WithLossCell.html)接口封装网络和损失函数。 - 然后需要在LossCell外包一层`GradAccumulation`,并指定MicroBatch的size为4。详细请参考本章概述中的相关接口。 ```python diff --git a/tutorials/source_zh_cn/parallel/dynamic_cluster.md b/tutorials/source_zh_cn/parallel/dynamic_cluster.md index 60c3383b6a..81ceefcd90 100644 --- a/tutorials/source_zh_cn/parallel/dynamic_cluster.md +++ b/tutorials/source_zh_cn/parallel/dynamic_cluster.md @@ -1,6 +1,6 @@ # 动态组网启动 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/dynamic_cluster.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/dynamic_cluster.md) ## 概述 @@ -157,7 +157,7 @@ MindSpore**动态组网**特性通过**复用Parameter Server模式训练架构* 动态组网启动脚本在各硬件平台下一致,下面以Ascend为例演示如何编写启动脚本: -> 您可以在这里下载完整的样例代码:[startup_method](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/startup_method)。 +> 您可以在这里下载完整的样例代码:[startup_method](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/startup_method)。 目录结构如下: @@ -260,7 +260,7 @@ for epoch in range(10): #### 单机多卡 -单机多卡启动脚本内容[run_dynamic_cluster.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/run_dynamic_cluster.sh)如下,以单机8卡为例: +单机多卡启动脚本内容[run_dynamic_cluster.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/run_dynamic_cluster.sh)如下,以单机8卡为例: ```bash EXEC_PATH=$(pwd) @@ -317,7 +317,7 @@ epoch: 0, step: 30, loss is 1.0437132 多机训练场景下,需拆分启动脚本。下面以执行双机8卡训练为例,每台机器执行启动4个Worker: -脚本[run_dynamic_cluster_1.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/run_dynamic_cluster_1.sh)在节点1上启动1个`Scheduler`进程以及4个`Worker`进程: +脚本[run_dynamic_cluster_1.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/run_dynamic_cluster_1.sh)在节点1上启动1个`Scheduler`进程以及4个`Worker`进程: ```bash EXEC_PATH=$(pwd) @@ -352,7 +352,7 @@ export MS_ROLE=MS_SCHED # 设置启动的进程为MS_SCHED角 python ./net.py > device/scheduler.log 2>&1 & # 启动训练脚本 ``` -脚本[run_dynamic_cluster_2.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/run_dynamic_cluster_2.sh)在节点2上启动`Worker5`到`Worker8`(无需执行Scheduler): +脚本[run_dynamic_cluster_2.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/run_dynamic_cluster_2.sh)在节点2上启动`Worker5`到`Worker8`(无需执行Scheduler): ```bash EXEC_PATH=$(pwd) @@ -380,7 +380,7 @@ do done ``` -> 在多机器任务中,需要为每个主机节点设置不同的主机名,否则会出现报错`device id`越界。可参考[FAQ](https://www.mindspore.cn/docs/zh-CN/master/faq/distributed_parallel.html#q-多机场景使用动态组网或msrun启动分布式任务时报错device-id越界如何解决)。 +> 在多机器任务中,需要为每个主机节点设置不同的主机名,否则会出现报错`device id`越界。可参考[FAQ](https://www.mindspore.cn/docs/zh-CN/br_base/faq/distributed_parallel.html#q-多机场景使用动态组网或msrun启动分布式任务时报错device-id越界如何解决)。 > > 在多机任务中,`MS_WORKER_NUM`应当为集群中Worker节点总数。 > @@ -430,4 +430,4 @@ bash run_dynamic_cluster_2.sh - `cipher_list`:密码套件(支持的SSL加密类型列表)。 - `cert_expire_warning_time_in_day`:证书过期的告警时间。 -p12文件中的秘钥为密文存储,在启动时需要传入密码,具体参数请参考Python API [mindspore.set_ps_context](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.set_ps_context.html#mindspore.set_ps_context)中的`client_password`以及`server_password`字段。 +p12文件中的秘钥为密文存储,在启动时需要传入密码,具体参数请参考Python API [mindspore.set_ps_context](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.set_ps_context.html#mindspore.set_ps_context)中的`client_password`以及`server_password`字段。 diff --git a/tutorials/source_zh_cn/parallel/high_dimension_tensor_parallel.md b/tutorials/source_zh_cn/parallel/high_dimension_tensor_parallel.md index 104c534b22..40d20766eb 100644 --- a/tutorials/source_zh_cn/parallel/high_dimension_tensor_parallel.md +++ b/tutorials/source_zh_cn/parallel/high_dimension_tensor_parallel.md @@ -1,6 +1,6 @@ # 高维张量并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/high_dimension_tensor_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/high_dimension_tensor_parallel.md) ## 简介 @@ -51,8 +51,8 @@ ### 相关接口 -1. [mindspore.ops.MatMul().add_prim_attr("enable_nd_tp", True)](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MatMul.html):开启采用AllGather、MatMul和ReduceScatter的2D/3D通信/计算模式,必须使用Layout配置MatMul的shard切分。 -2. [mindspore.ops.BatchMatMul().add_prim_attr("enable_nd_tp", True)](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BatchMatMul.html): 开启采用AllGather、MatMul和ReduceScatter的2D/3D通信/计算模式,必须使用Layout配置MatMul的shard切分。 +1. [mindspore.ops.MatMul().add_prim_attr("enable_nd_tp", True)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MatMul.html):开启采用AllGather、MatMul和ReduceScatter的2D/3D通信/计算模式,必须使用Layout配置MatMul的shard切分。 +2. [mindspore.ops.BatchMatMul().add_prim_attr("enable_nd_tp", True)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BatchMatMul.html): 开启采用AllGather、MatMul和ReduceScatter的2D/3D通信/计算模式,必须使用Layout配置MatMul的shard切分。 开启上述开关后,shard切分根据不同的in_strategy决定采用2D或3D并行模式: @@ -69,7 +69,7 @@ ### 样例代码说明 -> 下载完整的样例代码:[high_dimension_tensor_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/high_dimension_tensor_parallel)。 +> 下载完整的样例代码:[high_dimension_tensor_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/high_dimension_tensor_parallel)。 目录结构如下: @@ -97,7 +97,7 @@ init() ### 构造网络并计算 -算子定义中需调用add_prim_attr方法指定MatMul算子打开高维TP,并通过Layout指定Matmul算子切分方式。由 [no_init_parameters](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html) 接口延后初始化网络参数,并通过`AutoParallel` 包裹 `net` 设置并行模式为半自动并行模式。代码如下: +算子定义中需调用add_prim_attr方法指定MatMul算子打开高维TP,并通过Layout指定Matmul算子切分方式。由 [no_init_parameters](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html) 接口延后初始化网络参数,并通过`AutoParallel` 包裹 `net` 设置并行模式为半自动并行模式。代码如下: ```python # 示例代码 diff --git a/tutorials/source_zh_cn/parallel/host_device_training.md b/tutorials/source_zh_cn/parallel/host_device_training.md index 63386c8a08..cd9b07ed9b 100644 --- a/tutorials/source_zh_cn/parallel/host_device_training.md +++ b/tutorials/source_zh_cn/parallel/host_device_training.md @@ -1,6 +1,6 @@ # Host&Device异构 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/host_device_training.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/host_device_training.md) ## 概述 @@ -24,9 +24,9 @@ ### 相关接口 -1. [mindspore.ops.Primitive.set_device()](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.set_device):设置Primitive执行后端。 +1. [mindspore.ops.Primitive.set_device()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.set_device):设置Primitive执行后端。 -2. [mindspore.nn.Optimizer.target](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Optimizer.html#mindspore.nn.Optimizer.target):该属性用于指定在主机(host)上还是设备(device)上更新参数。输入类型为str,只能是"CPU","Ascend"。 +2. [mindspore.nn.Optimizer.target](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Optimizer.html#mindspore.nn.Optimizer.target):该属性用于指定在主机(host)上还是设备(device)上更新参数。输入类型为str,只能是"CPU","Ascend"。 ## 操作实践 @@ -34,7 +34,7 @@ ### 样例代码说明 -> 下载完整的样例代码:[host_device](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/host_device)。 +> 下载完整的样例代码:[host_device](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/host_device)。 目录结构如下: @@ -50,7 +50,7 @@ ### 配置分布式环境 -首先通过context接口指定并行模式为[数据并行](https://www.mindspore.cn/docs/zh-CN/master/features/parallel/data_parallel.html)模式,并通过init初始化通信。 +首先通过context接口指定并行模式为[数据并行](https://www.mindspore.cn/docs/zh-CN/br_base/features/parallel/data_parallel.html)模式,并通过init初始化通信。 ```python import mindspore as ms @@ -93,7 +93,7 @@ data_set = create_dataset(32) ### 网络定义 -网络定义与单卡网络区别在于,配置[ops.Add()](https://www.mindspore.cn/docs/en/master/api_python/ops/mindspore.ops.Add.html)算子在主机端运行,代码如下: +网络定义与单卡网络区别在于,配置[ops.Add()](https://www.mindspore.cn/docs/en/br_base/api_python/ops/mindspore.ops.Add.html)算子在主机端运行,代码如下: ```python import mindspore as ms @@ -144,7 +144,7 @@ net.layer3.add.set_device("CPU") ### 训练网络 -损失函数、优化器以及训练过程与数据并行一致,用[mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.DistributedGradReducer.html)接口来对所有卡的梯度进行聚合,代码如下: +损失函数、优化器以及训练过程与数据并行一致,用[mindspore.nn.DistributedGradReducer()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.DistributedGradReducer.html)接口来对所有卡的梯度进行聚合,代码如下: ```python from mindspore import nn diff --git a/tutorials/source_zh_cn/parallel/mpirun.md b/tutorials/source_zh_cn/parallel/mpirun.md index 3f4f153454..5499bc99a5 100644 --- a/tutorials/source_zh_cn/parallel/mpirun.md +++ b/tutorials/source_zh_cn/parallel/mpirun.md @@ -1,6 +1,6 @@ # mpirun启动 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/mpirun.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/mpirun.md) ## 概述 @@ -32,7 +32,7 @@ OpenMPI(Open Message Passing Interface)是一个开源的、高性能的消 `mpirun`启动脚本在Ascend和GPU硬件平台下一致,下面以Ascend为例演示如何编写启动脚本: -> 您可以在这里下载完整的样例代码:[startup_method](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/startup_method)。 +> 您可以在这里下载完整的样例代码:[startup_method](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/startup_method)。 目录结构如下: diff --git a/tutorials/source_zh_cn/parallel/msrun_launcher.md b/tutorials/source_zh_cn/parallel/msrun_launcher.md index c0e48a2291..dacdd59c7d 100644 --- a/tutorials/source_zh_cn/parallel/msrun_launcher.md +++ b/tutorials/source_zh_cn/parallel/msrun_launcher.md @@ -1,10 +1,10 @@ # msrun启动 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/msrun_launcher.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/msrun_launcher.md) ## 概述 -`msrun`是[动态组网](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/dynamic_cluster.html)启动方式的封装,用户可使用`msrun`,以单个命令行指令的方式在各节点拉起多进程分布式任务,并且无需手动设置[动态组网环境变量](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/dynamic_cluster.html)。`msrun`同时支持`Ascend`,`GPU`和`CPU`后端。与`动态组网`启动方式一样,`msrun`无需依赖第三方库以及配置文件。 +`msrun`是[动态组网](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/dynamic_cluster.html)启动方式的封装,用户可使用`msrun`,以单个命令行指令的方式在各节点拉起多进程分布式任务,并且无需手动设置[动态组网环境变量](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/dynamic_cluster.html)。`msrun`同时支持`Ascend`,`GPU`和`CPU`后端。与`动态组网`启动方式一样,`msrun`无需依赖第三方库以及配置文件。 > - `msrun`在用户安装MindSpore后即可使用,可使用指令`msrun --help`查看支持参数。 > - `msrun`支持`图模式`以及`PyNative模式`。 @@ -87,7 +87,7 @@
- + @@ -186,13 +186,13 @@
设置模拟编译等级。 Integer 默认为-1,即关闭模拟编译功能。若用户配置此参数,msrun只会拉起进程的模拟编译,不做算子执行。
此功能通常用于调试大规模分布式训练并行策略,在编译阶段提前发现内存和策略问题。
模拟编译等级的设置可参考文档:DryRun
若用户配置此参数,msrun只会拉起进程的模拟编译,不做算子执行。
此功能通常用于调试大规模分布式训练并行策略,在编译阶段提前发现内存和策略问题。
模拟编译等级的设置可参考文档:DryRun
--sim_rank_id
-msrun作为动态组网启动方式的封装,所有用户可自定义配置的环境变量可参考[动态组网环境变量](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/dynamic_cluster.html)。 +msrun作为动态组网启动方式的封装,所有用户可自定义配置的环境变量可参考[动态组网环境变量](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/dynamic_cluster.html)。 ## 启动分布式任务 启动脚本在各硬件平台下一致,下面以Ascend为例演示如何编写启动脚本: -> 您可以在这里下载完整的样例代码:[startup_method](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/startup_method)。 +> 您可以在这里下载完整的样例代码:[startup_method](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/startup_method)。 目录结构如下: @@ -299,7 +299,7 @@ for epoch in range(10): 下面以执行单机8卡训练为例: -脚本[msrun_single.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/msrun_single.sh)使用msrun指令在当前节点拉起1个`Scheduler`进程以及8个`Worker`进程(无需设置`master_addr`,默认为`127.0.0.1`;单机无需设置`node_rank`): +脚本[msrun_single.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/msrun_single.sh)使用msrun指令在当前节点拉起1个`Scheduler`进程以及8个`Worker`进程(无需设置`master_addr`,默认为`127.0.0.1`;单机无需设置`node_rank`): ```bash EXEC_PATH=$(pwd) @@ -338,7 +338,7 @@ epoch: 0, step: 30, loss is 1.0437132 下面以执行2机8卡训练,每台机器执行启动4个Worker为例: -脚本[msrun_1.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/msrun_1.sh)在节点1上执行,使用msrun指令拉起1个`Scheduler`进程以及4个`Worker`进程,配置`master_addr`为节点1的IP地址(msrun会自动检测到当前节点IP与`master_addr`匹配而拉起`Scheduler`进程),通过`node_rank`设置当前节点为0号节点: +脚本[msrun_1.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/msrun_1.sh)在节点1上执行,使用msrun指令拉起1个`Scheduler`进程以及4个`Worker`进程,配置`master_addr`为节点1的IP地址(msrun会自动检测到当前节点IP与`master_addr`匹配而拉起`Scheduler`进程),通过`node_rank`设置当前节点为0号节点: ```bash EXEC_PATH=$(pwd) @@ -357,7 +357,7 @@ echo "start training" msrun --worker_num=8 --local_worker_num=4 --master_addr= --master_port=8118 --node_rank=0 --log_dir=msrun_log --join=True --cluster_time_out=300 net.py ``` -脚本[msrun_2.sh](https://gitee.com/mindspore/docs/blob/master/docs/sample_code/startup_method/msrun_2.sh)在节点2上执行,使用msrun指令拉起4个`Worker`进程,配置`master_addr`为节点1的IP地址,通过`node_rank`设置当前节点为1号节点: +脚本[msrun_2.sh](https://gitee.com/mindspore/docs/blob/br_base/docs/sample_code/startup_method/msrun_2.sh)在节点2上执行,使用msrun指令拉起4个`Worker`进程,配置`master_addr`为节点1的IP地址,通过`node_rank`设置当前节点为1号节点: ```bash EXEC_PATH=$(pwd) @@ -453,9 +453,9 @@ if get_rank() == 7: ms.set_seed(1) ``` -> [mindspore.communication.get_rank()](https://www.mindspore.cn/docs/zh-CN/master/api_python/communication/mindspore.communication.get_rank.html)接口需要在调用[mindspore.communication.init()](https://www.mindspore.cn/docs/zh-CN/master/api_python/communication/mindspore.communication.init.html)接口完成分布式初始化后才能正常获取rank信息,否则`get_rank()`默认返回0。 +> [mindspore.communication.get_rank()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/communication/mindspore.communication.get_rank.html)接口需要在调用[mindspore.communication.init()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/communication/mindspore.communication.init.html)接口完成分布式初始化后才能正常获取rank信息,否则`get_rank()`默认返回0。 -在对某一rank进行断点操作之后,会导致该rank进程执行停止在断点处等待后续交互操作,而其他未断点rank进程会继续运行,这样可能会导致快慢卡的情况,所以可以使用[mindspore.communication.comm_func.barrier()](https://www.mindspore.cn/docs/zh-CN/master/api_python/communication/mindspore.communication.comm_func.barrier.html)算子和[mindspore.runtime.synchronize()](https://www.mindspore.cn/docs/zh-CN/master/api_python/runtime/mindspore.runtime.synchronize.html)来同步所有rank的运行,确保其他rank阻塞等待,且一旦调试的rank继续运行则其他rank的停止会被释放。比如在单机八卡任务中,仅针对rank 7进行断点调试且阻塞所有其他rank: +在对某一rank进行断点操作之后,会导致该rank进程执行停止在断点处等待后续交互操作,而其他未断点rank进程会继续运行,这样可能会导致快慢卡的情况,所以可以使用[mindspore.communication.comm_func.barrier()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/communication/mindspore.communication.comm_func.barrier.html)算子和[mindspore.runtime.synchronize()](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/runtime/mindspore.runtime.synchronize.html)来同步所有rank的运行,确保其他rank阻塞等待,且一旦调试的rank继续运行则其他rank的停止会被释放。比如在单机八卡任务中,仅针对rank 7进行断点调试且阻塞所有其他rank: ```python import pdb diff --git a/tutorials/source_zh_cn/parallel/multiple_copy.md b/tutorials/source_zh_cn/parallel/multiple_copy.md index a9b72b3522..7a04952251 100644 --- a/tutorials/source_zh_cn/parallel/multiple_copy.md +++ b/tutorials/source_zh_cn/parallel/multiple_copy.md @@ -1,6 +1,6 @@ # 多副本并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/multiple_copy.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/multiple_copy.md) ## 简介 @@ -16,7 +16,7 @@ ### 相关接口 -- [mindspore.parallel.nn.MicroBatchInterleaved(cell_network, interleave_num=2)](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.nn.MicroBatchInterleaved.html):这个函数的作用是将输入在第零维度拆成 `interleave_num`份,然后执行包裹的cell的计算。 +- [mindspore.parallel.nn.MicroBatchInterleaved(cell_network, interleave_num=2)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.nn.MicroBatchInterleaved.html):这个函数的作用是将输入在第零维度拆成 `interleave_num`份,然后执行包裹的cell的计算。 ## 操作实践 @@ -24,7 +24,7 @@ ### 样例代码说明 -> 下载完整的样例代码:[multiple_copy](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/multiple_copy)。 +> 下载完整的样例代码:[multiple_copy](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/multiple_copy)。 目录结构如下: @@ -53,7 +53,7 @@ init() ### 数据集加载与网络定义 此处数据集加载和网络定义与单卡模型一致。 -通过 [no_init_parameters](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html) 接口延后初始化网络参数和优化器参数。 +通过 [no_init_parameters](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html) 接口延后初始化网络参数和优化器参数。 ```python import os @@ -103,8 +103,8 @@ with no_init_parameters(): 在这一步,我们需要定义损失函数、训练过程,调用两个接口来配置多副本并行: -- 首先需要定义LossCell,本例中调用了[nn.WithLossCell](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.WithLossCell.html)接口封装网络和损失函数。 -- 然后需要在LossCell外包一层[mindspore.parallel.nn.MicroBatchInterleaved](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.nn.MicroBatchInterleaved.html),并指定interleave_num的size为2。详细请参考本章概述中的相关接口。 +- 首先需要定义LossCell,本例中调用了[nn.WithLossCell](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.WithLossCell.html)接口封装网络和损失函数。 +- 然后需要在LossCell外包一层[mindspore.parallel.nn.MicroBatchInterleaved](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.nn.MicroBatchInterleaved.html),并指定interleave_num的size为2。详细请参考本章概述中的相关接口。 最后,`AutoParallel` 包裹 `net` 设置并行模式为半自动并行模式。 diff --git a/tutorials/source_zh_cn/parallel/multiple_mixed.md b/tutorials/source_zh_cn/parallel/multiple_mixed.md index 7a9752b2b9..17ca52dc3b 100644 --- a/tutorials/source_zh_cn/parallel/multiple_mixed.md +++ b/tutorials/source_zh_cn/parallel/multiple_mixed.md @@ -1,6 +1,6 @@ # 基于双递归搜索的多维混合并行案例 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/multiple_mixed.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/multiple_mixed.md) ## 概述 @@ -12,7 +12,7 @@ ### 样例代码说明 -> 下载完整的样例代码:[multiple_mix](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/multiple_mix)。 +> 下载完整的样例代码:[multiple_mix](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/multiple_mix)。 目录结构如下: @@ -107,7 +107,7 @@ data_set = create_dataset(32) ### 训练网络 -这部分与流水线并行的训练代码一致。在单机训练代码基础上需要调用两个额外的接口:[nn.WithLossCell](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.WithLossCell.html)用于封装网络和损失函数、[parallel.nn.Pipeline](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.nn.Pipeline.html)用于封装LossCell和配置MicroBatch大小。通过[Autoparallel](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html)接口指定运行模式、运行设备、运行卡号等,与单卡脚本不同,并行脚本还需指定并行模式`parallel_mode`为双递归策略搜索模式`recursive_programming`,用于自动切分数据并行和模型并行,`stages`为流水线并行中stage的数量,`hsdp`用于开启优化器并行。代码如下: +这部分与流水线并行的训练代码一致。在单机训练代码基础上需要调用两个额外的接口:[nn.WithLossCell](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.WithLossCell.html)用于封装网络和损失函数、[parallel.nn.Pipeline](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.nn.Pipeline.html)用于封装LossCell和配置MicroBatch大小。通过[Autoparallel](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.auto_parallel.AutoParallel.html)接口指定运行模式、运行设备、运行卡号等,与单卡脚本不同,并行脚本还需指定并行模式`parallel_mode`为双递归策略搜索模式`recursive_programming`,用于自动切分数据并行和模型并行,`stages`为流水线并行中stage的数量,`hsdp`用于开启优化器并行。代码如下: ```python import mindspore as ms diff --git a/tutorials/source_zh_cn/parallel/operator_parallel.md b/tutorials/source_zh_cn/parallel/operator_parallel.md index 783970e7ae..9122cfb015 100644 --- a/tutorials/source_zh_cn/parallel/operator_parallel.md +++ b/tutorials/source_zh_cn/parallel/operator_parallel.md @@ -1,6 +1,6 @@ # 算子级并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/operator_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/operator_parallel.md) ## 简介 @@ -16,7 +16,7 @@ MindSpore提供两种粒度的算子级并行能力:算子级并行和高阶 #### 样例代码说明 -> 下载完整的样例代码:[distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_operator_parallel)。 +> 下载完整的样例代码:[distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_operator_parallel)。 目录结构如下: @@ -107,7 +107,7 @@ class Network(nn.Cell): #### 训练网络定义 -在这一步,我们需要定义损失函数、优化器以及训练过程。需要注意的是,由于大模型的参数量巨大,在单卡上定义网络时如果进行参数初始化,显存将远远不够。因此在定义网络时需要配合[mindspore.nn.utils.no_init_parameters](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html)接口进行延迟初始化,将参数初始化延迟到并行多卡阶段。这里包括网络和优化器的定义都需要延后初始化。 +在这一步,我们需要定义损失函数、优化器以及训练过程。需要注意的是,由于大模型的参数量巨大,在单卡上定义网络时如果进行参数初始化,显存将远远不够。因此在定义网络时需要配合[mindspore.nn.utils.no_init_parameters](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html)接口进行延迟初始化,将参数初始化延迟到并行多卡阶段。这里包括网络和优化器的定义都需要延后初始化。 ```python from mindspore.nn.utils import no_init_parameters @@ -186,7 +186,7 @@ epoch: 0 step: 50, loss is 1.8051043 ... ``` -其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/startup_method.html)。 +其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/startup_method.html)。 ### mint算子并行实践 @@ -194,7 +194,7 @@ epoch: 0 step: 50, loss is 1.8051043 #### 样例代码说明 -> 下载完整的样例代码:[distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_operator_parallel)。 +> 下载完整的样例代码:[distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_operator_parallel)。 目录结构如下: @@ -250,7 +250,7 @@ data_set = create_dataset(32) #### 定义网络 -在当前mint算子并行模式下,需要用mint算子定义网络。由于mint算子作为函数式接口,并不直接对外暴露其算子类型原语(Primitive),因此无法直接为算子配置并行策略,而需要用户在单卡网络的基础上使用[mindspore.parallel.shard](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.shard.html)接口手动配置mint算子的切分策略,例如配置策略后的网络结构为: +在当前mint算子并行模式下,需要用mint算子定义网络。由于mint算子作为函数式接口,并不直接对外暴露其算子类型原语(Primitive),因此无法直接为算子配置并行策略,而需要用户在单卡网络的基础上使用[mindspore.parallel.shard](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.shard.html)接口手动配置mint算子的切分策略,例如配置策略后的网络结构为: ```python import mindspore as ms @@ -338,7 +338,7 @@ epoch: 0 step: 50, forward_sum is 0.96655 ... ``` -其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/startup_method.html)。 +其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/startup_method.html)。 ## 高阶算子级并行实践 @@ -348,7 +348,7 @@ epoch: 0 step: 50, forward_sum is 0.96655 #### 样例代码说明 -> 下载完整的样例代码:[distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_operator_parallel)。 +> 下载完整的样例代码:[distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_operator_parallel)。 目录结构如下: @@ -462,7 +462,7 @@ epoch: 0 step: 50, loss is 1.8051043 ... ``` -其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/startup_method.html)。 +其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/startup_method.html)。 ### 高阶mint算子并行实践 @@ -470,7 +470,7 @@ epoch: 0 step: 50, loss is 1.8051043 #### 样例代码说明 -> 下载完整的样例代码:[distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_operator_parallel)。 +> 下载完整的样例代码:[distributed_operator_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_operator_parallel)。 目录结构如下: @@ -604,4 +604,4 @@ epoch: 0 step: 50, forward_sum is 0.96655 ... ``` -其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/startup_method.html)。 +其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/startup_method.html)。 diff --git a/tutorials/source_zh_cn/parallel/optimize_technique.rst b/tutorials/source_zh_cn/parallel/optimize_technique.rst index 4a4df80d3f..cefa4a1252 100644 --- a/tutorials/source_zh_cn/parallel/optimize_technique.rst +++ b/tutorials/source_zh_cn/parallel/optimize_technique.rst @@ -1,8 +1,8 @@ 并行优化策略 ======================== -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/optimize_technique.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/optimize_technique.rst :alt: 查看源文件 .. toctree:: @@ -23,18 +23,18 @@ - 并行策略优化:并行策略优化主要包括并行策略的选择、算子级并行下的切分技巧以及多副本技巧。 - - `策略选择 `_:根据模型规模和数据量大小,可以选择不同的并行策略,以提高训练效率和资源利用率。 - - `切分技巧 `_:切分技巧是指通过手动配置某些关键算子的切分策略,减少张量重排布来提升训练效率。 - - `多副本 `_:多副本是指在一个迭代步骤中,将一个训练batch拆分成多个,将模型并行通信与计算进行并发,提升资源利用率。 - - `高维张量并行 `_:高维张量并行是指对于模型并行中的MatMul计算中的激活、权重张量进行多维度切分,通过优化切分策略降低通信量,提高训练效率。 + - `策略选择 `_:根据模型规模和数据量大小,可以选择不同的并行策略,以提高训练效率和资源利用率。 + - `切分技巧 `_:切分技巧是指通过手动配置某些关键算子的切分策略,减少张量重排布来提升训练效率。 + - `多副本 `_:多副本是指在一个迭代步骤中,将一个训练batch拆分成多个,将模型并行通信与计算进行并发,提升资源利用率。 + - `高维张量并行 `_:高维张量并行是指对于模型并行中的MatMul计算中的激活、权重张量进行多维度切分,通过优化切分策略降低通信量,提高训练效率。 - 内存优化:内存优化包括梯度累加、重计算、数据集切分、Host&Device异构和异构存储,主要目标是节省内存空间。 - - `梯度累加 `_:梯度累加通过在多个MicroBatch上计算梯度并将它们累加起来,然后一次性应用这个累加梯度来更新神经网络的参数。通过这种方法少量设备也能训练大Batch,有效减低内存峰值。 - - `重计算 `_:重计算是一种以时间换空间的技术,通过不保存某些正向算子的计算结果,以节省内存空间,在计算反向算子时,需要用到正向结果再重新计算正向算子。 - - `数据集切分 `_:数据集单个数据过大甚至无法加载到单个设备的时候,可以对数据进行切分,进行分布式训练。数据集切分配合模型并行是有效降低显存占用的方式。 - - `Host&Device异构 `_:在遇到参数量超过Device内存上限的时候,可以把一些内存占用量大且计算量少的算子放在Host端,这样能同时利用Host端内存大,Device端计算快的特性,提升了设备的利用率。 + - `梯度累加 `_:梯度累加通过在多个MicroBatch上计算梯度并将它们累加起来,然后一次性应用这个累加梯度来更新神经网络的参数。通过这种方法少量设备也能训练大Batch,有效减低内存峰值。 + - `重计算 `_:重计算是一种以时间换空间的技术,通过不保存某些正向算子的计算结果,以节省内存空间,在计算反向算子时,需要用到正向结果再重新计算正向算子。 + - `数据集切分 `_:数据集单个数据过大甚至无法加载到单个设备的时候,可以对数据进行切分,进行分布式训练。数据集切分配合模型并行是有效降低显存占用的方式。 + - `Host&Device异构 `_:在遇到参数量超过Device内存上限的时候,可以把一些内存占用量大且计算量少的算子放在Host端,这样能同时利用Host端内存大,Device端计算快的特性,提升了设备的利用率。 - 通信优化:通信优化包括通信融合和通信子图提取与复用,主要目标是减少通信延时,提升性能。 - - `通信融合 `_:通信融合可以将相同源节点和目标节点的通信算子合并到一次通信过程,避免多次通信带来额外开销。 + - `通信融合 `_:通信融合可以将相同源节点和目标节点的通信算子合并到一次通信过程,避免多次通信带来额外开销。 diff --git a/tutorials/source_zh_cn/parallel/optimizer_parallel.md b/tutorials/source_zh_cn/parallel/optimizer_parallel.md index c04d4ae854..3957933c33 100644 --- a/tutorials/source_zh_cn/parallel/optimizer_parallel.md +++ b/tutorials/source_zh_cn/parallel/optimizer_parallel.md @@ -1,6 +1,6 @@ # 优化器并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/optimizer_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/optimizer_parallel.md) ## 简介 @@ -10,7 +10,7 @@ ## 样例代码说明 -> 下载完整的样例代码:[distributed_optimizer_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_optimizer_parallel)。 +> 下载完整的样例代码:[distributed_optimizer_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_optimizer_parallel)。 目录结构如下: @@ -97,7 +97,7 @@ net.layer2.set_comm_fusion(1) net.layer3.set_comm_fusion(2) ``` -> 这里为了减少通信成本,为不同层配置了通信融合,详细可以参考[通信算子融合](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/comm_fusion.html)。 +> 这里为了减少通信成本,为不同层配置了通信融合,详细可以参考[通信算子融合](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/comm_fusion.html)。 ## 训练网络定义 @@ -186,5 +186,5 @@ epoch: 0, step: 100, loss is 0.6854114 ... ``` -其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/startup_method.html)。 +其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/startup_method.html)。 diff --git a/tutorials/source_zh_cn/parallel/overview.md b/tutorials/source_zh_cn/parallel/overview.md index 3da1bc2418..8e06a748e4 100644 --- a/tutorials/source_zh_cn/parallel/overview.md +++ b/tutorials/source_zh_cn/parallel/overview.md @@ -1,6 +1,6 @@ # 分布式并行概述 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/overview.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/overview.md) 在深度学习中,当数据集和参数量的规模越来越大,训练所需的时间和硬件资源会随之增加,最后会变成制约训练的瓶颈。分布式并行训练,可以降低对内存、计算性能等硬件的需求,是进行训练的重要优化手段。此外,分布式并行对大模型训练和推理有着重要的意义,它为处理大规模数据和复杂模型提供了强大的计算能力和性能优势。 @@ -15,13 +15,13 @@ MindSpore目前支持四种启动方式: - **mpirun**:通过多进程通信库OpenMPI启动,支持Ascend/GPU。 - **rank table**:配置rank_table表后,通过脚本启动和卡数对应的进程,支持Ascend。 -详细可参考[分布式并行启动方式](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/startup_method.html)章节。 +详细可参考[分布式并行启动方式](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/startup_method.html)章节。 ## 数据并行 数据并行是最常用的并行训练方式,用于加速模型训练和处理大规模数据集。在数据并行模式下,训练数据被划分成多份,然后将每份数据分配到不同的计算节点上,例如多卡或者多台设备。每个节点独立地处理自己的数据子集,并使用相同的模型进行前向传播和反向传播,最终对所有节点的梯度进行同步后,进行模型参数更新。 -详细可参考[数据并行](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/data_parallel.html)章节。 +详细可参考[数据并行](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/data_parallel.html)章节。 ## 算子级并行 @@ -29,19 +29,19 @@ MindSpore目前支持四种启动方式: MindSpore提供两种粒度的算子级并行能力:算子级并行和高阶算子级并行。算子级并行通过简单切分策略描述张量维度分布,满足大多数场景需求。高阶算子级并行通过开放设备排布描述,支持复杂切分场景。 -详细可参考[算子级并行](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/operator_parallel.html)章节。 +详细可参考[算子级并行](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/operator_parallel.html)章节。 ## 优化器并行 在进行数据并行训练时,模型的参数更新部分在各卡间存在冗余计算,优化器并行通过将优化器的计算量分散到数据并行维度的卡上,在大规模网络上(比如Bert、GPT)可以有效减少内存消耗并提升网络性能。 -详细可参考[优化器并行](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/optimizer_parallel.html)章节。 +详细可参考[优化器并行](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/optimizer_parallel.html)章节。 ## 流水线并行 近年来,神经网络的规模几乎是呈指数型增长。受单卡内存的限制,训练这些大模型用到的设备数量也在不断增加。受server间通信带宽低的影响,传统数据并行叠加模型并行的这种混合并行模式的性能表现欠佳,需要引入流水线并行。流水线并行能够将模型在空间上按阶段(Stage)进行切分,每个Stage只需执行网络的一部分,大大节省了内存开销,同时缩小了通信域,缩短了通信时间。MindSpore能够根据用户的配置,将单机模型自动地转换成流水线并行模式去执行。 -详细可参考[流水线并行](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/pipeline_parallel.html)章节。 +详细可参考[流水线并行](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/pipeline_parallel.html)章节。 ## 并行优化策略 @@ -49,20 +49,20 @@ MindSpore提供两种粒度的算子级并行能力:算子级并行和高阶 - **并行策略优化**: - - **策略选择**:根据您的模型规模和数据量大小,您可以参考[策略选择](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/strategy_select.html)教程来选择不同的并行策略,以提高训练效率和资源利用率。 - - **切分技巧**:切分技巧也是实现高效并行计算的关键,在[切分技巧](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/split_technique.html)教程中,您可以通过具体案例了解到如何应用各种切分技巧来提升效率。 - - **多副本并行**:在现有的单副本模式下,某些底层算子在进行通信的时候,无法同时进行计算,从而导致资源浪费。多副本并行通过对数据按照Batch Size维度进行切分为多个副本,可以使一个副本在通信时,另一副本进行计算操作,提升了资源利用率,详细可参考[多副本并行](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/multiple_copy.html)教程。 - - **高维张量并行**:高维张量并行是指对于模型并行中的MatMul计算中的激活、权重张量进行多维度切分,通过优化切分策略降低通信量,提高训练效率,详细可参考[高维张量并行](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/high_dimension_tensor_parallel.html)教程。 + - **策略选择**:根据您的模型规模和数据量大小,您可以参考[策略选择](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/strategy_select.html)教程来选择不同的并行策略,以提高训练效率和资源利用率。 + - **切分技巧**:切分技巧也是实现高效并行计算的关键,在[切分技巧](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/split_technique.html)教程中,您可以通过具体案例了解到如何应用各种切分技巧来提升效率。 + - **多副本并行**:在现有的单副本模式下,某些底层算子在进行通信的时候,无法同时进行计算,从而导致资源浪费。多副本并行通过对数据按照Batch Size维度进行切分为多个副本,可以使一个副本在通信时,另一副本进行计算操作,提升了资源利用率,详细可参考[多副本并行](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/multiple_copy.html)教程。 + - **高维张量并行**:高维张量并行是指对于模型并行中的MatMul计算中的激活、权重张量进行多维度切分,通过优化切分策略降低通信量,提高训练效率,详细可参考[高维张量并行](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/high_dimension_tensor_parallel.html)教程。 - **内存优化**: - - **梯度累加**:梯度累加通过在多个MicroBatch上计算梯度并将它们累加起来,然后一次性应用这个累加梯度来更新神经网络的参数。通过这种方法少量设备也能训练大Batch Size,有效减低内存峰值,详细可参考[梯度累加](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/distributed_gradient_accumulation.html)教程。 - - **重计算**:重计算通过不保存某些正向算子的计算结果,以节省内存空间,在计算反向算子时,需要用到正向结果再重新计算正向算子。详细可参考[重计算](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/recompute.html)教程。 - - **数据集切分**:数据集单个数据过大的时候,可以对数据进行切分,进行分布式训练。数据集切分配合模型并行是有效降低显存占用的方式。详细可参考[数据集切分](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/dataset_slice.html)教程。 - - **Host&Device异构**:在遇到参数量超过Device内存上限的时候,可以把一些内存占用量大且计算量少的算子放在Host端,这样能同时利用Host端内存大,Device端计算快的特性,提升了设备的利用率。详细可参考[Host&Device异构](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/host_device_training.html)教程。 + - **梯度累加**:梯度累加通过在多个MicroBatch上计算梯度并将它们累加起来,然后一次性应用这个累加梯度来更新神经网络的参数。通过这种方法少量设备也能训练大Batch Size,有效减低内存峰值,详细可参考[梯度累加](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/distributed_gradient_accumulation.html)教程。 + - **重计算**:重计算通过不保存某些正向算子的计算结果,以节省内存空间,在计算反向算子时,需要用到正向结果再重新计算正向算子。详细可参考[重计算](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/recompute.html)教程。 + - **数据集切分**:数据集单个数据过大的时候,可以对数据进行切分,进行分布式训练。数据集切分配合模型并行是有效降低显存占用的方式。详细可参考[数据集切分](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/dataset_slice.html)教程。 + - **Host&Device异构**:在遇到参数量超过Device内存上限的时候,可以把一些内存占用量大且计算量少的算子放在Host端,这样能同时利用Host端内存大,Device端计算快的特性,提升了设备的利用率。详细可参考[Host&Device异构](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/host_device_training.html)教程。 - **通信优化**: - - **通信融合**:通信融合可以将相同源节点和目标节点的通信算子合并到一次通信过程,避免多次通信带来额外开销。详细可参考[通信融合](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/comm_fusion.html)。 + - **通信融合**:通信融合可以将相同源节点和目标节点的通信算子合并到一次通信过程,避免多次通信带来额外开销。详细可参考[通信融合](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/comm_fusion.html)。 ## 分布式高阶配置案例 -- **基于双递归搜索的多维混合并行案例**:基于双递归搜索的多维混合并行是指用户可以配置重计算、优化器并行、流水线并行等优化方法,在用户配置的基础上,通过双递归策略搜索算法进行算子级策略自动搜索,进而生成最优的并行策略。详细可参考[基于双递归搜索的多维混合并行案例](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/multiple_mixed.html)。 \ No newline at end of file +- **基于双递归搜索的多维混合并行案例**:基于双递归搜索的多维混合并行是指用户可以配置重计算、优化器并行、流水线并行等优化方法,在用户配置的基础上,通过双递归策略搜索算法进行算子级策略自动搜索,进而生成最优的并行策略。详细可参考[基于双递归搜索的多维混合并行案例](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/multiple_mixed.html)。 \ No newline at end of file diff --git a/tutorials/source_zh_cn/parallel/pipeline_parallel.md b/tutorials/source_zh_cn/parallel/pipeline_parallel.md index e38b87ae28..d9581a74f7 100644 --- a/tutorials/source_zh_cn/parallel/pipeline_parallel.md +++ b/tutorials/source_zh_cn/parallel/pipeline_parallel.md @@ -1,6 +1,6 @@ # 流水线并行 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/pipeline_parallel.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/pipeline_parallel.md) ## 简介 @@ -12,7 +12,7 @@ ### 样例代码说明 -> 下载完整的样例代码:[distributed_pipeline_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_pipeline_parallel)。 +> 下载完整的样例代码:[distributed_pipeline_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_pipeline_parallel)。 目录结构如下: @@ -122,11 +122,11 @@ class Network(nn.Cell): ### 训练网络定义 -在这一步,我们需要定义损失函数、优化器以及训练过程。需要注意的是,这里对网络和优化器的定义都需要延后初始化。除此之外,还需要增加 [mindspore.parallel.nn.PipelineGradReducer](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.nn.PipelineGradReducer.html) 接口,用于处理流水线并行下的梯度,该接口的第一个参数为需要更新的网络参数,第二个为是否使用优化器并行。 +在这一步,我们需要定义损失函数、优化器以及训练过程。需要注意的是,这里对网络和优化器的定义都需要延后初始化。除此之外,还需要增加 [mindspore.parallel.nn.PipelineGradReducer](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.nn.PipelineGradReducer.html) 接口,用于处理流水线并行下的梯度,该接口的第一个参数为需要更新的网络参数,第二个为是否使用优化器并行。 与单卡模型不同,在这部分需要调用两个接口来配置流水线并行: -- 首先需要定义LossCell,本例中调用了[mindspore.nn.WithLossCell](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.WithLossCell.html)接口封装网络和损失函数。 +- 首先需要定义LossCell,本例中调用了[mindspore.nn.WithLossCell](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.WithLossCell.html)接口封装网络和损失函数。 - 然后需要在LossCell外包一层`Pipeline`,并指定MicroBatch的size,并通过`stage_config`配置每个包含训练参数的`Cell`的`pipeline_stage`。 ```python @@ -248,7 +248,7 @@ Tensor(shape=[8, 512], dtype=Float32, value= [ 4.89746094e-01 3.56689453e-01 -4.90966797e-01 ... -3.30078125e-e01 -2.38525391e-01 7.33398438e-01]]) ``` -其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/startup_method.html)。 +其他启动方式如`mpirun`、`rank table`的启动可参考[启动方式](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/startup_method.html)。 ## 推理操作实践 @@ -256,7 +256,7 @@ Tensor(shape=[8, 512], dtype=Float32, value= ### 样例代码说明 -> 下载完整的样例代码:[distributed_pipeline_parallel](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/distributed_pipeline_parallel)。 +> 下载完整的样例代码:[distributed_pipeline_parallel](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/distributed_pipeline_parallel)。 目录结构如下: @@ -353,7 +353,7 @@ net.head.pipeline_stage = 3 我们需要进一步设置并行有关的配置,用`AutoParallel`再包裹一次network,指定并行模式`semi_auto`为半自动并行模式,此外,还需开启流水线并行,配置`pipeline`,并通过配置`stages`数来指定stage的总数。此处不设置`device_target`会自动指定为MindSpore包对应的后端硬件设备(默认为Ascend)。`output_broadcast=True`表示流水线并行推理时,将最后一个stage的结果广播给其余stage,可以用于自回归推理场景。 -在执行推理前,先编译计算图`parallel_net.compile()`,再调用[mindspore.parallel.sync_pipeline_shared_parameters(parallel_net)](https://www.mindspore.cn/docs/zh-CN/master/api_python/parallel/mindspore.parallel.sync_pipeline_shared_parameters.html)接口,框架自动同步stage间的共享权重。 +在执行推理前,先编译计算图`parallel_net.compile()`,再调用[mindspore.parallel.sync_pipeline_shared_parameters(parallel_net)](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/parallel/mindspore.parallel.sync_pipeline_shared_parameters.html)接口,框架自动同步stage间的共享权重。 ```python diff --git a/tutorials/source_zh_cn/parallel/rank_table.md b/tutorials/source_zh_cn/parallel/rank_table.md index 44e1ee1990..2a64e05802 100644 --- a/tutorials/source_zh_cn/parallel/rank_table.md +++ b/tutorials/source_zh_cn/parallel/rank_table.md @@ -1,6 +1,6 @@ # rank table启动 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/rank_table.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/rank_table.md) ## 概述 @@ -37,7 +37,7 @@ ## 操作实践 -> 您可以在这里下载完整的样例代码:[startup_method](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/startup_method)。 +> 您可以在这里下载完整的样例代码:[startup_method](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/startup_method)。 目录结构如下: diff --git a/tutorials/source_zh_cn/parallel/recompute.md b/tutorials/source_zh_cn/parallel/recompute.md index e0aca6900c..6b84d0adb3 100644 --- a/tutorials/source_zh_cn/parallel/recompute.md +++ b/tutorials/source_zh_cn/parallel/recompute.md @@ -1,6 +1,6 @@ # 重计算 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/recompute.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/recompute.md) ## 简介 @@ -28,11 +28,11 @@ MindSpore采用反向模式的自动微分,根据正向图计算流程来自 ### 相关接口 -1. `mindspore.nn.Cell.recompute()`:调用`Cell`的[recompute接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.recompute),调用该接口之后,在计算反向部分时,除了该Cell的输出算子,Cell里面其他的所有算子以及子Cell里面的所有算子都会被重新计算。PyNative模式和Graph模式都支持。 +1. `mindspore.nn.Cell.recompute()`:调用`Cell`的[recompute接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.recompute),调用该接口之后,在计算反向部分时,除了该Cell的输出算子,Cell里面其他的所有算子以及子Cell里面的所有算子都会被重新计算。PyNative模式和Graph模式都支持。 -2. `mindspore.ops.Primitive.recompute()`:调用`Primitive`的[recompute接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.recompute),调用该接口之后,在计算反向部分时,该算子会被重新计算。只支持Graph模式。 +2. `mindspore.ops.Primitive.recompute()`:调用`Primitive`的[recompute接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Primitive.html#mindspore.ops.Primitive.recompute),调用该接口之后,在计算反向部分时,该算子会被重新计算。只支持Graph模式。 -3. `mindspore.recompute()`:调用`mindspore`的[recompute接口](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.recompute.html#mindspore.recompute),调用该接口之后,网络模块会被重新计算。只支持PyNative模式。 +3. `mindspore.recompute()`:调用`mindspore`的[recompute接口](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.recompute.html#mindspore.recompute),调用该接口之后,网络模块会被重新计算。只支持PyNative模式。 ## 操作实践 @@ -40,7 +40,7 @@ MindSpore采用反向模式的自动微分,根据正向图计算流程来自 ### 样例代码说明 -> 下载完整的样例代码:[recompute](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/recompute)。 +> 下载完整的样例代码:[recompute](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/recompute)。 目录结构如下: @@ -132,7 +132,7 @@ class Grad(Cell): ### 执行网络 -在这一步,我们需要定义网络输入,通过[no_init_parameters](https://www.mindspore.cn/docs/zh-CN/master/api_python/nn/mindspore.nn.utils.no_init_parameters.html)接口延后初始化网络参数和优化器参数,然后调用`Grad`以获取导数,通过顶层 `AutoParallel` 接口设置并行模式为半自动并行模式,代码如下: +在这一步,我们需要定义网络输入,通过[no_init_parameters](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/nn/mindspore.nn.utils.no_init_parameters.html)接口延后初始化网络参数和优化器参数,然后调用`Grad`以获取导数,通过顶层 `AutoParallel` 接口设置并行模式为半自动并行模式,代码如下: ```python import numpy as np diff --git a/tutorials/source_zh_cn/parallel/split_technique.md b/tutorials/source_zh_cn/parallel/split_technique.md index a6f1ef019c..b8cf65ccb2 100644 --- a/tutorials/source_zh_cn/parallel/split_technique.md +++ b/tutorials/source_zh_cn/parallel/split_technique.md @@ -1,6 +1,6 @@ # 切分技巧 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/split_technique.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/split_technique.md) ## 概述 @@ -14,7 +14,7 @@ ### 配置维度改变/轴改变的算子 -深度学习框架的算子大致可以分为两类:语义简单的维度保持的算子;会改变输入张量维度的算子。对于维度保持算子,策略传播算法可以较容易地将切分策略传播出去。但是,对于维度改变算子,显式地配置切分策略才能更好地表达用户的初始想法,避免策略传播算法推导出非用户期望的切分策略。常见的维度改变/轴改变算子有:[ReduceMean](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceMean.html)、[ReduceSum](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.ReduceSum.html)、[Transpose](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Transpose.html)、[StridedSlice](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.StridedSlice.html)、[MatMul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.MatMul.html)与[BatchMatMul](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.BatchMatMul.html)。在下图的例子中,ReduceMean和MatMul是维度改变算子,它们被配置了切分策略。 +深度学习框架的算子大致可以分为两类:语义简单的维度保持的算子;会改变输入张量维度的算子。对于维度保持算子,策略传播算法可以较容易地将切分策略传播出去。但是,对于维度改变算子,显式地配置切分策略才能更好地表达用户的初始想法,避免策略传播算法推导出非用户期望的切分策略。常见的维度改变/轴改变算子有:[ReduceMean](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceMean.html)、[ReduceSum](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.ReduceSum.html)、[Transpose](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.Transpose.html)、[StridedSlice](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.StridedSlice.html)、[MatMul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.MatMul.html)与[BatchMatMul](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.BatchMatMul.html)。在下图的例子中,ReduceMean和MatMul是维度改变算子,它们被配置了切分策略。 ![sp_case2_zh](./images/sp_case2_zh.png "配置维度改变的算子") @@ -26,7 +26,7 @@ ### 配置融合算子 -对于融合大算子,如[FlashAttentionScore](https://www.mindspore.cn/lite/api/zh-CN/master/generate/classmindspore_ops_FlashAttentionScore.html#exhale-class-classmindspore-ops-flashattentionscore)、[rms_norm](https://www.mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.rms_norm.html),也是需要用户手动配置策略的算子,融合算子的输入与输出逻辑相对复杂,传播出的没有重排的策略并不一定是用户所期望的策略,这些算子也需要显式配置算子级策略。 +对于融合大算子,如[FlashAttentionScore](https://www.mindspore.cn/lite/api/zh-CN/master/generate/classmindspore_ops_FlashAttentionScore.html#exhale-class-classmindspore-ops-flashattentionscore)、[rms_norm](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/ops/mindspore.ops.rms_norm.html),也是需要用户手动配置策略的算子,融合算子的输入与输出逻辑相对复杂,传播出的没有重排的策略并不一定是用户所期望的策略,这些算子也需要显式配置算子级策略。 用户在用策略传播时不仅需要对其传播算法本身有一定的了解,还要对要训练的模型的并行方式有一定的理解。如果存在某个由策略传播算法决定的算子的并行策略不符合用户的期望,那总可以通过多配置一个算子并行策略的方式解决。实际中,对于一个新模型,确实需要尝试几次才能获得性能较优的整体并行配置。 diff --git a/tutorials/source_zh_cn/parallel/startup_method.rst b/tutorials/source_zh_cn/parallel/startup_method.rst index 07ebce78e9..a18c9cb9b1 100644 --- a/tutorials/source_zh_cn/parallel/startup_method.rst +++ b/tutorials/source_zh_cn/parallel/startup_method.rst @@ -1,8 +1,8 @@ 分布式并行启动方式 ============================ -.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg - :target: https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/startup_method.rst +.. image:: https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg + :target: https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/startup_method.rst :alt: 查看源文件 .. toctree:: @@ -19,10 +19,10 @@ 目前GPU、Ascend和CPU分别支持多种启动方式。主要有\ ``msrun``\、动态组网、\ ``mpirun``\和\ ``rank table``\四种方式: -- `msrun `_: `msrun` 是动态组网的封装,允许用户使用单命令行指令在各节点拉起分布式任务,安装MindSpore后即可使用。此方式不依赖第三方库以及配置文件,具有容灾恢复功能,安全性较好,支持三种硬件平台。建议用户优先使用此种启动方式。 -- `动态组网 `_:动态组网需要用户手动拉起多进程以及导出环境变量,是 `msrun` 的具体实现,Parameter Server训练模式建议使用此方式,其余分布式场景建议使用 `msrun` 。 -- `mpirun `_:此方式依赖开源库OpenMPI,启动命令简单,多机需要保证两两之间免密登录,推荐有OpenMPI使用经验的用户使用此种启动方式。 -- `rank table `_:此方式需要在Ascend硬件平台使用,不依赖第三方库。手动配置rank_table文件后,就可以通过脚本启动并行程序,多机脚本一致,方便批量部署。 +- `msrun `_: `msrun` 是动态组网的封装,允许用户使用单命令行指令在各节点拉起分布式任务,安装MindSpore后即可使用。此方式不依赖第三方库以及配置文件,具有容灾恢复功能,安全性较好,支持三种硬件平台。建议用户优先使用此种启动方式。 +- `动态组网 `_:动态组网需要用户手动拉起多进程以及导出环境变量,是 `msrun` 的具体实现,Parameter Server训练模式建议使用此方式,其余分布式场景建议使用 `msrun` 。 +- `mpirun `_:此方式依赖开源库OpenMPI,启动命令简单,多机需要保证两两之间免密登录,推荐有OpenMPI使用经验的用户使用此种启动方式。 +- `rank table `_:此方式需要在Ascend硬件平台使用,不依赖第三方库。手动配置rank_table文件后,就可以通过脚本启动并行程序,多机脚本一致,方便批量部署。 四种启动方式的硬件支持情况如下表: diff --git a/tutorials/source_zh_cn/parallel/strategy_select.md b/tutorials/source_zh_cn/parallel/strategy_select.md index 10dc2f9091..e99897b47a 100644 --- a/tutorials/source_zh_cn/parallel/strategy_select.md +++ b/tutorials/source_zh_cn/parallel/strategy_select.md @@ -1,18 +1,18 @@ # 策略选择 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/parallel/strategy_select.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/parallel/strategy_select.md) ## 概述 在分布式模型训练中,针对不同的模型规模和数据量大小,可以选择不同的并行策略来提高训练效率和资源利用率。以下是不同并行策略的解释和适用情况: -1. [数据并行](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/data_parallel.html):数据并行是指在训练过程中,将不同的训练样本分布到不同的设备上,每个设备计算其分配的样本的梯度。然后通过梯度的平均或累加来更新模型的参数。数据并行适用于数据量较大,而模型参数量较少,可以在单个设备上加载的情况。数据并行能够充分利用多个设备的计算能力,加速训练过程。 +1. [数据并行](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/data_parallel.html):数据并行是指在训练过程中,将不同的训练样本分布到不同的设备上,每个设备计算其分配的样本的梯度。然后通过梯度的平均或累加来更新模型的参数。数据并行适用于数据量较大,而模型参数量较少,可以在单个设备上加载的情况。数据并行能够充分利用多个设备的计算能力,加速训练过程。 -2. [算子级并行](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/operator_parallel.html):算子级并行是指以算子为单位,把输入张量和模型参数切分到多台设备上进行计算,每个设备负责计算模型的一部分,提升整体速度。算子级并行又分为需要手动配置切分策略的半自动并行模式以及只需配置少部分甚至无需配置切分策略的自动并行模式。算子级并行适用于模型架构较大,无法完全载入单个设备内存的情况。 +2. [算子级并行](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/operator_parallel.html):算子级并行是指以算子为单位,把输入张量和模型参数切分到多台设备上进行计算,每个设备负责计算模型的一部分,提升整体速度。算子级并行又分为需要手动配置切分策略的半自动并行模式以及只需配置少部分甚至无需配置切分策略的自动并行模式。算子级并行适用于模型架构较大,无法完全载入单个设备内存的情况。 -3. [优化器并行](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/optimizer_parallel.html):优化器并行通过将优化器的计算量分散到数据并行维度的卡上,在大规模网络上(比如LLAMA、DeepSeek)可以有效减少内存消耗并提升网络性能,推荐并行训练时开启。 +3. [优化器并行](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/optimizer_parallel.html):优化器并行通过将优化器的计算量分散到数据并行维度的卡上,在大规模网络上(比如LLAMA、DeepSeek)可以有效减少内存消耗并提升网络性能,推荐并行训练时开启。 -4. [流水线并行](https://www.mindspore.cn/tutorials/zh-CN/master/parallel/pipeline_parallel.html):流水线并行将整个训练过程分成多个阶段,每个阶段的计算在不同的设备上进行。数据在不同阶段之间流动,类似于流水线。这种策略适用于网络模型较大,单卡无法载入,且网络可以较为平均地分为多个阶段的计算,并且每个阶段的计算时间较长,从而可以最大限度地重叠计算和通信。 +4. [流水线并行](https://www.mindspore.cn/tutorials/zh-CN/br_base/parallel/pipeline_parallel.html):流水线并行将整个训练过程分成多个阶段,每个阶段的计算在不同的设备上进行。数据在不同阶段之间流动,类似于流水线。这种策略适用于网络模型较大,单卡无法载入,且网络可以较为平均地分为多个阶段的计算,并且每个阶段的计算时间较长,从而可以最大限度地重叠计算和通信。 选择适当的并行策略取决于具体的训练任务和资源配置。通常情况下,可以根据以下指导原则进行选择: diff --git a/tutorials/source_zh_cn/train_availability/fault_recover.md b/tutorials/source_zh_cn/train_availability/fault_recover.md index 20976d5428..7b04b13e2a 100644 --- a/tutorials/source_zh_cn/train_availability/fault_recover.md +++ b/tutorials/source_zh_cn/train_availability/fault_recover.md @@ -1,6 +1,6 @@ # 故障恢复 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/train_availability/fault_recover.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/train_availability/fault_recover.md) ## 概述 @@ -104,7 +104,7 @@ model = Model(net, loss_fn=loss, optimizer=optim) # Model封装 ### 配置CheckpointConfig -[mindspore.train.CheckpointConfig](https://www.mindspore.cn/docs/zh-CN/master/api_python/train/mindspore.train.CheckpointConfig.html) 支持根据迭代次数进行配置,主要参数如下: +[mindspore.train.CheckpointConfig](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/train/mindspore.train.CheckpointConfig.html) 支持根据迭代次数进行配置,主要参数如下: - `save_checkpoint_steps`:表示每隔多少个step保存一个Checkpoint文件,默认值为1。 - `keep_checkpoint_max`:表示最多保存多少个Checkpoint文件,默认值为5。 @@ -166,7 +166,7 @@ ckpt_file = ckpt_path + "/" + ckptnames[-1] ### 加载Checkpoint文件 -使用 [load_checkpoint](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.load_checkpoint.html) 和 [load_param_into_net](https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/mindspore.load_param_into_net.html) 方法加载最新保存的Checkpoint文件。 +使用 [load_checkpoint](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.load_checkpoint.html) 和 [load_param_into_net](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/mindspore/mindspore.load_param_into_net.html) 方法加载最新保存的Checkpoint文件。 - `load_checkpoint` 方法会把Checkpoint文件中的网络参数加载到字典param_dict中。 - `load_param_into_net` 方法会把字典param_dict中的参数加载到网络或者优化器中,加载后网络中的参数就是Checkpoint文件中保存的。 diff --git a/tutorials/source_zh_cn/train_availability/graceful_exit.md b/tutorials/source_zh_cn/train_availability/graceful_exit.md index 9b5a3360ff..7c2c47f5e8 100644 --- a/tutorials/source_zh_cn/train_availability/graceful_exit.md +++ b/tutorials/source_zh_cn/train_availability/graceful_exit.md @@ -1,12 +1,12 @@ # 进程优雅退出 -[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/master/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/master/tutorials/source_zh_cn/train_availability/graceful_exit.md) +[![查看源文件](https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/website-images/br_base/resource/_static/logo_source.svg)](https://gitee.com/mindspore/docs/blob/br_base/tutorials/source_zh_cn/train_availability/graceful_exit.md) ## 概述 当训练集群中存在亚健康设备时,如果能在亚健康设备发生故障之前完成 checkpoint 保存并结束集群训练进程,可以有效避免集群损坏时的权重数据丢失问题。同时,这也可以避免训练恢复时的数据回滚和 checkpoint 加载回滚等问题,从而减少训练资源的浪费。 -> 本文档介绍进程优雅退出功能的使用方法。为了说明具体使用方式,以在第一个训练step时检测到退出配置信息并提前结束训练进程为例。您可以在这里下载完整代码:[process_graceful_exit](https://gitee.com/mindspore/docs/tree/master/docs/sample_code/graceful_exit/)。 +> 本文档介绍进程优雅退出功能的使用方法。为了说明具体使用方式,以在第一个训练step时检测到退出配置信息并提前结束训练进程为例。您可以在这里下载完整代码:[process_graceful_exit](https://gitee.com/mindspore/docs/tree/br_base/docs/sample_code/graceful_exit/)。 其中,`graceful_exit.py` 为训练脚本,`train.sh` 为 `msrun` 启动脚本,`graceful_exit.json` 为优雅退出配置文件。 @@ -159,7 +159,7 @@ config_json = r"./graceful_exit.json" cb = OnRequestExit(file_name="LeNet", config_file=config_json) ``` -另外,在配置 `OnRequestExit` callback函数时,保存mindir、保存checkpoint以及其他配置参数可以根据需要自行配置,详情参见[OnRequestExit](https://www.mindspore.cn/docs/zh-CN/master/api_python/train/mindspore.train.OnRequestExit.html)。 +另外,在配置 `OnRequestExit` callback函数时,保存mindir、保存checkpoint以及其他配置参数可以根据需要自行配置,详情参见[OnRequestExit](https://www.mindspore.cn/docs/zh-CN/br_base/api_python/train/mindspore.train.OnRequestExit.html)。 ```python def graceful_exit_case(): -- Gitee