From b598550a564d8b80d9a73567a8bc76200e7ec7c5 Mon Sep 17 00:00:00 2001 From: "hongliang.yuan" Date: Fri, 16 May 2025 09:46:42 +0800 Subject: [PATCH] sync resnet satrn t5 tacotron2 --- audio/speech_synthesis/tacotron2/pytorch/requirements.txt | 2 +- .../resnet101/pytorch/dataloader/dali_classification.py | 6 +++--- .../resnet50/pytorch/dataloader/dali_classification.py | 6 +++--- cv/ocr/satrn/pytorch/base/requirements.txt | 4 ++-- nlp/translation/t5/pytorch/train.py | 4 ++-- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/audio/speech_synthesis/tacotron2/pytorch/requirements.txt b/audio/speech_synthesis/tacotron2/pytorch/requirements.txt index 77c33962..71435a5d 100644 --- a/audio/speech_synthesis/tacotron2/pytorch/requirements.txt +++ b/audio/speech_synthesis/tacotron2/pytorch/requirements.txt @@ -1,4 +1,4 @@ -matplotlib==3.7.0 +matplotlib==3.8.0 numpy inflect==0.2.5 scipy diff --git a/cv/classification/resnet101/pytorch/dataloader/dali_classification.py b/cv/classification/resnet101/pytorch/dataloader/dali_classification.py index 4c92283b..faf9c8cb 100644 --- a/cv/classification/resnet101/pytorch/dataloader/dali_classification.py +++ b/cv/classification/resnet101/pytorch/dataloader/dali_classification.py @@ -14,7 +14,7 @@ import nvidia.dali.ops as ops import nvidia.dali.types as types from nvidia.dali.pipeline import Pipeline from nvidia.dali.plugin.pytorch import DALIClassificationIterator, DALIGenericIterator - +from nvidia.dali.plugin.base_iterator import LastBatchPolicy class HybridTrainPipe(Pipeline): def __init__(self, batch_size, num_threads, device_id, data_dir, size): super(HybridTrainPipe, self).__init__(batch_size, num_threads, device_id) @@ -66,14 +66,14 @@ def get_imagenet_iter_dali(type, image_dir, batch_size, num_threads, device_id, data_dir = os.path.join(image_dir, "train"), size=size) pip_train.build() - dali_iter_train = DALIClassificationIterator(pip_train, size=pip_train.epoch_size("Reader")) + dali_iter_train = DALIClassificationIterator(pip_train, size=pip_train.epoch_size("Reader"), last_batch_policy = LastBatchPolicy.DROP) return dali_iter_train elif type == 'val': pip_val = HybridValPipe(batch_size=batch_size, num_threads=num_threads, device_id=device_id, data_dir = os.path.join(image_dir, "val"), size=size) pip_val.build() - dali_iter_val = DALIClassificationIterator(pip_val, size=pip_val.epoch_size("Reader")) + dali_iter_val = DALIClassificationIterator(pip_val, size=pip_val.epoch_size("Reader"), last_batch_policy = LastBatchPolicy.DROP) return dali_iter_val diff --git a/cv/classification/resnet50/pytorch/dataloader/dali_classification.py b/cv/classification/resnet50/pytorch/dataloader/dali_classification.py index 4c92283b..faf9c8cb 100644 --- a/cv/classification/resnet50/pytorch/dataloader/dali_classification.py +++ b/cv/classification/resnet50/pytorch/dataloader/dali_classification.py @@ -14,7 +14,7 @@ import nvidia.dali.ops as ops import nvidia.dali.types as types from nvidia.dali.pipeline import Pipeline from nvidia.dali.plugin.pytorch import DALIClassificationIterator, DALIGenericIterator - +from nvidia.dali.plugin.base_iterator import LastBatchPolicy class HybridTrainPipe(Pipeline): def __init__(self, batch_size, num_threads, device_id, data_dir, size): super(HybridTrainPipe, self).__init__(batch_size, num_threads, device_id) @@ -66,14 +66,14 @@ def get_imagenet_iter_dali(type, image_dir, batch_size, num_threads, device_id, data_dir = os.path.join(image_dir, "train"), size=size) pip_train.build() - dali_iter_train = DALIClassificationIterator(pip_train, size=pip_train.epoch_size("Reader")) + dali_iter_train = DALIClassificationIterator(pip_train, size=pip_train.epoch_size("Reader"), last_batch_policy = LastBatchPolicy.DROP) return dali_iter_train elif type == 'val': pip_val = HybridValPipe(batch_size=batch_size, num_threads=num_threads, device_id=device_id, data_dir = os.path.join(image_dir, "val"), size=size) pip_val.build() - dali_iter_val = DALIClassificationIterator(pip_val, size=pip_val.epoch_size("Reader")) + dali_iter_val = DALIClassificationIterator(pip_val, size=pip_val.epoch_size("Reader"), last_batch_policy = LastBatchPolicy.DROP) return dali_iter_val diff --git a/cv/ocr/satrn/pytorch/base/requirements.txt b/cv/ocr/satrn/pytorch/base/requirements.txt index a6ff6227..b3390ac4 100755 --- a/cv/ocr/satrn/pytorch/base/requirements.txt +++ b/cv/ocr/satrn/pytorch/base/requirements.txt @@ -1,5 +1,5 @@ addict==2.4.0 -yapf==0.32.0 +yapf==0.40.1 lmdb==1.3.0 -rapidfuzz==2.0.7 +rapidfuzz==2.2.0 shapely diff --git a/nlp/translation/t5/pytorch/train.py b/nlp/translation/t5/pytorch/train.py index 047438c3..ea62996f 100644 --- a/nlp/translation/t5/pytorch/train.py +++ b/nlp/translation/t5/pytorch/train.py @@ -18,8 +18,8 @@ from transformers import (HfArgumentParser, default_data_collator, DataCollatorForSeq2Seq, ) -from trainer_seq2seq import Seq2SeqTrainer -from training_args_seq2seq import Seq2SeqTrainingArguments +from transformers.trainer_seq2seq import Seq2SeqTrainer +from transformers.training_args_seq2seq import Seq2SeqTrainingArguments logger = logging.getLogger(__name__) -- Gitee