From 20266db654c1ef2ae535da0f23ee0c0e4078be20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=90=89=E5=AE=8F=E6=A2=85?= <591861959@qq.com> Date: Thu, 7 Apr 2022 06:13:52 +0000 Subject: [PATCH] update --- .../nlp/LSTM_ID0468_for_PyTorch/timit/steps/train_ctc.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/PyTorch/built-in/nlp/LSTM_ID0468_for_PyTorch/timit/steps/train_ctc.py b/PyTorch/built-in/nlp/LSTM_ID0468_for_PyTorch/timit/steps/train_ctc.py index 0b434d30bb..98e3f7ae16 100644 --- a/PyTorch/built-in/nlp/LSTM_ID0468_for_PyTorch/timit/steps/train_ctc.py +++ b/PyTorch/built-in/nlp/LSTM_ID0468_for_PyTorch/timit/steps/train_ctc.py @@ -75,10 +75,10 @@ def run_epoch(epoch_id, model, data_iter, loss_fn, device, optimizer=None, print for i, data in enumerate(data_iter): start_time = time.time() inputs, input_sizes, targets, target_sizes, utt_list = data - inputs = inputs.to(device) - input_sizes = input_sizes.to(device) - targets = targets.to(device) - target_sizes = target_sizes.to(device) + inputs = inputs.to(device,non_blocking=True) + input_sizes = input_sizes.to(device,non_blocking=True) + targets = targets.to(device,non_blocking=True) + target_sizes = target_sizes.to(device,non_blocking=True) out = model(inputs) out_len, batch_size, _ = out.size() -- Gitee