From d7618af332353479a9fcba230257c18b957c12f5 Mon Sep 17 00:00:00 2001 From: YueJiang <1206052386@qq.com> Date: Tue, 23 May 2023 13:34:12 +0000 Subject: [PATCH] =?UTF-8?q?update=20TensorFlow/built-in/recommendation/DIN?= =?UTF-8?q?=5FID0190=5Ffor=5FTensorFlow/examples/din=5Fdemo.py.=20?= =?UTF-8?q?=E9=83=A8=E5=88=86=E7=BC=96=E8=BE=91=E9=94=99=E8=AF=AF=E4=BF=AE?= =?UTF-8?q?=E6=94=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: YueJiang <1206052386@qq.com> --- .../DIN_ID0190_for_TensorFlow/examples/din_demo.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/TensorFlow/built-in/recommendation/DIN_ID0190_for_TensorFlow/examples/din_demo.py b/TensorFlow/built-in/recommendation/DIN_ID0190_for_TensorFlow/examples/din_demo.py index f85958820..843bb5137 100644 --- a/TensorFlow/built-in/recommendation/DIN_ID0190_for_TensorFlow/examples/din_demo.py +++ b/TensorFlow/built-in/recommendation/DIN_ID0190_for_TensorFlow/examples/din_demo.py @@ -52,7 +52,7 @@ def input_fn(filenames, is_train, batch_size=1024): return dataset class TimeHistory(keras.callbacks.Callback): - def on_train_begin(self, log={}): + def on_train_begin(self, logs={}): self.init_time = time.time() self.batch_train_time = [] self.batch_valid_time = [] @@ -95,7 +95,7 @@ class TimeHistory(keras.callbacks.Callback): self.epoch_train_samples_accum += batch_size_global self.hist_tr_samples += batch_size_global - def on_test_batch_brgin(self, batch,logs={}): + def on_test_batch_begin(self, batch,logs={}): self.eval_batch_start = time.time() def on_test_batch_end(self, batch,logs={}): @@ -131,7 +131,7 @@ class TimeHistory(keras.callbacks.Callback): self.times["hist_va_time"].append(np.sum(self.batch_valid_time)) self.times["hist_tr_fps"].append(self.hist_tr_samples / self.times["hist_tr_time"][-1]) - self.tiems["epoch_tr_fps"].append(self.epoch_train_samples_accum / self.times["epoch_tr_time"][-1]) + self.times["epoch_tr_fps"].append(self.epoch_train_samples_accum / self.times["epoch_tr_time"][-1]) self.times["epoch_total_fps"].append(self.epoch_train_samples_accum / epoch_time) self.times["hist_total_fps"].append(self.hist_tr_samples / self.times["hist_total_time"][-1]) self.times["epoch_max_fps"].append(np.max(self.batch_train_fps)) @@ -169,12 +169,12 @@ if __name__ == "__main__": model.fit(x=input_fn(filename, True), epochs=5, verbose=1, validation_data=input_fn(r"./data/test.tfrecords.gz", False), validation_steps=5406, callbacks=callbacks) proc_total_time = time.time() - process_init_time - timing_items = sorted(time_callback.time.keys()) + timing_items = sorted(time_callback.times.keys()) epochs = len(time_callback.times[timing_items[0]]) print("Epoch, ", end="") for k in timing_items: - print(f"{k} ", end="") + print(f"{k}, ", end="") print("E2E total time") for i in range(epochs): @@ -184,4 +184,4 @@ if __name__ == "__main__": val = time_callback.times[k][i] print(f"{val:.4f}, ", end="") - print(f"{proc_total_time:4f}") \ No newline at end of file + print(f"{proc_total_time:.4f}") \ No newline at end of file -- Gitee