From 5f542a1ddfea912a44e451bab046cee55330e086 Mon Sep 17 00:00:00 2001 From: fuchao <1501312275@qq.com> Date: Fri, 23 May 2025 10:38:11 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BC=98=E5=8C=96=E6=8F=92=E5=85=A5=E5=8F=8D?= =?UTF-8?q?=E5=90=91TensorDump=E7=AE=97=E5=AD=90=E9=80=BB=E8=BE=91?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../mindspore/dump/cell_dump_process.py | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/debug/accuracy_tools/msprobe/mindspore/dump/cell_dump_process.py b/debug/accuracy_tools/msprobe/mindspore/dump/cell_dump_process.py index 1f17dc491..f4e0bf58c 100644 --- a/debug/accuracy_tools/msprobe/mindspore/dump/cell_dump_process.py +++ b/debug/accuracy_tools/msprobe/mindspore/dump/cell_dump_process.py @@ -58,6 +58,12 @@ if (ms.__version__ >= "2.5.0"): td_in = ops.TensorDump("in") else: td_in = ops.TensorDump() +dump_gradient_op_existed = False +if hasattr(ops, 'DumpGradient'): + gd = ops.DumpGradient() + dump_gradient_op_existed = True +else: + logger.warning('The operator "DumpGradient" does not exist. Cell dump can not work in graph mode.') td.add_prim_attr(KEY_SIDE_EFFECT, False) td_in.add_prim_attr(KEY_SIDE_EFFECT, False) td.add_prim_attr(KEY_TD_FLAG, True) @@ -147,7 +153,8 @@ def cell_construct_wrapper(func, self): for index, item in enumerate(args): if self.data_mode == "backward" or self.data_mode == "all": if ops.is_tensor(item): - item = self.output_clips[index](item) + item = gd(gen_file_path(self.dump_path, self.cell_prefix, KEY_BACKWARD, KEY_OUTPUT, index), + item, "out") if self.data_mode == "forward" or self.data_mode == "all": if ops.is_tensor(item): if need_tensordump_in(self, 'input_dump_mode'): @@ -170,7 +177,8 @@ def cell_construct_wrapper(func, self): for index, item in enumerate(out): if self.data_mode == "backward" or self.data_mode == "all": if ops.is_tensor(item): - item = self.input_clips[index](item) + item = gd(gen_file_path(self.dump_path, self.cell_prefix, KEY_BACKWARD, KEY_INPUT, index), + item, "in") if self.data_mode == "forward" or self.data_mode == "all": if ops.is_tensor(item): if need_tensordump_in(self, 'output_dump_mode'): @@ -191,7 +199,8 @@ def cell_construct_wrapper(func, self): return out_list else: if self.data_mode == "backward" or self.data_mode == "all": - out = self.input_clips[0](out) + out = gd(gen_file_path(self.dump_path, self.cell_prefix, KEY_BACKWARD, KEY_INPUT, 0), + out, "in") if self.data_mode == "forward" or self.data_mode == "all": if ops.is_tensor(out): if need_tensordump_in(self, 'output_dump_mode'): @@ -831,16 +840,6 @@ def start(config: CellDumpConfig): logger.info(f"Cell {name}: construct function is wrapped!") cell.dump_path = dump_path cell.data_mode = data_mode - cell.input_clips = [] - cell.output_clips = [] - # It is assumed that each cell has a maximum of 50 outputs and 50 inputs. - for i in range(50): - cell.input_clips.append( - ops.InsertGradientOf(partial_func(clip_gradient, cell.dump_path, cell.cell_prefix, i, KEY_INPUT)) - ) - cell.output_clips.append( - ops.InsertGradientOf(partial_func(clip_gradient, cell.dump_path, cell.cell_prefix, i, KEY_OUTPUT)) - ) logger.info(f"==========The cell_dump_process_start phase is Finished!==========") if dump_task == CoreConst.TENSOR: -- Gitee