From 97c30f16a898d9908a73d9f030ab8e8955639552 Mon Sep 17 00:00:00 2001 From: wuyulong11 Date: Mon, 8 May 2023 19:53:08 +0800 Subject: [PATCH] =?UTF-8?q?=E3=80=90=E4=BF=AE=E6=94=B9=E8=AF=B4=E6=98=8E?= =?UTF-8?q?=E3=80=91=20Kernel=20View=E7=95=8C=E9=9D=A2=E4=BF=AE=E6=94=B9?= =?UTF-8?q?=20=E3=80=90=E4=BF=AE=E6=94=B9=E4=BA=BA=E3=80=91=20wuyulong=203?= =?UTF-8?q?0031080?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tb_plugin/fe/src/api/generated/api.ts | 25 +++++++++++-- tb_plugins/profiling/tb_plugin/fe/src/app.tsx | 8 +++-- .../tb_plugin/fe/src/components/Kernel.tsx | 2 +- .../fe/src/components/TooltipDescriptions.ts | 2 +- .../tb_plugin/torch_tb_profiler/plugin.py | 6 +++- .../torch_tb_profiler/profiler/loader.py | 2 +- .../profiler/run_generator.py | 35 ++++++------------- .../tb_plugin/torch_tb_profiler/run.py | 2 ++ .../static/trace_viewer_full.html | 1 + 9 files changed, 50 insertions(+), 33 deletions(-) diff --git a/tb_plugins/profiling/tb_plugin/fe/src/api/generated/api.ts b/tb_plugins/profiling/tb_plugin/fe/src/api/generated/api.ts index 37a5eb3127d..755e3357a42 100644 --- a/tb_plugins/profiling/tb_plugin/fe/src/api/generated/api.ts +++ b/tb_plugins/profiling/tb_plugin/fe/src/api/generated/api.ts @@ -90,7 +90,7 @@ export class RequiredError extends Error { * @export * @interface CallStackTableData */ -export interface CallStackTableData extends Array {} +export interface CallStackTableData extends Array { } /** * * @export @@ -862,7 +862,7 @@ export interface OpStats { * @export * @interface OperationTableData */ -export interface OperationTableData extends Array {} +export interface OperationTableData extends Array { } /** * * @export @@ -1173,6 +1173,25 @@ export interface ValueAndFormat { */ f: string } +/** + * + * @exports + * @interface Views + */ +export interface Views { + /** + * + * @type {string} + * @memberof Views + */ + device_target: string + /** + * + * @type {Array} + * @memberof Views + */ + views: Array +} /** * DefaultApi - fetch parameter creator * @export @@ -3524,7 +3543,7 @@ export const DefaultApiFp = function (configuration?: Configuration) { viewsGet( run: string, options?: any - ): (fetch?: FetchAPI, basePath?: string) => Promise> { + ): (fetch?: FetchAPI, basePath?: string) => Promise { const localVarFetchArgs = DefaultApiFetchParamCreator( configuration ).viewsGet(run, options) diff --git a/tb_plugins/profiling/tb_plugin/fe/src/app.tsx b/tb_plugins/profiling/tb_plugin/fe/src/app.tsx index a42d31c85f0..aa5ce17284a 100644 --- a/tb_plugins/profiling/tb_plugin/fe/src/app.tsx +++ b/tb_plugins/profiling/tb_plugin/fe/src/app.tsx @@ -159,6 +159,7 @@ export const App = () => { const [view, setView] = React.useState('') const [loaded, setLoaded] = React.useState(false) const iframeRef = React.useRef(null) + const [deviceTarget, setDeviceTarget] = React.useState('GPU') const [diffLeftWorkerOptions, setDiffLeftWorkerOptions] = React.useState< string[] @@ -260,9 +261,10 @@ export const App = () => { React.useEffect(() => { if (run) { api.defaultApi.viewsGet(run).then((rawViews) => { - const views = rawViews + const views = rawViews.views .map((v) => Views[Views[v as Views]]) .filter(Boolean) + setDeviceTarget(rawViews.device_target) setViews(views) }) } @@ -503,7 +505,9 @@ export const App = () => { diff --git a/tb_plugins/profiling/tb_plugin/fe/src/components/Kernel.tsx b/tb_plugins/profiling/tb_plugin/fe/src/components/Kernel.tsx index 296f8aebec8..aecb86b3ea3 100644 --- a/tb_plugins/profiling/tb_plugin/fe/src/components/Kernel.tsx +++ b/tb_plugins/profiling/tb_plugin/fe/src/components/Kernel.tsx @@ -163,7 +163,7 @@ export const Kernel: React.FC = (props) => { const TensorCoresTitle = React.useMemo( () => deviceTarget === 'Ascend' ? chartHeaderRenderer( - 'AI Cores Utilization', + 'Accelerator Core Utilization', TensorCoresPieChartTooltipAscend ) : diff --git a/tb_plugins/profiling/tb_plugin/fe/src/components/TooltipDescriptions.ts b/tb_plugins/profiling/tb_plugin/fe/src/components/TooltipDescriptions.ts index 596e918eb82..8c60bfb05a0 100644 --- a/tb_plugins/profiling/tb_plugin/fe/src/components/TooltipDescriptions.ts +++ b/tb_plugins/profiling/tb_plugin/fe/src/components/TooltipDescriptions.ts @@ -23,7 +23,7 @@ export const GPUKernelTotalTimeTooltip = `The accumulated time of all calls of t export const TensorCoresPieChartTooltip = `The accumulated time of all kernels using or not using Tensor Cores.` -export const TensorCoresPieChartTooltipAscend = `The accumulated time of all kernels using or not using AI Cores.` +export const TensorCoresPieChartTooltipAscend = `The accumulated time of all kernels group by Accelerator Core.` export const DistributedGpuInfoTableTooltip = `Information about GPU hardware used during the run.` diff --git a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/plugin.py b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/plugin.py index 7fa352261de..798fc5fb84f 100644 --- a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/plugin.py +++ b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/plugin.py @@ -138,7 +138,11 @@ class TorchProfilerPlugin(base_plugin.TBPlugin): self._validate(run=name) run = self._get_run(name) views_list = [view.display_name for view in run.views] - return self.respond_as_json(views_list) + data = { + 'device_target': run.device_target, + 'views': views_list + } + return self.respond_as_json(data) @wrappers.Request.application def workers_route(self, request: werkzeug.Request): diff --git a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/loader.py b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/loader.py index 5a42ddaecc8..2d45753ba84 100644 --- a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/loader.py +++ b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/loader.py @@ -118,7 +118,7 @@ class RunLoader(object): sys.exit(1) except Exception as ex: if self.device_target == 'Ascend': - worker_name = f'{worker}_{span_name}' + worker_name = f'{worker}_{span_name}_ascend_pt' else: worker_name = worker logger.warning('Failed to parse profile data for Run %s on %s. Exception=%s', diff --git a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/run_generator.py b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/run_generator.py index 23131a943c8..9107947a5ea 100644 --- a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/run_generator.py +++ b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/run_generator.py @@ -438,13 +438,11 @@ class RunGenerator(object): def _generate_kernel_pie_npu(self): pie = {'columns': [{'type': 'string', 'name': 'name'}, {'type': 'number', 'name': 'value'}], 'rows': []} - with open(self.profile_data.kernel_file_path, encoding="utf-8") as f: - reader = csv.DictReader(f) - for row in reader: - data = [] - data.append(row.get('Name')) - data.append(float(row.get('Duration(us)'))) - pie['rows'].append(data) + for key, val in self.statistic_data.items(): + data = [] + data.append(key) + data.append(float(val['Total'])) + pie['rows'].append(data) datas = {'total': pie, 'device_target': self.device_target} return datas @@ -465,24 +463,17 @@ class RunGenerator(object): for idx, column in enumerate(datas[0]): if column == 'Name': self.name_idx = idx - if column == 'Duration(us)': + elif column == 'Duration(us)': self.duration_idx = idx - if column == 'Accelerator Core': + elif column == 'Accelerator Core': self.core_type_idx = idx - if column == 'Start Time': - self.start_time_idx = idx - if column == 'Wait Time(us)': - self.wait_time_idx = idx - if column == 'Block Dim': - self.block_dim = idx if column in display_columns: display_idxs.append(idx) - if column == 'Start Time' or column == 'Duration(us)' \ - or column == 'Wait Time(us)' or column == 'Block Dim': + if column in ('Duration(us)', 'Start Time', 'Wait Time(us)', 'Block Dim'): table['columns'].append({'type': 'number', 'name': column}) - continue - table['columns'].append({'type': 'string', 'name': column}) + else: + table['columns'].append({'type': 'string', 'name': column}) table['rows'] = [self._handle_kernel_table_rows(display_idxs, ls) for idx, ls in enumerate(datas) if idx != 0] return result @@ -529,11 +520,7 @@ class RunGenerator(object): def _handle_kernel_table_rows(self, ids, row): display_row = [] for idx in ids: - if idx == self.start_time_idx or idx == self.duration_idx \ - or idx == self.wait_time_idx or idx == self.block_dim: - display_row.append(float(row[idx])) - else: - display_row.append(row[idx]) + display_row.append(row[idx]) call_name = row[self.name_idx] call_duration = float(row[self.duration_idx]) call_type = row[self.core_type_idx] diff --git a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/run.py b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/run.py index 3cc7ef7fd56..f148e4ccbf1 100644 --- a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/run.py +++ b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/run.py @@ -57,6 +57,8 @@ class Run(object): def get_spans(self, worker=None): if worker is not None: + if self.span_view.get(worker) is None: + return None spans = self.span_view[worker] else: spans = [s for _, s in self.profiles.keys()] diff --git a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/static/trace_viewer_full.html b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/static/trace_viewer_full.html index 15169a4572b..4aac8735da1 100644 --- a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/static/trace_viewer_full.html +++ b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/static/trace_viewer_full.html @@ -138,6 +138,7 @@ tr > td { padding: 2px 4px 2px 4px; vertical-align: top; + line-height: 16px; } table > tbody:focus { -- Gitee