diff --git a/tb_plugins/profiling/tb_plugin/fe/src/api/generated/api.ts b/tb_plugins/profiling/tb_plugin/fe/src/api/generated/api.ts index 37a5eb3127d0f4bd65bac16d527617070f2f4990..755e3357a42c4c845d4d02a35d1bbf228bc9c8a7 100644 --- a/tb_plugins/profiling/tb_plugin/fe/src/api/generated/api.ts +++ b/tb_plugins/profiling/tb_plugin/fe/src/api/generated/api.ts @@ -90,7 +90,7 @@ export class RequiredError extends Error { * @export * @interface CallStackTableData */ -export interface CallStackTableData extends Array {} +export interface CallStackTableData extends Array { } /** * * @export @@ -862,7 +862,7 @@ export interface OpStats { * @export * @interface OperationTableData */ -export interface OperationTableData extends Array {} +export interface OperationTableData extends Array { } /** * * @export @@ -1173,6 +1173,25 @@ export interface ValueAndFormat { */ f: string } +/** + * + * @exports + * @interface Views + */ +export interface Views { + /** + * + * @type {string} + * @memberof Views + */ + device_target: string + /** + * + * @type {Array} + * @memberof Views + */ + views: Array +} /** * DefaultApi - fetch parameter creator * @export @@ -3524,7 +3543,7 @@ export const DefaultApiFp = function (configuration?: Configuration) { viewsGet( run: string, options?: any - ): (fetch?: FetchAPI, basePath?: string) => Promise> { + ): (fetch?: FetchAPI, basePath?: string) => Promise { const localVarFetchArgs = DefaultApiFetchParamCreator( configuration ).viewsGet(run, options) diff --git a/tb_plugins/profiling/tb_plugin/fe/src/app.tsx b/tb_plugins/profiling/tb_plugin/fe/src/app.tsx index a42d31c85f01043006ea2217165c8b4e2e2b99ed..aa5ce17284a1748fb76d6fc9cabfee888ef55250 100644 --- a/tb_plugins/profiling/tb_plugin/fe/src/app.tsx +++ b/tb_plugins/profiling/tb_plugin/fe/src/app.tsx @@ -159,6 +159,7 @@ export const App = () => { const [view, setView] = React.useState('') const [loaded, setLoaded] = React.useState(false) const iframeRef = React.useRef(null) + const [deviceTarget, setDeviceTarget] = React.useState('GPU') const [diffLeftWorkerOptions, setDiffLeftWorkerOptions] = React.useState< string[] @@ -260,9 +261,10 @@ export const App = () => { React.useEffect(() => { if (run) { api.defaultApi.viewsGet(run).then((rawViews) => { - const views = rawViews + const views = rawViews.views .map((v) => Views[Views[v as Views]]) .filter(Boolean) + setDeviceTarget(rawViews.device_target) setViews(views) }) } @@ -503,7 +505,9 @@ export const App = () => { diff --git a/tb_plugins/profiling/tb_plugin/fe/src/components/Kernel.tsx b/tb_plugins/profiling/tb_plugin/fe/src/components/Kernel.tsx index 296f8aebec829db0c3fddb76858941d696c77026..aecb86b3ea3cf531498b8ae0e0867ac672d11c4c 100644 --- a/tb_plugins/profiling/tb_plugin/fe/src/components/Kernel.tsx +++ b/tb_plugins/profiling/tb_plugin/fe/src/components/Kernel.tsx @@ -163,7 +163,7 @@ export const Kernel: React.FC = (props) => { const TensorCoresTitle = React.useMemo( () => deviceTarget === 'Ascend' ? chartHeaderRenderer( - 'AI Cores Utilization', + 'Accelerator Core Utilization', TensorCoresPieChartTooltipAscend ) : diff --git a/tb_plugins/profiling/tb_plugin/fe/src/components/TooltipDescriptions.ts b/tb_plugins/profiling/tb_plugin/fe/src/components/TooltipDescriptions.ts index 596e918eb82dbf96830db7f1938959cf219a626e..8c60bfb05a0fa8ebdf899c5fc6141a42f6626c2e 100644 --- a/tb_plugins/profiling/tb_plugin/fe/src/components/TooltipDescriptions.ts +++ b/tb_plugins/profiling/tb_plugin/fe/src/components/TooltipDescriptions.ts @@ -23,7 +23,7 @@ export const GPUKernelTotalTimeTooltip = `The accumulated time of all calls of t export const TensorCoresPieChartTooltip = `The accumulated time of all kernels using or not using Tensor Cores.` -export const TensorCoresPieChartTooltipAscend = `The accumulated time of all kernels using or not using AI Cores.` +export const TensorCoresPieChartTooltipAscend = `The accumulated time of all kernels group by Accelerator Core.` export const DistributedGpuInfoTableTooltip = `Information about GPU hardware used during the run.` diff --git a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/plugin.py b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/plugin.py index 7fa352261de88a952867e838bdbbc31c114dcb70..798fc5fb84fb0d5abeccb1bcc1a68204cebf237a 100644 --- a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/plugin.py +++ b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/plugin.py @@ -138,7 +138,11 @@ class TorchProfilerPlugin(base_plugin.TBPlugin): self._validate(run=name) run = self._get_run(name) views_list = [view.display_name for view in run.views] - return self.respond_as_json(views_list) + data = { + 'device_target': run.device_target, + 'views': views_list + } + return self.respond_as_json(data) @wrappers.Request.application def workers_route(self, request: werkzeug.Request): diff --git a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/loader.py b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/loader.py index 5a42ddaecc822af26186796be3819d0fbbe47530..2d45753ba8480cacae1e41929426afa533976e58 100644 --- a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/loader.py +++ b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/loader.py @@ -118,7 +118,7 @@ class RunLoader(object): sys.exit(1) except Exception as ex: if self.device_target == 'Ascend': - worker_name = f'{worker}_{span_name}' + worker_name = f'{worker}_{span_name}_ascend_pt' else: worker_name = worker logger.warning('Failed to parse profile data for Run %s on %s. Exception=%s', diff --git a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/run_generator.py b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/run_generator.py index 23131a943c85e5591ce042eef4e9068620372fa5..9107947a5ea88cefc7d59ad6bd0dd79bd08588e8 100644 --- a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/run_generator.py +++ b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/profiler/run_generator.py @@ -438,13 +438,11 @@ class RunGenerator(object): def _generate_kernel_pie_npu(self): pie = {'columns': [{'type': 'string', 'name': 'name'}, {'type': 'number', 'name': 'value'}], 'rows': []} - with open(self.profile_data.kernel_file_path, encoding="utf-8") as f: - reader = csv.DictReader(f) - for row in reader: - data = [] - data.append(row.get('Name')) - data.append(float(row.get('Duration(us)'))) - pie['rows'].append(data) + for key, val in self.statistic_data.items(): + data = [] + data.append(key) + data.append(float(val['Total'])) + pie['rows'].append(data) datas = {'total': pie, 'device_target': self.device_target} return datas @@ -465,24 +463,17 @@ class RunGenerator(object): for idx, column in enumerate(datas[0]): if column == 'Name': self.name_idx = idx - if column == 'Duration(us)': + elif column == 'Duration(us)': self.duration_idx = idx - if column == 'Accelerator Core': + elif column == 'Accelerator Core': self.core_type_idx = idx - if column == 'Start Time': - self.start_time_idx = idx - if column == 'Wait Time(us)': - self.wait_time_idx = idx - if column == 'Block Dim': - self.block_dim = idx if column in display_columns: display_idxs.append(idx) - if column == 'Start Time' or column == 'Duration(us)' \ - or column == 'Wait Time(us)' or column == 'Block Dim': + if column in ('Duration(us)', 'Start Time', 'Wait Time(us)', 'Block Dim'): table['columns'].append({'type': 'number', 'name': column}) - continue - table['columns'].append({'type': 'string', 'name': column}) + else: + table['columns'].append({'type': 'string', 'name': column}) table['rows'] = [self._handle_kernel_table_rows(display_idxs, ls) for idx, ls in enumerate(datas) if idx != 0] return result @@ -529,11 +520,7 @@ class RunGenerator(object): def _handle_kernel_table_rows(self, ids, row): display_row = [] for idx in ids: - if idx == self.start_time_idx or idx == self.duration_idx \ - or idx == self.wait_time_idx or idx == self.block_dim: - display_row.append(float(row[idx])) - else: - display_row.append(row[idx]) + display_row.append(row[idx]) call_name = row[self.name_idx] call_duration = float(row[self.duration_idx]) call_type = row[self.core_type_idx] diff --git a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/run.py b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/run.py index 3cc7ef7fd5699351b991465249845bb82e37f9f2..f148e4ccbf110dec99e55bb808c10233b2e28154 100644 --- a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/run.py +++ b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/run.py @@ -57,6 +57,8 @@ class Run(object): def get_spans(self, worker=None): if worker is not None: + if self.span_view.get(worker) is None: + return None spans = self.span_view[worker] else: spans = [s for _, s in self.profiles.keys()] diff --git a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/static/trace_viewer_full.html b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/static/trace_viewer_full.html index 15169a4572b546a7ba0f35e870bae528de913773..4aac8735da109e1fc17c532e767fad21f85527f0 100644 --- a/tb_plugins/profiling/tb_plugin/torch_tb_profiler/static/trace_viewer_full.html +++ b/tb_plugins/profiling/tb_plugin/torch_tb_profiler/static/trace_viewer_full.html @@ -138,6 +138,7 @@ tr > td { padding: 2px 4px 2px 4px; vertical-align: top; + line-height: 16px; } table > tbody:focus {